Rewrite Ona Script implementation #43
							
								
								
									
										21
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										21
									
								
								.vscode/settings.json
									
									
									
									
										vendored
									
									
								
							| @ -1,13 +1,14 @@ | ||||
| { | ||||
|     "files.insertFinalNewline": true, | ||||
|     "files.trimTrailingWhitespace": true, | ||||
| 	"files.insertFinalNewline": true, | ||||
| 	"files.trimTrailingWhitespace": true, | ||||
| 	"zig.initialSetupDone": true, | ||||
| 
 | ||||
|     "[zig]": { | ||||
|         "editor.formatOnSave": false, | ||||
|         "files.eol": "\n", | ||||
|         "editor.minimap.maxColumn": 120, | ||||
|         "editor.detectIndentation": false, | ||||
|         "editor.insertSpaces": false, | ||||
|         "editor.rulers": [120], | ||||
|     } | ||||
| 	"[zig]": { | ||||
| 		"editor.formatOnSave": false, | ||||
| 		"files.eol": "\n", | ||||
| 		"editor.minimap.maxColumn": 120, | ||||
| 		"editor.detectIndentation": false, | ||||
| 		"editor.insertSpaces": false, | ||||
| 		"editor.rulers": [120], | ||||
| 	} | ||||
| } | ||||
|  | ||||
| @ -1,15 +1,15 @@ | ||||
| 
 | ||||
| var printer = lambda (pfx): | ||||
| let printer = lambda (): | ||||
| 	return lambda (msg): | ||||
| 		@print("This is a func call") | ||||
| 		@print(msg) | ||||
| 	end | ||||
| end | ||||
| 
 | ||||
| let pr = printer("This is a func call") | ||||
| let pr = printer() | ||||
| var i = 0 | ||||
| 
 | ||||
| pr("") | ||||
| pr("test") | ||||
| 
 | ||||
| while i < 5: | ||||
| 	pr("hello, world") | ||||
|  | ||||
| @ -217,12 +217,12 @@ pub fn all_equals(target: []const Byte, match: Byte) bool { | ||||
| 	return true; | ||||
| } | ||||
| 
 | ||||
| pub fn allocate_copy(allocator: Allocator, source: []const Byte) AllocationError![]Byte { | ||||
| 	const allocation = try allocator.actions.reallocate(allocator.context, @returnAddress(), null, source.len); | ||||
| pub fn allocate_copy(comptime Element: type, allocator: Allocator, source: []const Element) AllocationError![]Element { | ||||
| 	const allocation = try allocator.actions.reallocate(allocator.context, @returnAddress(), null, @sizeOf(Element) * source.len); | ||||
| 
 | ||||
| 	copy(allocation, source); | ||||
| 	copy(allocation, bytes_of(source)); | ||||
| 
 | ||||
| 	return allocation; | ||||
| 	return @as([*]Element, @ptrCast(@alignCast(allocation.ptr)))[0 .. source.len]; | ||||
| } | ||||
| 
 | ||||
| pub fn allocate_many(allocator: Allocator, count: usize, value: anytype) AllocationError![]@TypeOf(value) { | ||||
| @ -332,24 +332,6 @@ pub fn djb2_hash(comptime int: std.builtin.Type.Int, target: []const Byte) math. | ||||
| 	return hash_code; | ||||
| } | ||||
| 
 | ||||
| pub fn ends_with(target: []const Byte, match: []const Byte) bool { | ||||
| 	if (target.len < match.len) { | ||||
| 		return false; | ||||
| 	} | ||||
| 
 | ||||
| 	{ | ||||
| 		var index = @as(usize, 0); | ||||
| 
 | ||||
| 		while (index < match.len) : (index += 1) { | ||||
| 			if (target[target.len - (1 + index)] != match[match.len - (1 + index)]) { | ||||
| 				return false; | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return true; | ||||
| } | ||||
| 
 | ||||
| pub fn find_first(haystack: []const Byte, needle: Byte) ?usize { | ||||
| 	for (0 .. haystack.len) |i| { | ||||
| 		if (haystack[i] == needle) { | ||||
|  | ||||
| @ -16,6 +16,16 @@ pub fn Stack(comptime Value: type) type { | ||||
| 			self.values = self.values[0 .. 0]; | ||||
| 		} | ||||
| 
 | ||||
| 		pub fn drop(self: *Self, count: usize) bool { | ||||
| 			if (math.checked_sub(self.values, count)) |updated_count| { | ||||
| 				self.values = self.values[0 .. updated_count]; | ||||
| 
 | ||||
| 				return true; | ||||
| 			} | ||||
| 
 | ||||
| 			return false; | ||||
| 		} | ||||
| 
 | ||||
| 		pub fn free(self: *Self) void { | ||||
| 			if (self.capacity == 0) { | ||||
| 				return; | ||||
|  | ||||
| @ -8,7 +8,7 @@ pub const DecimalFormat = struct { | ||||
| 	delimiter: []const io.Byte, | ||||
| 	positive_prefix: enum {none, plus, space}, | ||||
| 
 | ||||
| 	const default = DecimalFormat{ | ||||
| 	pub const default = DecimalFormat{ | ||||
| 		.delimiter = "", | ||||
| 		.positive_prefix = .none, | ||||
| 	}; | ||||
| @ -113,7 +113,7 @@ pub const DecimalFormat = struct { | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn print(self: DecimalFormat, writer: io.Writer, value: anytype) PrintError!void { | ||||
| 	pub fn print(self: DecimalFormat, writer: io.Writer, value: anytype) ?usize { | ||||
| 		if (value == 0) { | ||||
| 			return print_string(writer, switch (self.positive_prefix) { | ||||
| 				.none => "0", | ||||
| @ -134,8 +134,8 @@ pub const DecimalFormat = struct { | ||||
| 					var decomposable_value = value; | ||||
| 
 | ||||
| 					while (decomposable_value != 0) : (buffer_start -= 1) { | ||||
| 						buffer[buffer_start] = @intCast((decomposable_value % radix) + '0'); | ||||
| 						decomposable_value = (decomposable_value / radix); | ||||
| 						buffer[buffer_start] = @intCast(@mod(decomposable_value, radix) + '0'); | ||||
| 						decomposable_value = @divTrunc(decomposable_value, radix); | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| @ -149,10 +149,33 @@ pub const DecimalFormat = struct { | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				try print_string(writer, buffer[buffer_start ..]); | ||||
| 				return print_string(writer, buffer[buffer_start ..]); | ||||
| 			}, | ||||
| 
 | ||||
| 			else => unformattableMessage(ValueType), | ||||
| 			.Float => |float| { | ||||
| 				var length = @as(usize, 0); | ||||
| 
 | ||||
| 				if (value < 0) { | ||||
| 					length += print_string(writer, "-") orelse return null; | ||||
| 				} | ||||
| 
 | ||||
| 				const Float = @TypeOf(value); | ||||
| 
 | ||||
| 				const Int = math.Int(.{ | ||||
| 					.bits = float.bits, | ||||
| 					.signedness = .unsigned, | ||||
| 				}); | ||||
| 
 | ||||
| 				const integer = @as(Int, @intFromFloat(value)); | ||||
| 
 | ||||
| 				length += self.print(writer, integer) orelse return null; | ||||
| 				length += print_string(writer, ".") orelse return null; | ||||
| 				length += self.print(writer, @as(Int, @intFromFloat((value - @as(Float, @floatFromInt(integer))) * 100))) orelse return null; | ||||
| 
 | ||||
| 				return length; | ||||
| 			}, | ||||
| 
 | ||||
| 			else => @compileError(unformattableMessage(ValueType)), | ||||
| 		} | ||||
| 	} | ||||
| }; | ||||
| @ -168,7 +191,7 @@ pub const HexadecimalFormat = struct { | ||||
| 		.casing = .lower, | ||||
| 	}; | ||||
| 
 | ||||
| 	pub fn print(self: HexadecimalFormat, writer: io.Writer, value: anytype) PrintError!void { | ||||
| 	pub fn print(self: HexadecimalFormat, writer: io.Writer, value: anytype) ?usize { | ||||
| 		// TODO: Implement. | ||||
| 		_ = self; | ||||
| 		_ = writer; | ||||
| @ -176,19 +199,32 @@ pub const HexadecimalFormat = struct { | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
| pub const PrintError = error { | ||||
| 	PrintFailed, | ||||
| 	PrintIncomplete, | ||||
| }; | ||||
| pub fn alloc_formatted(allocator: io.Allocator, comptime format: []const u8, args: anytype) io.AllocationError![]io.Byte { | ||||
| 	const formatted_len = print_formatted(io.null_writer, format, args); | ||||
| 
 | ||||
| pub fn print_string(writer: io.Writer, utf8: []const io.Byte) PrintError!void { | ||||
| 	if ((writer.invoke(utf8) orelse return error.PrintFailed) != utf8.len) { | ||||
| 		return error.PrintIncomplete; | ||||
| 	debug.assert(formatted_len != null); | ||||
| 
 | ||||
| 	const allocation = try allocator.reallocate(null, formatted_len.?); | ||||
| 
 | ||||
| 	errdefer allocator.deallocate(allocation); | ||||
| 
 | ||||
| 	{ | ||||
| 		var fixed_buffer = io.FixedBuffer{.bytes = allocation}; | ||||
| 
 | ||||
| 		debug.assert(print_formatted(fixed_buffer.as_writer(), format, args) == formatted_len); | ||||
| 	} | ||||
| 
 | ||||
| 	return allocation; | ||||
| } | ||||
| 
 | ||||
| pub fn print_formatted(writer: io.Writer, comptime format: []const u8, arguments: anytype) PrintError!void { | ||||
| 	switch (@typeInfo(@TypeOf(arguments))) { | ||||
| pub fn print_string(writer: io.Writer, utf8: []const u8) ?usize { | ||||
| 	return writer.invoke(utf8); | ||||
| } | ||||
| 
 | ||||
| pub fn print_formatted(writer: io.Writer, comptime format: []const u8, args: anytype) ?usize { | ||||
| 	var length = @as(usize, 0); | ||||
| 
 | ||||
| 	switch (@typeInfo(@TypeOf(args))) { | ||||
| 		.Struct => |arguments_struct| { | ||||
| 			comptime var arg_index = 0; | ||||
| 			comptime var head = 0; | ||||
| @ -204,8 +240,7 @@ pub fn print_formatted(writer: io.Writer, comptime format: []const u8, arguments | ||||
| 
 | ||||
| 					switch (format[tail]) { | ||||
| 						'{' => { | ||||
| 							try print_string(writer, format[head .. (tail - 1)]); | ||||
| 
 | ||||
| 							length += print_string(writer, format[head .. (tail - 1)]) orelse return null; | ||||
| 							tail += 1; | ||||
| 							head = tail; | ||||
| 						}, | ||||
| @ -215,8 +250,7 @@ pub fn print_formatted(writer: io.Writer, comptime format: []const u8, arguments | ||||
| 								@compileError("all format specifiers must be named when using a named struct"); | ||||
| 							} | ||||
| 
 | ||||
| 							try print_string(writer, arguments[arg_index]); | ||||
| 
 | ||||
| 							length += print_string(writer, args[arg_index]) orelse return null; | ||||
| 							arg_index += 1; | ||||
| 							tail += 1; | ||||
| 							head = tail; | ||||
| @ -227,8 +261,7 @@ pub fn print_formatted(writer: io.Writer, comptime format: []const u8, arguments | ||||
| 								@compileError("format specifiers cannot be named when using a tuple struct"); | ||||
| 							} | ||||
| 
 | ||||
| 							try print_string(writer, format[head .. (tail - 1)]); | ||||
| 
 | ||||
| 							length += print_string(writer, format[head .. (tail - 1)]) orelse return null; | ||||
| 							head = tail; | ||||
| 							tail += 1; | ||||
| 
 | ||||
| @ -244,21 +277,24 @@ pub fn print_formatted(writer: io.Writer, comptime format: []const u8, arguments | ||||
| 								debug.assert(tail < format.len); | ||||
| 							} | ||||
| 
 | ||||
| 							try print_value(writer, @field(arguments, format[head .. tail])); | ||||
| 
 | ||||
| 							length += print_value(writer, @field(args, format[head .. tail])) orelse return null; | ||||
| 							tail += 1; | ||||
| 							head = tail; | ||||
| 						} | ||||
| 					} | ||||
| 				} | ||||
| 			} | ||||
| 
 | ||||
| 			length += print_string(writer, format[head .. ]) orelse return null; | ||||
| 		}, | ||||
| 
 | ||||
| 		else => @compileError("`arguments` must be a struct type"), | ||||
| 	} | ||||
| 
 | ||||
| 	return length; | ||||
| } | ||||
| 
 | ||||
| noinline fn print_value(writer: io.Writer, value: anytype) PrintError!void { | ||||
| noinline fn print_value(writer: io.Writer, value: anytype) ?usize { | ||||
| 	const Value = @TypeOf(value); | ||||
| 
 | ||||
| 	return switch (@typeInfo(Value)) { | ||||
|  | ||||
| @ -61,88 +61,86 @@ const AllocationNode = struct { | ||||
| const Context = struct { | ||||
| 	head: ?*AllocationNode = null, | ||||
| 
 | ||||
| 	fn deallocate(self: *Context, allocation: []u8) void { | ||||
| 		switch (builtin.mode) { | ||||
| 			.Debug, .ReleaseSafe => { | ||||
| 				const panic_message = "incorrect allocation address for deallocating"; | ||||
| 				var current_node = self.head orelse @panic(panic_message); | ||||
| 	fn deallocate(_: *Context, allocation: []coral.io.Byte) void { | ||||
| 		// switch (builtin.mode) { | ||||
| 		// 	.Debug, .ReleaseSafe => { | ||||
| 		// 		const panic_message = "incorrect allocation address for deallocating"; | ||||
| 		// 		var current_node = self.head orelse @panic(panic_message); | ||||
| 
 | ||||
| 				if (current_node.owns_userdata(allocation)) { | ||||
| 					self.head = current_node.next; | ||||
| 		// 		if (current_node.owns_userdata(allocation)) { | ||||
| 		// 			self.head = current_node.next; | ||||
| 
 | ||||
| 					return current_node.dealloc(); | ||||
| 				} | ||||
| 		// 			return current_node.dealloc(); | ||||
| 		// 		} | ||||
| 
 | ||||
| 				while (true) { | ||||
| 					const next_node = current_node.next orelse @panic(panic_message); | ||||
| 		// 		while (true) { | ||||
| 		// 			const next_node = current_node.next orelse @panic(panic_message); | ||||
| 
 | ||||
| 					if (next_node.owns_userdata(allocation)) { | ||||
| 						current_node.next = next_node.next; | ||||
| 		// 			if (next_node.owns_userdata(allocation)) { | ||||
| 		// 				current_node.next = next_node.next; | ||||
| 
 | ||||
| 						return next_node.dealloc(); | ||||
| 					} | ||||
| 		// 				return next_node.dealloc(); | ||||
| 		// 			} | ||||
| 
 | ||||
| 					current_node = next_node; | ||||
| 				} | ||||
| 			}, | ||||
| 		// 			current_node = next_node; | ||||
| 		// 		} | ||||
| 		// 	}, | ||||
| 
 | ||||
| 			.ReleaseFast, .ReleaseSmall => { | ||||
| 			// .ReleaseFast, .ReleaseSmall => { | ||||
| 				ext.SDL_free(allocation.ptr); | ||||
| 			}, | ||||
| 		} | ||||
| 			// }, | ||||
| 		// } | ||||
| 	} | ||||
| 
 | ||||
| 	fn reallocate(self: *Context, return_address: usize, existing_allocation: ?[]u8, size: usize) coral.io.AllocationError![]u8 { | ||||
| 		switch (builtin.mode) { | ||||
| 			.Debug, .ReleaseSafe => { | ||||
| 				if (existing_allocation) |allocation| { | ||||
| 					const panic_message = "incorrect allocation address for reallocating"; | ||||
| 					var current_node = self.head orelse @panic(panic_message); | ||||
| 	fn reallocate(_: *Context, _: usize, existing_allocation: ?[]coral.io.Byte, size: usize) coral.io.AllocationError![]coral.io.Byte { | ||||
| 		// switch (builtin.mode) { | ||||
| 		// 	.Debug, .ReleaseSafe => { | ||||
| 		// 		if (existing_allocation) |allocation| { | ||||
| 		// 			const panic_message = "incorrect allocation address for reallocating"; | ||||
| 		// 			var current_node = self.head orelse @panic(panic_message); | ||||
| 
 | ||||
| 					if (current_node.owns_userdata(allocation)) { | ||||
| 						const node = current_node.realloc(size, return_address); | ||||
| 		// 			if (current_node.owns_userdata(allocation)) { | ||||
| 		// 				const node = current_node.realloc(size, return_address); | ||||
| 
 | ||||
| 						self.head = node; | ||||
| 		// 				self.head = node; | ||||
| 
 | ||||
| 						return node.userdata(); | ||||
| 					} | ||||
| 		// 				return node.userdata(); | ||||
| 		// 			} | ||||
| 
 | ||||
| 					while (true) { | ||||
| 						const next_node = current_node.next orelse @panic(panic_message); | ||||
| 		// 			while (true) { | ||||
| 		// 				const next_node = current_node.next orelse @panic(panic_message); | ||||
| 
 | ||||
| 						if (next_node.owns_userdata(allocation)) { | ||||
| 							const node = next_node.realloc(size, return_address); | ||||
| 		// 				if (next_node.owns_userdata(allocation)) { | ||||
| 		// 					const node = next_node.realloc(size, return_address); | ||||
| 
 | ||||
| 							current_node.next = node; | ||||
| 		// 					current_node.next = node; | ||||
| 
 | ||||
| 							return node.userdata(); | ||||
| 						} | ||||
| 		// 					return node.userdata(); | ||||
| 		// 				} | ||||
| 
 | ||||
| 						current_node = next_node; | ||||
| 					} | ||||
| 				} else { | ||||
| 					const node = AllocationNode.alloc(size, return_address); | ||||
| 		// 				current_node = next_node; | ||||
| 		// 			} | ||||
| 		// 		} else { | ||||
| 		// 			const node = AllocationNode.alloc(size, return_address); | ||||
| 
 | ||||
| 					if (self.head) |head| { | ||||
| 						node.next = head; | ||||
| 					} | ||||
| 		// 			if (self.head) |head| { | ||||
| 		// 				node.next = head; | ||||
| 		// 			} | ||||
| 
 | ||||
| 					self.head = node; | ||||
| 		// 			self.head = node; | ||||
| 
 | ||||
| 					return node.userdata(); | ||||
| 				} | ||||
| 			}, | ||||
| 		// 			return node.userdata(); | ||||
| 		// 		} | ||||
| 		// 	}, | ||||
| 
 | ||||
| 			.ReleaseFast, .ReleaseSmall => { | ||||
| 		// 	.ReleaseFast, .ReleaseSmall => { | ||||
| 				if (existing_allocation) |allocation | { | ||||
| 					return @as([*]u8, ext.SDL_realloc(allocation.ptr, size) orelse { | ||||
| 						return error.OutOfMemory; | ||||
| 					})[0 .. size]; | ||||
| 					return @as([*]coral.io.Byte, @ptrCast(ext.SDL_realloc(allocation.ptr, size) orelse return error.OutOfMemory))[0 .. size]; | ||||
| 				} | ||||
| 
 | ||||
| 				return @as([*]u8, ext.SDL_malloc(size) orelse return error.OutOfMemory)[0 .. size]; | ||||
| 			}, | ||||
| 		} | ||||
| 				return @as([*]u8, @ptrCast(ext.SDL_malloc(size) orelse return error.OutOfMemory))[0 .. size]; | ||||
| 		// 	}, | ||||
| 		// } | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
|  | ||||
							
								
								
									
										1502
									
								
								source/ona/kym.zig
									
									
									
									
									
								
							
							
						
						
									
										1502
									
								
								source/ona/kym.zig
									
									
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										570
									
								
								source/ona/kym/Chunk.zig
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										570
									
								
								source/ona/kym/Chunk.zig
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,570 @@ | ||||
| const Compiler = @import("./Compiler.zig"); | ||||
| 
 | ||||
| const coral = @import("coral"); | ||||
| 
 | ||||
| const file = @import("../file.zig"); | ||||
| 
 | ||||
| const kym = @import("../kym.zig"); | ||||
| 
 | ||||
| const tree = @import("./tree.zig"); | ||||
| 
 | ||||
| name: *kym.RuntimeRef, | ||||
| arity: u8, | ||||
| opcodes: OpcodeList, | ||||
| constants: ConstList, | ||||
| 
 | ||||
| const Builtin = enum { | ||||
| 	import, | ||||
| 	print, | ||||
| 	vec2, | ||||
| 	vec3, | ||||
| }; | ||||
| 
 | ||||
| const ConstList = coral.list.Stack(*kym.RuntimeRef); | ||||
| 
 | ||||
| const OpcodeList = coral.list.Stack(union (enum) { | ||||
| 	pop, | ||||
| 	push_nil, | ||||
| 	push_true, | ||||
| 	push_false, | ||||
| 	push_const: u16, | ||||
| 	push_local: u8, | ||||
| 	push_top, | ||||
| 	push_table: u32, | ||||
| 	push_builtin: Builtin, | ||||
| 	push_closure: u8, | ||||
| 	push_self, | ||||
| 	push_boxed, | ||||
| 	set_local: u8, | ||||
| 	get_dynamic, | ||||
| 	set_dynamic, | ||||
| 	call: u8, | ||||
| 
 | ||||
| 	not, | ||||
| 	neg, | ||||
| 
 | ||||
| 	add, | ||||
| 	sub, | ||||
| 	mul, | ||||
| 	div, | ||||
| 
 | ||||
| 	eql, | ||||
| 	cgt, | ||||
| 	clt, | ||||
| 	cge, | ||||
| 	cle, | ||||
| 
 | ||||
| 	jt: u32, | ||||
| 	jf: u32, | ||||
| }); | ||||
| 
 | ||||
| const Self = @This(); | ||||
| 
 | ||||
| pub fn dump(chunk: Self, env: *kym.RuntimeEnv) kym.RuntimeError!*kym.RuntimeRef { | ||||
| 	var opcode_cursor = @as(u32, 0); | ||||
| 	var buffer = coral.list.ByteStack.make(env.allocator); | ||||
| 
 | ||||
| 	defer buffer.free(); | ||||
| 
 | ||||
| 	const writer = coral.list.stack_as_writer(&buffer); | ||||
| 	_ = coral.utf8.print_string(writer, coral.io.slice_sentineled(@as(coral.io.Byte, 0), try env.unbox_symbol(chunk.name))); | ||||
| 
 | ||||
| 	_ = coral.utf8.print_string(writer, ":\n"); | ||||
| 
 | ||||
| 	while (opcode_cursor < chunk.opcodes.values.len) : (opcode_cursor += 1) { | ||||
| 		_ = coral.utf8.print_formatted(writer, "[{instruction}]: ", .{.instruction = opcode_cursor}); | ||||
| 
 | ||||
| 		_ = switch (chunk.opcodes.values[opcode_cursor]) { | ||||
| 			.pop => coral.utf8.print_string(writer, "pop\n"), | ||||
| 			.push_nil => coral.utf8.print_string(writer, "push nil\n"), | ||||
| 			.push_true => coral.utf8.print_string(writer, "push true\n"), | ||||
| 			.push_false => coral.utf8.print_string(writer, "push false\n"), | ||||
| 
 | ||||
| 			.push_const => |push_const| print: { | ||||
| 				if (push_const >= chunk.constants.values.len) { | ||||
| 					return env.raise(error.IllegalState, "invalid constant"); | ||||
| 				} | ||||
| 
 | ||||
| 				const string_ref = try env.to_string(chunk.constants.values[push_const]); | ||||
| 
 | ||||
| 				defer env.discard(string_ref); | ||||
| 
 | ||||
| 				const string = string_ref.as_string(); | ||||
| 
 | ||||
| 				coral.debug.assert(string != null); | ||||
| 
 | ||||
| 				break: print coral.utf8.print_formatted(writer, "push const <{value}>\n", .{.value = string.?}); | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_local => |push_local| coral.utf8.print_formatted(writer, "push local <{local}>\n", .{ | ||||
| 				.local = push_local, | ||||
| 			}), | ||||
| 
 | ||||
| 			.push_top => coral.utf8.print_string(writer, "push top\n"), | ||||
| 
 | ||||
| 			.push_table => |push_table| coral.utf8.print_formatted(writer, "push table <{count}>\n", .{ | ||||
| 				.count = push_table, | ||||
| 			}), | ||||
| 
 | ||||
| 			.push_boxed => coral.utf8.print_string(writer, "push boxed\n"), | ||||
| 			.push_self => coral.utf8.print_string(writer, "push self\n"), | ||||
| 
 | ||||
| 			.push_closure => |push_closure| coral.utf8.print_formatted(writer, "push closure <{count}>\n", .{ | ||||
| 				.count = push_closure, | ||||
| 			}), | ||||
| 
 | ||||
| 			.push_builtin => |push_builtin| coral.utf8.print_formatted(writer, "push builtin <{builtin}>\n", .{ | ||||
| 				.builtin = switch (push_builtin) { | ||||
| 					.import => "import", | ||||
| 					.print => "print", | ||||
| 					.vec2 => "vec2", | ||||
| 					.vec3 => "vec3", | ||||
| 				}, | ||||
| 			}), | ||||
| 
 | ||||
| 			.set_local => |local_set| coral.utf8.print_formatted(writer, "set local <{local}>\n", .{.local = local_set}), | ||||
| 			.get_dynamic => coral.utf8.print_string(writer, "get dynamic\n"), | ||||
| 			.set_dynamic => coral.utf8.print_string(writer, "set dynamic\n"), | ||||
| 			.call => |call| coral.utf8.print_formatted(writer, "call <{count}>\n", .{.count = call}), | ||||
| 			.not => coral.utf8.print_string(writer, "not\n"), | ||||
| 			.neg => coral.utf8.print_string(writer, "neg\n"), | ||||
| 			.add => coral.utf8.print_string(writer, "add\n"), | ||||
| 			.sub => coral.utf8.print_string(writer, "sub\n"), | ||||
| 			.mul => coral.utf8.print_string(writer, "mul\n"), | ||||
| 			.div => coral.utf8.print_string(writer, "div\n"), | ||||
| 			.eql => coral.utf8.print_string(writer, "eql\n"), | ||||
| 			.cgt => coral.utf8.print_string(writer, "cgt\n"), | ||||
| 			.clt => coral.utf8.print_string(writer, "clt\n"), | ||||
| 			.cge => coral.utf8.print_string(writer, "cge\n"), | ||||
| 			.cle => coral.utf8.print_string(writer, "cle\n"), | ||||
| 			.jf => |jf| coral.utf8.print_formatted(writer, "jf <{instruction}>\n", .{.instruction = jf}), | ||||
| 			.jt => |jt| coral.utf8.print_formatted(writer, "jt <{instruction}>\n", .{.instruction = jt}), | ||||
| 		}; | ||||
| 	} | ||||
| 
 | ||||
| 	return env.new_string(buffer.values); | ||||
| } | ||||
| 
 | ||||
| pub fn execute(chunk: Self, env: *kym.RuntimeEnv, frame: kym.Frame) kym.RuntimeError!?*kym.RuntimeRef { | ||||
| 	var opcode_cursor = @as(u32, 0); | ||||
| 
 | ||||
| 	while (opcode_cursor < chunk.opcodes.values.len) : (opcode_cursor += 1) { | ||||
| 		switch (chunk.opcodes.values[opcode_cursor]) { | ||||
| 			.pop => { | ||||
| 				if (try env.pop_local()) |ref| { | ||||
| 					env.discard(ref); | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_nil => try env.locals.push_one(null), | ||||
| 			.push_true => try env.locals.push_one(try env.new_boolean(true)), | ||||
| 			.push_false => try env.locals.push_one(try env.new_boolean(false)), | ||||
| 
 | ||||
| 			.push_const => |push_const| { | ||||
| 				if (push_const >= chunk.constants.values.len) { | ||||
| 					return env.raise(error.IllegalState, "invalid constant"); | ||||
| 				} | ||||
| 
 | ||||
| 				try env.locals.push_one(chunk.constants.values[push_const].acquire()); | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_local => |push_local| { | ||||
| 				if (push_local >= env.locals.values.len) { | ||||
| 					return env.raise(error.IllegalState, "invalid local"); | ||||
| 				} | ||||
| 
 | ||||
| 				if (env.locals.values[frame.locals_top + push_local]) |local| { | ||||
| 					try env.locals.push_one(local.acquire()); | ||||
| 				} else { | ||||
| 					try env.locals.push_one(null); | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_top => { | ||||
| 				const frame_locals = env.locals.values[frame.locals_top ..]; | ||||
| 
 | ||||
| 				if (frame_locals.len == 0) { | ||||
| 					return env.raise(error.IllegalState, "stack overflow"); | ||||
| 				} | ||||
| 
 | ||||
| 				if (frame_locals[frame_locals.len - 1]) |local| { | ||||
| 					try env.locals.push_one(local.acquire()); | ||||
| 				} else { | ||||
| 					try env.locals.push_one(null); | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_table => |push_table| { | ||||
| 				const table = try env.new_table(); | ||||
| 
 | ||||
| 				errdefer env.discard(table); | ||||
| 
 | ||||
| 				{ | ||||
| 					var popped = @as(usize, 0); | ||||
| 
 | ||||
| 					while (popped < push_table) : (popped += 1) { | ||||
| 						const index = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 						defer env.discard(index); | ||||
| 
 | ||||
| 						if (try env.pop_local()) |value| { | ||||
| 							defer env.discard(value); | ||||
| 
 | ||||
| 							try env.set(table, index, value); | ||||
| 						} | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				try env.locals.push_one(table); | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_boxed => { | ||||
| 				// TODO: Implement. | ||||
| 				unreachable; | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_self => { | ||||
| 				// TODO: Implement. | ||||
| 				unreachable; | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_closure => |push_closure| { | ||||
| 				// TODO: Implement. | ||||
| 				_ = push_closure; | ||||
| 				unreachable; | ||||
| 			}, | ||||
| 
 | ||||
| 			.push_builtin => |push_builtin| { | ||||
| 				const builtin_syscall = try env.new_syscall(switch (push_builtin) { | ||||
| 					.import => syscall_import, | ||||
| 					.print => syscall_print, | ||||
| 					.vec2 => syscall_vec2, | ||||
| 					.vec3 => syscall_vec3, | ||||
| 				}); | ||||
| 
 | ||||
| 				errdefer env.discard(builtin_syscall); | ||||
| 
 | ||||
| 				try env.locals.push_one(builtin_syscall); | ||||
| 			}, | ||||
| 
 | ||||
| 			.set_local => |local_set| { | ||||
| 				const local = &env.locals.values[frame.locals_top + local_set]; | ||||
| 
 | ||||
| 				if (local.*) |previous_local| { | ||||
| 					env.discard(previous_local); | ||||
| 				} | ||||
| 
 | ||||
| 				local.* = try env.pop_local(); | ||||
| 			}, | ||||
| 
 | ||||
| 			.get_dynamic => { | ||||
| 				const index = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(index); | ||||
| 
 | ||||
| 				const indexable = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(indexable); | ||||
| 
 | ||||
| 				const value = try env.get(indexable, index); | ||||
| 
 | ||||
| 				errdefer { | ||||
| 					if (value) |ref| { | ||||
| 						env.discard(ref); | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				try env.locals.push_one(value); | ||||
| 			}, | ||||
| 
 | ||||
| 			.set_dynamic => { | ||||
| 				const value = try env.pop_local(); | ||||
| 
 | ||||
| 				defer { | ||||
| 					if (value) |ref| { | ||||
| 						env.discard(ref); | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				const index = try env.pop_local() orelse { | ||||
| 					return env.raise(error.TypeMismatch, "nil is not a valid index"); | ||||
| 				}; | ||||
| 
 | ||||
| 				defer env.discard(index); | ||||
| 
 | ||||
| 				const indexable = try env.pop_local() orelse { | ||||
| 					return env.raise(error.TypeMismatch, "nil is not a valid indexable"); | ||||
| 				}; | ||||
| 
 | ||||
| 				defer env.discard(indexable); | ||||
| 
 | ||||
| 				try env.set(indexable, index, value); | ||||
| 			}, | ||||
| 
 | ||||
| 			.call => |call| { | ||||
| 				const result = call: { | ||||
| 					const callable = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 					defer env.discard(callable); | ||||
| 
 | ||||
| 					const call_frame = try env.push_frame(call); | ||||
| 
 | ||||
| 					defer env.pop_frame(); | ||||
| 
 | ||||
| 					break: call try env.call_frame(callable, call_frame); | ||||
| 				}; | ||||
| 
 | ||||
| 				errdefer { | ||||
| 					if (result) |ref| { | ||||
| 						env.discard(ref); | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				try env.locals.push_one(result); | ||||
| 			}, | ||||
| 
 | ||||
| 			.not => { | ||||
| 				if (try env.pop_local()) |value| { | ||||
| 					defer env.discard(value); | ||||
| 
 | ||||
| 					try env.locals.push_one(try env.new_boolean(!value.is_truthy())); | ||||
| 				} else { | ||||
| 					try env.locals.push_one(try env.new_boolean(true)); | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.neg => { | ||||
| 				const value = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(value); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.neg(value)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.add => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.add(lhs, rhs)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.sub => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.sub(lhs, rhs)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.mul => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.mul(lhs, rhs)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.div => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.div(lhs, rhs)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.eql => { | ||||
| 				if (try env.pop_local()) |rhs| { | ||||
| 					env.discard(rhs); | ||||
| 
 | ||||
| 					if (try env.pop_local()) |lhs| { | ||||
| 						env.discard(lhs); | ||||
| 
 | ||||
| 						try env.locals.push_one(try env.new_boolean(lhs.equals(rhs))); | ||||
| 					} else { | ||||
| 						try env.locals.push_one(try env.new_boolean(false)); | ||||
| 					} | ||||
| 				} else { | ||||
| 					if (try env.pop_local()) |lhs| { | ||||
| 						env.discard(lhs); | ||||
| 
 | ||||
| 						try env.locals.push_one(try env.new_boolean(false)); | ||||
| 					} else { | ||||
| 						try env.locals.push_one(try env.new_boolean(true)); | ||||
| 					} | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.cgt => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.new_boolean(try env.compare(lhs, rhs) > 0)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.clt => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.new_boolean(try env.compare(lhs, rhs) < 0)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.cge => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.new_boolean(try env.compare(lhs, rhs) >= 0)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.cle => { | ||||
| 				const rhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(rhs); | ||||
| 
 | ||||
| 				const lhs = try env.expect(try env.pop_local()); | ||||
| 
 | ||||
| 				defer env.discard(lhs); | ||||
| 
 | ||||
| 				try env.locals.push_one(try env.new_boolean(try env.compare(lhs, rhs) <= 0)); | ||||
| 			}, | ||||
| 
 | ||||
| 			.jf => |jf| { | ||||
| 				if (try env.pop_local()) |condition| { | ||||
| 					defer env.discard(condition); | ||||
| 
 | ||||
| 					if (!condition.is_truthy()) { | ||||
| 						opcode_cursor = jf; | ||||
| 					} | ||||
| 				} else { | ||||
| 					opcode_cursor = jf; | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.jt => |jt| { | ||||
| 				if (try env.pop_local()) |condition| { | ||||
| 					defer env.discard(condition); | ||||
| 
 | ||||
| 					if (condition.is_truthy()) { | ||||
| 						opcode_cursor = jt; | ||||
| 					} | ||||
| 				} | ||||
| 			}, | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return env.pop_local(); | ||||
| } | ||||
| 
 | ||||
| pub fn free(self: *Self, env: *kym.RuntimeEnv) void { | ||||
| 	while (self.constants.pop()) |constant| { | ||||
| 		env.discard(constant); | ||||
| 	} | ||||
| 
 | ||||
| 	self.constants.free(); | ||||
| 	self.opcodes.free(); | ||||
| 	env.discard(self.name); | ||||
| } | ||||
| 
 | ||||
| pub fn make(env: *kym.RuntimeEnv, name: []const coral.io.Byte, environment: *const tree.Environment) kym.RuntimeError!Self { | ||||
| 	var chunk = Self{ | ||||
| 		.name = try env.new_symbol(name), | ||||
| 		.opcodes = OpcodeList.make(env.allocator), | ||||
| 		.constants = ConstList.make(env.allocator), | ||||
| 		.arity = 0, | ||||
| 	}; | ||||
| 
 | ||||
| 	var compiler = Compiler{ | ||||
| 		.chunk = &chunk, | ||||
| 		.env = env, | ||||
| 	}; | ||||
| 
 | ||||
| 	try compiler.compile_environment(environment); | ||||
| 
 | ||||
| 	return chunk; | ||||
| } | ||||
| 
 | ||||
| fn syscall_import(env: *kym.RuntimeEnv, frame: kym.Frame) kym.RuntimeError!?*kym.RuntimeRef { | ||||
| 	return env.import(file.Path.from(&.{try env.unbox_string(try frame.get_arg(env, 0))})); | ||||
| } | ||||
| 
 | ||||
| fn syscall_print(env: *kym.RuntimeEnv, frame: kym.Frame) kym.RuntimeError!?*kym.RuntimeRef { | ||||
| 	env.print(try env.unbox_string(try frame.get_arg(env, 0))); | ||||
| 
 | ||||
| 	return null; | ||||
| } | ||||
| 
 | ||||
| fn syscall_vec2(env: *kym.RuntimeEnv, frame: kym.Frame) kym.RuntimeError!?*kym.RuntimeRef { | ||||
| 	const x = @as(f32, @floatCast(try env.unbox_float(try frame.get_arg(env, 0)))); | ||||
| 
 | ||||
| 	if (frame.has_arg(env, 1)) |y| { | ||||
| 		return env.new_vector2(x, @floatCast(try env.unbox_float(y))); | ||||
| 	} | ||||
| 
 | ||||
| 	return env.new_vector2(x, x); | ||||
| } | ||||
| 
 | ||||
| fn syscall_vec3(env: *kym.RuntimeEnv, frame: kym.Frame) kym.RuntimeError!?*kym.RuntimeRef { | ||||
| 	const x = @as(f32, @floatCast(try env.unbox_float(try frame.get_arg(env, 0)))); | ||||
| 
 | ||||
| 	if (frame.has_arg(env, 1)) |y| { | ||||
| 		return env.new_vector3( | ||||
| 			x, | ||||
| 			@floatCast(try env.unbox_float(y)), | ||||
| 			@floatCast(try env.unbox_float(try frame.get_arg(env, 2))), | ||||
| 		); | ||||
| 	} | ||||
| 
 | ||||
| 	return env.new_vector3(x, x, x); | ||||
| } | ||||
| 
 | ||||
| pub const typeinfo = &kym.Typeinfo{ | ||||
| 	.size = @sizeOf(Self), | ||||
| 	.name = "lambda", | ||||
| 	.destruct = typeinfo_destruct, | ||||
| 	.call = typeinfo_call, | ||||
| }; | ||||
| 
 | ||||
| fn typeinfo_call(env: *kym.RuntimeEnv, userdata: []coral.io.Byte, frame: kym.Frame) kym.RuntimeError!?*kym.RuntimeRef { | ||||
| 	const chunk = @as(*Self, @ptrCast(@alignCast(userdata))); | ||||
| 
 | ||||
| 	if (frame.arg_count < chunk.arity) { | ||||
| 		return env.raise(error.BadOperation, "expected more arguments"); | ||||
| 	} | ||||
| 
 | ||||
| 	return chunk.execute(env, frame); | ||||
| } | ||||
| 
 | ||||
| fn typeinfo_destruct(env: *kym.RuntimeEnv, userdata: []coral.io.Byte) void { | ||||
| 	@as(*Self, @ptrCast(@alignCast(userdata))).free(env); | ||||
| } | ||||
							
								
								
									
										370
									
								
								source/ona/kym/Compiler.zig
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										370
									
								
								source/ona/kym/Compiler.zig
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,370 @@ | ||||
| const Chunk = @import("./Chunk.zig"); | ||||
| 
 | ||||
| const Expr = @import("./Expr.zig"); | ||||
| 
 | ||||
| const Stmt = @import("./Stmt.zig"); | ||||
| 
 | ||||
| const coral = @import("coral"); | ||||
| 
 | ||||
| const kym = @import("../kym.zig"); | ||||
| 
 | ||||
| const tree = @import("./tree.zig"); | ||||
| 
 | ||||
| chunk: *Chunk, | ||||
| env: *kym.RuntimeEnv, | ||||
| 
 | ||||
| const Self = @This(); | ||||
| 
 | ||||
| fn compile_argument(self: Self, environment: *const tree.Environment, initial_argument: ?*const Expr) kym.RuntimeError!u8 { | ||||
| 	// TODO: Exceeding 255 arguments will make the VM crash. | ||||
| 	var maybe_argument = initial_argument; | ||||
| 	var argument_count = @as(u8, 0); | ||||
| 
 | ||||
| 	while (maybe_argument) |argument| { | ||||
| 		try self.compile_expression(environment, argument); | ||||
| 
 | ||||
| 		maybe_argument = argument.next; | ||||
| 		argument_count += 1; | ||||
| 	} | ||||
| 
 | ||||
| 	return argument_count; | ||||
| } | ||||
| 
 | ||||
| fn compile_expression(self: Self, environment: *const tree.Environment, expression: *const Expr) kym.RuntimeError!void { | ||||
| 	const number_format = coral.utf8.DecimalFormat{ | ||||
| 		.delimiter = "_", | ||||
| 		.positive_prefix = .none, | ||||
| 	}; | ||||
| 
 | ||||
| 	switch (expression.kind) { | ||||
| 		.nil_literal => try self.chunk.opcodes.push_one(.push_nil), | ||||
| 		.true_literal => try self.chunk.opcodes.push_one(.push_true), | ||||
| 		.false_literal => try self.chunk.opcodes.push_one(.push_false), | ||||
| 
 | ||||
| 		.number_literal => |literal| { | ||||
| 			for (literal) |codepoint| { | ||||
| 				if (codepoint == '.') { | ||||
| 					return self.chunk.opcodes.push_one(.{ | ||||
| 						.push_const = try self.declare_float(number_format.parse(literal, kym.Float) orelse unreachable), | ||||
| 					}); | ||||
| 				} | ||||
| 			} | ||||
| 
 | ||||
| 			try self.chunk.opcodes.push_one(.{ | ||||
| 				.push_const = try self.declare_fixed(number_format.parse(literal, kym.Fixed) orelse unreachable), | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.string_literal => |literal| { | ||||
| 			try self.chunk.opcodes.push_one(.{.push_const = try self.declare_string(literal)}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.symbol_literal => |literal| { | ||||
| 			try self.chunk.opcodes.push_one(.{.push_const = try self.declare_symbol(literal)}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.table_construct => |table_construct| { | ||||
| 			var table_entry = table_construct.entry; | ||||
| 			var field_count = @as(u32, 0); | ||||
| 
 | ||||
| 			while (table_entry) |entry| : (table_entry = entry.next) { | ||||
| 				try self.compile_expression(environment, entry); | ||||
| 
 | ||||
| 				if (entry.kind != .key_value) { | ||||
| 					try self.chunk.opcodes.push_one(.push_top); | ||||
| 				} | ||||
| 
 | ||||
| 				field_count += 1; | ||||
| 			} | ||||
| 
 | ||||
| 			try self.chunk.opcodes.push_one(.{.push_table = field_count}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.key_value => |key_value| { | ||||
| 			try self.compile_expression(environment, key_value.value); | ||||
| 			try self.compile_expression(environment, key_value.key); | ||||
| 		}, | ||||
| 
 | ||||
| 		.lambda_construct => |lambda_construct| { | ||||
| 			var chunk = try Chunk.make(self.env, "<lambda>", lambda_construct.environment); | ||||
| 
 | ||||
| 			errdefer chunk.free(self.env); | ||||
| 
 | ||||
| 			try self.chunk.opcodes.push_one(.{.push_const = try self.declare_chunk(chunk)}); | ||||
| 
 | ||||
| 			if (lambda_construct.environment.capture_count != 0) { | ||||
| 				for (lambda_construct.environment.captures[0 .. lambda_construct.environment.capture_count]) |capture| { | ||||
| 					try self.chunk.opcodes.push_one(.{.push_local = environment.captures[capture]}); | ||||
| 				} | ||||
| 
 | ||||
| 				try self.chunk.opcodes.push_one(.{.push_closure = lambda_construct.environment.capture_count}); | ||||
| 			} | ||||
| 		}, | ||||
| 
 | ||||
| 		.binary_op => |binary_op| { | ||||
| 			try self.compile_expression(environment, binary_op.lhs_operand); | ||||
| 			try self.compile_expression(environment, binary_op.rhs_operand); | ||||
| 
 | ||||
| 			try self.chunk.opcodes.push_one(switch (binary_op.operation) { | ||||
| 				.addition => .add, | ||||
| 				.subtraction => .sub, | ||||
| 				.multiplication => .mul, | ||||
| 				.divsion => .div, | ||||
| 				.greater_equals_comparison => .cge, | ||||
| 				.greater_than_comparison => .cgt, | ||||
| 				.equals_comparison => .eql, | ||||
| 				.less_than_comparison => .clt, | ||||
| 				.less_equals_comparison => .cle, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.unary_op => |unary_op| { | ||||
| 			try self.compile_expression(environment, unary_op.operand); | ||||
| 
 | ||||
| 			try self.chunk.opcodes.push_one(switch (unary_op.operation) { | ||||
| 				.boolean_negation => .not, | ||||
| 				.numeric_negation => .neg, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.invoke => |invoke| { | ||||
| 			const argument_count = try self.compile_argument(environment, invoke.argument); | ||||
| 
 | ||||
| 			try self.compile_expression(environment, invoke.object); | ||||
| 			try self.chunk.opcodes.push_one(.{.call = argument_count}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.group => |group| try self.compile_expression(environment, group), | ||||
| 		.import_builtin => try self.chunk.opcodes.push_one(.{.push_builtin = .import}), | ||||
| 		.print_builtin => try self.chunk.opcodes.push_one(.{.push_builtin = .print}), | ||||
| 		.vec2_builtin => try self.chunk.opcodes.push_one(.{.push_builtin = .vec2}), | ||||
| 		.vec3_builtin => try self.chunk.opcodes.push_one(.{.push_builtin = .vec3}), | ||||
| 
 | ||||
| 		.declaration_get => |declaration_get| { | ||||
| 			if (get_local_index(environment, declaration_get.declaration)) |index| { | ||||
| 				return self.chunk.opcodes.push_one(.{.push_local = index}); | ||||
| 			} | ||||
| 
 | ||||
| 			if (get_capture_index(environment, declaration_get.declaration)) |index| { | ||||
| 				try self.chunk.opcodes.push_one(.push_self); | ||||
| 				try self.chunk.opcodes.push_one(.{.push_const = try self.declare_fixed(index)}); | ||||
| 
 | ||||
| 				return self.chunk.opcodes.push_one(.get_dynamic); | ||||
| 			} | ||||
| 
 | ||||
| 			return self.env.raise(error.IllegalState, "local out of scope"); | ||||
| 		}, | ||||
| 
 | ||||
| 		.declaration_set => |declaration_set| { | ||||
| 			try self.compile_expression(environment, declaration_set.assign); | ||||
| 
 | ||||
| 			if (get_local_index(environment, declaration_set.declaration)) |index| { | ||||
| 				return self.chunk.opcodes.push_one(.{.set_local = index}); | ||||
| 			} | ||||
| 
 | ||||
| 			if (get_capture_index(environment, declaration_set.declaration)) |index| { | ||||
| 				try self.chunk.opcodes.push_one(.push_self); | ||||
| 				try self.chunk.opcodes.push_one(.{.push_const = try self.declare_fixed(index)}); | ||||
| 
 | ||||
| 				return self.chunk.opcodes.push_one(.set_dynamic); | ||||
| 			} | ||||
| 
 | ||||
| 			return self.env.raise(error.IllegalState, "local out of scope"); | ||||
| 		}, | ||||
| 
 | ||||
| 		.field_get => |field_get| { | ||||
| 			try self.compile_expression(environment, field_get.object); | ||||
| 			try self.chunk.opcodes.push_one(.{.push_const = try self.declare_symbol(field_get.identifier)}); | ||||
| 			try self.chunk.opcodes.push_one(.get_dynamic); | ||||
| 		}, | ||||
| 
 | ||||
| 		.field_set => |field_set| { | ||||
| 			try self.compile_expression(environment, field_set.object); | ||||
| 			try self.chunk.opcodes.push_one(.{.push_const = try self.declare_symbol(field_set.identifier)}); | ||||
| 			try self.compile_expression(environment, field_set.assign); | ||||
| 			try self.chunk.opcodes.push_one(.set_dynamic); | ||||
| 		}, | ||||
| 
 | ||||
| 		.subscript_get => |subscript_get| { | ||||
| 			try self.compile_expression(environment, subscript_get.object); | ||||
| 			try self.compile_expression(environment, subscript_get.index); | ||||
| 			try self.chunk.opcodes.push_one(.get_dynamic); | ||||
| 		}, | ||||
| 
 | ||||
| 		.subscript_set => |subscript_set| { | ||||
| 			try self.compile_expression(environment, subscript_set.object); | ||||
| 			try self.compile_expression(environment, subscript_set.index); | ||||
| 			try self.compile_expression(environment, subscript_set.assign); | ||||
| 			try self.chunk.opcodes.push_one(.set_dynamic); | ||||
| 		}, | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| pub fn compile_environment(self: Self, environment: *const tree.Environment) kym.RuntimeError!void { | ||||
| 	if (environment.statement) |statement| { | ||||
| 		const last_statement = try self.compile_statement(environment, statement); | ||||
| 
 | ||||
| 		if (last_statement.kind != .@"return") { | ||||
| 			try self.chunk.opcodes.push_one(.push_nil); | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| fn compile_statement(self: Self, environment: *const tree.Environment, initial_statement: *const Stmt) kym.RuntimeError!*const Stmt { | ||||
| 	var current_statement = initial_statement; | ||||
| 
 | ||||
| 	while (true) { | ||||
| 		switch (current_statement.kind) { | ||||
| 			.@"return" => |@"return"| { | ||||
| 				if (@"return".returned_expression) |expression| { | ||||
| 					try self.compile_expression(environment, expression); | ||||
| 				} else { | ||||
| 					try self.chunk.opcodes.push_one(.push_nil); | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.@"while" => |@"while"| { | ||||
| 				try self.compile_expression(environment, @"while".loop_expression); | ||||
| 				try self.chunk.opcodes.push_one(.{.jf = 0}); | ||||
| 
 | ||||
| 				const origin_index = @as(u32, @intCast(self.chunk.opcodes.values.len - 1)); | ||||
| 
 | ||||
| 				_ = try self.compile_statement(environment, @"while".loop); | ||||
| 				self.chunk.opcodes.values[origin_index].jf = @intCast(self.chunk.opcodes.values.len - 1); | ||||
| 
 | ||||
| 				try self.compile_expression(environment, @"while".loop_expression); | ||||
| 				try self.chunk.opcodes.push_one(.{.jt = origin_index}); | ||||
| 			}, | ||||
| 
 | ||||
| 			.@"if" => |@"if"| { | ||||
| 				try self.compile_expression(environment, @"if".then_expression); | ||||
| 				try self.chunk.opcodes.push_one(.{.jf = 0}); | ||||
| 
 | ||||
| 				const origin_index = @as(u32, @intCast(self.chunk.opcodes.values.len - 1)); | ||||
| 
 | ||||
| 				_ = try self.compile_statement(environment, @"if".@"then"); | ||||
| 				self.chunk.opcodes.values[origin_index].jf = @intCast(self.chunk.opcodes.values.len - 1); | ||||
| 
 | ||||
| 				if (@"if".@"else") |@"else"| { | ||||
| 					_ = try self.compile_statement(environment, @"else"); | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.declare => |declare| { | ||||
| 				try self.compile_expression(environment, declare.initial_expression); | ||||
| 
 | ||||
| 				if (declare.declaration.is_captured and !declare.declaration.is_readonly) { | ||||
| 					try self.chunk.opcodes.push_one(.push_boxed); | ||||
| 				} | ||||
| 			}, | ||||
| 
 | ||||
| 			.top_expression => |top_expression| { | ||||
| 				try self.compile_expression(environment, top_expression); | ||||
| 
 | ||||
| 				if (top_expression.kind == .invoke) { | ||||
| 					try self.chunk.opcodes.push_one(.pop); | ||||
| 				} | ||||
| 			}, | ||||
| 		} | ||||
| 
 | ||||
| 		current_statement = current_statement.next orelse return current_statement; | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| fn declare_chunk(self: Self, chunk: Chunk) kym.RuntimeError!u16 { | ||||
| 	if (self.chunk.constants.values.len == coral.math.max_int(@typeInfo(u16).Int)) { | ||||
| 		return self.env.raise(error.BadSyntax, "chunks cannot contain more than 65,535 constants"); | ||||
| 	} | ||||
| 
 | ||||
| 	const constant = try self.env.new_dynamic(coral.io.bytes_of(&chunk).ptr, Chunk.typeinfo); | ||||
| 
 | ||||
| 	errdefer self.env.discard(constant); | ||||
| 
 | ||||
| 	try self.chunk.constants.push_one(constant); | ||||
| 
 | ||||
| 	return @intCast(self.chunk.constants.values.len - 1); | ||||
| } | ||||
| 
 | ||||
| fn declare_fixed(self: Self, fixed: kym.Fixed) kym.RuntimeError!u16 { | ||||
| 	if (self.chunk.constants.values.len == coral.math.max_int(@typeInfo(u16).Int)) { | ||||
| 		return self.env.raise(error.BadSyntax, "chunks cannot contain more than 65,535 constants"); | ||||
| 	} | ||||
| 
 | ||||
| 	const constant = try self.env.new_fixed(fixed); | ||||
| 
 | ||||
| 	errdefer self.env.discard(constant); | ||||
| 
 | ||||
| 	try self.chunk.constants.push_one(constant); | ||||
| 
 | ||||
| 	return @intCast(self.chunk.constants.values.len - 1); | ||||
| } | ||||
| 
 | ||||
| fn declare_float(self: Self, float: kym.Float) kym.RuntimeError!u16 { | ||||
| 	if (self.chunk.constants.values.len == coral.math.max_int(@typeInfo(u16).Int)) { | ||||
| 		return self.env.raise(error.BadSyntax, "chunks cannot contain more than 65,535 constants"); | ||||
| 	} | ||||
| 
 | ||||
| 	const constant = try self.env.new_float(float); | ||||
| 
 | ||||
| 	errdefer self.env.discard(constant); | ||||
| 
 | ||||
| 	try self.chunk.constants.push_one(constant); | ||||
| 
 | ||||
| 	return @intCast(self.chunk.constants.values.len - 1); | ||||
| } | ||||
| 
 | ||||
| fn declare_string(self: Self, string: []const coral.io.Byte) kym.RuntimeError!u16 { | ||||
| 	if (self.chunk.constants.values.len == coral.math.max_int(@typeInfo(u16).Int)) { | ||||
| 		return self.env.raise(error.BadSyntax, "chunks cannot contain more than 65,535 constants"); | ||||
| 	} | ||||
| 
 | ||||
| 	const constant = try self.env.new_string(string); | ||||
| 
 | ||||
| 	errdefer self.env.discard(constant); | ||||
| 
 | ||||
| 	try self.chunk.constants.push_one(constant); | ||||
| 
 | ||||
| 	return @intCast(self.chunk.constants.values.len - 1); | ||||
| } | ||||
| 
 | ||||
| fn declare_symbol(self: Self, symbol: []const coral.io.Byte) kym.RuntimeError!u16 { | ||||
| 	if (self.chunk.constants.values.len == coral.math.max_int(@typeInfo(u16).Int)) { | ||||
| 		return self.env.raise(error.BadSyntax, "chunks cannot contain more than 65,535 constants"); | ||||
| 	} | ||||
| 
 | ||||
| 	const constant = try self.env.new_symbol(symbol); | ||||
| 
 | ||||
| 	errdefer self.env.discard(constant); | ||||
| 
 | ||||
| 	try self.chunk.constants.push_one(constant); | ||||
| 
 | ||||
| 	return @intCast(self.chunk.constants.values.len - 1); | ||||
| } | ||||
| 
 | ||||
| pub fn get_capture_index(self: *const tree.Environment, declaration: *const tree.Declaration) ?u8 { | ||||
| 	if (self.enclosing) |enclosing_environment| { | ||||
| 		var capture_index = @as(u8, 0); | ||||
| 
 | ||||
| 		while (capture_index < self.capture_count) : (capture_index += 1) { | ||||
| 			if (&enclosing_environment.local_declarations[capture_index] == declaration) { | ||||
| 				return capture_index; | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return null; | ||||
| } | ||||
| 
 | ||||
| pub fn get_local_index(self: *const tree.Environment, declaration: *const tree.Declaration) ?u8 { | ||||
| 	var remaining = self.local_declaration_count; | ||||
| 
 | ||||
| 	while (remaining != 0) { | ||||
| 		remaining -= 1; | ||||
| 
 | ||||
| 		if (&self.local_declarations[remaining] == declaration) { | ||||
| 			return remaining; | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return null; | ||||
| } | ||||
							
								
								
									
										620
									
								
								source/ona/kym/Expr.zig
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										620
									
								
								source/ona/kym/Expr.zig
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,620 @@ | ||||
| const Stmt = @import("./Stmt.zig"); | ||||
| 
 | ||||
| const coral = @import("coral"); | ||||
| 
 | ||||
| const tokens = @import("./tokens.zig"); | ||||
| 
 | ||||
| const tree = @import("./tree.zig"); | ||||
| 
 | ||||
| next: ?*const Self = null, | ||||
| 
 | ||||
| kind: union (enum) { | ||||
| 	nil_literal, | ||||
| 	true_literal, | ||||
| 	false_literal, | ||||
| 	number_literal: []const coral.io.Byte, | ||||
| 	string_literal: []const coral.io.Byte, | ||||
| 	symbol_literal: []const coral.io.Byte, | ||||
| 	table_construct: TableConstruct, | ||||
| 	key_value: KeyValue, | ||||
| 	group: *Self, | ||||
| 	lambda_construct: LambdaConstruct, | ||||
| 	declaration_get: DeclarationGet, | ||||
| 	declaration_set: DeclarationSet, | ||||
| 	field_get: FieldGet, | ||||
| 	field_set: FieldSet, | ||||
| 	subscript_get: SubscriptGet, | ||||
| 	subscript_set: SubscriptSet, | ||||
| 	binary_op: BinaryOp, | ||||
| 	unary_op: UnaryOp, | ||||
| 	invoke: Invoke, | ||||
| 	import_builtin, | ||||
| 	print_builtin, | ||||
| 	vec2_builtin, | ||||
| 	vec3_builtin, | ||||
| }, | ||||
| 
 | ||||
| pub const BinaryOp = struct { | ||||
| 	rhs_operand: *Self, | ||||
| 	lhs_operand: *Self, | ||||
| 	operation: Operation, | ||||
| 
 | ||||
| 	pub const Operation = enum { | ||||
| 		addition, | ||||
| 		subtraction, | ||||
| 		multiplication, | ||||
| 		divsion, | ||||
| 		equals_comparison, | ||||
| 		greater_than_comparison, | ||||
| 		greater_equals_comparison, | ||||
| 		less_than_comparison, | ||||
| 		less_equals_comparison, | ||||
| 	}; | ||||
| 
 | ||||
| 	fn parser(comptime parse_next: Parser, comptime operations: []const BinaryOp.Operation) Parser { | ||||
| 		const BinaryOpParser = struct { | ||||
| 			fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self { | ||||
| 				var expression = try parse_next(root, stream, environment); | ||||
| 
 | ||||
| 				inline for (operations) |operation| { | ||||
| 					const token = comptime @as(tokens.Token, switch (operation) { | ||||
| 						.addition => .symbol_plus, | ||||
| 						.subtraction => .symbol_minus, | ||||
| 						.multiplication => .symbol_asterisk, | ||||
| 						.divsion => .symbol_forward_slash, | ||||
| 						.equals_comparison => .symbol_double_equals, | ||||
| 						.greater_than_comparison => .symbol_greater_than, | ||||
| 						.greater_equals_comparison => .symbol_greater_equals, | ||||
| 						.less_than_comparison => .symbol_less_than, | ||||
| 						.less_equals_comparison => .symbol_less_equals, | ||||
| 					}); | ||||
| 
 | ||||
| 					if (stream.token == coral.io.tag_of(token)) { | ||||
| 						stream.step(); | ||||
| 
 | ||||
| 						if (stream.token == .end) { | ||||
| 							return root.report_error(stream, "expected other half of expression after `" ++ comptime token.text() ++ "`", .{}); | ||||
| 						} | ||||
| 
 | ||||
| 						// TODO: Remove once Zig has fixed struct self-reassignment. | ||||
| 						const unnecessary_temp = expression; | ||||
| 
 | ||||
| 						expression = try root.create_expr(.{ | ||||
| 							.kind = .{ | ||||
| 								.binary_op = .{ | ||||
| 									.rhs_operand = try parse_next(root, stream, environment), | ||||
| 									.operation = operation, | ||||
| 									.lhs_operand = unnecessary_temp, | ||||
| 								}, | ||||
| 							}, | ||||
| 						}); | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				return expression; | ||||
| 			} | ||||
| 		}; | ||||
| 
 | ||||
| 		return BinaryOpParser.parse; | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
| pub const DeclarationGet = struct { | ||||
| 	declaration: *const tree.Declaration, | ||||
| }; | ||||
| 
 | ||||
| pub const DeclarationSet = struct { | ||||
| 	declaration: *const tree.Declaration, | ||||
| 	assign: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const FieldGet = struct { | ||||
| 	identifier: []const coral.io.Byte, | ||||
| 	object: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const FieldSet = struct { | ||||
| 	identifier: []const coral.io.Byte, | ||||
| 	object: *const Self, | ||||
| 	assign: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const Invoke = struct { | ||||
| 	argument: ?*const Self, | ||||
| 	object: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const KeyValue = struct { | ||||
| 	key: *const Self, | ||||
| 	value: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const LambdaConstruct = struct { | ||||
| 	environment: *const tree.Environment, | ||||
| }; | ||||
| 
 | ||||
| const Parser = fn (root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self; | ||||
| 
 | ||||
| const Self = @This(); | ||||
| 
 | ||||
| pub const SubscriptGet = struct { | ||||
| 	index: *const Self, | ||||
| 	object: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const SubscriptSet = struct { | ||||
| 	index: *const Self, | ||||
| 	object: *const Self, | ||||
| 	assign: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const TableConstruct = struct { | ||||
| 	entry: ?*const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const UnaryOp = struct { | ||||
| 	operand: *Self, | ||||
| 	operation: Operation, | ||||
| 
 | ||||
| 	pub const Operation = enum { | ||||
| 		numeric_negation, | ||||
| 		boolean_negation, | ||||
| 	}; | ||||
| }; | ||||
| 
 | ||||
| pub fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self { | ||||
| 	const expression = try parse_additive(root, stream, environment); | ||||
| 
 | ||||
| 	if (stream.token == .symbol_equals) { | ||||
| 		stream.skip_newlines(); | ||||
| 
 | ||||
| 		if (stream.token == .end) { | ||||
| 			return root.report_error(stream, "expected assignment after `=`", .{}); | ||||
| 		} | ||||
| 
 | ||||
| 		return root.create_expr(.{ | ||||
| 			.kind = switch (expression.kind) { | ||||
| 				.declaration_get => |declaration_get| .{ | ||||
| 					.declaration_set = .{ | ||||
| 						.assign = try parse(root, stream, environment), | ||||
| 						.declaration = declaration_get.declaration, | ||||
| 					}, | ||||
| 				}, | ||||
| 
 | ||||
| 				.field_get => |field_get| .{ | ||||
| 					.field_set = .{ | ||||
| 						.assign = try parse(root, stream, environment), | ||||
| 						.object = field_get.object, | ||||
| 						.identifier = field_get.identifier, | ||||
| 					}, | ||||
| 				}, | ||||
| 
 | ||||
| 				.subscript_get => |subscript_get| .{ | ||||
| 					.subscript_set = .{ | ||||
| 						.assign = try parse(root, stream, environment), | ||||
| 						.object = subscript_get.object, | ||||
| 						.index = subscript_get.index, | ||||
| 					}, | ||||
| 				}, | ||||
| 
 | ||||
| 				else => return root.report_error(stream, "expected local or field on left-hand side of expression", .{}), | ||||
| 			}, | ||||
| 		}); | ||||
| 	} | ||||
| 
 | ||||
| 	return expression; | ||||
| } | ||||
| 
 | ||||
| const parse_additive = BinaryOp.parser(parse_equality, &.{ | ||||
| 	.addition, | ||||
| 	.subtraction, | ||||
| }); | ||||
| 
 | ||||
| const parse_comparison = BinaryOp.parser(parse_term, &.{ | ||||
| 	.greater_than_comparison, | ||||
| 	.greater_equals_comparison, | ||||
| 	.less_than_comparison, | ||||
| 	.less_equals_comparison | ||||
| }); | ||||
| 
 | ||||
| const parse_equality = BinaryOp.parser(parse_comparison, &.{ | ||||
| 	.equals_comparison, | ||||
| }); | ||||
| 
 | ||||
| fn parse_factor(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self { | ||||
| 	var expression = try parse_operand(root, stream, environment); | ||||
| 
 | ||||
| 	while (true) { | ||||
| 		switch (stream.token) { | ||||
| 			.symbol_period => { | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment. | ||||
| 				const unnecessary_temp = expression; | ||||
| 
 | ||||
| 				expression = try root.create_expr(.{ | ||||
| 					.next = null, | ||||
| 
 | ||||
| 					.kind = .{ | ||||
| 						.field_get = .{ | ||||
| 							.identifier = switch (stream.token) { | ||||
| 								.identifier => |field_identifier| field_identifier, | ||||
| 								else => return root.report_error(stream, "expected identifier after `.`", .{}), | ||||
| 							}, | ||||
| 
 | ||||
| 							.object = unnecessary_temp, | ||||
| 						}, | ||||
| 					}, | ||||
| 				}); | ||||
| 
 | ||||
| 				stream.skip_newlines(); | ||||
| 			}, | ||||
| 
 | ||||
| 			.symbol_bracket_left => { | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment. | ||||
| 				const unnecessary_temp = expression; | ||||
| 
 | ||||
| 				expression = try root.create_expr(.{ | ||||
| 					.next = null, | ||||
| 
 | ||||
| 					.kind = .{ | ||||
| 						.subscript_get = .{ | ||||
| 							.index = try parse(root, stream, environment), | ||||
| 							.object = unnecessary_temp, | ||||
| 						}, | ||||
| 					}, | ||||
| 				}); | ||||
| 
 | ||||
| 				if (stream.token != .symbol_bracket_right) { | ||||
| 					return root.report_error(stream, "expected closing `]` on subscript", .{}); | ||||
| 				} | ||||
| 
 | ||||
| 				stream.skip_newlines(); | ||||
| 			}, | ||||
| 
 | ||||
| 			.symbol_paren_left => { | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				var first_argument = @as(?*Self, null); | ||||
| 
 | ||||
| 				if (stream.token != .symbol_paren_right) { | ||||
| 					var argument = try parse(root, stream, environment); | ||||
| 
 | ||||
| 					first_argument = argument; | ||||
| 
 | ||||
| 					while (true) { | ||||
| 						switch (stream.token) { | ||||
| 							.symbol_comma => stream.skip_newlines(), | ||||
| 							.symbol_paren_right => break, | ||||
| 							else => return root.report_error(stream, "expected `,` or `)` after lambda argument", .{}), | ||||
| 						} | ||||
| 
 | ||||
| 						const next_argument = try parse(root, stream, environment); | ||||
| 
 | ||||
| 						argument.next = next_argument; | ||||
| 						argument = next_argument; | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment. | ||||
| 				const unnecessary_temp = expression; | ||||
| 
 | ||||
| 				expression = try root.create_expr(.{ | ||||
| 					.next = null, | ||||
| 
 | ||||
| 					.kind = .{ | ||||
| 						.invoke = .{ | ||||
| 							.argument = first_argument, | ||||
| 							.object = unnecessary_temp, | ||||
| 						}, | ||||
| 					}, | ||||
| 				}); | ||||
| 			}, | ||||
| 
 | ||||
| 			else => break, | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	return expression; | ||||
| } | ||||
| 
 | ||||
| fn parse_operand(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self { | ||||
| 	switch (stream.token) { | ||||
| 		.symbol_paren_left => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			const expression = try parse(root, stream, environment); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_paren_right) { | ||||
| 				return root.report_error(stream, "expected a closing `)` after expression", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .{.group = expression}}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.keyword_nil => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .nil_literal}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.keyword_true => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .true_literal}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.keyword_false => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .false_literal}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.number => |value| { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .{.number_literal = value}}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.string => |value| { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .{.string_literal = value}}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.symbol_at => { | ||||
| 			stream.step(); | ||||
| 
 | ||||
| 			const identifier = switch (stream.token) { | ||||
| 				.identifier => |identifier| identifier, | ||||
| 				else => return root.report_error(stream, "expected identifier after `@`", .{}), | ||||
| 			}; | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (coral.io.are_equal(identifier, "import")) { | ||||
| 				return root.create_expr(.{.kind = .import_builtin}); | ||||
| 			} | ||||
| 
 | ||||
| 			if (coral.io.are_equal(identifier, "print")) { | ||||
| 				return root.create_expr(.{.kind = .print_builtin}); | ||||
| 			} | ||||
| 
 | ||||
| 			if (coral.io.are_equal(identifier, "vec2")) { | ||||
| 				return root.create_expr(.{.kind = .vec2_builtin}); | ||||
| 			} | ||||
| 
 | ||||
| 			if (coral.io.are_equal(identifier, "vec3")) { | ||||
| 				return root.create_expr(.{.kind = .vec3_builtin}); | ||||
| 			} | ||||
| 
 | ||||
| 			return root.report_error(stream, "unexpected identifier after `@`", .{}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.symbol_period => { | ||||
| 			stream.step(); | ||||
| 
 | ||||
| 			const identifier = switch (stream.token) { | ||||
| 				.identifier => |identifier| identifier, | ||||
| 				else => return root.report_error(stream, "expected identifier after `.`", .{}), | ||||
| 			}; | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .{.symbol_literal = identifier}}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.identifier => |identifier| { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{ | ||||
| 				.next = null, | ||||
| 
 | ||||
| 				.kind = .{ | ||||
| 					.declaration_get = .{ | ||||
| 						.declaration = (try environment.resolve_declaration(identifier)) orelse { | ||||
| 							return root.report_error(stream, "undefined identifier `{identifier}`", .{ | ||||
| 								.identifier = identifier, | ||||
| 							}); | ||||
| 						}, | ||||
| 					}, | ||||
| 				}, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.keyword_lambda => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_paren_left) { | ||||
| 				return root.report_error(stream, "expected `(` after opening lambda block", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			var lambda_environment = try environment.create_enclosed(root); | ||||
| 
 | ||||
| 			while (stream.token != .symbol_paren_right) { | ||||
| 				const identifier = switch (stream.token) { | ||||
| 					.identifier => |identifier| identifier, | ||||
| 					else => return root.report_error(stream, "expected identifier", .{}), | ||||
| 				}; | ||||
| 
 | ||||
| 				if (try lambda_environment.declare(identifier) == null) { | ||||
| 					return root.report_error(stream, "declaration `{identifier}` already exists", .{ | ||||
| 						.identifier = identifier, | ||||
| 					}); | ||||
| 				} | ||||
| 
 | ||||
| 				lambda_environment.argument_count += 1; | ||||
| 
 | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				switch (stream.token) { | ||||
| 					.symbol_comma => stream.skip_newlines(), | ||||
| 					.symbol_paren_right => break, | ||||
| 					else => return root.report_error(stream, "expected `,` or `)` after identifier", .{}), | ||||
| 				} | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_colon) { | ||||
| 				return root.report_error(stream, "expected `:` after closing `)` of lambda identifiers", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (stream.token != .keyword_end) { | ||||
| 				const first_statement = try Stmt.parse(root, stream, lambda_environment); | ||||
| 				var current_statement = first_statement; | ||||
| 
 | ||||
| 				while (stream.token != .keyword_end) { | ||||
| 					const next_statement = try Stmt.parse(root, stream, lambda_environment); | ||||
| 
 | ||||
| 					current_statement.next = next_statement; | ||||
| 					current_statement = next_statement; | ||||
| 				} | ||||
| 
 | ||||
| 				lambda_environment.statement = first_statement; | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .{.lambda_construct = .{.environment = lambda_environment}}}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.symbol_brace_left => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (stream.token == .symbol_brace_right) { | ||||
| 				return root.create_expr(.{.kind = .{.table_construct = .{.entry = null}}}); | ||||
| 			} | ||||
| 
 | ||||
| 			const first_entry = try parse_table_entry(root, stream, environment); | ||||
| 			var entry = first_entry; | ||||
| 
 | ||||
| 			while (stream.token == .symbol_comma) { | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				if (stream.token == .symbol_brace_right) { | ||||
| 					break; | ||||
| 				} | ||||
| 
 | ||||
| 				const expression = try parse_table_entry(root, stream, environment); | ||||
| 
 | ||||
| 				entry.next = expression; | ||||
| 				entry = expression; | ||||
| 			} | ||||
| 
 | ||||
| 			if (stream.token != .symbol_brace_right) { | ||||
| 				return root.report_error(stream, "expected closing `}` on table construct", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{.kind = .{.table_construct = .{.entry = first_entry}}}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.symbol_minus => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{ | ||||
| 				.next = null, | ||||
| 
 | ||||
| 				.kind = .{ | ||||
| 					.unary_op = .{ | ||||
| 						.operand = try parse_factor(root, stream, environment), | ||||
| 						.operation = .numeric_negation, | ||||
| 					}, | ||||
| 				}, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.symbol_bang => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{ | ||||
| 				.next = null, | ||||
| 
 | ||||
| 				.kind = .{ | ||||
| 					.unary_op = .{ | ||||
| 						.operand = try parse_factor(root, stream, environment), | ||||
| 						.operation = .boolean_negation, | ||||
| 					}, | ||||
| 				}, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		else => return root.report_error(stream, "unexpected token in expression", .{}), | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| fn parse_table_entry(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self { | ||||
| 	switch (stream.token) { | ||||
| 		.symbol_period => { | ||||
| 			stream.step(); | ||||
| 
 | ||||
| 			const field = switch (stream.token) { | ||||
| 				.identifier => |identifier| identifier, | ||||
| 				else => return root.report_error(stream, "expected identifier in field symbol literal", .{}), | ||||
| 			}; | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_equals) { | ||||
| 				return root.report_error(stream, "expected `=` after table symbol key", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{ | ||||
| 				.kind = .{ | ||||
| 					.key_value = .{ | ||||
| 						.value = try parse(root, stream, environment), | ||||
| 						.key = try root.create_expr(.{.kind = .{.symbol_literal = field}}), | ||||
| 					}, | ||||
| 				}, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.symbol_bracket_left => { | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			const key = try parse(root, stream, environment); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_bracket_right)  { | ||||
| 				return root.report_error(stream, "expected `]` after subscript index expression", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_equals) { | ||||
| 				return root.report_error(stream, "expected `=` after table expression key", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_expr(.{ | ||||
| 				.kind = .{ | ||||
| 					.key_value = .{ | ||||
| 						.value = try parse(root, stream, environment), | ||||
| 						.key = key, | ||||
| 					}, | ||||
| 				}, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		else => return parse(root, stream, environment), | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| const parse_term = BinaryOp.parser(parse_factor, &.{ | ||||
| 	.multiplication, | ||||
| 	.divsion, | ||||
| }); | ||||
							
								
								
									
										229
									
								
								source/ona/kym/Stmt.zig
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										229
									
								
								source/ona/kym/Stmt.zig
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,229 @@ | ||||
| const Expr = @import("./Expr.zig"); | ||||
| 
 | ||||
| const coral = @import("coral"); | ||||
| 
 | ||||
| const tokens = @import("./tokens.zig"); | ||||
| 
 | ||||
| const tree = @import("./tree.zig"); | ||||
| 
 | ||||
| next: ?*const Self = null, | ||||
| 
 | ||||
| kind: union (enum) { | ||||
| 	top_expression: *const Expr, | ||||
| 	@"return": Return, | ||||
| 	declare: Declare, | ||||
| 	@"if": If, | ||||
| 	@"while": While, | ||||
| }, | ||||
| 
 | ||||
| pub const Declare = struct { | ||||
| 	declaration: *const tree.Declaration, | ||||
| 	initial_expression: *const Expr, | ||||
| }; | ||||
| 
 | ||||
| pub const If = struct { | ||||
| 	then_expression: *const Expr, | ||||
| 	@"then": *const Self, | ||||
| 	@"else": ?*const Self, | ||||
| }; | ||||
| 
 | ||||
| pub const Return = struct { | ||||
| 	returned_expression: ?*const Expr, | ||||
| }; | ||||
| 
 | ||||
| const Self = @This(); | ||||
| 
 | ||||
| pub const While = struct { | ||||
| 	loop_expression: *const Expr, | ||||
| 	loop: *const Self, | ||||
| }; | ||||
| 
 | ||||
| pub fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self { | ||||
| 	switch (stream.token) { | ||||
| 		.keyword_return => { | ||||
| 			stream.step(); | ||||
| 
 | ||||
| 			if (stream.token != .end and stream.token != .newline) { | ||||
| 				return root.create_stmt(.{ | ||||
| 					.kind = .{.@"return" = .{.returned_expression = try Expr.parse(root, stream, environment)}}, | ||||
| 				}); | ||||
| 			} | ||||
| 
 | ||||
| 			if (stream.token != .end and stream.token != .newline) { | ||||
| 				return root.report_error(stream, "expected end or newline after return statement", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			return root.create_stmt(.{.kind = .{.@"return" = .{.returned_expression = null}}}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.keyword_while => { | ||||
| 			defer stream.skip_newlines(); | ||||
| 
 | ||||
| 			stream.step(); | ||||
| 
 | ||||
| 			const condition_expression = try Expr.parse(root, stream, environment); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_colon) { | ||||
| 				return root.report_error(stream, "expected `:` after `while` statement", .{}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			const first_statement = try parse(root, stream, environment); | ||||
| 
 | ||||
| 			{ | ||||
| 				var current_statement = first_statement; | ||||
| 
 | ||||
| 				while (stream.token != .keyword_end) { | ||||
| 					const next_statement = try parse(root, stream, environment); | ||||
| 
 | ||||
| 					current_statement.next = next_statement; | ||||
| 					current_statement = next_statement; | ||||
| 				} | ||||
| 			} | ||||
| 
 | ||||
| 			return root.create_stmt(.{ | ||||
| 				.kind = .{ | ||||
| 					.@"while" = .{ | ||||
| 						.loop = first_statement, | ||||
| 						.loop_expression = condition_expression, | ||||
| 					}, | ||||
| 				}, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.keyword_var, .keyword_let => { | ||||
| 			const storage_token = stream.token; | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			const identifier = switch (stream.token) { | ||||
| 				.identifier => |identifier| identifier, | ||||
| 
 | ||||
| 				else => { | ||||
| 					return root.report_error(stream, "expected identifier after `{storage}` declaration statement", .{ | ||||
| 						.storage = storage_token.text() | ||||
| 					}); | ||||
| 				}, | ||||
| 			}; | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			if (stream.token != .symbol_equals) { | ||||
| 				return root.report_error(stream, "expected `=` after declaration `{identifier}`", .{ | ||||
| 					.identifier = identifier, | ||||
| 				}); | ||||
| 			} | ||||
| 
 | ||||
| 			stream.skip_newlines(); | ||||
| 
 | ||||
| 			return root.create_stmt(.{ | ||||
| 				.kind = .{ | ||||
| 					.declare = .{ | ||||
| 						.initial_expression = try Expr.parse(root, stream, environment), | ||||
| 
 | ||||
| 						.declaration = (try root.environment.declare(identifier)) orelse { | ||||
| 							return root.report_error(stream, "declaration `{identifier}` already exists", .{ | ||||
| 								.identifier = identifier, | ||||
| 							}); | ||||
| 						}, | ||||
| 					}, | ||||
| 				}, | ||||
| 			}); | ||||
| 		}, | ||||
| 
 | ||||
| 		.keyword_if => return parse_branch(root, stream, environment), | ||||
| 
 | ||||
| 		else => return root.create_stmt(.{.kind = .{.top_expression = try Expr.parse(root, stream, environment)}}), | ||||
| 	} | ||||
| } | ||||
| 
 | ||||
| fn parse_branch(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self { | ||||
| 	stream.step(); | ||||
| 
 | ||||
| 	const expression = try Expr.parse(root, stream, environment); | ||||
| 
 | ||||
| 	if (stream.token != .symbol_colon) { | ||||
| 		return root.report_error(stream, "expected `:` after `{token}`", .{.token = stream.token.text()}); | ||||
| 	} | ||||
| 
 | ||||
| 	stream.skip_newlines(); | ||||
| 
 | ||||
| 	const first_then_statement = try parse(root, stream, environment); | ||||
| 	var current_then_statement = first_then_statement; | ||||
| 
 | ||||
| 	while (true) { | ||||
| 		switch (stream.token) { | ||||
| 			.keyword_end => { | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				return root.create_stmt(.{ | ||||
| 					.next = null, | ||||
| 
 | ||||
| 					.kind = .{ | ||||
| 						.@"if" = .{ | ||||
| 							.then_expression = expression, | ||||
| 							.@"then" = first_then_statement, | ||||
| 							.@"else" = null, | ||||
| 						}, | ||||
| 					}, | ||||
| 				}); | ||||
| 			}, | ||||
| 
 | ||||
| 			.keyword_else => { | ||||
| 				stream.step(); | ||||
| 
 | ||||
| 				if (stream.token != .symbol_colon) { | ||||
| 					return root.report_error(stream, "expected `:` after `if` statement condition", .{}); | ||||
| 				} | ||||
| 
 | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				const first_else_statement = try parse(root, stream, environment); | ||||
| 				var current_else_statement = first_else_statement; | ||||
| 
 | ||||
| 				while (stream.token != .keyword_end) { | ||||
| 					const next_statement = try parse(root, stream, environment); | ||||
| 
 | ||||
| 					current_else_statement.next = next_statement; | ||||
| 					current_else_statement = next_statement; | ||||
| 				} | ||||
| 
 | ||||
| 				stream.skip_newlines(); | ||||
| 
 | ||||
| 				return root.create_stmt(.{ | ||||
| 					.next = null, | ||||
| 
 | ||||
| 					.kind = .{ | ||||
| 						.@"if" = .{ | ||||
| 							.@"else" = first_else_statement, | ||||
| 							.@"then" = first_then_statement, | ||||
| 							.then_expression = expression, | ||||
| 						}, | ||||
| 					} | ||||
| 				}); | ||||
| 			}, | ||||
| 
 | ||||
| 			.keyword_elif => { | ||||
| 				return root.create_stmt(.{ | ||||
| 					.next = null, | ||||
| 
 | ||||
| 					.kind = .{ | ||||
| 						.@"if" = .{ | ||||
| 							.@"else" = try parse_branch(root, stream, environment), | ||||
| 							.@"then" = first_then_statement, | ||||
| 							.then_expression = expression, | ||||
| 						}, | ||||
| 					}, | ||||
| 				}); | ||||
| 			}, | ||||
| 
 | ||||
| 			else => { | ||||
| 				const next_statement = try parse(root, stream, environment); | ||||
| 
 | ||||
| 				current_then_statement.next = next_statement; | ||||
| 				current_then_statement = next_statement; | ||||
| 			}, | ||||
| 		} | ||||
| 	} | ||||
| } | ||||
							
								
								
									
										122
									
								
								source/ona/kym/Table.zig
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										122
									
								
								source/ona/kym/Table.zig
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,122 @@ | ||||
| const coral = @import("coral"); | ||||
| 
 | ||||
| const kym = @import("../kym.zig"); | ||||
| 
 | ||||
| associative: RefTable, | ||||
| contiguous: RefList, | ||||
| 
 | ||||
| const RefList = coral.list.Stack(?*kym.RuntimeRef); | ||||
| 
 | ||||
| const RefTable = coral.map.Table(*kym.RuntimeRef, *kym.RuntimeRef, struct { | ||||
| 	pub const hash = kym.RuntimeRef.hash; | ||||
| 
 | ||||
| 	pub const equals = kym.RuntimeRef.equals; | ||||
| }); | ||||
| 
 | ||||
| const Self = @This(); | ||||
| 
 | ||||
| pub fn free(self: *Self, env: *kym.RuntimeEnv) void { | ||||
| 	{ | ||||
| 		var field_iterable = self.associative.as_iterable(); | ||||
| 
 | ||||
| 		while (field_iterable.next()) |entry| { | ||||
| 			env.discard(entry.key); | ||||
| 			env.discard(entry.value); | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	self.associative.free(); | ||||
| 
 | ||||
| 	while (self.contiguous.pop()) |value| { | ||||
| 		if (value) |ref| { | ||||
| 			env.discard(ref); | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	self.contiguous.free(); | ||||
| } | ||||
| 
 | ||||
| pub fn make(env: *kym.RuntimeEnv) Self { | ||||
| 	return .{ | ||||
| 		.associative = RefTable.make(env.allocator, .{}), | ||||
| 		.contiguous = RefList.make(env.allocator), | ||||
| 	}; | ||||
| } | ||||
| 
 | ||||
| pub const typeinfo = &kym.Typeinfo{ | ||||
| 	.size = @sizeOf(Self), | ||||
| 	.name = "table", | ||||
| 	.destruct = typeinfo_destruct, | ||||
| 	.get = typeinfo_get, | ||||
| 	.set = typeinfo_set, | ||||
| }; | ||||
| 
 | ||||
| fn typeinfo_destruct(env: *kym.RuntimeEnv, userdata: []coral.io.Byte) void { | ||||
| 	@as(*Self, @ptrCast(@alignCast(userdata))).free(env); | ||||
| } | ||||
| 
 | ||||
| fn typeinfo_get(env: *kym.RuntimeEnv, userdata: []coral.io.Byte, index: *const kym.RuntimeRef) kym.RuntimeError!?*kym.RuntimeRef { | ||||
| 	const table = @as(*Self, @ptrCast(@alignCast(userdata))); | ||||
| 	const acquired_index = index.acquire(); | ||||
| 
 | ||||
| 	defer env.discard(acquired_index); | ||||
| 
 | ||||
| 	if (acquired_index.as_fixed()) |fixed| { | ||||
| 		if (fixed < 0) { | ||||
| 			// TODO: Negative indexing. | ||||
| 			unreachable; | ||||
| 		} | ||||
| 
 | ||||
| 		if (fixed < table.contiguous.values.len) { | ||||
| 			return (table.contiguous.values[@intCast(fixed)] orelse return null).acquire(); | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	if (table.associative.lookup(acquired_index)) |value| { | ||||
| 		return value.acquire(); | ||||
| 	} | ||||
| 
 | ||||
| 	return null; | ||||
| } | ||||
| 
 | ||||
| fn typeinfo_set(env: *kym.RuntimeEnv, userdata: []coral.io.Byte, index: *const kym.RuntimeRef, value: ?*const kym.RuntimeRef) kym.RuntimeError!void { | ||||
| 	const table = @as(*Self, @ptrCast(@alignCast(userdata))); | ||||
| 	const acquired_index = index.acquire(); | ||||
| 
 | ||||
| 	errdefer env.discard(acquired_index); | ||||
| 
 | ||||
| 	if (acquired_index.as_fixed()) |fixed| { | ||||
| 		if (fixed < 0) { | ||||
| 			// TODO: Negative indexing. | ||||
| 			unreachable; | ||||
| 		} | ||||
| 
 | ||||
| 		if (fixed < table.contiguous.values.len) { | ||||
| 			const maybe_replacing = &table.contiguous.values[@intCast(fixed)]; | ||||
| 
 | ||||
| 			if (maybe_replacing.*) |replacing| { | ||||
| 				env.discard(replacing); | ||||
| 			} | ||||
| 
 | ||||
| 			maybe_replacing.* = if (value) |ref| ref.acquire() else null; | ||||
| 
 | ||||
| 			return; | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	const acquired_value = (value orelse { | ||||
| 		if (table.associative.remove(acquired_index)) |removed| { | ||||
| 			env.discard(removed.key); | ||||
| 			env.discard(removed.value); | ||||
| 		} | ||||
| 
 | ||||
| 		return; | ||||
| 	}).acquire(); | ||||
| 
 | ||||
| 	errdefer env.discard(acquired_value); | ||||
| 
 | ||||
| 	if (try table.associative.replace(acquired_index, acquired_value)) |replaced| { | ||||
| 		env.discard(replaced.key); | ||||
| 		env.discard(replaced.value); | ||||
| 	} | ||||
| } | ||||
| @ -1,796 +0,0 @@ | ||||
| const coral = @import("coral"); | ||||
| 
 | ||||
| const tokens = @import("./tokens.zig"); | ||||
| 
 | ||||
| pub const BinaryOperator = enum { | ||||
| 	addition, | ||||
| 	subtraction, | ||||
| 	multiplication, | ||||
| 	divsion, | ||||
| 	equals_comparison, | ||||
| 	greater_than_comparison, | ||||
| 	greater_equals_comparison, | ||||
| 	less_than_comparison, | ||||
| 	less_equals_comparison, | ||||
| 
 | ||||
| 	fn builder(comptime build_next: ExpressionBuilder, comptime operators: []const BinaryOperator) ExpressionBuilder { | ||||
| 		const Builder = struct { | ||||
| 			fn build(self: *Tree) ParseError!Expression { | ||||
| 				const allocator = self.arena.as_allocator(); | ||||
| 				var expression = try build_next(self); | ||||
| 
 | ||||
| 				inline for (operators) |operator| { | ||||
| 					const token = @as(tokens.Token, switch (operator) { | ||||
| 						.addition => .symbol_plus, | ||||
| 						.subtraction => .symbol_minus, | ||||
| 						.multiplication => .symbol_asterisk, | ||||
| 						.divsion => .symbol_forward_slash, | ||||
| 						.equals_comparison => .symbol_double_equals, | ||||
| 						.greater_than_comparison => .symbol_greater_than, | ||||
| 						.greater_equals_comparison => .symbol_greater_equals, | ||||
| 						.less_than_comparison => .symbol_less_than, | ||||
| 						.less_equals_comparison => .symbol_less_equals, | ||||
| 					}); | ||||
| 
 | ||||
| 					if (self.tokenizer.token == coral.io.tag_of(token)) { | ||||
| 						self.tokenizer.step(); | ||||
| 
 | ||||
| 						if (self.tokenizer.token == .end) { | ||||
| 							return self.report( | ||||
| 								"expected other half of expression after `" ++ | ||||
| 								comptime token.text() ++ | ||||
| 								"`"); | ||||
| 						} | ||||
| 
 | ||||
| 						// TODO: Remove once Zig has fixed struct self-reassignment. | ||||
| 						const unnecessary_temp = try coral.io.allocate_one(allocator, expression); | ||||
| 
 | ||||
| 						expression = .{ | ||||
| 							.binary_operation = .{ | ||||
| 								.operator = operator, | ||||
| 								.lhs_expression = unnecessary_temp, | ||||
| 								.rhs_expression = try coral.io.allocate_one(allocator, try build_next(self)), | ||||
| 							}, | ||||
| 						}; | ||||
| 					} | ||||
| 				} | ||||
| 
 | ||||
| 				return expression; | ||||
| 			} | ||||
| 		}; | ||||
| 
 | ||||
| 		return Builder.build; | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
| pub const Expression = union (enum) { | ||||
| 	nil_literal, | ||||
| 	true_literal, | ||||
| 	false_literal, | ||||
| 	builtin: []const coral.io.Byte, | ||||
| 	number_literal: []const coral.io.Byte, | ||||
| 	string_literal: []const coral.io.Byte, | ||||
| 	symbol_literal: []const coral.io.Byte, | ||||
| 	table_literal: TableLiteral, | ||||
| 	grouped_expression: *Expression, | ||||
| 
 | ||||
| 	lambda_literal: struct { | ||||
| 		argument_identifiers: IdentifierList, | ||||
| 		block_statements: Statement.List, | ||||
| 	}, | ||||
| 
 | ||||
| 	local_get: struct { | ||||
| 		identifier: []const coral.io.Byte, | ||||
| 	}, | ||||
| 
 | ||||
| 	local_set: struct { | ||||
| 		identifier: []const coral.io.Byte, | ||||
| 		value_expression: *Expression, | ||||
| 	}, | ||||
| 
 | ||||
| 	field_get: struct { | ||||
| 		object_expression: *Expression, | ||||
| 		identifier: []const coral.io.Byte, | ||||
| 	}, | ||||
| 
 | ||||
| 	field_set: struct { | ||||
| 		object_expression: *Expression, | ||||
| 		identifier: []const coral.io.Byte, | ||||
| 		value_expression: *Expression, | ||||
| 	}, | ||||
| 
 | ||||
| 	subscript_get: struct { | ||||
| 		object_expression: *Expression, | ||||
| 		subscript_expression: *Expression, | ||||
| 	}, | ||||
| 
 | ||||
| 	subscript_set: struct { | ||||
| 		object_expression: *Expression, | ||||
| 		subscript_expression: *Expression, | ||||
| 		value_expression: *Expression, | ||||
| 	}, | ||||
| 
 | ||||
| 	binary_operation: struct { | ||||
| 		operator: BinaryOperator, | ||||
| 		lhs_expression: *Expression, | ||||
| 		rhs_expression: *Expression, | ||||
| 	}, | ||||
| 
 | ||||
| 	unary_operation: struct { | ||||
| 		operator: UnaryOperator, | ||||
| 		expression: *Expression, | ||||
| 	}, | ||||
| 
 | ||||
| 	invoke: struct { | ||||
| 		object_expression: *Expression, | ||||
| 		argument_expressions: List, | ||||
| 	}, | ||||
| 
 | ||||
| 	const List = coral.list.Stack(Expression); | ||||
| 
 | ||||
| 	const TableLiteral = coral.list.Stack(struct { | ||||
| 		key_expression: Expression, | ||||
| 		value_expression: Expression, | ||||
| 	}); | ||||
| }; | ||||
| 
 | ||||
| const ExpressionBuilder = fn (self: *Tree) ParseError!Expression; | ||||
| 
 | ||||
| const IdentifierList = coral.list.Stack([]const coral.io.Byte); | ||||
| 
 | ||||
| pub const ParseError = error { | ||||
| 	OutOfMemory, | ||||
| 	BadSyntax, | ||||
| }; | ||||
| 
 | ||||
| pub const Statement = union (enum) { | ||||
| 	@"return": ?Expression, | ||||
| 
 | ||||
| 	declare: struct { | ||||
| 		storage: DeclarationStorage, | ||||
| 		identifier: []const coral.io.Byte, | ||||
| 		assigned_expression: Expression, | ||||
| 	}, | ||||
| 
 | ||||
| 	@"if": struct { | ||||
| 		condition_expression: Expression, | ||||
| 		block_statements: List, | ||||
| 		else_statement: ?*Statement, | ||||
| 	}, | ||||
| 
 | ||||
| 	@"while": struct { | ||||
| 		condition_expression: Expression, | ||||
| 		block_statements: List, | ||||
| 	}, | ||||
| 
 | ||||
| 	block: List, | ||||
| 	expression: Expression, | ||||
| 
 | ||||
| 	pub const DeclarationStorage = enum { | ||||
| 		@"var", | ||||
| 		let, | ||||
| 	}; | ||||
| 
 | ||||
| 	const List = coral.list.Stack(Statement); | ||||
| }; | ||||
| 
 | ||||
| pub const Tree = struct { | ||||
| 	name: []const coral.io.Byte, | ||||
| 	allocator: coral.io.Allocator, | ||||
| 	arena: coral.arena.Stacking, | ||||
| 	error_buffer: coral.list.ByteStack, | ||||
| 	tokenizer: tokens.Tokenizer, | ||||
| 	parsed_statements: Statement.List, | ||||
| 	has_returned: bool, | ||||
| 
 | ||||
| 	pub fn error_message(self: Tree) []const coral.io.Byte { | ||||
| 		return self.error_buffer.values; | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn free(self: *Tree) void { | ||||
| 		self.parsed_statements.free(); | ||||
| 		self.error_buffer.free(); | ||||
| 		self.arena.free(); | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn make(allocator: coral.io.Allocator, ast_name: []const coral.io.Byte) Tree { | ||||
| 		return .{ | ||||
| 			.arena = coral.arena.Stacking.make(allocator, 4096), | ||||
| 			.error_buffer = coral.list.ByteStack.make(allocator), | ||||
| 			.parsed_statements = Statement.List.make(allocator), | ||||
| 			.tokenizer = .{.source = ""}, | ||||
| 			.allocator = allocator, | ||||
| 			.name = ast_name, | ||||
| 			.has_returned = false, | ||||
| 		}; | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn parse(self: *Tree, data: []const coral.io.Byte) ParseError![]const Statement { | ||||
| 		self.free(); | ||||
| 
 | ||||
| 		self.tokenizer = .{.source = data}; | ||||
| 		self.has_returned = false; | ||||
| 
 | ||||
| 		self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 		while (self.tokenizer.token != .end) { | ||||
| 			try self.parsed_statements.push_one(try self.parse_statement()); | ||||
| 		} | ||||
| 
 | ||||
| 		return self.parsed_statements.values; | ||||
| 	} | ||||
| 
 | ||||
| 	const parse_additive = BinaryOperator.builder(parse_equality, &.{ | ||||
| 		.addition, | ||||
| 		.subtraction, | ||||
| 	}); | ||||
| 
 | ||||
| 	fn parse_branch(self: *Tree) ParseError!Statement { | ||||
| 		const allocator = self.arena.as_allocator(); | ||||
| 
 | ||||
| 		self.tokenizer.step(); | ||||
| 
 | ||||
| 		const condition_expression = try self.parse_expression(); | ||||
| 
 | ||||
| 		if (self.tokenizer.token != .symbol_colon) { | ||||
| 			return self.report("expected `:` after `if` statement condition"); | ||||
| 		} | ||||
| 
 | ||||
| 		var statements = Statement.List.make(allocator); | ||||
| 
 | ||||
| 		self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 		while (true) { | ||||
| 			switch (self.tokenizer.token) { | ||||
| 				.keyword_end => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					return .{ | ||||
| 						.@"if" = .{ | ||||
| 							.condition_expression = condition_expression, | ||||
| 							.block_statements = statements, | ||||
| 							.else_statement = null, | ||||
| 						}, | ||||
| 					}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.keyword_else => { | ||||
| 					self.tokenizer.step(); | ||||
| 
 | ||||
| 					if (self.tokenizer.token != .symbol_colon) { | ||||
| 						return self.report("expected `:` after `if` statement condition"); | ||||
| 					} | ||||
| 
 | ||||
| 					var else_statements = Statement.List.make(allocator); | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					while (self.tokenizer.token != .keyword_end) { | ||||
| 						try else_statements.push_one(try self.parse_statement()); | ||||
| 					} | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					return .{ | ||||
| 						.@"if" = .{ | ||||
| 							.else_statement = try coral.io.allocate_one(allocator, Statement{.block = else_statements}), | ||||
| 							.condition_expression = condition_expression, | ||||
| 							.block_statements = statements, | ||||
| 						}, | ||||
| 					}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.keyword_elif => { | ||||
| 					return .{ | ||||
| 						.@"if" = .{ | ||||
| 							.else_statement = try coral.io.allocate_one(allocator, try self.parse_branch()), | ||||
| 							.condition_expression = condition_expression, | ||||
| 							.block_statements = statements, | ||||
| 						}, | ||||
| 					}; | ||||
| 				}, | ||||
| 
 | ||||
| 				else => try statements.push_one(try self.parse_statement()), | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	const parse_comparison = BinaryOperator.builder(parse_term, &.{ | ||||
| 		.greater_than_comparison, | ||||
| 		.greater_equals_comparison, | ||||
| 		.less_than_comparison, | ||||
| 		.less_equals_comparison | ||||
| 	}); | ||||
| 
 | ||||
| 	const parse_equality = BinaryOperator.builder(parse_comparison, &.{ | ||||
| 		.equals_comparison, | ||||
| 	}); | ||||
| 
 | ||||
| 	pub fn parse_expression(self: *Tree) ParseError!Expression { | ||||
| 		const allocator = self.arena.as_allocator(); | ||||
| 		const expression = try self.parse_additive(); | ||||
| 
 | ||||
| 		if (self.tokenizer.token == .symbol_equals) { | ||||
| 			self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 			if (self.tokenizer.token == .end) { | ||||
| 				return self.report("expected assignment after `=`"); | ||||
| 			} | ||||
| 
 | ||||
| 			return switch (expression) { | ||||
| 				.local_get => |local_get| .{ | ||||
| 					.local_set = .{ | ||||
| 						.identifier = local_get.identifier, | ||||
| 						.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()), | ||||
| 					}, | ||||
| 				}, | ||||
| 
 | ||||
| 				.field_get => |field_get| .{ | ||||
| 					.field_set = .{ | ||||
| 						.object_expression = field_get.object_expression, | ||||
| 						.identifier = field_get.identifier, | ||||
| 						.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()), | ||||
| 					}, | ||||
| 				}, | ||||
| 
 | ||||
| 				.subscript_get => |subscript_get| .{ | ||||
| 					.subscript_set = .{ | ||||
| 						.object_expression = subscript_get.object_expression, | ||||
| 						.subscript_expression = subscript_get.subscript_expression, | ||||
| 						.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()), | ||||
| 					}, | ||||
| 				}, | ||||
| 
 | ||||
| 				else => self.report("expected local or field on left-hand side of expression"), | ||||
| 			}; | ||||
| 		} | ||||
| 
 | ||||
| 		return expression; | ||||
| 	} | ||||
| 
 | ||||
| 	fn parse_factor(self: *Tree) ParseError!Expression { | ||||
| 		const allocator = self.arena.as_allocator(); | ||||
| 
 | ||||
| 		var expression = @as(Expression, parse: { | ||||
| 			switch (self.tokenizer.token) { | ||||
| 				.symbol_paren_left => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					if (self.tokenizer.token == .end) { | ||||
| 						return self.report("expected an expression after `(`"); | ||||
| 					} | ||||
| 
 | ||||
| 					const expression = try self.parse_expression(); | ||||
| 
 | ||||
| 					if (self.tokenizer.token != .symbol_paren_right) { | ||||
| 						return self.report("expected a closing `)` after expression"); | ||||
| 					} | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .{.grouped_expression = try coral.io.allocate_one(allocator, expression)}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.keyword_nil => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .nil_literal; | ||||
| 				}, | ||||
| 
 | ||||
| 				.keyword_true => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .true_literal; | ||||
| 				}, | ||||
| 
 | ||||
| 				.keyword_false => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .false_literal; | ||||
| 				}, | ||||
| 
 | ||||
| 				.number => |value| { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .{.number_literal = value}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.string => |value| { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .{.string_literal = value}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.identifier => |identifier| { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .{.local_get = .{.identifier = identifier}}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.builtin => |builtin| { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .{.builtin = builtin}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.keyword_lambda => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					if (self.tokenizer.token != .symbol_paren_left) { | ||||
| 						return self.report("expected `(` after opening lambda block"); | ||||
| 					} | ||||
| 
 | ||||
| 					var argument_identifiers = IdentifierList.make(allocator); | ||||
| 
 | ||||
| 					while (true) { | ||||
| 						self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 						switch (self.tokenizer.token) { | ||||
| 							.identifier => |identifier| try argument_identifiers.push_one(identifier), | ||||
| 							.symbol_paren_right => break, | ||||
| 							else => return self.report("expected identifier or closing `)` in argument list"), | ||||
| 						} | ||||
| 
 | ||||
| 						self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 						switch (self.tokenizer.token) { | ||||
| 							.symbol_comma => continue, | ||||
| 							.symbol_paren_right => break, | ||||
| 							else => return self.report("expected `,` or closing `)` after identifier in argument list"), | ||||
| 						} | ||||
| 					} | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					if (self.tokenizer.token != .symbol_colon) { | ||||
| 						return self.report("expected `:` after closing `)` of lambda block argument list"); | ||||
| 					} | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					var block_statements = Statement.List.make(allocator); | ||||
| 
 | ||||
| 					while (self.tokenizer.token != .keyword_end) { | ||||
| 						try block_statements.push_one(try self.parse_statement()); | ||||
| 					} | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					break: parse .{ | ||||
| 						.lambda_literal = .{ | ||||
| 							.argument_identifiers = argument_identifiers, | ||||
| 							.block_statements = block_statements, | ||||
| 						}, | ||||
| 					}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.symbol_brace_left => { | ||||
| 					var table_literal = Expression.TableLiteral.make(allocator); | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					while (true) { | ||||
| 						switch (self.tokenizer.token) { | ||||
| 							.symbol_brace_right => { | ||||
| 								self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 								break: parse .{.table_literal = table_literal}; | ||||
| 							}, | ||||
| 
 | ||||
| 							.symbol_period => { | ||||
| 								self.tokenizer.step(); | ||||
| 
 | ||||
| 								const identifier = switch (self.tokenizer.token) { | ||||
| 									.identifier => |identifier| identifier, | ||||
| 									else => return self.report("expected identifier after `.`"), | ||||
| 								}; | ||||
| 
 | ||||
| 								self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 								if (self.tokenizer.token != .symbol_equals) { | ||||
| 									return self.report("expected `=` after symbol"); | ||||
| 								} | ||||
| 
 | ||||
| 								self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 								try table_literal.push_one(.{ | ||||
| 									.value_expression = try self.parse_expression(), | ||||
| 									.key_expression = .{.symbol_literal = identifier}, | ||||
| 								}); | ||||
| 
 | ||||
| 								switch (self.tokenizer.token) { | ||||
| 									.symbol_comma => self.tokenizer.skip_newlines(), | ||||
| 
 | ||||
| 									.symbol_brace_right => { | ||||
| 										self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 										break: parse .{.table_literal = table_literal}; | ||||
| 									}, | ||||
| 
 | ||||
| 									else => return self.report("expected `,` or `}` after expression"), | ||||
| 								} | ||||
| 							}, | ||||
| 
 | ||||
| 							.symbol_bracket_left => { | ||||
| 								self.tokenizer.step(); | ||||
| 
 | ||||
| 								const subscript_expression = try self.parse_expression(); | ||||
| 
 | ||||
| 								if (self.tokenizer.token != .symbol_bracket_right) { | ||||
| 									return self.report("expected `]` after subscript expression"); | ||||
| 								} | ||||
| 
 | ||||
| 								self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 								if (self.tokenizer.token != .symbol_equals) { | ||||
| 									return self.report("expected `=` after `]`"); | ||||
| 								} | ||||
| 
 | ||||
| 								self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 								try table_literal.push_one(.{ | ||||
| 									.value_expression = try self.parse_expression(), | ||||
| 									.key_expression = subscript_expression, | ||||
| 								}); | ||||
| 
 | ||||
| 								switch (self.tokenizer.token) { | ||||
| 									.symbol_comma => self.tokenizer.skip_newlines(), | ||||
| 
 | ||||
| 									.symbol_brace_right => { | ||||
| 										self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 										break: parse .{.table_literal = table_literal}; | ||||
| 									}, | ||||
| 
 | ||||
| 									else => return self.report("expected `,` or `}` after expression"), | ||||
| 								} | ||||
| 							}, | ||||
| 
 | ||||
| 							else => return self.report("expected `}` or fields in table literal"), | ||||
| 						} | ||||
| 					} | ||||
| 				}, | ||||
| 
 | ||||
| 				.symbol_minus => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					if (self.tokenizer.token == .end) { | ||||
| 						return self.report("expected expression after numeric negation (`-`)"); | ||||
| 					} | ||||
| 
 | ||||
| 					break: parse .{ | ||||
| 						.unary_operation = .{ | ||||
| 							.expression = try coral.io.allocate_one(allocator, try self.parse_factor()), | ||||
| 							.operator = .numeric_negation, | ||||
| 						}, | ||||
| 					}; | ||||
| 				}, | ||||
| 
 | ||||
| 				.symbol_bang => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					if (self.tokenizer.token == .end) { | ||||
| 						return self.report("expected expression after boolean negation (`!`)"); | ||||
| 					} | ||||
| 
 | ||||
| 					break: parse .{ | ||||
| 						.unary_operation = .{ | ||||
| 							.expression = try coral.io.allocate_one(allocator, try self.parse_factor()), | ||||
| 							.operator = .boolean_negation, | ||||
| 						}, | ||||
| 					}; | ||||
| 				}, | ||||
| 
 | ||||
| 				else => return self.report("unexpected token in expression"), | ||||
| 			} | ||||
| 		}); | ||||
| 
 | ||||
| 		while (true) { | ||||
| 			switch (self.tokenizer.token) { | ||||
| 				.symbol_period => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment. | ||||
| 					const unnecessary_temp = try coral.io.allocate_one(allocator, expression); | ||||
| 
 | ||||
| 					expression = .{ | ||||
| 						.field_get = .{ | ||||
| 							.identifier = switch (self.tokenizer.token) { | ||||
| 								.identifier => |field_identifier| field_identifier, | ||||
| 								else => return self.report("expected identifier after `.`"), | ||||
| 							}, | ||||
| 
 | ||||
| 							.object_expression = unnecessary_temp, | ||||
| 						}, | ||||
| 					}; | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 				}, | ||||
| 
 | ||||
| 				.symbol_bracket_left => { | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment. | ||||
| 					const unnecessary_temp = try coral.io.allocate_one(allocator, expression); | ||||
| 
 | ||||
| 					expression = .{ | ||||
| 						.subscript_get = .{ | ||||
| 							.subscript_expression = try coral.io.allocate_one(allocator, try self.parse_expression()), | ||||
| 							.object_expression = unnecessary_temp, | ||||
| 						}, | ||||
| 					}; | ||||
| 
 | ||||
| 					if (self.tokenizer.token != .symbol_bracket_right) { | ||||
| 						return self.report("expected `]` subscript expression"); | ||||
| 					} | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 				}, | ||||
| 
 | ||||
| 				.symbol_paren_left => { | ||||
| 					var argument_expressions = Expression.List.make(allocator); | ||||
| 
 | ||||
| 					while (true) { | ||||
| 						self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 						switch (self.tokenizer.token) { | ||||
| 							.symbol_paren_right => break, | ||||
| 
 | ||||
| 							else => { | ||||
| 								try argument_expressions.push_one(try self.parse_expression()); | ||||
| 
 | ||||
| 								switch (self.tokenizer.token) { | ||||
| 									.symbol_comma => continue, | ||||
| 									.symbol_paren_right => break, | ||||
| 									else => return self.report("expected `,` or `)` after function argument expression"), | ||||
| 								} | ||||
| 							}, | ||||
| 						} | ||||
| 					} | ||||
| 
 | ||||
| 					self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 					// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment. | ||||
| 					const unnecessary_temp = try coral.io.allocate_one(allocator, expression); | ||||
| 
 | ||||
| 					expression = .{ | ||||
| 						.invoke = .{ | ||||
| 							.argument_expressions = argument_expressions, | ||||
| 							.object_expression = unnecessary_temp, | ||||
| 						}, | ||||
| 					}; | ||||
| 				}, | ||||
| 
 | ||||
| 				else => break, | ||||
| 			} | ||||
| 		} | ||||
| 
 | ||||
| 		return expression; | ||||
| 	} | ||||
| 
 | ||||
| 	fn parse_statement(self: *Tree) ParseError!Statement { | ||||
| 		const allocator = self.arena.as_allocator(); | ||||
| 
 | ||||
| 		switch (self.tokenizer.token) { | ||||
| 			.keyword_return => { | ||||
| 				if (self.has_returned) { | ||||
| 					return self.report("multiple returns in lambda scope but expected only one"); | ||||
| 				} | ||||
| 
 | ||||
| 				self.tokenizer.step(); | ||||
| 
 | ||||
| 				if (self.tokenizer.token != .end and self.tokenizer.token != .newline) { | ||||
| 					return .{.@"return" = try self.parse_expression()}; | ||||
| 				} | ||||
| 
 | ||||
| 				if (self.tokenizer.token != .end and self.tokenizer.token != .newline) { | ||||
| 					return self.report("expected end or newline after return statement"); | ||||
| 				} | ||||
| 
 | ||||
| 				self.has_returned = true; | ||||
| 
 | ||||
| 				return .{.@"return" = null}; | ||||
| 			}, | ||||
| 
 | ||||
| 			.keyword_while => { | ||||
| 				defer self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				self.tokenizer.step(); | ||||
| 
 | ||||
| 				const condition_expression = try self.parse_expression(); | ||||
| 
 | ||||
| 				if (self.tokenizer.token != .symbol_colon) { | ||||
| 					return self.report("expected `:` after `while` statement"); | ||||
| 				} | ||||
| 
 | ||||
| 				var statements = Statement.List.make(allocator); | ||||
| 
 | ||||
| 				self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				while (self.tokenizer.token != .keyword_end) { | ||||
| 					try statements.push_one(try self.parse_statement()); | ||||
| 				} | ||||
| 
 | ||||
| 				return .{ | ||||
| 					.@"while" = .{ | ||||
| 						.block_statements = statements, | ||||
| 						.condition_expression = condition_expression, | ||||
| 					}, | ||||
| 				}; | ||||
| 			}, | ||||
| 
 | ||||
| 			.keyword_var => { | ||||
| 				self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				const identifier = switch (self.tokenizer.token) { | ||||
| 					.identifier => |identifier| identifier, | ||||
| 					else => return self.report("expected identifier after `var` declaration statement"), | ||||
| 				}; | ||||
| 
 | ||||
| 				self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				if (self.tokenizer.token != .symbol_equals) { | ||||
| 					return self.report("expected `=` after declaration identifier"); | ||||
| 				} | ||||
| 
 | ||||
| 				self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				return .{ | ||||
| 					.declare = .{ | ||||
| 						.assigned_expression = try self.parse_expression(), | ||||
| 						.storage = .@"var", | ||||
| 						.identifier = identifier, | ||||
| 					}, | ||||
| 				}; | ||||
| 			}, | ||||
| 
 | ||||
| 			.keyword_let => { | ||||
| 				self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				const identifier = switch (self.tokenizer.token) { | ||||
| 					.identifier => |identifier| identifier, | ||||
| 					else => return self.report("expected identifier after `let` declaration statement"), | ||||
| 				}; | ||||
| 
 | ||||
| 				self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				if (self.tokenizer.token != .symbol_equals) { | ||||
| 					return self.report("expected `=` after declaration identifier"); | ||||
| 				} | ||||
| 
 | ||||
| 				self.tokenizer.skip_newlines(); | ||||
| 
 | ||||
| 				return .{ | ||||
| 					.declare = .{ | ||||
| 						.assigned_expression = try self.parse_expression(), | ||||
| 						.storage = .let, | ||||
| 						.identifier = identifier, | ||||
| 					}, | ||||
| 				}; | ||||
| 			}, | ||||
| 
 | ||||
| 			.keyword_if => return self.parse_branch(), | ||||
| 			else => return .{.expression = try self.parse_expression()}, | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	const parse_term = BinaryOperator.builder(parse_factor, &.{ | ||||
| 		.multiplication, | ||||
| 		.divsion, | ||||
| 	}); | ||||
| 
 | ||||
| 	fn report(self: *Tree, message: []const coral.io.Byte) ParseError { | ||||
| 		coral.utf8.print_formatted(coral.list.stack_as_writer(&self.error_buffer), "{name}@{line}: {message}", .{ | ||||
| 			.name = self.name, | ||||
| 			.line = self.tokenizer.lines_stepped, | ||||
| 			.message = message, | ||||
| 		}) catch return error.OutOfMemory; | ||||
| 
 | ||||
| 		return error.BadSyntax; | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
| pub const UnaryOperator = enum { | ||||
| 	boolean_negation, | ||||
| 	numeric_negation, | ||||
| }; | ||||
| @ -101,12 +101,12 @@ pub const Token = union(enum) { | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
| pub const Tokenizer = struct { | ||||
| pub const Stream = struct { | ||||
| 	source: []const coral.io.Byte, | ||||
| 	lines_stepped: usize = 1, | ||||
| 	token: Token = .newline, | ||||
| 
 | ||||
| 	pub fn skip_newlines(self: *Tokenizer) void { | ||||
| 	pub fn skip_newlines(self: *Stream) void { | ||||
| 		self.step(); | ||||
| 
 | ||||
| 		while (self.token == .newline) { | ||||
| @ -114,7 +114,7 @@ pub const Tokenizer = struct { | ||||
| 		} | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn step(self: *Tokenizer) void { | ||||
| 	pub fn step(self: *Stream) void { | ||||
| 		var cursor = @as(usize, 0); | ||||
| 
 | ||||
| 		defer self.source = self.source[cursor ..]; | ||||
| @ -184,7 +184,7 @@ pub const Tokenizer = struct { | ||||
| 
 | ||||
| 					switch (identifier[0]) { | ||||
| 						'c' => { | ||||
| 							if (coral.io.ends_with(identifier, "onst")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "onst")) { | ||||
| 								self.token = .keyword_const; | ||||
| 
 | ||||
| 								return; | ||||
| @ -192,7 +192,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'd' => { | ||||
| 							if (coral.io.ends_with(identifier, "o")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "o")) { | ||||
| 								self.token = .keyword_do; | ||||
| 
 | ||||
| 								return; | ||||
| @ -200,19 +200,19 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'e' => { | ||||
| 							if (coral.io.ends_with(identifier, "lse")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "lse")) { | ||||
| 								self.token = .keyword_else; | ||||
| 
 | ||||
| 								return; | ||||
| 							} | ||||
| 
 | ||||
| 							if (coral.io.ends_with(identifier, "lif")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "lif")) { | ||||
| 								self.token = .keyword_elif; | ||||
| 
 | ||||
| 								return; | ||||
| 							} | ||||
| 
 | ||||
| 							if (coral.io.ends_with(identifier, "nd")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "nd")) { | ||||
| 								self.token = .keyword_end; | ||||
| 
 | ||||
| 								return; | ||||
| @ -220,7 +220,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'f' => { | ||||
| 							if (coral.io.ends_with(identifier, "alse")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "alse")) { | ||||
| 								self.token = .keyword_false; | ||||
| 
 | ||||
| 								return; | ||||
| @ -228,7 +228,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'i' => { | ||||
| 							if (coral.io.ends_with(identifier, "f")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "f")) { | ||||
| 								self.token = .keyword_if; | ||||
| 
 | ||||
| 								return; | ||||
| @ -236,13 +236,13 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'l' => { | ||||
| 							if (coral.io.ends_with(identifier, "ambda")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "ambda")) { | ||||
| 								self.token = .keyword_lambda; | ||||
| 
 | ||||
| 								return; | ||||
| 							} | ||||
| 
 | ||||
| 							if (coral.io.ends_with(identifier, "et")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "et")) { | ||||
| 								self.token = .keyword_let; | ||||
| 
 | ||||
| 								return; | ||||
| @ -250,7 +250,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'n' => { | ||||
| 							if (coral.io.ends_with(identifier, "il")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "il")) { | ||||
| 								self.token = .keyword_nil; | ||||
| 
 | ||||
| 								return; | ||||
| @ -258,7 +258,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'r' => { | ||||
| 							if (coral.io.ends_with(identifier, "eturn")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "eturn")) { | ||||
| 								self.token = .keyword_return; | ||||
| 
 | ||||
| 								return; | ||||
| @ -266,7 +266,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						's' => { | ||||
| 							if (coral.io.ends_with(identifier, "elf")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "elf")) { | ||||
| 								self.token = .keyword_self; | ||||
| 
 | ||||
| 								return; | ||||
| @ -274,7 +274,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						't' => { | ||||
| 							if (coral.io.ends_with(identifier, "rue")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "rue")) { | ||||
| 								self.token = .keyword_true; | ||||
| 
 | ||||
| 								return; | ||||
| @ -282,7 +282,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'v' => { | ||||
| 							if (coral.io.ends_with(identifier, "ar")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "ar")) { | ||||
| 								self.token = .keyword_var; | ||||
| 
 | ||||
| 								return; | ||||
| @ -290,7 +290,7 @@ pub const Tokenizer = struct { | ||||
| 						}, | ||||
| 
 | ||||
| 						'w' => { | ||||
| 							if (coral.io.ends_with(identifier, "hile")) { | ||||
| 							if (coral.io.are_equal(identifier[1 ..], "hile")) { | ||||
| 								self.token = .keyword_while; | ||||
| 
 | ||||
| 								return; | ||||
| @ -305,21 +305,6 @@ pub const Tokenizer = struct { | ||||
| 					return; | ||||
| 				}, | ||||
| 
 | ||||
| 				'@' => { | ||||
| 					cursor += 1; | ||||
| 
 | ||||
| 					const begin = cursor; | ||||
| 
 | ||||
| 					while (cursor < self.source.len) switch (self.source[cursor]) { | ||||
| 						'0'...'9', 'A'...'Z', 'a'...'z', '_' => cursor += 1, | ||||
| 						else => break, | ||||
| 					}; | ||||
| 
 | ||||
| 					self.token = if (begin == cursor) .{.unknown = '@'} else .{.builtin = self.source[begin .. cursor]}; | ||||
| 
 | ||||
| 					return; | ||||
| 				}, | ||||
| 
 | ||||
| 				'"' => { | ||||
| 					cursor += 1; | ||||
| 
 | ||||
| @ -485,6 +470,13 @@ pub const Tokenizer = struct { | ||||
| 					return; | ||||
| 				}, | ||||
| 
 | ||||
| 				'@' => { | ||||
| 					self.token = .symbol_at; | ||||
| 					cursor += 1; | ||||
| 
 | ||||
| 					return; | ||||
| 				}, | ||||
| 
 | ||||
| 				else => { | ||||
| 					self.token = .{.unknown = self.source[cursor]}; | ||||
| 					cursor += 1; | ||||
|  | ||||
							
								
								
									
										148
									
								
								source/ona/kym/tree.zig
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										148
									
								
								source/ona/kym/tree.zig
									
									
									
									
									
										Normal file
									
								
							| @ -0,0 +1,148 @@ | ||||
| const Expr = @import("./Expr.zig"); | ||||
| 
 | ||||
| const Stmt = @import("./Stmt.zig"); | ||||
| 
 | ||||
| const coral = @import("coral"); | ||||
| 
 | ||||
| const tokens = @import("./tokens.zig"); | ||||
| 
 | ||||
| pub const Declaration = struct { | ||||
| 	identifier: []const coral.io.Byte, | ||||
| 	is_readonly: bool = false, | ||||
| 	is_captured: bool = false, | ||||
| }; | ||||
| 
 | ||||
| pub const Environment = struct { | ||||
| 	captures: [capture_max]u8 = [_]u8{0} ** capture_max, | ||||
| 	capture_count: u8 = 0, | ||||
| 	local_declarations: [declaration_max]Declaration = [_]Declaration{.{.identifier = ""}} ** declaration_max, | ||||
| 	local_declaration_count: u8 = 0, | ||||
| 	argument_count: u8 = 0, | ||||
| 	statement: ?*const Stmt = null, | ||||
| 	enclosing: ?*Environment = null, | ||||
| 
 | ||||
| 	const capture_max = coral.math.max_int(@typeInfo(u8).Int); | ||||
| 
 | ||||
| 	const declaration_max = coral.math.max_int(@typeInfo(u8).Int); | ||||
| 
 | ||||
| 	pub fn create_enclosed(self: *Environment, root: *Root) coral.io.AllocationError!*Environment { | ||||
| 		return coral.io.allocate_one(root.arena.as_allocator(), Environment{ | ||||
| 			.enclosing = self, | ||||
| 		}); | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn declare(self: *Environment, identifier: []const coral.io.Byte) coral.io.AllocationError!?*const Declaration { | ||||
| 		if (self.local_declaration_count == self.local_declarations.len) { | ||||
| 			return error.OutOfMemory; | ||||
| 		} | ||||
| 
 | ||||
| 		const declaration = &self.local_declarations[self.local_declaration_count]; | ||||
| 
 | ||||
| 		declaration.* = .{.identifier = identifier}; | ||||
| 		self.local_declaration_count += 1; | ||||
| 
 | ||||
| 		return declaration; | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn resolve_declaration(self: *Environment, identifier: []const coral.io.Byte) coral.io.AllocationError!?*const Declaration { | ||||
| 		var environment = self; | ||||
| 		var ancestry = @as(usize, 0); | ||||
| 
 | ||||
| 		while (true) : (ancestry += 1) { | ||||
| 			var remaining_count = self.local_declaration_count; | ||||
| 
 | ||||
| 			while (remaining_count != 0) { | ||||
| 				remaining_count -= 1; | ||||
| 
 | ||||
| 				const declaration = &self.local_declarations[remaining_count]; | ||||
| 
 | ||||
| 				if (coral.io.are_equal(declaration.identifier, identifier)) { | ||||
| 					if (ancestry != 0) { | ||||
| 						declaration.is_captured = true; | ||||
| 						environment = self; | ||||
| 
 | ||||
| 						while (ancestry != 0) : (ancestry -= 1) { | ||||
| 							if (environment.capture_count == self.captures.len) { | ||||
| 								return error.OutOfMemory; | ||||
| 							} | ||||
| 
 | ||||
| 							environment.captures[environment.capture_count] = remaining_count; | ||||
| 							environment.capture_count += 1; | ||||
| 
 | ||||
| 							coral.debug.assert(environment.enclosing != null); | ||||
| 
 | ||||
| 							environment = environment.enclosing.?; | ||||
| 						} | ||||
| 					} | ||||
| 
 | ||||
| 					return declaration; | ||||
| 				} | ||||
| 			} | ||||
| 
 | ||||
| 			environment = environment.enclosing orelse return null; | ||||
| 		} | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
| pub const ParseError = coral.io.AllocationError || error { | ||||
| 	BadSyntax, | ||||
| }; | ||||
| 
 | ||||
| pub const Root = struct { | ||||
| 	arena: coral.arena.Stacking, | ||||
| 	environment: Environment, | ||||
| 	error_messages: MessageList, | ||||
| 
 | ||||
| 	const MessageList = coral.list.Stack([]coral.io.Byte); | ||||
| 
 | ||||
| 	pub fn report_error(self: *Root, stream: *tokens.Stream, comptime format: []const u8, args: anytype) ParseError { | ||||
| 		const allocator = self.arena.as_allocator(); | ||||
| 
 | ||||
| 		try self.error_messages.push_one(try coral.utf8.alloc_formatted(allocator, "{line}: {message}", .{ | ||||
| 			.message = try coral.utf8.alloc_formatted(allocator, format, args), | ||||
| 			.line = stream.lines_stepped, | ||||
| 		})); | ||||
| 
 | ||||
| 		return error.BadSyntax; | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn create_expr(self: *Root, expr: Expr) coral.io.AllocationError!*Expr { | ||||
| 		return coral.io.allocate_one(self.arena.as_allocator(), expr); | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn create_stmt(self: *Root, stmt: Stmt) coral.io.AllocationError!*Stmt { | ||||
| 		return coral.io.allocate_one(self.arena.as_allocator(), stmt); | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn free(self: *Root) void { | ||||
| 		self.error_messages.free(); | ||||
| 		self.arena.free(); | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn make(allocator: coral.io.Allocator) coral.io.AllocationError!Root { | ||||
| 		const arena_page_size = 4096; | ||||
| 
 | ||||
| 		return .{ | ||||
| 			.arena = coral.arena.Stacking.make(allocator, arena_page_size), | ||||
| 			.error_messages = MessageList.make(allocator), | ||||
| 			.environment = .{}, | ||||
| 		}; | ||||
| 	} | ||||
| 
 | ||||
| 	pub fn parse(self: *Root, stream: *tokens.Stream) ParseError!void { | ||||
| 		stream.skip_newlines(); | ||||
| 
 | ||||
| 		const first_statement = try Stmt.parse(self, stream, &self.environment); | ||||
| 		var current_statement = first_statement; | ||||
| 
 | ||||
| 		while (stream.token != .end) { | ||||
| 			const next_statement = try Stmt.parse(self, stream, &self.environment); | ||||
| 
 | ||||
| 			current_statement.next = next_statement; | ||||
| 			current_statement = next_statement; | ||||
| 		} | ||||
| 
 | ||||
| 		self.environment.statement = first_statement; | ||||
| 	} | ||||
| }; | ||||
| 
 | ||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user