module; #include #include #include export module coral; // Runtime utilities. export namespace coral { /** * Triggers safety-checked behavior in debug mode. * * In release mode, the compiler can use this function as a marker to optimize out safety- * checked logic branches that should never be executed. */ [[noreturn]] void unreachable() { __builtin_unreachable(); } } // Concrete and interface types. export namespace coral { using usize = size_t; using size = __ssize_t; using u8 = uint8_t; usize const u8_max = 0xff; using i8 = uint8_t; using u16 = uint16_t; usize const u16_max = 0xffff; using i16 = uint16_t; using u32 = uint32_t; using i32 = int32_t; usize const i32_max = 0xffffffff; using u64 = uint64_t; using i64 = int64_t; using f32 = float; using f64 = double; /** * Base type for runtime-pluggable memory allocation strategies used by the core library. */ struct allocator { virtual ~allocator() {}; /** * If `allocation` is `nullptr`, the allocator will attempt to allocate a new memory block * of `requested_size` bytes. Otherwise, the allocator will attempt to reallocate * `allocation` to be `request_size` bytes in size. * * The returned address will point to a dynamically allocated buffer of `requested_size` if * the operation was successful, otherwise `nullptr`. * * *Note*: If the returned address is a non-`nullptr`, it should be deallocated prior to * program exit. This may be achieved through either [deallocate] or implementation- * specific allocator functionality. * * *Note*: Attempting to pass a non-`nullptr` `allocation` address not allocated by the * allocator *will* result in erroneous implementation-behavior. * * *Note*: After invocation, `allocation` should be considered an invalid memory address. */ [[nodiscard]] virtual u8 * reallocate(u8 * allocation, usize requested_size) = 0; /** * If `allocation` points to a non-`nullptr` address, the allocator will deallocate it. * Otherwise, the function has no side-effects. * * *Note* that attempting to pass a non-`nullptr` `allocation` address not allocated by the * allocator *will* result in erroneous implementation-behavior. */ virtual void deallocate(void * allocation) = 0; }; /** * Length-signed pointer type that describes how many elements of `type` it references, * providing a type-safe wrapper for passing arrays and zero-terminated strings to functions. */ template struct slice { /** * Number of `type` elements referenced. */ usize length; /** * Base element address referenced. */ type * pointer; constexpr slice() { this->length = 0; this->pointer = nullptr; } constexpr slice(char const *&& zstring) { this->pointer = zstring; this->length = 0; while (zstring[length] != 0) this->length += 1; } constexpr slice(type * slice_pointer, usize slice_length) { this->pointer = slice_pointer; this->length = slice_length; } constexpr slice(type * slice_begin, type * slice_end) { this->pointer = slice_begin; this->length = static_cast(slice_end - slice_begin); } template constexpr slice(type(&array)[array_size]) { this->pointer = array; this->length = array_size; } /** * Reinterprets the data referenced as a series of bytes. * * The returned view is constant to protect against inadvertant memory corruption. */ slice as_bytes() const { return {reinterpret_cast(this->pointer), this->length * sizeof(type)}; } /** * Reinterprets the data referenced as a series of chars. * * The returned view is constant to protect against inadvertant memory corruption. * * *Note* the returned value has no guarantees about the validity of any specific character * encoding set. */ slice as_chars() const { return {reinterpret_cast(this->pointer), this->length * sizeof(type)}; } /** * Returns the base pointer of the slice. */ constexpr type * begin() const { return this->pointer; } /** * Returns the tail pointer of the slice. */ constexpr type * end() const { return this->pointer + this->length; } /** * Returns a new slice with the base-pointer offset by `index` elements and a length of * `range` elements from `index`. * * *Note* that attempting to slice with an `index` or `range` outside of the existing slice * bounds will result in safety-checked behavior. */ constexpr slice sliced(usize index, usize range) const { if ((this->length <= index) || ((range + index) > this->length)) unreachable(); return {this->pointer + index, range - index}; } operator slice() const { return (*reinterpret_cast const *>(this)); } constexpr type & operator[](usize index) const { if (this->length <= index) unreachable(); return this->pointer[index]; } }; } // Math functions. export namespace coral { /** * Returns the maximum value between `a` and `b`. */ template constexpr scalar max(scalar const & a, scalar const & b) { return (a > b) ? a : b; } /** * Returns the minimum value between `a` and `b`. */ template constexpr scalar min(scalar const & a, scalar const & b) { return (a < b) ? a : b; } /** * Returns `value` clamped between the range of `min_value` and `max_value` (inclusive). */ template constexpr scalar clamp(scalar const & value, scalar const & min_value, scalar const & max_value) { return max(min_value, min(max_value, value)); } /** * Returns `value` rounded to the nearest whole number. */ f32 round32(f32 value) { return __builtin_roundf(value); } } /** * Allocates and initializes a type of `requested_size` in `buffer`, returning its base pointer. As * a result of accepting a pre-allocated buffer, invocation does not allocate any dynamic memory. * * *Note*: passing an `buffer` smaller than `requested_size` will result in safety-checked * behavior. */ export void * operator new(coral::usize requested_size, coral::slice const & buffer) { if (buffer.length < requested_size) coral::unreachable(); return buffer.pointer; } /** * Allocates and initializes a series of types at `requested_size` in `buffer`, returning the base * pointer. As a result of accepting a pre-allocated buffer, invocation does not allocate any * dynamic memory. * * *Note*: passing an `buffer` smaller than `requested_size` will result in safety-checked * behavior. */ export void * operator new[](coral::usize requested_size, coral::slice const & buffer) { if (buffer.length < requested_size) coral::unreachable(); return buffer.pointer; } /** * Attempts to allocate and initialize a type of `requested_size` using `allocator`. * * *Note*: If the returned address is a non-`nullptr`, it should be deallocated prior to program * exit. This may be achieved through either [coral::allocator::deallocate] or implementation- * specific allocator functionality. */ export [[nodiscard]] void * operator new(coral::usize requested_size, coral::allocator & allocator) { return allocator.reallocate(nullptr, requested_size); } /** * Attempts to allocate and initialize a series of types of `requested_size` using `allocator`. * * *Note*: If the returned address is a non-`nullptr`, it should be deallocated prior to program * exit. This may be achieved through either [coral::allocator::deallocate] or implementation- * specific allocator functionality. */ export [[nodiscard]] void * operator new[](coral::usize requested_size, coral::allocator & allocator) { return allocator.reallocate(nullptr, requested_size); } // Wrapper types. export namespace coral { /** * Monadic container for a single-`element` value or nothing. */ template struct [[nodiscard]] optional { optional() : buffer{0} {} optional(element const & value) : buffer{0} { (*reinterpret_cast(this->buffer)) = value; this->buffer[sizeof(element)] = 1; } optional(optional const & that) : buffer{0} { if (that.has_value()) { (*reinterpret_cast(this->buffer)) = *that; this->buffer[sizeof(element)] = 1; } else { this->buffer[sizeof(element)] = 0; } } /** * Returns `true` if the optional contains a value, otherwise `false`. */ bool has_value() const { return this->buffer[sizeof(element)] == 1; } /** * Attempts to call `apply` on the contained value, returning a new [optional] of whatever type `apply` returns. * * If the optional is empty, an empty optional will always be returned. */ template std::invoke_result_t map(functor const & apply) const { if (this->has_value()) return apply(**this); return {}; } /** * Returns the contained value or `fallback` if the optional is empty. */ element const & value_or(element const & fallback) const { return this->has_value() ? *reinterpret_cast(this->buffer) : fallback; } element & operator *() { if (!this->has_value()) unreachable(); return *reinterpret_cast(this->buffer); } element const & operator *() const { if (!this->has_value()) unreachable(); return *reinterpret_cast(this->buffer); } private: u8 buffer[sizeof(element) + 1]; }; /** * Monadic container for a descriminating union of either `value_element` or `error_element`. */ template struct [[nodiscard]] expected { expected(value_element const & value) : buffer{0} { (*reinterpret_cast(this->buffer)) = value; this->buffer[buffer_size] = 1; } expected(error_element const & error) : buffer{0} { (*reinterpret_cast(this->buffer)) = error; } /** * Returns `true` if the optional contains a value, otherwise `false` if it holds an error. */ bool is_ok() const { return this->buffer[buffer_size]; } /** * Returns a reference to the contained value. * * *Note*: attempting to access the value of an erroneous expected will trigger safety- * checked behavior. */ value_element & value() { if (!this->is_ok()) unreachable(); return *reinterpret_cast(this->buffer); } /** * Returns the contained value. * * *Note*: attempting to access the value of an erroneous expected will trigger safety- * checked behavior. */ value_element const & value() const { if (!this->is_ok()) unreachable(); return *reinterpret_cast(this->buffer); } /** * Returns a reference to the contained error. * * *Note*: attempting to access the error of a non-erroneous expected will trigger safety- * checked behavior. */ error_element & error() { if (this->is_ok()) unreachable(); return *reinterpret_cast(this->buffer); } /** * Returns the contained error. * * *Note*: attempting to access the error of a non-erroneous expected will trigger safety- * checked behavior. */ error_element const & error() const { if (this->is_ok()) unreachable(); return *reinterpret_cast(this->buffer); } private: static constexpr usize buffer_size = max(sizeof(value_element), sizeof(error_element)); u8 buffer[buffer_size + 1]; }; template struct callable; /** * Type-erasing wrapper for functor types that have a call operator with a return value * matching `return_value` and arguments matching `argument_values`. */ template struct callable { using function = return_value(*)(argument_values...); callable(function callable_function) { this->dispatcher = [](u8 const * userdata, argument_values... arguments) -> return_value { return (*reinterpret_cast(userdata))(arguments...); }; new (this->capture) function{callable_function}; } callable(callable const &) = delete; template callable(functor const & callable_functor) { this->dispatcher = [](u8 const * userdata, argument_values... arguments) -> return_value { return (*reinterpret_cast(userdata))(arguments...); }; new (this->capture) functor{callable_functor}; } return_value operator()(argument_values const &... arguments) const { return this->dispatcher(this->capture, arguments...); } private: static constexpr usize capture_size = 24; return_value(* dispatcher)(u8 const * userdata, argument_values... arguments); u8 capture[capture_size]; }; /** * Errors that may occur while executing an opaque I/O operation via the `readable` and * `writable` type aliases. */ enum class io_error { unavailable, }; /** * Readable resource interface. */ struct reader { virtual ~reader() {} /** * Attempts to fill `data` with whatever data the reader has to offer, returning the number * of bytes actually read. * * Should the read operation fail for any reason, a [io_error] is returned instead. */ virtual expected read(slice const & data) = 0; }; /** * Writable resource interface. */ struct writer { virtual ~writer() {} /** * Attempts to write `data` out to the writer, returning the number of bytes actually * written. * * Should the write operation fail for any reason, a [io_error] is returned instead. */ virtual expected write(slice const & data) = 0; }; } // Input/output operations. export namespace coral { /** * Returns `value` reinterpreted as a sequence of bytes. */ slice as_bytes(auto const * value) { return {reinterpret_cast(value), sizeof(value)}; } /** * Compares `a` and `b`, returning the difference between them or `0` if they are identical. */ constexpr size compare(slice const & a, slice const & b) { usize const range = min(a.length, b.length); for (usize index = 0; index < range; index += 1) { size const difference = static_cast(a[index]) - static_cast(b[index]); if (difference != 0) return difference; } return static_cast(a.length) - static_cast(b.length); } /** * Copies the contents of `origin` into `target`. * * *Note*: safety-checked behavior is triggered if `target` is smaller than `origin`. */ void copy(slice const & target, slice const & origin) { if (target.length < origin.length) unreachable(); for (usize i = 0; i < origin.length; i += 1) target[i] = origin[i]; } /** * Zeroes the contents of `target`. */ void zero(slice const & target) { for (usize i = 0; i < target.length; i += 1) target[i] = 0; } /** * Tests the equality of `a` against `b`, returning `true` if they contain identical bytes, * otherwise `false`. */ constexpr bool equals(slice const & a, slice const & b) { if (a.length != b.length) return false; for (size_t i = 0; i < a.length; i += 1) if (a[i] != b[i]) return false; return true; } /** * Returns a hash code generated from the values in `bytes`. * * *Note:* the returned hash code is not guaranteed to be unique. */ constexpr usize hash(slice const & bytes) { usize hash_code = 5381; for (u8 const byte : bytes) hash_code = ((hash_code << 5) + hash_code) + byte; return hash_code; } /** * Swaps the values of `element` in `a` and `b` around using copy semantics. */ template constexpr void swap(element & a, element & b) { element const temp = a; a = b; b = temp; } /** * Streams the data from `input` to `output`, using `buffer` as temporary transfer space. * * The returned [expected] can be used to introspect if `input` or `output` encountered any * issues during streaming, otherwise it will contain the number of bytes streamed. * * *Note*: if `buffer` has a length of `0`, no data will be streamed as there is nowhere to * temporarily place data during streaming. */ expected stream(writer & output, reader & input, slice const & buffer) { usize total_bytes_written = 0; expected bytes_read = input.read(buffer); if (!bytes_read.is_ok()) return bytes_read.error(); usize read = bytes_read.value(); while (read != 0) { expected const bytes_written = output.write(buffer.sliced(0, read)); if (!bytes_written.is_ok()) return bytes_read.error(); total_bytes_written += bytes_written.value(); bytes_read = input.read(buffer); if (!bytes_read.is_ok()) return bytes_read.error(); read = bytes_read.value(); } return total_bytes_written; } /** * Attempts to format and print `value` as an unsigned integer out to `output`. * * The returned [expected] can be used to introspect if `output` encountered any issues during * printing, otherwise it will contain the number of characters used to print `value` as text. */ expected print_unsigned(writer & output, u64 value) { if (value == 0) return output.write(slice{"0"}.as_bytes()); u8 buffer[20]{0}; usize buffer_count{0}; while (value != 0) { constexpr usize radix{10}; buffer[buffer_count] = static_cast((value % radix) + '0'); value = (value / radix); buffer_count += 1; } usize const half_buffer_count{buffer_count / 2}; for (usize i = 0; i < half_buffer_count; i += 1) swap(buffer[i], buffer[buffer_count - i - 1]); return output.write({buffer, buffer_count}); } /** * Returns a reference to a shared [allocator] which will always return `nullptr` on calls to * [allocator::reallocate]. */ allocator & null_allocator() { static struct : public allocator { u8 * reallocate(u8 * maybe_allocation, usize requested_size) override { if (maybe_allocation != nullptr) unreachable(); return nullptr; } void deallocate(void * allocation) override { if (allocation != nullptr) unreachable(); } } a; return a; } /** * Multiplexing byte-based ring buffer of `capacity` size that may be used for memory-backed * I/O operations and lightweight data construction. */ template struct fixed_buffer : public writer, public reader { fixed_buffer(coral::u8 fill_value) : data{fill_value} {} /** * Returns the base pointer of the buffer data. */ u8 * begin() { return this->data; } /** * Returns the base pointer of the buffer data. */ u8 const * begin() const { return this->data; } /** * Returns the tail pointer of the buffer data. */ u8 * end() { return this->data + this->cursor; } /** * Returns the tail pointer of the buffer data. */ u8 const * end() const { return this->data + this->cursor; } /** * Returns `true` if the buffer has been completely filled with data. */ bool is_full() const { return this->filled == capacity; } /** * Reads whatever data is in the buffer into `data`, returning the number of bytes read * from the buffer. */ expected read(slice const & data) override { slice const readable_data{this->data, min(this->filled, data.length)}; this->filled -= readable_data.length; for (usize index = 0; index < readable_data.length; index += 1) { data[index] = this->data[this->read_index]; this->read_index = (this->read_index + 1) % capacity; } return readable_data.length; } /** * Attempts to write `data` to the buffer, returning the number of bytes written or * [io_error::unavailable] if it has been completely filled and no more bytes can be * written. */ expected write(slice const & data) override { if (this->is_full()) return io_error::unavailable; slice const writable_data{data.sliced(0, min(data.length, this->filled))}; this->filled += writable_data.length; for (usize index = 0; index < writable_data.length; index += 1) { this->data[this->write_index] = data[index]; this->write_index = (this->write_index + 1) % capacity; } return writable_data.length; } private: usize filled = 0; usize read_index = 0; usize write_index = 0; u8 data[capacity]; }; }