ona/source/coral.cpp

632 lines
18 KiB
C++
Raw Normal View History

2023-02-18 03:34:40 +00:00
module;
#include <cstdint>
#include <cstddef>
#include <type_traits>
#include <concepts>
2023-02-18 03:34:40 +00:00
2023-02-19 16:50:29 +00:00
export module coral;
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
// Runtime utilities.
2023-02-19 16:50:29 +00:00
export namespace coral {
2023-02-18 19:40:12 +00:00
/**
* Triggers safety-checked behavior in debug mode.
*
2023-02-26 01:19:16 +00:00
* In release mode, the compiler can use this function as a marker to optimize out safety-checked logic branches
* that should never be executed.
2023-02-18 19:40:12 +00:00
*/
[[noreturn]] void unreachable() {
__builtin_unreachable();
}
}
// Concrete and interface types.
2023-02-19 16:50:29 +00:00
export namespace coral {
2023-02-18 03:34:40 +00:00
using usize = size_t;
using size = __ssize_t;
using u8 = uint8_t;
usize const u8_max = 0xff;
using i8 = uint8_t;
using u16 = uint16_t;
usize const u16_max = 0xffff;
using i16 = uint16_t;
using u32 = uint32_t;
usize const u32_max = 0xffffffff;
using i32 = int32_t;
2023-02-18 03:34:40 +00:00
usize const i32_max = 0xffffffff;
using u64 = uint64_t;
2023-02-18 03:34:40 +00:00
using i64 = int64_t;
2023-02-18 03:34:40 +00:00
using f32 = float;
using f64 = double;
2023-02-18 19:40:12 +00:00
/**
* Base type for runtime-pluggable memory allocation strategies used by the core library.
*/
2023-02-18 03:34:40 +00:00
struct allocator {
virtual ~allocator() {};
2023-02-18 19:40:12 +00:00
/**
2023-02-26 01:19:16 +00:00
* If `allocation` is `nullptr`, the allocator will attempt to allocate a new memory block of `requested_size`
* bytes. Otherwise, the allocator will attempt to reallocate `allocation` to be `request_size` bytes in size.
2023-02-18 19:40:12 +00:00
*
2023-02-26 01:19:16 +00:00
* The returned address will point to a dynamically allocated buffer of `requested_size` if the operation was
* successful, otherwise `nullptr`.
2023-02-18 19:40:12 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note*: If the returned address is a non-`nullptr`, it should be deallocated prior to program exit. This may
* be achieved through either [deallocate] or implementation-specific allocator functionality.
2023-02-18 19:40:12 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note*: Attempting to pass a non-`nullptr` `allocation` address not allocated by the allocator *will* result
* in erroneous implementation-behavior.
2023-02-18 19:40:12 +00:00
*
* *Note*: After invocation, `allocation` should be considered an invalid memory address.
*/
[[nodiscard]] virtual u8 * reallocate(u8 * allocation, usize requested_size) = 0;
/**
2023-02-26 01:19:16 +00:00
* If `allocation` points to a non-`nullptr` address, the allocator will deallocate it. Otherwise, the function
* has no side-effects.
2023-02-18 19:40:12 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note* that attempting to pass a non-`nullptr` `allocation` address not allocated by the allocator *will*
* result in erroneous implementation-behavior.
2023-02-18 19:40:12 +00:00
*/
2023-02-18 03:34:40 +00:00
virtual void deallocate(void * allocation) = 0;
};
2023-02-18 19:40:12 +00:00
/**
2023-02-26 01:19:16 +00:00
* Length-signed pointer type that describes how many elements of `type` it references, providing a type-safe
* wrapper for passing arrays and zero-terminated strings to functions.
*
* **Note**: slices take no ownership of their data, making it the responsibility of the caller to manage the
* lifetime of any data referenced by it.
2023-02-18 19:40:12 +00:00
*/
2023-02-18 03:34:40 +00:00
template<typename type> struct slice {
2023-02-18 19:40:12 +00:00
/**
* Number of `type` elements referenced.
*/
2023-02-22 15:35:22 +00:00
usize length{0};
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
/**
* Base element address referenced.
*/
2023-02-22 15:35:22 +00:00
type * pointer{nullptr};
2023-02-18 03:34:40 +00:00
2023-02-22 15:35:22 +00:00
constexpr slice() = default;
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
constexpr slice(char const *&& zstring) {
this->pointer = zstring;
this->length = 0;
2023-02-18 03:34:40 +00:00
while (zstring[length] != 0) this->length += 1;
}
2023-02-18 19:40:12 +00:00
constexpr slice(type * slice_pointer, usize slice_length) {
this->pointer = slice_pointer;
this->length = slice_length;
2023-02-18 03:34:40 +00:00
}
2023-02-18 19:40:12 +00:00
constexpr slice(type * slice_begin, type * slice_end) {
this->pointer = slice_begin;
this->length = static_cast<usize>(slice_end - slice_begin);
}
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
template<usize array_size> constexpr slice(type(&array)[array_size]) {
this->pointer = array;
this->length = array_size;
2023-02-18 03:34:40 +00:00
}
2023-02-18 19:40:12 +00:00
/**
* Reinterprets the data referenced as a series of bytes.
*
* The returned view is constant to protect against inadvertant memory corruption.
*/
2023-02-18 03:34:40 +00:00
slice<u8 const> as_bytes() const {
return {reinterpret_cast<u8 const *>(this->pointer), this->length * sizeof(type)};
}
2023-02-18 19:40:12 +00:00
/**
* Reinterprets the data referenced as a series of chars.
*
* The returned view is constant to protect against inadvertant memory corruption.
*
2023-02-26 01:19:16 +00:00
* *Note* the returned value has no guarantees about the validity of any specific character encoding set.
2023-02-18 19:40:12 +00:00
*/
2023-02-18 03:34:40 +00:00
slice<char const> as_chars() const {
return {reinterpret_cast<char const *>(this->pointer), this->length * sizeof(type)};
}
2023-02-18 19:40:12 +00:00
/**
* Returns the base pointer of the slice.
*/
constexpr type * begin() const {
return this->pointer;
2023-02-18 03:34:40 +00:00
}
2023-02-18 19:40:12 +00:00
/**
* Returns the tail pointer of the slice.
*/
constexpr type * end() const {
return this->pointer + this->length;
2023-02-18 03:34:40 +00:00
}
2023-02-18 19:40:12 +00:00
/**
2023-02-26 01:19:16 +00:00
* Returns a new slice with the base-pointer offset by `index` elements and a length of `range` elements from
* `index`.
2023-02-18 19:40:12 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note* that attempting to slice with an `index` or `range` outside of the existing slice bounds will result
* in safety-checked behavior.
2023-02-18 19:40:12 +00:00
*/
constexpr slice sliced(usize index, usize range) const {
if ((this->length <= index) || ((range + index) > this->length)) unreachable();
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
return {this->pointer + index, range - index};
2023-02-18 03:34:40 +00:00
}
2023-02-18 19:40:12 +00:00
operator slice<type const>() const {
return (*reinterpret_cast<slice<type const> const *>(this));
2023-02-18 03:34:40 +00:00
}
2023-02-18 19:40:12 +00:00
constexpr type & operator[](usize index) const {
2023-02-19 16:50:29 +00:00
if (this->length <= index) unreachable();
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
return this->pointer[index];
2023-02-18 03:34:40 +00:00
}
};
2023-02-18 19:40:12 +00:00
}
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
// Math functions.
2023-02-19 16:50:29 +00:00
export namespace coral {
2023-02-18 19:40:12 +00:00
/**
* Returns the maximum value between `a` and `b`.
*/
template<typename scalar> constexpr scalar max(scalar const & a, scalar const & b) {
return (a > b) ? a : b;
}
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
/**
* Returns the minimum value between `a` and `b`.
*/
template<typename scalar> constexpr scalar min(scalar const & a, scalar const & b) {
return (a < b) ? a : b;
}
/**
* Returns `value` clamped between the range of `min_value` and `max_value` (inclusive).
*/
2023-02-26 01:19:16 +00:00
template<typename scalar> constexpr scalar clamp(scalar const & value,
scalar const & min_value, scalar const & max_value) {
2023-02-18 19:40:12 +00:00
return max(min_value, min(max_value, value));
}
/**
* Returns `value` rounded to the nearest whole number.
*/
f32 round32(f32 value) {
return __builtin_roundf(value);
}
}
/**
2023-02-26 01:19:16 +00:00
* Allocates and initializes a type of `requested_size` in `buffer`, returning its base pointer. As a result of
* accepting a pre-allocated buffer, invocation does not allocate any dynamic memory.
2023-02-18 19:40:12 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note*: passing an `buffer` smaller than `requested_size` will result in safety-checked behavior.
2023-02-18 19:40:12 +00:00
*/
2023-02-19 16:50:29 +00:00
export void * operator new(coral::usize requested_size, coral::slice<coral::u8> const & buffer) {
if (buffer.length < requested_size) coral::unreachable();
2023-02-18 19:40:12 +00:00
return buffer.pointer;
}
/**
2023-02-26 01:19:16 +00:00
* Allocates and initializes a series of types at `requested_size` in `buffer`, returning the base pointer. As a result
* of accepting a pre-allocated buffer, invocation does not allocate any dynamic memory.
2023-02-18 19:40:12 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note*: passing an `buffer` smaller than `requested_size` will result in safety-checked behavior.
2023-02-18 19:40:12 +00:00
*/
2023-02-19 16:50:29 +00:00
export void * operator new[](coral::usize requested_size, coral::slice<coral::u8> const & buffer) {
if (buffer.length < requested_size) coral::unreachable();
2023-02-18 19:40:12 +00:00
return buffer.pointer;
}
/**
* Attempts to allocate and initialize a type of `requested_size` using `allocator`.
*
2023-02-26 01:19:16 +00:00
* *Note*: If the returned address is a non-`nullptr`, it should be deallocated prior to program exit. This may be
* achieved through either [coral::allocator::deallocate] or implementation-specific allocator functionality.
2023-02-18 19:40:12 +00:00
*/
2023-02-19 16:50:29 +00:00
export [[nodiscard]] void * operator new(coral::usize requested_size, coral::allocator & allocator) {
2023-02-18 19:40:12 +00:00
return allocator.reallocate(nullptr, requested_size);
}
/**
* Attempts to allocate and initialize a series of types of `requested_size` using `allocator`.
*
2023-02-26 01:19:16 +00:00
* *Note*: If the returned address is a non-`nullptr`, it should be deallocated prior to program exit. This may be
* achieved through either [coral::allocator::deallocate] or implementation-specific allocator functionality.
2023-02-18 19:40:12 +00:00
*/
2023-02-19 16:50:29 +00:00
export [[nodiscard]] void * operator new[](coral::usize requested_size, coral::allocator & allocator) {
2023-02-18 19:40:12 +00:00
return allocator.reallocate(nullptr, requested_size);
}
2023-02-22 15:35:22 +00:00
/**
2023-02-26 01:19:16 +00:00
* If `pointer` is a non-`nullptr` value, the referenced memory will be deallocated using `allocator`. Otherwise, the
* function has no side-effects.
2023-02-22 15:35:22 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note*: passing a `pointer` value that was not allocated by `allocator` will result in erroneous behavior defined by
* the [coral::allocator] implementation.
2023-02-22 15:35:22 +00:00
*/
export void operator delete(void * pointer, coral::allocator & allocator) {
return allocator.deallocate(pointer);
}
/**
2023-02-26 01:19:16 +00:00
* If `pointer` is a non-`nullptr` value, the referenced memory block will be deallocated using `allocator`. Otherwise,
* the function has no side-effects.
2023-02-22 15:35:22 +00:00
*
2023-02-26 01:19:16 +00:00
* *Note*: passing a `pointer` value that was not allocated by `allocator` will result in erroneous behavior defined by
* the [coral::allocator] implementation.
2023-02-22 15:35:22 +00:00
*/
export void operator delete[](void * pointer, coral::allocator & allocator) {
return allocator.deallocate(pointer);
}
2023-02-18 19:40:12 +00:00
// Wrapper types.
2023-02-19 16:50:29 +00:00
export namespace coral {
2023-02-26 01:19:16 +00:00
template<typename callable, typename... arguments> concept function_pointer =
requires (callable callable_value, arguments... value_arguments) {
{*callable_value};
{callable_value(value_arguments...)};
};
2023-02-26 01:19:16 +00:00
template<typename callable, typename... arguments> concept functor =
requires (callable callable_value, arguments... value_arguments) {
{callable_value.operator()(value_arguments...)};
};
template<typename> struct closure;
/**
2023-02-26 01:19:16 +00:00
* Type-erasing view wrapper for both function and functor types that have a call operator with a return value
* matching `result` and arguments matching `arguments`.
*
* A closure may be constructed from either of the following inputs:
*
* * A function pointer that uses arguments and returns which are implicitly convertible to `arguments` and
* `returns`.
*
* * An L or R-value functor reference.
*
2023-02-26 01:19:16 +00:00
* **Note**: closures take no ownership of their data, making it the responsibility of the caller to manage the
* lifetime of any functor assigned to it.
*/
2023-02-26 01:19:16 +00:00
template<typename result, typename... arguments> struct closure<result(arguments...)> {
template<typename callable> closure(callable call) requires function_pointer<callable, arguments...> {
2023-02-26 01:19:16 +00:00
this->dispatch = [](void * context, arguments... dispatch_arguments) -> result {
return (reinterpret_cast<callable>(context))(dispatch_arguments...);
};
this->context = reinterpret_cast<void *>(call);
}
template<typename callable> closure(callable && call) requires functor<callable, arguments...> {
2023-02-26 01:19:16 +00:00
this->dispatch = [](void * context, arguments... dispatch_arguments) -> result {
return (*reinterpret_cast<callable *>(context))(dispatch_arguments...);
};
this->context = &call;
}
template<typename callable> closure(callable & call) requires functor<callable, arguments...> {
2023-02-26 01:19:16 +00:00
this->dispatch = [](void * context, arguments... dispatch_arguments) -> result {
return (*reinterpret_cast<callable *>(context))(dispatch_arguments...);
};
this->context = call;
}
closure(closure const &) = delete;
2023-02-26 01:19:16 +00:00
result operator()(arguments const &... call_arguments) const {
return this->dispatch(this->context, call_arguments...);
}
private:
void * context;
2023-02-26 01:19:16 +00:00
result(* dispatch)(void *, arguments...);
};
2023-02-18 19:40:12 +00:00
/**
* Monadic container for a single-`element` value or nothing.
2023-02-18 19:40:12 +00:00
*/
template<typename element> struct [[nodiscard]] optional {
/**
* Constructs an empty [optional].
*/
constexpr optional() = default;
/**
* Constructs an [optional] that contains `value`.
*/
constexpr optional(element const & value) {
(*reinterpret_cast<element *>(this->buffer)) = value;
this->buffer[sizeof(element)] = 1;
}
/**
* Constructs an [optional] from `that`, copying over its data.
*/
constexpr optional(optional const & that) {
if (that.has_value()) {
(*reinterpret_cast<element *>(this->buffer)) = *that;
this->buffer[sizeof(element)] = 1;
} else {
this->buffer[sizeof(element)] = 0;
}
}
2023-02-18 19:40:12 +00:00
/**
* Returns `true` if the optional contains a value, otherwise `false`.
2023-02-18 19:40:12 +00:00
*/
bool has_value() const {
return this->buffer[sizeof(element)] == 1;
}
/**
* Monadically maps `apply` to the value if it exists, otherwise doing nothing.
*/
template<typename result> optional<result> map(closure<result(element const &)> const & apply) const {
if (this->has_value()) return apply(this->value());
return {};
}
/**
* Returns the contained value or `fallback` if the optional is empty.
*/
element const & or_value(element const & fallback) const {
return this->has_value() ? *reinterpret_cast<element const *>(this->buffer) : fallback;
}
2023-02-18 19:40:12 +00:00
/**
* Returns a reference to the contained value.
*
* *Note*: attempting to access the value of an empty optional will trigger safety-checked behavior.
2023-02-18 19:40:12 +00:00
*/
element & operator *() {
if (!this->has_value()) unreachable();
return *reinterpret_cast<element *>(this->buffer);
}
2023-02-18 19:40:12 +00:00
/**
* Returns a const reference to the contained value.
2023-02-18 19:40:12 +00:00
*
* *Note*: attempting to access the value of an empty optional will trigger safety-checked behavior.
2023-02-18 19:40:12 +00:00
*/
element const & operator *() const {
if (!this->has_value()) unreachable();
return *reinterpret_cast<element const *>(this->buffer);
}
private:
u8 buffer[sizeof(element) + 1] {0};
};
/**
* Monadic container for a descriminating union of either `expects` or `errors`.
*/
template<typename expects, typename errors> struct [[nodiscard]] expected {
template<typename value> using rebound = expected<value, errors>;
2023-02-18 19:40:12 +00:00
/**
* Constructs from `value`, creating an [expected] that contains the expected type.
2023-02-18 19:40:12 +00:00
*/
expected(expects const & value) {
(*reinterpret_cast<expects *>(this->buffer)) = value;
this->buffer[buffer_size] = 1;
}
/**
* Constructs from `error`, creating an [expected] that does not contain the expected type.
*/
expected(errors const & error) {
(*reinterpret_cast<errors *>(this->buffer)) = error;
}
2023-02-18 19:40:12 +00:00
/**
* Returns the contained error as an [optional].
2023-02-18 19:40:12 +00:00
*/
optional<errors> error() const {
if (this->is_error()) return *reinterpret_cast<errors const *>(this->buffer);
return {};
}
2023-02-26 01:19:16 +00:00
/**
* Returns `true` if the optional holds an error, otherwise `false` if it is ok.
2023-02-26 01:19:16 +00:00
*/
bool is_error() const {
return this->buffer[buffer_size] == 0;
}
/**
* Returns `true` if the optional contains the expected value, otherwise `false` if it holds an error.
*/
bool is_ok() const {
return this->buffer[buffer_size] == 1;
}
/**
* Monadically maps `apply` to the value if it exists, otherwise doing nothing.
*/
template<typename result> rebound<result> map(closure<result(expects const &)> const & apply) const {
if (this->is_ok()) return apply(*this->ok());
return *this->error();
}
/**
* Returns the contained ok value as an [optional].
*/
optional<expects> ok() const {
if (this->is_ok()) return *reinterpret_cast<expects const *>(this->buffer);
return {};
}
/**
* Returns the contained value or `value` if it is not ok.
*/
expects ok_or(expects value) const {
if (this->is_ok()) return *this->ok();
return value;
}
private:
2023-02-26 01:19:16 +00:00
static constexpr usize buffer_size = max(sizeof(expects), sizeof(errors));
u8 buffer[buffer_size + 1] {0};
};
2023-02-18 19:40:12 +00:00
/**
2023-02-26 01:19:16 +00:00
* Errors that may occur while executing an opaque I/O operation via the `readable` and `writable` type aliases.
2023-02-18 19:40:12 +00:00
*/
enum class io_error {
unavailable,
};
2023-02-20 21:58:36 +00:00
/**
* Readable resource interface.
*/
struct reader {
2023-02-20 21:58:36 +00:00
virtual ~reader() {}
/**
2023-02-26 01:19:16 +00:00
* Attempts to fill `data` with whatever the reader has to offer, returning the number of bytes actually read.
2023-02-20 21:58:36 +00:00
*
* Should the read operation fail for any reason, a [io_error] is returned instead.
*/
virtual expected<usize, io_error> read(slice<u8> const & data) = 0;
};
2023-02-18 03:34:40 +00:00
2023-02-20 21:58:36 +00:00
/**
* Writable resource interface.
*/
struct writer {
2023-02-20 21:58:36 +00:00
virtual ~writer() {}
/**
2023-02-26 01:19:16 +00:00
* Attempts to write `data` out to the writer, returning the number of bytes actually written.
2023-02-20 21:58:36 +00:00
*
* Should the write operation fail for any reason, a [io_error] is returned instead.
*/
virtual expected<usize, io_error> write(slice<u8 const> const & data) = 0;
};
2023-02-18 19:40:12 +00:00
}
2023-02-18 03:34:40 +00:00
2023-02-18 19:40:12 +00:00
// Input/output operations.
2023-02-19 16:50:29 +00:00
export namespace coral {
/**
* Returns `value` reinterpreted as a sequence of bytes.
*/
slice<u8 const> as_bytes(auto const * value) {
return {reinterpret_cast<u8 const *>(value), sizeof(value)};
}
2023-02-18 19:40:12 +00:00
/**
* Compares `a` and `b`, returning the difference between them or `0` if they are identical.
2023-02-18 19:40:12 +00:00
*/
constexpr size compare(slice<u8 const> const & a, slice<u8 const> const & b) {
usize const range = min(a.length, b.length);
2023-02-18 03:34:40 +00:00
for (usize index = 0; index < range; index += 1) {
size const difference = static_cast<size>(a[index]) - static_cast<size>(b[index]);
2023-02-18 03:34:40 +00:00
if (difference != 0) return difference;
}
return static_cast<size>(a.length) - static_cast<size>(b.length);
2023-02-18 03:34:40 +00:00
}
2023-02-18 19:40:12 +00:00
/**
* Copies the contents of `origin` into `target`.
*
* *Note*: safety-checked behavior is triggered if `target` is smaller than `origin`.
*/
void copy(slice<u8> const & target, slice<u8 const> const & origin) {
if (target.length < origin.length) unreachable();
2023-02-18 19:40:12 +00:00
for (usize i = 0; i < origin.length; i += 1) target[i] = origin[i];
}
/**
* Zeroes the contents of `target`.
*/
void zero(slice<u8> const & target) {
for (usize i = 0; i < target.length; i += 1) target[i] = 0;
2023-02-18 19:40:12 +00:00
}
/**
2023-02-26 01:19:16 +00:00
* Tests the equality of `a` against `b`, returning `true` if they contain identical bytes, otherwise `false`.
*/
constexpr bool equals(slice<u8 const> const & a, slice<u8 const> const & b) {
if (a.length != b.length) return false;
for (usize i = 0; i < a.length; i += 1) if (a[i] != b[i]) return false;
return true;
}
/**
* Performs a linear search from the back of
*/
constexpr optional<usize> find_last(slice<u8 const> const & bytes, u8 byte) {
for (usize i = bytes.length; i >= 0; i -= 1) if (bytes[i] == byte) return i;
return {};
}
/**
* Returns a hash code generated from the values in `bytes`.
2023-02-18 23:49:31 +00:00
*
* *Note:* the returned hash code is not guaranteed to be unique.
*/
constexpr usize hash(slice<u8 const> const & bytes) {
usize hash_code = 5381;
for (u8 const byte : bytes) hash_code = ((hash_code << 5) + hash_code) + byte;
return hash_code;
}
/**
* Swaps the values of `element` in `a` and `b` around using copy semantics.
*/
template<typename element> constexpr void swap(element & a, element & b) {
element const temp = a;
a = b;
b = temp;
}
2023-02-18 03:34:40 +00:00
}