Compare commits
5 Commits
1997c38e97
...
1cc19d41da
Author | SHA1 | Date |
---|---|---|
kayomn | 1cc19d41da | |
kayomn | 033227b243 | |
kayomn | 9f411025a7 | |
kayomn | 2a44f5bf11 | |
kayomn | 1d2356e942 |
|
@ -14,4 +14,5 @@
|
||||||
|
|
||||||
"git.detectSubmodulesLimit": 0,
|
"git.detectSubmodulesLimit": 0,
|
||||||
"git.ignoreSubmodules": true,
|
"git.ignoreSubmodules": true,
|
||||||
|
"debug.onTaskErrors": "showErrors",
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,25 @@
|
||||||
{
|
{
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
|
|
||||||
|
"problemMatcher": {
|
||||||
|
"source": "zig",
|
||||||
|
"owner": "cpptools",
|
||||||
|
|
||||||
|
"fileLocation": [
|
||||||
|
"autoDetect",
|
||||||
|
"${cwd}",
|
||||||
|
],
|
||||||
|
|
||||||
|
"pattern": {
|
||||||
|
"regexp": "^(.*?):(\\d+):(\\d*):?\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"severity": 4,
|
||||||
|
"message": 5,
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
"tasks": [
|
"tasks": [
|
||||||
{
|
{
|
||||||
"label": "Build Debug",
|
"label": "Build Debug",
|
||||||
|
@ -21,25 +40,6 @@
|
||||||
"clear": true,
|
"clear": true,
|
||||||
"revealProblems": "onProblem",
|
"revealProblems": "onProblem",
|
||||||
},
|
},
|
||||||
|
|
||||||
"problemMatcher": {
|
|
||||||
"source": "zig",
|
|
||||||
"owner": "cpptools",
|
|
||||||
|
|
||||||
"fileLocation": [
|
|
||||||
"autoDetect",
|
|
||||||
"${cwd}",
|
|
||||||
],
|
|
||||||
|
|
||||||
"pattern": {
|
|
||||||
"regexp": "^(.*?):(\\d+):(\\d*):?\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"severity": 4,
|
|
||||||
"message": 5,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Build Test",
|
"label": "Build Test",
|
||||||
|
|
|
@ -281,8 +281,7 @@ pub const FileSystem = union(enum) {
|
||||||
///
|
///
|
||||||
pub const Path = struct {
|
pub const Path = struct {
|
||||||
file_system: *FileSystem,
|
file_system: *FileSystem,
|
||||||
length: u8,
|
path: oar.Path,
|
||||||
buffer: [max]u8,
|
|
||||||
|
|
||||||
///
|
///
|
||||||
/// With files typically being backed by a block device, they can produce a variety of
|
/// With files typically being backed by a block device, they can produce a variety of
|
||||||
|
@ -430,11 +429,10 @@ pub const FileSystem = union(enum) {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
entry.* = archive.instance.find(path.buffer[0 .. path.length]) catch |err|
|
entry.* = archive.instance.find(path.path) catch |err| return switch (err) {
|
||||||
return switch (err) {
|
error.FileInaccessible => error.FileNotFound,
|
||||||
error.FileInaccessible => error.FileNotFound,
|
error.EntryNotFound => error.FileNotFound,
|
||||||
error.EntryNotFound => error.FileNotFound,
|
};
|
||||||
};
|
|
||||||
|
|
||||||
return FileAccess{
|
return FileAccess{
|
||||||
.context = entry,
|
.context = entry,
|
||||||
|
@ -460,15 +458,15 @@ pub const FileSystem = union(enum) {
|
||||||
var path_buffer = std.mem.zeroes([4096]u8);
|
var path_buffer = std.mem.zeroes([4096]u8);
|
||||||
const seperator_length = @boolToInt(native[native.len - 1] != seperator);
|
const seperator_length = @boolToInt(native[native.len - 1] != seperator);
|
||||||
|
|
||||||
if ((native.len + seperator_length + path.length) >=
|
if ((native.len + seperator_length + path.path.length) >=
|
||||||
path_buffer.len) return error.FileNotFound;
|
path_buffer.len) return error.FileNotFound;
|
||||||
|
|
||||||
std.mem.copy(u8, path_buffer[0 ..], native);
|
std.mem.copy(u8, path_buffer[0 ..], native);
|
||||||
|
|
||||||
if (seperator_length != 0) path_buffer[native.len] = seperator;
|
if (seperator_length != 0) path_buffer[native.len] = seperator;
|
||||||
|
|
||||||
std.mem.copy(u8, path_buffer[native.len ..
|
std.mem.copy(u8, path_buffer[native.len .. path_buffer.
|
||||||
path_buffer.len], path.buffer[0 .. path.length]);
|
len], path.path.buffer[0 .. path.path.length]);
|
||||||
|
|
||||||
ext.SDL_ClearError();
|
ext.SDL_ClearError();
|
||||||
|
|
||||||
|
@ -592,8 +590,7 @@ pub const FileSystem = union(enum) {
|
||||||
pub fn joinedPath(file_system: *FileSystem, sequences: []const []const u8) PathError!Path {
|
pub fn joinedPath(file_system: *FileSystem, sequences: []const []const u8) PathError!Path {
|
||||||
var path = Path{
|
var path = Path{
|
||||||
.file_system = file_system,
|
.file_system = file_system,
|
||||||
.buffer = std.mem.zeroes([Path.max]u8),
|
.path = oar.Path.empty,
|
||||||
.length = 0,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
if (sequences.len != 0) {
|
if (sequences.len != 0) {
|
||||||
|
@ -607,25 +604,25 @@ pub const FileSystem = union(enum) {
|
||||||
|
|
||||||
while (components.next()) |component| if (component.len != 0) {
|
while (components.next()) |component| if (component.len != 0) {
|
||||||
for (component) |byte| {
|
for (component) |byte| {
|
||||||
if (path.length == Path.max) return error.TooLong;
|
if (path.path.length == Path.max) return error.TooLong;
|
||||||
|
|
||||||
path.buffer[path.length] = byte;
|
path.path.buffer[path.path.length] = byte;
|
||||||
path.length += 1;
|
path.path.length += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (components.hasNext()) {
|
if (components.hasNext()) {
|
||||||
if (path.length == Path.max) return error.TooLong;
|
if (path.path.length == Path.max) return error.TooLong;
|
||||||
|
|
||||||
path.buffer[path.length] = '/';
|
path.path.buffer[path.path.length] = '/';
|
||||||
path.length += 1;
|
path.path.length += 1;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (index < last_sequence_index) {
|
if (index < last_sequence_index) {
|
||||||
if (path.length == Path.max) return error.TooLong;
|
if (path.path.length == Path.max) return error.TooLong;
|
||||||
|
|
||||||
path.buffer[path.length] = '/';
|
path.path.buffer[path.path.length] = '/';
|
||||||
path.length += 1;
|
path.path.length += 1;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,10 @@ pub const Archive = struct {
|
||||||
/// As the archive is queried via [find], the cache is lazily assembled with the absolute
|
/// As the archive is queried via [find], the cache is lazily assembled with the absolute
|
||||||
/// offsets of each queried file.
|
/// offsets of each queried file.
|
||||||
///
|
///
|
||||||
const IndexCache = ona.table.Hashed([]const u8, u64, ona.table.string_context);
|
const IndexCache = ona.table.Hashed(Path, u64, .{
|
||||||
|
.equals = Path.equals,
|
||||||
|
.hash = Path.hash,
|
||||||
|
});
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Deinitializes the index cache of `archive`, freeing all associated memory.
|
/// Deinitializes the index cache of `archive`, freeing all associated memory.
|
||||||
|
@ -43,12 +46,12 @@ pub const Archive = struct {
|
||||||
///
|
///
|
||||||
/// The found [Entry] value is returned or a [FindError] if it failed to be found.
|
/// The found [Entry] value is returned or a [FindError] if it failed to be found.
|
||||||
///
|
///
|
||||||
pub fn find(archive: *Archive, entry_path: []const u8) FindError!Entry {
|
pub fn find(archive: *Archive, entry_path: Path) FindError!Entry {
|
||||||
return Entry{
|
return Entry{
|
||||||
.header = find_header: {
|
.header = find_header: {
|
||||||
var header = Entry.Header{
|
var header = Entry.Header{
|
||||||
.revision = 0,
|
.revision = 0,
|
||||||
.path = std.mem.zeroes(Path),
|
.path = Path.empty,
|
||||||
.file_size = 0,
|
.file_size = 0,
|
||||||
.absolute_offset = 0
|
.absolute_offset = 0
|
||||||
};
|
};
|
||||||
|
@ -73,7 +76,7 @@ pub const Archive = struct {
|
||||||
|
|
||||||
// Read first entry.
|
// Read first entry.
|
||||||
while ((try archive.file_access.read(mem.asBytes(&header))) == header_size) {
|
while ((try archive.file_access.read(mem.asBytes(&header))) == header_size) {
|
||||||
if (mem.eql(u8, entry_path, header.path.buffer[0 .. header.path.length])) {
|
if (entry_path.equals(header.path)) {
|
||||||
// If caching fails... oh well...
|
// If caching fails... oh well...
|
||||||
archive.index_cache.insert(entry_path, header.absolute_offset) catch {};
|
archive.index_cache.insert(entry_path, header.absolute_offset) catch {};
|
||||||
|
|
||||||
|
@ -156,11 +159,33 @@ pub const Entry = struct {
|
||||||
};
|
};
|
||||||
|
|
||||||
///
|
///
|
||||||
|
/// Unique identifier pointing to an entry within an archive.
|
||||||
///
|
///
|
||||||
|
/// A path does not do any verification that the given entry pointed to actually exists.
|
||||||
///
|
///
|
||||||
pub const Path = extern struct {
|
pub const Path = extern struct {
|
||||||
buffer: [255]u8,
|
buffer: [255]u8,
|
||||||
length: u8,
|
length: u8,
|
||||||
|
|
||||||
|
///
|
||||||
|
/// An empty [Path] with a length of `0`.
|
||||||
|
///
|
||||||
|
pub const empty = std.mem.zeroes(Path);
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Returns `true` if `this_path` is equal to `that_path, otherwise `false`.
|
||||||
|
///
|
||||||
|
pub fn equals(this_path: Path, that_path: Path) bool {
|
||||||
|
return ona.io.equalsBytes(this_path.buffer[0 ..this_path.
|
||||||
|
length], that_path.buffer[0 .. that_path.length]);
|
||||||
|
}
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Returns the hash of the text in `path`.
|
||||||
|
///
|
||||||
|
pub fn hash(path: Path) usize {
|
||||||
|
return ona.io.hashBytes(path.buffer[0 .. path.length]);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
///
|
///
|
||||||
|
|
|
@ -286,6 +286,43 @@ pub const Writer = struct {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Returns `true` if `this_bytes` is the same length and contains the same data as `that_bytes`,
|
||||||
|
/// otherwise `false`.
|
||||||
|
///
|
||||||
|
pub fn equalsBytes(this_bytes: []const u8, that_bytes: []const u8) bool {
|
||||||
|
return std.mem.eql(u8, this_bytes, that_bytes);
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Equivalence of bytes" {
|
||||||
|
const bytes_sequence = &.{69, 42, 0};
|
||||||
|
const testing = std.testing;
|
||||||
|
|
||||||
|
try testing.expect(equalsBytes(bytes_sequence, bytes_sequence));
|
||||||
|
try testing.expect(!equalsBytes(bytes_sequence, &.{69, 42}));
|
||||||
|
}
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Returns a deterministic hash code compiled from each byte in `bytes`.
|
||||||
|
///
|
||||||
|
/// **Note** that this operation has `O(n)` time complexity.
|
||||||
|
///
|
||||||
|
pub fn hashBytes(bytes: []const u8) usize {
|
||||||
|
var hash = @as(usize, 5381);
|
||||||
|
|
||||||
|
for (bytes) |byte| hash = ((hash << 5) + hash) + byte;
|
||||||
|
|
||||||
|
return hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
test "Hashing bytes" {
|
||||||
|
const bytes_sequence = &.{69, 42, 0};
|
||||||
|
const testing = std.testing;
|
||||||
|
|
||||||
|
try testing.expect(hashBytes(bytes_sequence) == hashBytes(bytes_sequence));
|
||||||
|
try testing.expect(hashBytes(bytes_sequence) != hashBytes(&.{69, 42}));
|
||||||
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Writer that silently throws consumed data away and never fails.
|
/// Writer that silently throws consumed data away and never fails.
|
||||||
///
|
///
|
||||||
|
|
|
@ -173,35 +173,14 @@ pub fn KeyContext(comptime Key: type) type {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
test "Hashed table manipulation with bytes context" {
|
||||||
/// Tests if the contents of `this_string` lexically equals the contents of `that_string`.
|
|
||||||
///
|
|
||||||
fn equalsString(this_string: []const u8, that_string: []const u8) bool {
|
|
||||||
return std.mem.eql(u8, this_string, that_string);
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Hashes `string` into a hash value of `usize`.
|
|
||||||
///
|
|
||||||
fn hashString(string: []const u8) usize {
|
|
||||||
var hash = @as(usize, 5381);
|
|
||||||
|
|
||||||
for (string) |byte| hash = ((hash << 5) + hash) + byte;
|
|
||||||
|
|
||||||
return hash;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// A [KeyContext] for handling `[]const u8` types.
|
|
||||||
///
|
|
||||||
pub const string_context = KeyContext([]const u8){
|
|
||||||
.hash = hashString,
|
|
||||||
.equals = equalsString,
|
|
||||||
};
|
|
||||||
|
|
||||||
test "Hashed table manipulation with string context" {
|
|
||||||
const testing = std.testing;
|
const testing = std.testing;
|
||||||
var table = try Hashed([]const u8, u32, string_context).init(testing.allocator);
|
const io = @import("./io.zig");
|
||||||
|
|
||||||
|
var table = try Hashed([]const u8, u32, .{
|
||||||
|
.equals = io.equalsBytes,
|
||||||
|
.hash = io.hashBytes,
|
||||||
|
}).init(testing.allocator);
|
||||||
|
|
||||||
defer table.deinit();
|
defer table.deinit();
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue