mirror of
https://github.com/PixelGuys/Cubyz.git
synced 2025-08-03 11:17:05 -04:00
Move all the allocators to a separate file. They can now be imported with main.heap
(analogous to std.heap
)
I also removed the unused BufferFallbackAllocator which was from a time where I was still experimenting with different allocators.
This commit is contained in:
parent
3c7231cb43
commit
09fc516828
@ -7,7 +7,7 @@ const Item = main.items.Item;
|
|||||||
const ItemStack = main.items.ItemStack;
|
const ItemStack = main.items.ItemStack;
|
||||||
const Tool = main.items.Tool;
|
const Tool = main.items.Tool;
|
||||||
const utils = main.utils;
|
const utils = main.utils;
|
||||||
const NeverFailingAllocator = utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
const vec = main.vec;
|
const vec = main.vec;
|
||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
|
@ -6,9 +6,9 @@ const migrations_zig = @import("migrations.zig");
|
|||||||
const ZonElement = @import("zon.zig").ZonElement;
|
const ZonElement = @import("zon.zig").ZonElement;
|
||||||
const main = @import("main.zig");
|
const main = @import("main.zig");
|
||||||
const biomes_zig = main.server.terrain.biomes;
|
const biomes_zig = main.server.terrain.biomes;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
var arena: main.utils.NeverFailingArenaAllocator = undefined;
|
var arena: main.heap.NeverFailingArenaAllocator = undefined;
|
||||||
var arenaAllocator: NeverFailingAllocator = undefined;
|
var arenaAllocator: NeverFailingAllocator = undefined;
|
||||||
var commonBlocks: std.StringHashMap(ZonElement) = undefined;
|
var commonBlocks: std.StringHashMap(ZonElement) = undefined;
|
||||||
var commonBlockMigrations: std.StringHashMap(ZonElement) = undefined;
|
var commonBlockMigrations: std.StringHashMap(ZonElement) = undefined;
|
||||||
@ -92,7 +92,7 @@ pub fn readAllZonFilesInAddons(
|
|||||||
};
|
};
|
||||||
defer dir.close();
|
defer dir.close();
|
||||||
|
|
||||||
var defaultsArena: main.utils.NeverFailingArenaAllocator = .init(main.stackAllocator);
|
var defaultsArena: main.heap.NeverFailingArenaAllocator = .init(main.stackAllocator);
|
||||||
defer defaultsArena.deinit();
|
defer defaultsArena.deinit();
|
||||||
|
|
||||||
const defaultsArenaAllocator = defaultsArena.allocator();
|
const defaultsArenaAllocator = defaultsArena.allocator();
|
||||||
|
@ -43,7 +43,7 @@ pub const BlockTag = enum(u32) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn loadFromZon(_allocator: main.utils.NeverFailingAllocator, zon: ZonElement) []BlockTag {
|
pub fn loadFromZon(_allocator: main.heap.NeverFailingAllocator, zon: ZonElement) []BlockTag {
|
||||||
const result = _allocator.alloc(BlockTag, zon.toSlice().len);
|
const result = _allocator.alloc(BlockTag, zon.toSlice().len);
|
||||||
for(zon.toSlice(), 0..) |tagZon, i| {
|
for(zon.toSlice(), 0..) |tagZon, i| {
|
||||||
result[i] = BlockTag.find(tagZon.as([]const u8, "incorrect"));
|
result[i] = BlockTag.find(tagZon.as([]const u8, "incorrect"));
|
||||||
@ -56,7 +56,7 @@ pub const BlockTag = enum(u32) {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var arena = main.utils.NeverFailingArenaAllocator.init(main.globalAllocator);
|
var arena = main.heap.NeverFailingArenaAllocator.init(main.globalAllocator);
|
||||||
const allocator = arena.allocator();
|
const allocator = arena.allocator();
|
||||||
|
|
||||||
pub const maxBlockCount: usize = 65536; // 16 bit limit
|
pub const maxBlockCount: usize = 65536; // 16 bit limit
|
||||||
@ -447,7 +447,7 @@ pub const meshes = struct { // MARK: meshes
|
|||||||
var textureFogData: main.List(FogData) = undefined;
|
var textureFogData: main.List(FogData) = undefined;
|
||||||
pub var textureOcclusionData: main.List(bool) = undefined;
|
pub var textureOcclusionData: main.List(bool) = undefined;
|
||||||
|
|
||||||
var arenaForWorld: main.utils.NeverFailingArenaAllocator = undefined;
|
var arenaForWorld: main.heap.NeverFailingArenaAllocator = undefined;
|
||||||
|
|
||||||
pub var blockBreakingTextures: main.List(u16) = undefined;
|
pub var blockBreakingTextures: main.List(u16) = undefined;
|
||||||
|
|
||||||
@ -569,7 +569,7 @@ pub const meshes = struct { // MARK: meshes
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extendedPath(_allocator: main.utils.NeverFailingAllocator, path: []const u8, ending: []const u8) []const u8 {
|
fn extendedPath(_allocator: main.heap.NeverFailingAllocator, path: []const u8, ending: []const u8) []const u8 {
|
||||||
return std.fmt.allocPrint(_allocator.allocator, "{s}{s}", .{path, ending}) catch unreachable;
|
return std.fmt.allocPrint(_allocator.allocator, "{s}{s}", .{path, ending}) catch unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ const Mat4f = vec.Mat4f;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec4f = vec.Vec4f;
|
const Vec4f = vec.Vec4f;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const BinaryReader = main.utils.BinaryReader;
|
const BinaryReader = main.utils.BinaryReader;
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ const std = @import("std");
|
|||||||
const builtin = @import("builtin");
|
const builtin = @import("builtin");
|
||||||
|
|
||||||
const main = @import("root");
|
const main = @import("root");
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
const ZonElement = main.ZonElement;
|
const ZonElement = main.ZonElement;
|
||||||
|
|
||||||
pub fn read(allocator: NeverFailingAllocator, path: []const u8) ![]u8 {
|
pub fn read(allocator: NeverFailingAllocator, path: []const u8) ![]u8 {
|
||||||
|
@ -27,7 +27,7 @@ const Vec3f = vec.Vec3f;
|
|||||||
const main = @import("main.zig");
|
const main = @import("main.zig");
|
||||||
const Window = main.Window;
|
const Window = main.Window;
|
||||||
|
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const c = @cImport({
|
pub const c = @cImport({
|
||||||
@cInclude("glad/glad.h");
|
@cInclude("glad/glad.h");
|
||||||
@ -1698,7 +1698,7 @@ pub const TextureArray = struct { // MARK: TextureArray
|
|||||||
for(0..maxLOD) |i| {
|
for(0..maxLOD) |i| {
|
||||||
c.glTexImage3D(c.GL_TEXTURE_2D_ARRAY, @intCast(i), c.GL_RGBA8, @max(0, maxWidth >> @intCast(i)), @max(0, maxHeight >> @intCast(i)), @intCast(images.len), 0, c.GL_RGBA, c.GL_UNSIGNED_BYTE, null);
|
c.glTexImage3D(c.GL_TEXTURE_2D_ARRAY, @intCast(i), c.GL_RGBA8, @max(0, maxWidth >> @intCast(i)), @max(0, maxHeight >> @intCast(i)), @intCast(images.len), 0, c.GL_RGBA, c.GL_UNSIGNED_BYTE, null);
|
||||||
}
|
}
|
||||||
var arena = main.utils.NeverFailingArenaAllocator.init(main.stackAllocator);
|
var arena = main.heap.NeverFailingArenaAllocator.init(main.stackAllocator);
|
||||||
defer arena.deinit();
|
defer arena.deinit();
|
||||||
const lodBuffer: [][]Color = arena.allocator().alloc([]Color, maxLOD);
|
const lodBuffer: [][]Color = arena.allocator().alloc([]Color, maxLOD);
|
||||||
for(lodBuffer, 0..) |*buffer, i| {
|
for(lodBuffer, 0..) |*buffer, i| {
|
||||||
|
@ -9,7 +9,7 @@ const Texture = graphics.Texture;
|
|||||||
const random = main.random;
|
const random = main.random;
|
||||||
const vec = main.vec;
|
const vec = main.vec;
|
||||||
const Vec2f = vec.Vec2f;
|
const Vec2f = vec.Vec2f;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const gui = @import("../gui.zig");
|
const gui = @import("../gui.zig");
|
||||||
const GuiComponent = gui.GuiComponent;
|
const GuiComponent = gui.GuiComponent;
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec2f = vec.Vec2f;
|
const Vec2f = vec.Vec2f;
|
||||||
const List = main.List;
|
const List = main.List;
|
||||||
|
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const Button = @import("components/Button.zig");
|
const Button = @import("components/Button.zig");
|
||||||
const CheckBox = @import("components/CheckBox.zig");
|
const CheckBox = @import("components/CheckBox.zig");
|
||||||
|
@ -24,7 +24,7 @@ fn delayCallback(newValue: f32) void {
|
|||||||
settings.save();
|
settings.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn delayFormatter(allocator: main.utils.NeverFailingAllocator, value: f32) []const u8 {
|
fn delayFormatter(allocator: main.heap.NeverFailingAllocator, value: f32) []const u8 {
|
||||||
return std.fmt.allocPrint(allocator.allocator, "#ffffffPlace/Break Delay: {d:.0} ms", .{value}) catch unreachable;
|
return std.fmt.allocPrint(allocator.allocator, "#ffffffPlace/Break Delay: {d:.0} ms", .{value}) catch unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,7 +33,7 @@ fn speedCallback(newValue: f32) void {
|
|||||||
settings.save();
|
settings.save();
|
||||||
}
|
}
|
||||||
|
|
||||||
fn speedFormatter(allocator: main.utils.NeverFailingAllocator, value: f32) []const u8 {
|
fn speedFormatter(allocator: main.heap.NeverFailingAllocator, value: f32) []const u8 {
|
||||||
return std.fmt.allocPrint(allocator.allocator, "#ffffffPlace/Break Speed: {d:.0} ms", .{value}) catch unreachable;
|
return std.fmt.allocPrint(allocator.allocator, "#ffffffPlace/Break Speed: {d:.0} ms", .{value}) catch unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -59,11 +59,11 @@ fn updateDeadzone(deadzone: f32) void {
|
|||||||
main.settings.controllerAxisDeadzone = deadzone;
|
main.settings.controllerAxisDeadzone = deadzone;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn deadzoneFormatter(allocator: main.utils.NeverFailingAllocator, value: f32) []const u8 {
|
fn deadzoneFormatter(allocator: main.heap.NeverFailingAllocator, value: f32) []const u8 {
|
||||||
return std.fmt.allocPrint(allocator.allocator, "Deadzone: {d:.0}%", .{value*100}) catch unreachable;
|
return std.fmt.allocPrint(allocator.allocator, "Deadzone: {d:.0}%", .{value*100}) catch unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn sensitivityFormatter(allocator: main.utils.NeverFailingAllocator, value: f32) []const u8 {
|
fn sensitivityFormatter(allocator: main.heap.NeverFailingAllocator, value: f32) []const u8 {
|
||||||
return std.fmt.allocPrint(allocator.allocator, "{s} Sensitivity: {d:.0}%", .{if(editingKeyboard) "Mouse" else "Controller", value*100}) catch unreachable;
|
return std.fmt.allocPrint(allocator.allocator, "{s} Sensitivity: {d:.0}%", .{if(editingKeyboard) "Mouse" else "Controller", value*100}) catch unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -37,7 +37,7 @@ fn fpsCapRound(newValue: f32) ?u32 {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fpsCapFormatter(allocator: main.utils.NeverFailingAllocator, value: f32) []const u8 {
|
fn fpsCapFormatter(allocator: main.heap.NeverFailingAllocator, value: f32) []const u8 {
|
||||||
const cap = fpsCapRound(value);
|
const cap = fpsCapRound(value);
|
||||||
if(cap == null)
|
if(cap == null)
|
||||||
return allocator.dupe(u8, "#ffffffFPS: Unlimited");
|
return allocator.dupe(u8, "#ffffffFPS: Unlimited");
|
||||||
@ -70,11 +70,11 @@ fn fovCallback(newValue: f32) void {
|
|||||||
main.Window.GLFWCallbacks.framebufferSize(undefined, main.Window.width, main.Window.height);
|
main.Window.GLFWCallbacks.framebufferSize(undefined, main.Window.width, main.Window.height);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn fovFormatter(allocator: main.utils.NeverFailingAllocator, value: f32) []const u8 {
|
fn fovFormatter(allocator: main.heap.NeverFailingAllocator, value: f32) []const u8 {
|
||||||
return std.fmt.allocPrint(allocator.allocator, "#ffffffField Of View: {d:.0}°", .{value}) catch unreachable;
|
return std.fmt.allocPrint(allocator.allocator, "#ffffffField Of View: {d:.0}°", .{value}) catch unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lodDistanceFormatter(allocator: main.utils.NeverFailingAllocator, value: f32) []const u8 {
|
fn lodDistanceFormatter(allocator: main.heap.NeverFailingAllocator, value: f32) []const u8 {
|
||||||
return std.fmt.allocPrint(allocator.allocator, "#ffffffOpaque leaves distance: {d:.0}", .{@round(value)}) catch unreachable;
|
return std.fmt.allocPrint(allocator.allocator, "#ffffffOpaque leaves distance: {d:.0}", .{@round(value)}) catch unreachable;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ fn discoverIpAddress() void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn discoverIpAddressFromNewThread() void {
|
fn discoverIpAddressFromNewThread() void {
|
||||||
var sta = main.utils.StackAllocator.init(main.globalAllocator, 1 << 23);
|
var sta = main.heap.StackAllocator.init(main.globalAllocator, 1 << 23);
|
||||||
defer sta.deinit();
|
defer sta.deinit();
|
||||||
main.stackAllocator = sta.allocator();
|
main.stackAllocator = sta.allocator();
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ fn discoverIpAddress() void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn discoverIpAddressFromNewThread() void {
|
fn discoverIpAddressFromNewThread() void {
|
||||||
var sta = main.utils.StackAllocator.init(main.globalAllocator, 1 << 23);
|
var sta = main.heap.StackAllocator.init(main.globalAllocator, 1 << 23);
|
||||||
defer sta.deinit();
|
defer sta.deinit();
|
||||||
main.stackAllocator = sta.allocator();
|
main.stackAllocator = sta.allocator();
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ const main = @import("root");
|
|||||||
const ConnectionManager = main.network.ConnectionManager;
|
const ConnectionManager = main.network.ConnectionManager;
|
||||||
const settings = main.settings;
|
const settings = main.settings;
|
||||||
const Vec2f = main.vec.Vec2f;
|
const Vec2f = main.vec.Vec2f;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const gui = @import("../gui.zig");
|
const gui = @import("../gui.zig");
|
||||||
const GuiComponent = gui.GuiComponent;
|
const GuiComponent = gui.GuiComponent;
|
||||||
|
@ -4,7 +4,7 @@ const main = @import("root");
|
|||||||
const ConnectionManager = main.network.ConnectionManager;
|
const ConnectionManager = main.network.ConnectionManager;
|
||||||
const settings = main.settings;
|
const settings = main.settings;
|
||||||
const Vec2f = main.vec.Vec2f;
|
const Vec2f = main.vec.Vec2f;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
const Texture = main.graphics.Texture;
|
const Texture = main.graphics.Texture;
|
||||||
|
|
||||||
const gui = @import("../gui.zig");
|
const gui = @import("../gui.zig");
|
||||||
@ -22,7 +22,7 @@ pub var window = GuiWindow{
|
|||||||
|
|
||||||
const padding: f32 = 8;
|
const padding: f32 = 8;
|
||||||
const width: f32 = 128;
|
const width: f32 = 128;
|
||||||
var buttonNameArena: main.utils.NeverFailingArenaAllocator = undefined;
|
var buttonNameArena: main.heap.NeverFailingArenaAllocator = undefined;
|
||||||
|
|
||||||
pub var needsUpdate: bool = false;
|
pub var needsUpdate: bool = false;
|
||||||
|
|
||||||
@ -127,7 +127,7 @@ pub fn update() void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn onOpen() void {
|
pub fn onOpen() void {
|
||||||
buttonNameArena = main.utils.NeverFailingArenaAllocator.init(main.globalAllocator);
|
buttonNameArena = main.heap.NeverFailingArenaAllocator.init(main.globalAllocator);
|
||||||
const list = VerticalList.init(.{padding, 16 + padding}, 300, 8);
|
const list = VerticalList.init(.{padding, 16 + padding}, 300, 8);
|
||||||
list.add(Label.init(.{0, 0}, width, "**Select World**", .center));
|
list.add(Label.init(.{0, 0}, width, "**Select World**", .center));
|
||||||
list.add(Button.initText(.{0, 0}, 128, "Create New World", gui.openWindowCallback("save_creation")));
|
list.add(Button.initText(.{0, 0}, 128, "Create New World", gui.openWindowCallback("save_creation")));
|
||||||
|
@ -3,7 +3,7 @@ const std = @import("std");
|
|||||||
const main = @import("root");
|
const main = @import("root");
|
||||||
const settings = main.settings;
|
const settings = main.settings;
|
||||||
const Vec2f = main.vec.Vec2f;
|
const Vec2f = main.vec.Vec2f;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const gui = @import("../gui.zig");
|
const gui = @import("../gui.zig");
|
||||||
const GuiComponent = gui.GuiComponent;
|
const GuiComponent = gui.GuiComponent;
|
||||||
|
@ -20,7 +20,7 @@ const Mat4f = vec.Mat4f;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const ItemDrop = struct { // MARK: ItemDrop
|
const ItemDrop = struct { // MARK: ItemDrop
|
||||||
pos: Vec3d,
|
pos: Vec3d,
|
||||||
|
@ -14,7 +14,7 @@ const Vec2f = vec.Vec2f;
|
|||||||
const Vec2i = vec.Vec2i;
|
const Vec2i = vec.Vec2i;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const modifierList = @import("tool/modifiers/_list.zig");
|
const modifierList = @import("tool/modifiers/_list.zig");
|
||||||
|
|
||||||
@ -747,7 +747,7 @@ pub const Recipe = struct { // MARK: Recipe
|
|||||||
cachedInventory: ?Inventory = null,
|
cachedInventory: ?Inventory = null,
|
||||||
};
|
};
|
||||||
|
|
||||||
var arena: main.utils.NeverFailingArenaAllocator = undefined;
|
var arena: main.heap.NeverFailingArenaAllocator = undefined;
|
||||||
var toolTypes: std.StringHashMap(ToolType) = undefined;
|
var toolTypes: std.StringHashMap(ToolType) = undefined;
|
||||||
var reverseIndices: std.StringHashMap(*BaseItem) = undefined;
|
var reverseIndices: std.StringHashMap(*BaseItem) = undefined;
|
||||||
var modifiers: std.StringHashMap(*const Modifier.VTable) = undefined;
|
var modifiers: std.StringHashMap(*const Modifier.VTable) = undefined;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
const main = @import("main.zig");
|
const main = @import("main.zig");
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
const List = main.List;
|
const List = main.List;
|
||||||
|
|
||||||
const JsonType = enum(u8) {
|
const JsonType = enum(u8) {
|
||||||
@ -180,7 +180,7 @@ pub const JsonElement = union(JsonType) { // MARK: JsonElement
|
|||||||
},
|
},
|
||||||
.vector => {
|
.vector => {
|
||||||
const len = @typeInfo(@TypeOf(value)).vector.len;
|
const len = @typeInfo(@TypeOf(value)).vector.len;
|
||||||
const result = initArray(main.utils.NeverFailingAllocator{.allocator = allocator, .IAssertThatTheProvidedAllocatorCantFail = {}});
|
const result = initArray(main.heap.NeverFailingAllocator{.allocator = allocator, .IAssertThatTheProvidedAllocatorCantFail = {}});
|
||||||
result.JsonArray.ensureCapacity(len);
|
result.JsonArray.ensureCapacity(len);
|
||||||
inline for(0..len) |i| {
|
inline for(0..len) |i| {
|
||||||
result.JsonArray.appendAssumeCapacity(createElementFromRandomType(value[i], allocator));
|
result.JsonArray.appendAssumeCapacity(createElementFromRandomType(value[i], allocator));
|
||||||
|
10
src/main.zig
10
src/main.zig
@ -27,6 +27,8 @@ pub const ZonElement = @import("zon.zig").ZonElement;
|
|||||||
|
|
||||||
pub const Window = @import("graphics/Window.zig");
|
pub const Window = @import("graphics/Window.zig");
|
||||||
|
|
||||||
|
pub const heap = @import("utils/heap.zig");
|
||||||
|
|
||||||
pub const List = @import("utils/list.zig").List;
|
pub const List = @import("utils/list.zig").List;
|
||||||
pub const ListUnmanaged = @import("utils/list.zig").ListUnmanaged;
|
pub const ListUnmanaged = @import("utils/list.zig").ListUnmanaged;
|
||||||
pub const VirtualList = @import("utils/list.zig").VirtualList;
|
pub const VirtualList = @import("utils/list.zig").VirtualList;
|
||||||
@ -36,11 +38,11 @@ const file_monitor = utils.file_monitor;
|
|||||||
const Vec2f = vec.Vec2f;
|
const Vec2f = vec.Vec2f;
|
||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
|
|
||||||
pub threadlocal var stackAllocator: utils.NeverFailingAllocator = undefined;
|
pub threadlocal var stackAllocator: heap.NeverFailingAllocator = undefined;
|
||||||
pub threadlocal var seed: u64 = undefined;
|
pub threadlocal var seed: u64 = undefined;
|
||||||
var global_gpa = std.heap.GeneralPurposeAllocator(.{.thread_safe = true}){};
|
var global_gpa = std.heap.GeneralPurposeAllocator(.{.thread_safe = true}){};
|
||||||
var handled_gpa = utils.ErrorHandlingAllocator.init(global_gpa.allocator());
|
var handled_gpa = heap.ErrorHandlingAllocator.init(global_gpa.allocator());
|
||||||
pub const globalAllocator: utils.NeverFailingAllocator = handled_gpa.allocator();
|
pub const globalAllocator: heap.NeverFailingAllocator = handled_gpa.allocator();
|
||||||
pub var threadPool: *utils.ThreadPool = undefined;
|
pub var threadPool: *utils.ThreadPool = undefined;
|
||||||
|
|
||||||
fn cacheStringImpl(comptime len: usize, comptime str: [len]u8) []const u8 {
|
fn cacheStringImpl(comptime len: usize, comptime str: [len]u8) []const u8 {
|
||||||
@ -522,7 +524,7 @@ pub fn main() void { // MARK: main()
|
|||||||
defer if(global_gpa.deinit() == .leak) {
|
defer if(global_gpa.deinit() == .leak) {
|
||||||
std.log.err("Memory leak", .{});
|
std.log.err("Memory leak", .{});
|
||||||
};
|
};
|
||||||
var sta = utils.StackAllocator.init(globalAllocator, 1 << 23);
|
var sta = heap.StackAllocator.init(globalAllocator, 1 << 23);
|
||||||
defer sta.deinit();
|
defer sta.deinit();
|
||||||
stackAllocator = sta.allocator();
|
stackAllocator = sta.allocator();
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ const main = @import("main.zig");
|
|||||||
const ZonElement = @import("zon.zig").ZonElement;
|
const ZonElement = @import("zon.zig").ZonElement;
|
||||||
const Palette = @import("assets.zig").Palette;
|
const Palette = @import("assets.zig").Palette;
|
||||||
|
|
||||||
var arenaAllocator = main.utils.NeverFailingArenaAllocator.init(main.globalAllocator);
|
var arenaAllocator = main.heap.NeverFailingArenaAllocator.init(main.globalAllocator);
|
||||||
const migrationAllocator = arenaAllocator.allocator();
|
const migrationAllocator = arenaAllocator.allocator();
|
||||||
|
|
||||||
var blockMigrations: std.StringHashMap([]const u8) = .init(migrationAllocator.allocator);
|
var blockMigrations: std.StringHashMap([]const u8) = .init(migrationAllocator.allocator);
|
||||||
|
@ -10,7 +10,7 @@ const Vec3f = vec.Vec3f;
|
|||||||
const Vec2f = vec.Vec2f;
|
const Vec2f = vec.Vec2f;
|
||||||
const Mat4f = vec.Mat4f;
|
const Mat4f = vec.Mat4f;
|
||||||
const FaceData = main.renderer.chunk_meshing.FaceData;
|
const FaceData = main.renderer.chunk_meshing.FaceData;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
var quadSSBO: graphics.SSBO = undefined;
|
var quadSSBO: graphics.SSBO = undefined;
|
||||||
|
|
||||||
@ -199,7 +199,7 @@ pub const Model = struct {
|
|||||||
return Model.init(quadInfos);
|
return Model.init(quadInfos);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn loadRawModelDataFromObj(allocator: main.utils.NeverFailingAllocator, data: []const u8) []QuadInfo {
|
pub fn loadRawModelDataFromObj(allocator: main.heap.NeverFailingAllocator, data: []const u8) []QuadInfo {
|
||||||
var vertices = main.List(Vec3f).init(main.stackAllocator);
|
var vertices = main.List(Vec3f).init(main.stackAllocator);
|
||||||
defer vertices.deinit();
|
defer vertices.deinit();
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ const vec = @import("vec.zig");
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const networkEndian: std.builtin.Endian = .big;
|
pub const networkEndian: std.builtin.Endian = .big;
|
||||||
|
|
||||||
@ -559,7 +559,7 @@ pub const ConnectionManager = struct { // MARK: ConnectionManager
|
|||||||
|
|
||||||
pub fn run(self: *ConnectionManager) void {
|
pub fn run(self: *ConnectionManager) void {
|
||||||
self.threadId = std.Thread.getCurrentId();
|
self.threadId = std.Thread.getCurrentId();
|
||||||
var sta = utils.StackAllocator.init(main.globalAllocator, 1 << 23);
|
var sta = main.heap.StackAllocator.init(main.globalAllocator, 1 << 23);
|
||||||
defer sta.deinit();
|
defer sta.deinit();
|
||||||
main.stackAllocator = sta.allocator();
|
main.stackAllocator = sta.allocator();
|
||||||
|
|
||||||
|
@ -5,7 +5,7 @@ const ZonElement = main.ZonElement;
|
|||||||
const vec = main.vec;
|
const vec = main.vec;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pos: Vec3d = .{0, 0, 0},
|
pos: Vec3d = .{0, 0, 0},
|
||||||
vel: Vec3d = .{0, 0, 0},
|
vel: Vec3d = .{0, 0, 0},
|
||||||
|
@ -293,7 +293,7 @@ fn deinit() void {
|
|||||||
command.deinit();
|
command.deinit();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getUserListAndIncreaseRefCount(allocator: utils.NeverFailingAllocator) []*User {
|
pub fn getUserListAndIncreaseRefCount(allocator: main.heap.NeverFailingAllocator) []*User {
|
||||||
userMutex.lock();
|
userMutex.lock();
|
||||||
defer userMutex.unlock();
|
defer userMutex.unlock();
|
||||||
const result = allocator.dupe(*User, users.items);
|
const result = allocator.dupe(*User, users.items);
|
||||||
@ -303,14 +303,14 @@ pub fn getUserListAndIncreaseRefCount(allocator: utils.NeverFailingAllocator) []
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn freeUserListAndDecreaseRefCount(allocator: utils.NeverFailingAllocator, list: []*User) void {
|
pub fn freeUserListAndDecreaseRefCount(allocator: main.heap.NeverFailingAllocator, list: []*User) void {
|
||||||
for(list) |user| {
|
for(list) |user| {
|
||||||
user.decreaseRefCount();
|
user.decreaseRefCount();
|
||||||
}
|
}
|
||||||
allocator.free(list);
|
allocator.free(list);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn getInitialEntityList(allocator: utils.NeverFailingAllocator) []const u8 {
|
fn getInitialEntityList(allocator: main.heap.NeverFailingAllocator) []const u8 {
|
||||||
// Send the entity updates:
|
// Send the entity updates:
|
||||||
var initialList: []const u8 = undefined;
|
var initialList: []const u8 = undefined;
|
||||||
const list = main.ZonElement.initArray(main.stackAllocator);
|
const list = main.ZonElement.initArray(main.stackAllocator);
|
||||||
@ -369,7 +369,7 @@ fn update() void { // MARK: update()
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn start(name: []const u8, port: ?u16) void {
|
pub fn start(name: []const u8, port: ?u16) void {
|
||||||
var sta = utils.StackAllocator.init(main.globalAllocator, 1 << 23);
|
var sta = main.heap.StackAllocator.init(main.globalAllocator, 1 << 23);
|
||||||
defer sta.deinit();
|
defer sta.deinit();
|
||||||
main.stackAllocator = sta.allocator();
|
main.stackAllocator = sta.allocator();
|
||||||
std.debug.assert(!running.load(.monotonic)); // There can only be one server.
|
std.debug.assert(!running.load(.monotonic)); // There can only be one server.
|
||||||
|
@ -170,7 +170,7 @@ pub const RegionFile = struct { // MARK: RegionFile
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getChunk(self: *RegionFile, allocator: main.utils.NeverFailingAllocator, relX: usize, relY: usize, relZ: usize) ?[]const u8 {
|
pub fn getChunk(self: *RegionFile, allocator: main.heap.NeverFailingAllocator, relX: usize, relY: usize, relZ: usize) ?[]const u8 {
|
||||||
self.mutex.lock();
|
self.mutex.lock();
|
||||||
defer self.mutex.unlock();
|
defer self.mutex.unlock();
|
||||||
const index = getIndex(relX, relY, relZ);
|
const index = getIndex(relX, relY, relZ);
|
||||||
@ -259,7 +259,7 @@ pub const ChunkCompression = struct { // MARK: ChunkCompression
|
|||||||
deflate_with_8bit_palette = 3,
|
deflate_with_8bit_palette = 3,
|
||||||
_,
|
_,
|
||||||
};
|
};
|
||||||
pub fn compressChunk(allocator: main.utils.NeverFailingAllocator, ch: *chunk.Chunk, allowLossy: bool) []const u8 {
|
pub fn compressChunk(allocator: main.heap.NeverFailingAllocator, ch: *chunk.Chunk, allowLossy: bool) []const u8 {
|
||||||
if(ch.data.paletteLength == 1) {
|
if(ch.data.paletteLength == 1) {
|
||||||
var writer = BinaryWriter.initCapacity(allocator, .big, @sizeOf(CompressionAlgo) + @sizeOf(u32));
|
var writer = BinaryWriter.initCapacity(allocator, .big, @sizeOf(CompressionAlgo) + @sizeOf(u32));
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ const ChunkPosition = main.chunk.ChunkPosition;
|
|||||||
const ZonElement = main.ZonElement;
|
const ZonElement = main.ZonElement;
|
||||||
const vec = main.vec;
|
const vec = main.vec;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const terrain = @import("terrain.zig");
|
const terrain = @import("terrain.zig");
|
||||||
const TerrainGenerationProfile = terrain.TerrainGenerationProfile;
|
const TerrainGenerationProfile = terrain.TerrainGenerationProfile;
|
||||||
@ -147,7 +147,7 @@ pub const InterpolatableCaveBiomeMapView = struct { // MARK: InterpolatableCaveB
|
|||||||
width: i32,
|
width: i32,
|
||||||
allocator: NeverFailingAllocator,
|
allocator: NeverFailingAllocator,
|
||||||
|
|
||||||
pub fn init(allocator: main.utils.NeverFailingAllocator, pos: ChunkPosition, width: u31, margin: u31) InterpolatableCaveBiomeMapView {
|
pub fn init(allocator: main.heap.NeverFailingAllocator, pos: ChunkPosition, width: u31, margin: u31) InterpolatableCaveBiomeMapView {
|
||||||
const center = Vec3i{
|
const center = Vec3i{
|
||||||
pos.wx +% width/2,
|
pos.wx +% width/2,
|
||||||
pos.wy +% width/2,
|
pos.wy +% width/2,
|
||||||
|
@ -6,7 +6,7 @@ const ServerChunk = main.chunk.ServerChunk;
|
|||||||
const ChunkPosition = main.chunk.ChunkPosition;
|
const ChunkPosition = main.chunk.ChunkPosition;
|
||||||
const Cache = main.utils.Cache;
|
const Cache = main.utils.Cache;
|
||||||
const ZonElement = main.ZonElement;
|
const ZonElement = main.ZonElement;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const terrain = @import("terrain.zig");
|
const terrain = @import("terrain.zig");
|
||||||
const TerrainGenerationProfile = terrain.TerrainGenerationProfile;
|
const TerrainGenerationProfile = terrain.TerrainGenerationProfile;
|
||||||
|
@ -9,7 +9,7 @@ const terrain = main.server.terrain;
|
|||||||
const TerrainGenerationProfile = terrain.TerrainGenerationProfile;
|
const TerrainGenerationProfile = terrain.TerrainGenerationProfile;
|
||||||
const Biome = terrain.biomes.Biome;
|
const Biome = terrain.biomes.Biome;
|
||||||
const MapFragment = terrain.SurfaceMap.MapFragment;
|
const MapFragment = terrain.SurfaceMap.MapFragment;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const BiomeSample = struct {
|
pub const BiomeSample = struct {
|
||||||
biome: *const Biome,
|
biome: *const Biome,
|
||||||
|
@ -6,7 +6,7 @@ const ServerChunk = main.chunk.ServerChunk;
|
|||||||
const ChunkPosition = main.chunk.ChunkPosition;
|
const ChunkPosition = main.chunk.ChunkPosition;
|
||||||
const Cache = main.utils.Cache;
|
const Cache = main.utils.Cache;
|
||||||
const ZonElement = main.ZonElement;
|
const ZonElement = main.ZonElement;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
const vec = main.vec;
|
const vec = main.vec;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
|
|
||||||
@ -41,8 +41,8 @@ pub const StructureMapFragment = struct {
|
|||||||
pos: ChunkPosition,
|
pos: ChunkPosition,
|
||||||
voxelShift: u5,
|
voxelShift: u5,
|
||||||
refCount: Atomic(u16) = .init(0),
|
refCount: Atomic(u16) = .init(0),
|
||||||
arena: main.utils.NeverFailingArenaAllocator,
|
arena: main.heap.NeverFailingArenaAllocator,
|
||||||
allocator: main.utils.NeverFailingAllocator,
|
allocator: main.heap.NeverFailingAllocator,
|
||||||
|
|
||||||
tempData: struct {
|
tempData: struct {
|
||||||
lists: *[chunkedSize*chunkedSize*chunkedSize]main.ListUnmanaged(Structure),
|
lists: *[chunkedSize*chunkedSize*chunkedSize]main.ListUnmanaged(Structure),
|
||||||
|
@ -5,7 +5,7 @@ const blocks = main.blocks;
|
|||||||
const ServerChunk = main.chunk.ServerChunk;
|
const ServerChunk = main.chunk.ServerChunk;
|
||||||
const ZonElement = main.ZonElement;
|
const ZonElement = main.ZonElement;
|
||||||
const terrain = main.server.terrain;
|
const terrain = main.server.terrain;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
const vec = @import("main.vec");
|
const vec = @import("main.vec");
|
||||||
const Vec3f = main.vec.Vec3f;
|
const Vec3f = main.vec.Vec3f;
|
||||||
const Vec3d = main.vec.Vec3d;
|
const Vec3d = main.vec.Vec3d;
|
||||||
@ -51,7 +51,7 @@ pub const SimpleStructureModel = struct { // MARK: SimpleStructureModel
|
|||||||
}
|
}
|
||||||
|
|
||||||
var modelRegistry: std.StringHashMapUnmanaged(VTable) = .{};
|
var modelRegistry: std.StringHashMapUnmanaged(VTable) = .{};
|
||||||
var arena: main.utils.NeverFailingArenaAllocator = .init(main.globalAllocator);
|
var arena: main.heap.NeverFailingArenaAllocator = .init(main.globalAllocator);
|
||||||
|
|
||||||
pub fn reset() void {
|
pub fn reset() void {
|
||||||
std.debug.assert(arena.reset(.free_all));
|
std.debug.assert(arena.reset(.free_all));
|
||||||
|
@ -13,7 +13,7 @@ const vec = main.vec;
|
|||||||
const Vec2i = vec.Vec2i;
|
const Vec2i = vec.Vec2i;
|
||||||
const Vec2f = vec.Vec2f;
|
const Vec2f = vec.Vec2f;
|
||||||
|
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
// Generates the climate map using a fluidynamics simulation, with a circular heat distribution.
|
// Generates the climate map using a fluidynamics simulation, with a circular heat distribution.
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ const std = @import("std");
|
|||||||
const main = @import("root");
|
const main = @import("root");
|
||||||
const random = main.random;
|
const random = main.random;
|
||||||
const Array2D = main.utils.Array2D;
|
const Array2D = main.utils.Array2D;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const sizeShift = 7; // TODO: Increase back to 10 once this is no longer impacting loading time.
|
const sizeShift = 7; // TODO: Increase back to 10 once this is no longer impacting loading time.
|
||||||
const size = 1 << sizeShift;
|
const size = 1 << sizeShift;
|
||||||
|
@ -4,7 +4,7 @@ const main = @import("root");
|
|||||||
const Array3D = main.utils.Array3D;
|
const Array3D = main.utils.Array3D;
|
||||||
const ChunkPosition = main.chunk.ChunkPosition;
|
const ChunkPosition = main.chunk.ChunkPosition;
|
||||||
const random = main.random;
|
const random = main.random;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
const FractalNoise3D = @This();
|
const FractalNoise3D = @This();
|
||||||
|
|
||||||
|
@ -3,7 +3,7 @@ const std = @import("std");
|
|||||||
const main = @import("root");
|
const main = @import("root");
|
||||||
const Array2D = main.utils.Array2D;
|
const Array2D = main.utils.Array2D;
|
||||||
const random = main.random;
|
const random = main.random;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
// TODO: Simplify with Vec2f and Vec2i.
|
// TODO: Simplify with Vec2f and Vec2i.
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const id = "cubyz:boulder";
|
pub const id = "cubyz:boulder";
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const id = "cubyz:fallen_tree";
|
pub const id = "cubyz:fallen_tree";
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const id = "cubyz:flower_patch";
|
pub const id = "cubyz:flower_patch";
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const id = "cubyz:ground_patch";
|
pub const id = "cubyz:ground_patch";
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const id = "cubyz:simple_tree";
|
pub const id = "cubyz:simple_tree";
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const id = "cubyz:simple_vegetation";
|
pub const id = "cubyz:simple_vegetation";
|
||||||
|
|
||||||
|
@ -9,7 +9,7 @@ const vec = main.vec;
|
|||||||
const Vec3d = vec.Vec3d;
|
const Vec3d = vec.Vec3d;
|
||||||
const Vec3f = vec.Vec3f;
|
const Vec3f = vec.Vec3f;
|
||||||
const Vec3i = vec.Vec3i;
|
const Vec3i = vec.Vec3i;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const id = "cubyz:stalagmite";
|
pub const id = "cubyz:stalagmite";
|
||||||
|
|
||||||
|
@ -2,7 +2,7 @@ const std = @import("std");
|
|||||||
|
|
||||||
const main = @import("root");
|
const main = @import("root");
|
||||||
const ZonElement = main.ZonElement;
|
const ZonElement = main.ZonElement;
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const biomes = @import("biomes.zig");
|
pub const biomes = @import("biomes.zig");
|
||||||
pub const noise = @import("noise/noise.zig");
|
pub const noise = @import("noise/noise.zig");
|
||||||
|
@ -497,7 +497,7 @@ pub const ServerWorld = struct { // MARK: ServerWorld
|
|||||||
self.itemDropManager.init(main.globalAllocator, self, self.gravity);
|
self.itemDropManager.init(main.globalAllocator, self, self.gravity);
|
||||||
errdefer self.itemDropManager.deinit();
|
errdefer self.itemDropManager.deinit();
|
||||||
|
|
||||||
var loadArena = main.utils.NeverFailingArenaAllocator.init(main.stackAllocator);
|
var loadArena = main.heap.NeverFailingArenaAllocator.init(main.stackAllocator);
|
||||||
defer loadArena.deinit();
|
defer loadArena.deinit();
|
||||||
const arenaAllocator = loadArena.allocator();
|
const arenaAllocator = loadArena.allocator();
|
||||||
var buf: [32768]u8 = undefined;
|
var buf: [32768]u8 = undefined;
|
||||||
|
545
src/utils.zig
545
src/utils.zig
@ -4,6 +4,7 @@ const Atomic = std.atomic.Value;
|
|||||||
const builtin = @import("builtin");
|
const builtin = @import("builtin");
|
||||||
|
|
||||||
const main = @import("main.zig");
|
const main = @import("main.zig");
|
||||||
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
pub const file_monitor = @import("utils/file_monitor.zig");
|
pub const file_monitor = @import("utils/file_monitor.zig");
|
||||||
|
|
||||||
@ -424,542 +425,6 @@ pub fn ConcurrentQueue(comptime T: type) type { // MARK: ConcurrentQueue
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Allows for stack-like allocations in a fast and safe way.
|
|
||||||
/// It is safe in the sense that a regular allocator will be used when the buffer is full.
|
|
||||||
pub const StackAllocator = struct { // MARK: StackAllocator
|
|
||||||
const AllocationTrailer = packed struct {wasFreed: bool, previousAllocationTrailer: u31};
|
|
||||||
backingAllocator: NeverFailingAllocator,
|
|
||||||
buffer: []align(4096) u8,
|
|
||||||
index: usize,
|
|
||||||
|
|
||||||
pub fn init(backingAllocator: NeverFailingAllocator, size: u31) StackAllocator {
|
|
||||||
return .{
|
|
||||||
.backingAllocator = backingAllocator,
|
|
||||||
.buffer = backingAllocator.alignedAlloc(u8, 4096, size),
|
|
||||||
.index = 0,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(self: StackAllocator) void {
|
|
||||||
if(self.index != 0) {
|
|
||||||
std.log.err("Memory leak in Stack Allocator", .{});
|
|
||||||
}
|
|
||||||
self.backingAllocator.free(self.buffer);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn allocator(self: *StackAllocator) NeverFailingAllocator {
|
|
||||||
return .{
|
|
||||||
.allocator = .{
|
|
||||||
.vtable = &.{
|
|
||||||
.alloc = &alloc,
|
|
||||||
.resize = &resize,
|
|
||||||
.remap = &remap,
|
|
||||||
.free = &free,
|
|
||||||
},
|
|
||||||
.ptr = self,
|
|
||||||
},
|
|
||||||
.IAssertThatTheProvidedAllocatorCantFail = {},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn isInsideBuffer(self: *StackAllocator, buf: []u8) bool {
|
|
||||||
const bufferStart = @intFromPtr(self.buffer.ptr);
|
|
||||||
const bufferEnd = bufferStart + self.buffer.len;
|
|
||||||
const compare = @intFromPtr(buf.ptr);
|
|
||||||
return compare >= bufferStart and compare < bufferEnd;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn indexInBuffer(self: *StackAllocator, buf: []u8) usize {
|
|
||||||
const bufferStart = @intFromPtr(self.buffer.ptr);
|
|
||||||
const compare = @intFromPtr(buf.ptr);
|
|
||||||
return compare - bufferStart;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getTrueAllocationEnd(start: usize, len: usize) usize {
|
|
||||||
const trailerStart = std.mem.alignForward(usize, start + len, @alignOf(AllocationTrailer));
|
|
||||||
return trailerStart + @sizeOf(AllocationTrailer);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn getTrailerBefore(self: *StackAllocator, end: usize) *AllocationTrailer {
|
|
||||||
const trailerStart = end - @sizeOf(AllocationTrailer);
|
|
||||||
return @ptrCast(@alignCast(self.buffer[trailerStart..].ptr));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
|
|
||||||
const self: *StackAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
const start = std.mem.alignForward(usize, self.index, @as(usize, 1) << @intCast(@intFromEnum(alignment)));
|
|
||||||
const end = getTrueAllocationEnd(start, len);
|
|
||||||
if(end >= self.buffer.len) return self.backingAllocator.rawAlloc(len, alignment, ret_addr);
|
|
||||||
const trailer = self.getTrailerBefore(end);
|
|
||||||
trailer.* = .{.wasFreed = false, .previousAllocationTrailer = @intCast(self.index)};
|
|
||||||
self.index = end;
|
|
||||||
return self.buffer.ptr + start;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resize(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool {
|
|
||||||
const self: *StackAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
if(self.isInsideBuffer(memory)) {
|
|
||||||
const start = self.indexInBuffer(memory);
|
|
||||||
const end = getTrueAllocationEnd(start, memory.len);
|
|
||||||
if(end != self.index) return false;
|
|
||||||
const newEnd = getTrueAllocationEnd(start, new_len);
|
|
||||||
if(newEnd >= self.buffer.len) return false;
|
|
||||||
|
|
||||||
const trailer = self.getTrailerBefore(end);
|
|
||||||
std.debug.assert(!trailer.wasFreed);
|
|
||||||
const newTrailer = self.getTrailerBefore(newEnd);
|
|
||||||
|
|
||||||
newTrailer.* = .{.wasFreed = false, .previousAllocationTrailer = trailer.previousAllocationTrailer};
|
|
||||||
self.index = newEnd;
|
|
||||||
return true;
|
|
||||||
} else {
|
|
||||||
return self.backingAllocator.rawResize(memory, alignment, new_len, ret_addr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn remap(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
|
||||||
if(resize(ctx, memory, alignment, new_len, ret_addr)) return memory.ptr;
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn free(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
|
|
||||||
const self: *StackAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
if(self.isInsideBuffer(memory)) {
|
|
||||||
const start = self.indexInBuffer(memory);
|
|
||||||
const end = getTrueAllocationEnd(start, memory.len);
|
|
||||||
const trailer = self.getTrailerBefore(end);
|
|
||||||
std.debug.assert(!trailer.wasFreed); // Double Free
|
|
||||||
|
|
||||||
if(end == self.index) {
|
|
||||||
self.index = trailer.previousAllocationTrailer;
|
|
||||||
if(self.index != 0) {
|
|
||||||
var previousTrailer = self.getTrailerBefore(trailer.previousAllocationTrailer);
|
|
||||||
while(previousTrailer.wasFreed) {
|
|
||||||
self.index = previousTrailer.previousAllocationTrailer;
|
|
||||||
if(self.index == 0) break;
|
|
||||||
previousTrailer = self.getTrailerBefore(previousTrailer.previousAllocationTrailer);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
trailer.wasFreed = true;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.backingAllocator.rawFree(memory, alignment, ret_addr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/// An allocator that handles OutOfMemory situations by panicing or freeing memory(TODO), making it safe to ignore errors.
|
|
||||||
pub const ErrorHandlingAllocator = struct { // MARK: ErrorHandlingAllocator
|
|
||||||
backingAllocator: Allocator,
|
|
||||||
|
|
||||||
pub fn init(backingAllocator: Allocator) ErrorHandlingAllocator {
|
|
||||||
return .{
|
|
||||||
.backingAllocator = backingAllocator,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn allocator(self: *ErrorHandlingAllocator) NeverFailingAllocator {
|
|
||||||
return .{
|
|
||||||
.allocator = .{
|
|
||||||
.vtable = &.{
|
|
||||||
.alloc = &alloc,
|
|
||||||
.resize = &resize,
|
|
||||||
.remap = &remap,
|
|
||||||
.free = &free,
|
|
||||||
},
|
|
||||||
.ptr = self,
|
|
||||||
},
|
|
||||||
.IAssertThatTheProvidedAllocatorCantFail = {},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn handleError() noreturn {
|
|
||||||
@panic("Out Of Memory. Please download more RAM, reduce the render distance, or close some of your 100 browser tabs.");
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Return a pointer to `len` bytes with specified `alignment`, or return
|
|
||||||
/// `null` indicating the allocation failed.
|
|
||||||
///
|
|
||||||
/// `ret_addr` is optionally provided as the first return address of the
|
|
||||||
/// allocation call stack. If the value is `0` it means no return address
|
|
||||||
/// has been provided.
|
|
||||||
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
|
|
||||||
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
return self.backingAllocator.rawAlloc(len, alignment, ret_addr) orelse handleError();
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempt to expand or shrink memory in place.
|
|
||||||
///
|
|
||||||
/// `memory.len` must equal the length requested from the most recent
|
|
||||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
|
||||||
/// equal the same value that was passed as the `alignment` parameter to
|
|
||||||
/// the original `alloc` call.
|
|
||||||
///
|
|
||||||
/// A result of `true` indicates the resize was successful and the
|
|
||||||
/// allocation now has the same address but a size of `new_len`. `false`
|
|
||||||
/// indicates the resize could not be completed without moving the
|
|
||||||
/// allocation to a different address.
|
|
||||||
///
|
|
||||||
/// `new_len` must be greater than zero.
|
|
||||||
///
|
|
||||||
/// `ret_addr` is optionally provided as the first return address of the
|
|
||||||
/// allocation call stack. If the value is `0` it means no return address
|
|
||||||
/// has been provided.
|
|
||||||
fn resize(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool {
|
|
||||||
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
return self.backingAllocator.rawResize(memory, alignment, new_len, ret_addr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Attempt to expand or shrink memory, allowing relocation.
|
|
||||||
///
|
|
||||||
/// `memory.len` must equal the length requested from the most recent
|
|
||||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
|
||||||
/// equal the same value that was passed as the `alignment` parameter to
|
|
||||||
/// the original `alloc` call.
|
|
||||||
///
|
|
||||||
/// A non-`null` return value indicates the resize was successful. The
|
|
||||||
/// allocation may have same address, or may have been relocated. In either
|
|
||||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
|
||||||
/// indicates that the resize would be equivalent to allocating new memory,
|
|
||||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
|
||||||
/// In such case, it is more efficient for the caller to perform the copy.
|
|
||||||
///
|
|
||||||
/// `new_len` must be greater than zero.
|
|
||||||
///
|
|
||||||
/// `ret_addr` is optionally provided as the first return address of the
|
|
||||||
/// allocation call stack. If the value is `0` it means no return address
|
|
||||||
/// has been provided.
|
|
||||||
fn remap(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
|
||||||
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
return self.backingAllocator.rawRemap(memory, alignment, new_len, ret_addr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Free and invalidate a region of memory.
|
|
||||||
///
|
|
||||||
/// `memory.len` must equal the length requested from the most recent
|
|
||||||
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
|
||||||
/// equal the same value that was passed as the `alignment` parameter to
|
|
||||||
/// the original `alloc` call.
|
|
||||||
///
|
|
||||||
/// `ret_addr` is optionally provided as the first return address of the
|
|
||||||
/// allocation call stack. If the value is `0` it means no return address
|
|
||||||
/// has been provided.
|
|
||||||
fn free(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
|
|
||||||
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
self.backingAllocator.rawFree(memory, alignment, ret_addr);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/// An allocator interface signaling that you can use
|
|
||||||
pub const NeverFailingAllocator = struct { // MARK: NeverFailingAllocator
|
|
||||||
allocator: Allocator,
|
|
||||||
IAssertThatTheProvidedAllocatorCantFail: void,
|
|
||||||
|
|
||||||
const Alignment = std.mem.Alignment;
|
|
||||||
const math = std.math;
|
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
|
||||||
/// implementation of an `Allocator`.
|
|
||||||
pub inline fn rawAlloc(a: NeverFailingAllocator, len: usize, alignment: Alignment, ret_addr: usize) ?[*]u8 {
|
|
||||||
return a.allocator.vtable.alloc(a.allocator.ptr, len, alignment, ret_addr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
|
||||||
/// implementation of an `Allocator`.
|
|
||||||
pub inline fn rawResize(a: NeverFailingAllocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) bool {
|
|
||||||
return a.allocator.vtable.resize(a.allocator.ptr, memory, alignment, new_len, ret_addr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
|
||||||
/// implementation of an `Allocator`.
|
|
||||||
pub inline fn rawRemap(a: NeverFailingAllocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
|
||||||
return a.allocator.vtable.remap(a.allocator.ptr, memory, alignment, new_len, ret_addr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function is not intended to be called except from within the
|
|
||||||
/// implementation of an `Allocator`.
|
|
||||||
pub inline fn rawFree(a: NeverFailingAllocator, memory: []u8, alignment: Alignment, ret_addr: usize) void {
|
|
||||||
return a.allocator.vtable.free(a.allocator.ptr, memory, alignment, ret_addr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a pointer to undefined memory.
|
|
||||||
/// Call `destroy` with the result to free the memory.
|
|
||||||
pub fn create(self: NeverFailingAllocator, comptime T: type) *T {
|
|
||||||
return self.allocator.create(T) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `ptr` should be the return value of `create`, or otherwise
|
|
||||||
/// have the same address and alignment property.
|
|
||||||
pub fn destroy(self: NeverFailingAllocator, ptr: anytype) void {
|
|
||||||
self.allocator.destroy(ptr);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Allocates an array of `n` items of type `T` and sets all the
|
|
||||||
/// items to `undefined`. Depending on the Allocator
|
|
||||||
/// implementation, it may be required to call `free` once the
|
|
||||||
/// memory is no longer needed, to avoid a resource leak. If the
|
|
||||||
/// `Allocator` implementation is unknown, then correct code will
|
|
||||||
/// call `free` when done.
|
|
||||||
///
|
|
||||||
/// For allocating a single item, see `create`.
|
|
||||||
pub fn alloc(self: NeverFailingAllocator, comptime T: type, n: usize) []T {
|
|
||||||
return self.allocator.alloc(T, n) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn allocWithOptions(
|
|
||||||
self: NeverFailingAllocator,
|
|
||||||
comptime Elem: type,
|
|
||||||
n: usize,
|
|
||||||
/// null means naturally aligned
|
|
||||||
comptime optional_alignment: ?u29,
|
|
||||||
comptime optional_sentinel: ?Elem,
|
|
||||||
) AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
|
|
||||||
return self.allocator.allocWithOptions(Elem, n, optional_alignment, optional_sentinel) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn allocWithOptionsRetAddr(
|
|
||||||
self: NeverFailingAllocator,
|
|
||||||
comptime Elem: type,
|
|
||||||
n: usize,
|
|
||||||
/// null means naturally aligned
|
|
||||||
comptime optional_alignment: ?u29,
|
|
||||||
comptime optional_sentinel: ?Elem,
|
|
||||||
return_address: usize,
|
|
||||||
) AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
|
|
||||||
return self.allocator.allocWithOptionsRetAddr(Elem, n, optional_alignment, optional_sentinel, return_address) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn AllocWithOptionsPayload(comptime Elem: type, comptime alignment: ?u29, comptime sentinel: ?Elem) type {
|
|
||||||
if(sentinel) |s| {
|
|
||||||
return [:s]align(alignment orelse @alignOf(Elem)) Elem;
|
|
||||||
} else {
|
|
||||||
return []align(alignment orelse @alignOf(Elem)) Elem;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Allocates an array of `n + 1` items of type `T` and sets the first `n`
|
|
||||||
/// items to `undefined` and the last item to `sentinel`. Depending on the
|
|
||||||
/// Allocator implementation, it may be required to call `free` once the
|
|
||||||
/// memory is no longer needed, to avoid a resource leak. If the
|
|
||||||
/// `Allocator` implementation is unknown, then correct code will
|
|
||||||
/// call `free` when done.
|
|
||||||
///
|
|
||||||
/// For allocating a single item, see `create`.
|
|
||||||
pub fn allocSentinel(
|
|
||||||
self: NeverFailingAllocator,
|
|
||||||
comptime Elem: type,
|
|
||||||
n: usize,
|
|
||||||
comptime sentinel: Elem,
|
|
||||||
) [:sentinel]Elem {
|
|
||||||
return self.allocator.allocSentinel(Elem, n, sentinel) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn alignedAlloc(
|
|
||||||
self: NeverFailingAllocator,
|
|
||||||
comptime T: type,
|
|
||||||
/// null means naturally aligned
|
|
||||||
comptime alignment: ?u29,
|
|
||||||
n: usize,
|
|
||||||
) []align(alignment orelse @alignOf(T)) T {
|
|
||||||
return self.allocator.alignedAlloc(T, alignment, n) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub inline fn allocAdvancedWithRetAddr(
|
|
||||||
self: NeverFailingAllocator,
|
|
||||||
comptime T: type,
|
|
||||||
/// null means naturally aligned
|
|
||||||
comptime alignment: ?u29,
|
|
||||||
n: usize,
|
|
||||||
return_address: usize,
|
|
||||||
) []align(alignment orelse @alignOf(T)) T {
|
|
||||||
return self.allocator.allocAdvancedWithRetAddr(T, alignment, n, return_address) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn allocWithSizeAndAlignment(self: NeverFailingAllocator, comptime size: usize, comptime alignment: u29, n: usize, return_address: usize) [*]align(alignment) u8 {
|
|
||||||
return self.allocator.allocWithSizeAndAlignment(alignment, size, alignment, n, return_address) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn allocBytesWithAlignment(self: NeverFailingAllocator, comptime alignment: u29, byte_count: usize, return_address: usize) [*]align(alignment) u8 {
|
|
||||||
return self.allocator.allocBytesWithAlignment(alignment, byte_count, return_address) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Request to modify the size of an allocation.
|
|
||||||
///
|
|
||||||
/// It is guaranteed to not move the pointer, however the allocator
|
|
||||||
/// implementation may refuse the resize request by returning `false`.
|
|
||||||
///
|
|
||||||
/// `allocation` may be an empty slice, in which case a new allocation is made.
|
|
||||||
///
|
|
||||||
/// `new_len` may be zero, in which case the allocation is freed.
|
|
||||||
pub fn resize(self: NeverFailingAllocator, allocation: anytype, new_len: usize) bool {
|
|
||||||
return self.allocator.resize(allocation, new_len);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Request to modify the size of an allocation, allowing relocation.
|
|
||||||
///
|
|
||||||
/// A non-`null` return value indicates the resize was successful. The
|
|
||||||
/// allocation may have same address, or may have been relocated. In either
|
|
||||||
/// case, the allocation now has size of `new_len`. A `null` return value
|
|
||||||
/// indicates that the resize would be equivalent to allocating new memory,
|
|
||||||
/// copying the bytes from the old memory, and then freeing the old memory.
|
|
||||||
/// In such case, it is more efficient for the caller to perform those
|
|
||||||
/// operations.
|
|
||||||
///
|
|
||||||
/// `allocation` may be an empty slice, in which case a new allocation is made.
|
|
||||||
///
|
|
||||||
/// `new_len` may be zero, in which case the allocation is freed.
|
|
||||||
pub fn remap(self: NeverFailingAllocator, allocation: anytype, new_len: usize) t: {
|
|
||||||
const Slice = @typeInfo(@TypeOf(allocation)).pointer;
|
|
||||||
break :t ?[]align(Slice.alignment) Slice.child;
|
|
||||||
} {
|
|
||||||
return self.allocator.remap(allocation, new_len);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// This function requests a new byte size for an existing allocation, which
|
|
||||||
/// can be larger, smaller, or the same size as the old memory allocation.
|
|
||||||
///
|
|
||||||
/// If `new_n` is 0, this is the same as `free` and it always succeeds.
|
|
||||||
///
|
|
||||||
/// `old_mem` may have length zero, which makes a new allocation.
|
|
||||||
///
|
|
||||||
/// This function only fails on out-of-memory conditions, unlike:
|
|
||||||
/// * `remap` which returns `null` when the `Allocator` implementation cannot
|
|
||||||
/// do the realloc more efficiently than the caller
|
|
||||||
/// * `resize` which returns `false` when the `Allocator` implementation cannot
|
|
||||||
/// change the size without relocating the allocation.
|
|
||||||
pub fn realloc(self: NeverFailingAllocator, old_mem: anytype, new_n: usize) t: {
|
|
||||||
const Slice = @typeInfo(@TypeOf(old_mem)).pointer;
|
|
||||||
break :t []align(Slice.alignment) Slice.child;
|
|
||||||
} {
|
|
||||||
return self.allocator.realloc(old_mem, new_n) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reallocAdvanced(
|
|
||||||
self: NeverFailingAllocator,
|
|
||||||
old_mem: anytype,
|
|
||||||
new_n: usize,
|
|
||||||
return_address: usize,
|
|
||||||
) t: {
|
|
||||||
const Slice = @typeInfo(@TypeOf(old_mem)).pointer;
|
|
||||||
break :t []align(Slice.alignment) Slice.child;
|
|
||||||
} {
|
|
||||||
return self.allocator.reallocAdvanced(old_mem, new_n, return_address) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Free an array allocated with `alloc`.
|
|
||||||
/// If memory has length 0, free is a no-op.
|
|
||||||
/// To free a single item, see `destroy`.
|
|
||||||
pub fn free(self: NeverFailingAllocator, memory: anytype) void {
|
|
||||||
self.allocator.free(memory);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Copies `m` to newly allocated memory. Caller owns the memory.
|
|
||||||
pub fn dupe(self: NeverFailingAllocator, comptime T: type, m: []const T) []T {
|
|
||||||
return self.allocator.dupe(T, m) catch unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Copies `m` to newly allocated memory, with a null-terminated element. Caller owns the memory.
|
|
||||||
pub fn dupeZ(self: NeverFailingAllocator, comptime T: type, m: []const T) [:0]T {
|
|
||||||
return self.allocator.dupeZ(T, m) catch unreachable;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const NeverFailingArenaAllocator = struct { // MARK: NeverFailingArena
|
|
||||||
arena: std.heap.ArenaAllocator,
|
|
||||||
|
|
||||||
pub fn init(child_allocator: NeverFailingAllocator) NeverFailingArenaAllocator {
|
|
||||||
return .{
|
|
||||||
.arena = .init(child_allocator.allocator),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deinit(self: NeverFailingArenaAllocator) void {
|
|
||||||
self.arena.deinit();
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn allocator(self: *NeverFailingArenaAllocator) NeverFailingAllocator {
|
|
||||||
return .{
|
|
||||||
.allocator = self.arena.allocator(),
|
|
||||||
.IAssertThatTheProvidedAllocatorCantFail = {},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Resets the arena allocator and frees all allocated memory.
|
|
||||||
///
|
|
||||||
/// `mode` defines how the currently allocated memory is handled.
|
|
||||||
/// See the variant documentation for `ResetMode` for the effects of each mode.
|
|
||||||
///
|
|
||||||
/// The function will return whether the reset operation was successful or not.
|
|
||||||
/// If the reallocation failed `false` is returned. The arena will still be fully
|
|
||||||
/// functional in that case, all memory is released. Future allocations just might
|
|
||||||
/// be slower.
|
|
||||||
///
|
|
||||||
/// NOTE: If `mode` is `free_all`, the function will always return `true`.
|
|
||||||
pub fn reset(self: *NeverFailingArenaAllocator, mode: std.heap.ArenaAllocator.ResetMode) bool {
|
|
||||||
return self.arena.reset(mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn shrinkAndFree(self: *NeverFailingArenaAllocator) void {
|
|
||||||
const node = self.arena.state.buffer_list.first orelse return;
|
|
||||||
const allocBuf = @as([*]u8, @ptrCast(node))[0..node.data];
|
|
||||||
const dataSize = std.mem.alignForward(usize, @sizeOf(std.SinglyLinkedList(usize).Node) + self.arena.state.end_index, @alignOf(std.SinglyLinkedList(usize).Node));
|
|
||||||
if(self.arena.child_allocator.rawResize(allocBuf, @enumFromInt(std.math.log2(@alignOf(std.SinglyLinkedList(usize).Node))), dataSize, @returnAddress())) {
|
|
||||||
node.data = dataSize;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const BufferFallbackAllocator = struct { // MARK: BufferFallbackAllocator
|
|
||||||
fixedBuffer: std.heap.FixedBufferAllocator,
|
|
||||||
fallbackAllocator: NeverFailingAllocator,
|
|
||||||
|
|
||||||
pub fn init(buffer: []u8, fallbackAllocator: NeverFailingAllocator) BufferFallbackAllocator {
|
|
||||||
return .{
|
|
||||||
.fixedBuffer = .init(buffer),
|
|
||||||
.fallbackAllocator = fallbackAllocator,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn allocator(self: *BufferFallbackAllocator) NeverFailingAllocator {
|
|
||||||
return .{
|
|
||||||
.allocator = .{
|
|
||||||
.vtable = &.{
|
|
||||||
.alloc = &alloc,
|
|
||||||
.resize = &resize,
|
|
||||||
.free = &free,
|
|
||||||
},
|
|
||||||
.ptr = self,
|
|
||||||
},
|
|
||||||
.IAssertThatTheProvidedAllocatorCantFail = {},
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn alloc(ctx: *anyopaque, len: usize, log2_ptr_align: u8, ra: usize) ?[*]u8 {
|
|
||||||
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
return self.fixedBuffer.allocator().rawAlloc(len, log2_ptr_align, ra) orelse
|
|
||||||
return self.fallbackAllocator.rawAlloc(len, log2_ptr_align, ra);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resize(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, new_len: usize, ra: usize) bool {
|
|
||||||
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
if(self.fixedBuffer.ownsPtr(buf.ptr)) {
|
|
||||||
return self.fixedBuffer.allocator().rawResize(buf, log2_buf_align, new_len, ra);
|
|
||||||
} else {
|
|
||||||
return self.fallbackAllocator.rawResize(buf, log2_buf_align, new_len, ra);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn free(ctx: *anyopaque, buf: []u8, log2_buf_align: u8, ra: usize) void {
|
|
||||||
const self: *BufferFallbackAllocator = @ptrCast(@alignCast(ctx));
|
|
||||||
if(self.fixedBuffer.ownsPtr(buf.ptr)) {
|
|
||||||
return self.fixedBuffer.allocator().rawFree(buf, log2_buf_align, ra);
|
|
||||||
} else {
|
|
||||||
return self.fallbackAllocator.rawFree(buf, log2_buf_align, ra);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
/// A simple binary heap.
|
/// A simple binary heap.
|
||||||
/// Thread safe and blocking.
|
/// Thread safe and blocking.
|
||||||
/// Expects T to have a `biggerThan(T) bool` function
|
/// Expects T to have a `biggerThan(T) bool` function
|
||||||
@ -1242,7 +707,7 @@ pub const ThreadPool = struct { // MARK: ThreadPool
|
|||||||
|
|
||||||
fn run(self: *ThreadPool, id: usize) void {
|
fn run(self: *ThreadPool, id: usize) void {
|
||||||
// In case any of the tasks wants to allocate memory:
|
// In case any of the tasks wants to allocate memory:
|
||||||
var sta = StackAllocator.init(main.globalAllocator, 1 << 23);
|
var sta = main.heap.StackAllocator.init(main.globalAllocator, 1 << 23);
|
||||||
defer sta.deinit();
|
defer sta.deinit();
|
||||||
main.stackAllocator = sta.allocator();
|
main.stackAllocator = sta.allocator();
|
||||||
|
|
||||||
@ -1321,7 +786,7 @@ pub fn DynamicPackedIntArray(size: comptime_int) type { // MARK: DynamicPackedIn
|
|||||||
|
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
||||||
pub fn initCapacity(allocator: main.utils.NeverFailingAllocator, bitSize: u5) Self {
|
pub fn initCapacity(allocator: main.heap.NeverFailingAllocator, bitSize: u5) Self {
|
||||||
std.debug.assert(bitSize == 0 or bitSize & bitSize - 1 == 0); // Must be a power of 2
|
std.debug.assert(bitSize == 0 or bitSize & bitSize - 1 == 0); // Must be a power of 2
|
||||||
return .{
|
return .{
|
||||||
.data = allocator.alignedAlloc(u32, 64, @as(usize, @divExact(size, @bitSizeOf(u32)))*bitSize),
|
.data = allocator.alignedAlloc(u32, 64, @as(usize, @divExact(size, @bitSizeOf(u32)))*bitSize),
|
||||||
@ -1329,7 +794,7 @@ pub fn DynamicPackedIntArray(size: comptime_int) type { // MARK: DynamicPackedIn
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(self: *Self, allocator: main.utils.NeverFailingAllocator) void {
|
pub fn deinit(self: *Self, allocator: main.heap.NeverFailingAllocator) void {
|
||||||
allocator.free(self.data);
|
allocator.free(self.data);
|
||||||
self.* = .{};
|
self.* = .{};
|
||||||
}
|
}
|
||||||
@ -1343,7 +808,7 @@ pub fn DynamicPackedIntArray(size: comptime_int) type { // MARK: DynamicPackedIn
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn resizeOnce(self: *Self, allocator: main.utils.NeverFailingAllocator) void {
|
pub fn resizeOnce(self: *Self, allocator: main.heap.NeverFailingAllocator) void {
|
||||||
const newBitSize = if(self.bitSize != 0) self.bitSize*2 else 1;
|
const newBitSize = if(self.bitSize != 0) self.bitSize*2 else 1;
|
||||||
var newSelf = Self.initCapacity(allocator, newBitSize);
|
var newSelf = Self.initCapacity(allocator, newBitSize);
|
||||||
|
|
||||||
|
490
src/utils/heap.zig
Normal file
490
src/utils/heap.zig
Normal file
@ -0,0 +1,490 @@
|
|||||||
|
const std = @import("std");
|
||||||
|
const Allocator = std.mem.Allocator;
|
||||||
|
|
||||||
|
const main = @import("root");
|
||||||
|
|
||||||
|
/// Allows for stack-like allocations in a fast and safe way.
|
||||||
|
/// It is safe in the sense that a regular allocator will be used when the buffer is full.
|
||||||
|
pub const StackAllocator = struct { // MARK: StackAllocator
|
||||||
|
const AllocationTrailer = packed struct {wasFreed: bool, previousAllocationTrailer: u31};
|
||||||
|
backingAllocator: NeverFailingAllocator,
|
||||||
|
buffer: []align(4096) u8,
|
||||||
|
index: usize,
|
||||||
|
|
||||||
|
pub fn init(backingAllocator: NeverFailingAllocator, size: u31) StackAllocator {
|
||||||
|
return .{
|
||||||
|
.backingAllocator = backingAllocator,
|
||||||
|
.buffer = backingAllocator.alignedAlloc(u8, 4096, size),
|
||||||
|
.index = 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(self: StackAllocator) void {
|
||||||
|
if(self.index != 0) {
|
||||||
|
std.log.err("Memory leak in Stack Allocator", .{});
|
||||||
|
}
|
||||||
|
self.backingAllocator.free(self.buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allocator(self: *StackAllocator) NeverFailingAllocator {
|
||||||
|
return .{
|
||||||
|
.allocator = .{
|
||||||
|
.vtable = &.{
|
||||||
|
.alloc = &alloc,
|
||||||
|
.resize = &resize,
|
||||||
|
.remap = &remap,
|
||||||
|
.free = &free,
|
||||||
|
},
|
||||||
|
.ptr = self,
|
||||||
|
},
|
||||||
|
.IAssertThatTheProvidedAllocatorCantFail = {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn isInsideBuffer(self: *StackAllocator, buf: []u8) bool {
|
||||||
|
const bufferStart = @intFromPtr(self.buffer.ptr);
|
||||||
|
const bufferEnd = bufferStart + self.buffer.len;
|
||||||
|
const compare = @intFromPtr(buf.ptr);
|
||||||
|
return compare >= bufferStart and compare < bufferEnd;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn indexInBuffer(self: *StackAllocator, buf: []u8) usize {
|
||||||
|
const bufferStart = @intFromPtr(self.buffer.ptr);
|
||||||
|
const compare = @intFromPtr(buf.ptr);
|
||||||
|
return compare - bufferStart;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getTrueAllocationEnd(start: usize, len: usize) usize {
|
||||||
|
const trailerStart = std.mem.alignForward(usize, start + len, @alignOf(AllocationTrailer));
|
||||||
|
return trailerStart + @sizeOf(AllocationTrailer);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn getTrailerBefore(self: *StackAllocator, end: usize) *AllocationTrailer {
|
||||||
|
const trailerStart = end - @sizeOf(AllocationTrailer);
|
||||||
|
return @ptrCast(@alignCast(self.buffer[trailerStart..].ptr));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
|
||||||
|
const self: *StackAllocator = @ptrCast(@alignCast(ctx));
|
||||||
|
const start = std.mem.alignForward(usize, self.index, @as(usize, 1) << @intCast(@intFromEnum(alignment)));
|
||||||
|
const end = getTrueAllocationEnd(start, len);
|
||||||
|
if(end >= self.buffer.len) return self.backingAllocator.rawAlloc(len, alignment, ret_addr);
|
||||||
|
const trailer = self.getTrailerBefore(end);
|
||||||
|
trailer.* = .{.wasFreed = false, .previousAllocationTrailer = @intCast(self.index)};
|
||||||
|
self.index = end;
|
||||||
|
return self.buffer.ptr + start;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn resize(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool {
|
||||||
|
const self: *StackAllocator = @ptrCast(@alignCast(ctx));
|
||||||
|
if(self.isInsideBuffer(memory)) {
|
||||||
|
const start = self.indexInBuffer(memory);
|
||||||
|
const end = getTrueAllocationEnd(start, memory.len);
|
||||||
|
if(end != self.index) return false;
|
||||||
|
const newEnd = getTrueAllocationEnd(start, new_len);
|
||||||
|
if(newEnd >= self.buffer.len) return false;
|
||||||
|
|
||||||
|
const trailer = self.getTrailerBefore(end);
|
||||||
|
std.debug.assert(!trailer.wasFreed);
|
||||||
|
const newTrailer = self.getTrailerBefore(newEnd);
|
||||||
|
|
||||||
|
newTrailer.* = .{.wasFreed = false, .previousAllocationTrailer = trailer.previousAllocationTrailer};
|
||||||
|
self.index = newEnd;
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return self.backingAllocator.rawResize(memory, alignment, new_len, ret_addr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remap(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
||||||
|
if(resize(ctx, memory, alignment, new_len, ret_addr)) return memory.ptr;
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn free(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
|
||||||
|
const self: *StackAllocator = @ptrCast(@alignCast(ctx));
|
||||||
|
if(self.isInsideBuffer(memory)) {
|
||||||
|
const start = self.indexInBuffer(memory);
|
||||||
|
const end = getTrueAllocationEnd(start, memory.len);
|
||||||
|
const trailer = self.getTrailerBefore(end);
|
||||||
|
std.debug.assert(!trailer.wasFreed); // Double Free
|
||||||
|
|
||||||
|
if(end == self.index) {
|
||||||
|
self.index = trailer.previousAllocationTrailer;
|
||||||
|
if(self.index != 0) {
|
||||||
|
var previousTrailer = self.getTrailerBefore(trailer.previousAllocationTrailer);
|
||||||
|
while(previousTrailer.wasFreed) {
|
||||||
|
self.index = previousTrailer.previousAllocationTrailer;
|
||||||
|
if(self.index == 0) break;
|
||||||
|
previousTrailer = self.getTrailerBefore(previousTrailer.previousAllocationTrailer);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
trailer.wasFreed = true;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self.backingAllocator.rawFree(memory, alignment, ret_addr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// An allocator that handles OutOfMemory situations by panicing or freeing memory(TODO), making it safe to ignore errors.
|
||||||
|
pub const ErrorHandlingAllocator = struct { // MARK: ErrorHandlingAllocator
|
||||||
|
backingAllocator: Allocator,
|
||||||
|
|
||||||
|
pub fn init(backingAllocator: Allocator) ErrorHandlingAllocator {
|
||||||
|
return .{
|
||||||
|
.backingAllocator = backingAllocator,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allocator(self: *ErrorHandlingAllocator) NeverFailingAllocator {
|
||||||
|
return .{
|
||||||
|
.allocator = .{
|
||||||
|
.vtable = &.{
|
||||||
|
.alloc = &alloc,
|
||||||
|
.resize = &resize,
|
||||||
|
.remap = &remap,
|
||||||
|
.free = &free,
|
||||||
|
},
|
||||||
|
.ptr = self,
|
||||||
|
},
|
||||||
|
.IAssertThatTheProvidedAllocatorCantFail = {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn handleError() noreturn {
|
||||||
|
@panic("Out Of Memory. Please download more RAM, reduce the render distance, or close some of your 100 browser tabs.");
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Return a pointer to `len` bytes with specified `alignment`, or return
|
||||||
|
/// `null` indicating the allocation failed.
|
||||||
|
///
|
||||||
|
/// `ret_addr` is optionally provided as the first return address of the
|
||||||
|
/// allocation call stack. If the value is `0` it means no return address
|
||||||
|
/// has been provided.
|
||||||
|
fn alloc(ctx: *anyopaque, len: usize, alignment: std.mem.Alignment, ret_addr: usize) ?[*]u8 {
|
||||||
|
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
||||||
|
return self.backingAllocator.rawAlloc(len, alignment, ret_addr) orelse handleError();
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempt to expand or shrink memory in place.
|
||||||
|
///
|
||||||
|
/// `memory.len` must equal the length requested from the most recent
|
||||||
|
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||||
|
/// equal the same value that was passed as the `alignment` parameter to
|
||||||
|
/// the original `alloc` call.
|
||||||
|
///
|
||||||
|
/// A result of `true` indicates the resize was successful and the
|
||||||
|
/// allocation now has the same address but a size of `new_len`. `false`
|
||||||
|
/// indicates the resize could not be completed without moving the
|
||||||
|
/// allocation to a different address.
|
||||||
|
///
|
||||||
|
/// `new_len` must be greater than zero.
|
||||||
|
///
|
||||||
|
/// `ret_addr` is optionally provided as the first return address of the
|
||||||
|
/// allocation call stack. If the value is `0` it means no return address
|
||||||
|
/// has been provided.
|
||||||
|
fn resize(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool {
|
||||||
|
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
||||||
|
return self.backingAllocator.rawResize(memory, alignment, new_len, ret_addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Attempt to expand or shrink memory, allowing relocation.
|
||||||
|
///
|
||||||
|
/// `memory.len` must equal the length requested from the most recent
|
||||||
|
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||||
|
/// equal the same value that was passed as the `alignment` parameter to
|
||||||
|
/// the original `alloc` call.
|
||||||
|
///
|
||||||
|
/// A non-`null` return value indicates the resize was successful. The
|
||||||
|
/// allocation may have same address, or may have been relocated. In either
|
||||||
|
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||||
|
/// indicates that the resize would be equivalent to allocating new memory,
|
||||||
|
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||||
|
/// In such case, it is more efficient for the caller to perform the copy.
|
||||||
|
///
|
||||||
|
/// `new_len` must be greater than zero.
|
||||||
|
///
|
||||||
|
/// `ret_addr` is optionally provided as the first return address of the
|
||||||
|
/// allocation call stack. If the value is `0` it means no return address
|
||||||
|
/// has been provided.
|
||||||
|
fn remap(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
||||||
|
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
||||||
|
return self.backingAllocator.rawRemap(memory, alignment, new_len, ret_addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Free and invalidate a region of memory.
|
||||||
|
///
|
||||||
|
/// `memory.len` must equal the length requested from the most recent
|
||||||
|
/// successful call to `alloc`, `resize`, or `remap`. `alignment` must
|
||||||
|
/// equal the same value that was passed as the `alignment` parameter to
|
||||||
|
/// the original `alloc` call.
|
||||||
|
///
|
||||||
|
/// `ret_addr` is optionally provided as the first return address of the
|
||||||
|
/// allocation call stack. If the value is `0` it means no return address
|
||||||
|
/// has been provided.
|
||||||
|
fn free(ctx: *anyopaque, memory: []u8, alignment: std.mem.Alignment, ret_addr: usize) void {
|
||||||
|
const self: *ErrorHandlingAllocator = @ptrCast(@alignCast(ctx));
|
||||||
|
self.backingAllocator.rawFree(memory, alignment, ret_addr);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
/// An allocator interface signaling that you can use
|
||||||
|
pub const NeverFailingAllocator = struct { // MARK: NeverFailingAllocator
|
||||||
|
allocator: Allocator,
|
||||||
|
IAssertThatTheProvidedAllocatorCantFail: void,
|
||||||
|
|
||||||
|
const Alignment = std.mem.Alignment;
|
||||||
|
const math = std.math;
|
||||||
|
|
||||||
|
/// This function is not intended to be called except from within the
|
||||||
|
/// implementation of an `Allocator`.
|
||||||
|
pub inline fn rawAlloc(a: NeverFailingAllocator, len: usize, alignment: Alignment, ret_addr: usize) ?[*]u8 {
|
||||||
|
return a.allocator.vtable.alloc(a.allocator.ptr, len, alignment, ret_addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function is not intended to be called except from within the
|
||||||
|
/// implementation of an `Allocator`.
|
||||||
|
pub inline fn rawResize(a: NeverFailingAllocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) bool {
|
||||||
|
return a.allocator.vtable.resize(a.allocator.ptr, memory, alignment, new_len, ret_addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function is not intended to be called except from within the
|
||||||
|
/// implementation of an `Allocator`.
|
||||||
|
pub inline fn rawRemap(a: NeverFailingAllocator, memory: []u8, alignment: Alignment, new_len: usize, ret_addr: usize) ?[*]u8 {
|
||||||
|
return a.allocator.vtable.remap(a.allocator.ptr, memory, alignment, new_len, ret_addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function is not intended to be called except from within the
|
||||||
|
/// implementation of an `Allocator`.
|
||||||
|
pub inline fn rawFree(a: NeverFailingAllocator, memory: []u8, alignment: Alignment, ret_addr: usize) void {
|
||||||
|
return a.allocator.vtable.free(a.allocator.ptr, memory, alignment, ret_addr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a pointer to undefined memory.
|
||||||
|
/// Call `destroy` with the result to free the memory.
|
||||||
|
pub fn create(self: NeverFailingAllocator, comptime T: type) *T {
|
||||||
|
return self.allocator.create(T) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// `ptr` should be the return value of `create`, or otherwise
|
||||||
|
/// have the same address and alignment property.
|
||||||
|
pub fn destroy(self: NeverFailingAllocator, ptr: anytype) void {
|
||||||
|
self.allocator.destroy(ptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocates an array of `n` items of type `T` and sets all the
|
||||||
|
/// items to `undefined`. Depending on the Allocator
|
||||||
|
/// implementation, it may be required to call `free` once the
|
||||||
|
/// memory is no longer needed, to avoid a resource leak. If the
|
||||||
|
/// `Allocator` implementation is unknown, then correct code will
|
||||||
|
/// call `free` when done.
|
||||||
|
///
|
||||||
|
/// For allocating a single item, see `create`.
|
||||||
|
pub fn alloc(self: NeverFailingAllocator, comptime T: type, n: usize) []T {
|
||||||
|
return self.allocator.alloc(T, n) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allocWithOptions(
|
||||||
|
self: NeverFailingAllocator,
|
||||||
|
comptime Elem: type,
|
||||||
|
n: usize,
|
||||||
|
/// null means naturally aligned
|
||||||
|
comptime optional_alignment: ?u29,
|
||||||
|
comptime optional_sentinel: ?Elem,
|
||||||
|
) AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
|
||||||
|
return self.allocator.allocWithOptions(Elem, n, optional_alignment, optional_sentinel) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allocWithOptionsRetAddr(
|
||||||
|
self: NeverFailingAllocator,
|
||||||
|
comptime Elem: type,
|
||||||
|
n: usize,
|
||||||
|
/// null means naturally aligned
|
||||||
|
comptime optional_alignment: ?u29,
|
||||||
|
comptime optional_sentinel: ?Elem,
|
||||||
|
return_address: usize,
|
||||||
|
) AllocWithOptionsPayload(Elem, optional_alignment, optional_sentinel) {
|
||||||
|
return self.allocator.allocWithOptionsRetAddr(Elem, n, optional_alignment, optional_sentinel, return_address) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn AllocWithOptionsPayload(comptime Elem: type, comptime alignment: ?u29, comptime sentinel: ?Elem) type {
|
||||||
|
if(sentinel) |s| {
|
||||||
|
return [:s]align(alignment orelse @alignOf(Elem)) Elem;
|
||||||
|
} else {
|
||||||
|
return []align(alignment orelse @alignOf(Elem)) Elem;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Allocates an array of `n + 1` items of type `T` and sets the first `n`
|
||||||
|
/// items to `undefined` and the last item to `sentinel`. Depending on the
|
||||||
|
/// Allocator implementation, it may be required to call `free` once the
|
||||||
|
/// memory is no longer needed, to avoid a resource leak. If the
|
||||||
|
/// `Allocator` implementation is unknown, then correct code will
|
||||||
|
/// call `free` when done.
|
||||||
|
///
|
||||||
|
/// For allocating a single item, see `create`.
|
||||||
|
pub fn allocSentinel(
|
||||||
|
self: NeverFailingAllocator,
|
||||||
|
comptime Elem: type,
|
||||||
|
n: usize,
|
||||||
|
comptime sentinel: Elem,
|
||||||
|
) [:sentinel]Elem {
|
||||||
|
return self.allocator.allocSentinel(Elem, n, sentinel) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn alignedAlloc(
|
||||||
|
self: NeverFailingAllocator,
|
||||||
|
comptime T: type,
|
||||||
|
/// null means naturally aligned
|
||||||
|
comptime alignment: ?u29,
|
||||||
|
n: usize,
|
||||||
|
) []align(alignment orelse @alignOf(T)) T {
|
||||||
|
return self.allocator.alignedAlloc(T, alignment, n) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub inline fn allocAdvancedWithRetAddr(
|
||||||
|
self: NeverFailingAllocator,
|
||||||
|
comptime T: type,
|
||||||
|
/// null means naturally aligned
|
||||||
|
comptime alignment: ?u29,
|
||||||
|
n: usize,
|
||||||
|
return_address: usize,
|
||||||
|
) []align(alignment orelse @alignOf(T)) T {
|
||||||
|
return self.allocator.allocAdvancedWithRetAddr(T, alignment, n, return_address) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn allocWithSizeAndAlignment(self: NeverFailingAllocator, comptime size: usize, comptime alignment: u29, n: usize, return_address: usize) [*]align(alignment) u8 {
|
||||||
|
return self.allocator.allocWithSizeAndAlignment(alignment, size, alignment, n, return_address) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn allocBytesWithAlignment(self: NeverFailingAllocator, comptime alignment: u29, byte_count: usize, return_address: usize) [*]align(alignment) u8 {
|
||||||
|
return self.allocator.allocBytesWithAlignment(alignment, byte_count, return_address) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Request to modify the size of an allocation.
|
||||||
|
///
|
||||||
|
/// It is guaranteed to not move the pointer, however the allocator
|
||||||
|
/// implementation may refuse the resize request by returning `false`.
|
||||||
|
///
|
||||||
|
/// `allocation` may be an empty slice, in which case a new allocation is made.
|
||||||
|
///
|
||||||
|
/// `new_len` may be zero, in which case the allocation is freed.
|
||||||
|
pub fn resize(self: NeverFailingAllocator, allocation: anytype, new_len: usize) bool {
|
||||||
|
return self.allocator.resize(allocation, new_len);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Request to modify the size of an allocation, allowing relocation.
|
||||||
|
///
|
||||||
|
/// A non-`null` return value indicates the resize was successful. The
|
||||||
|
/// allocation may have same address, or may have been relocated. In either
|
||||||
|
/// case, the allocation now has size of `new_len`. A `null` return value
|
||||||
|
/// indicates that the resize would be equivalent to allocating new memory,
|
||||||
|
/// copying the bytes from the old memory, and then freeing the old memory.
|
||||||
|
/// In such case, it is more efficient for the caller to perform those
|
||||||
|
/// operations.
|
||||||
|
///
|
||||||
|
/// `allocation` may be an empty slice, in which case a new allocation is made.
|
||||||
|
///
|
||||||
|
/// `new_len` may be zero, in which case the allocation is freed.
|
||||||
|
pub fn remap(self: NeverFailingAllocator, allocation: anytype, new_len: usize) t: {
|
||||||
|
const Slice = @typeInfo(@TypeOf(allocation)).pointer;
|
||||||
|
break :t ?[]align(Slice.alignment) Slice.child;
|
||||||
|
} {
|
||||||
|
return self.allocator.remap(allocation, new_len);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// This function requests a new byte size for an existing allocation, which
|
||||||
|
/// can be larger, smaller, or the same size as the old memory allocation.
|
||||||
|
///
|
||||||
|
/// If `new_n` is 0, this is the same as `free` and it always succeeds.
|
||||||
|
///
|
||||||
|
/// `old_mem` may have length zero, which makes a new allocation.
|
||||||
|
///
|
||||||
|
/// This function only fails on out-of-memory conditions, unlike:
|
||||||
|
/// * `remap` which returns `null` when the `Allocator` implementation cannot
|
||||||
|
/// do the realloc more efficiently than the caller
|
||||||
|
/// * `resize` which returns `false` when the `Allocator` implementation cannot
|
||||||
|
/// change the size without relocating the allocation.
|
||||||
|
pub fn realloc(self: NeverFailingAllocator, old_mem: anytype, new_n: usize) t: {
|
||||||
|
const Slice = @typeInfo(@TypeOf(old_mem)).pointer;
|
||||||
|
break :t []align(Slice.alignment) Slice.child;
|
||||||
|
} {
|
||||||
|
return self.allocator.realloc(old_mem, new_n) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reallocAdvanced(
|
||||||
|
self: NeverFailingAllocator,
|
||||||
|
old_mem: anytype,
|
||||||
|
new_n: usize,
|
||||||
|
return_address: usize,
|
||||||
|
) t: {
|
||||||
|
const Slice = @typeInfo(@TypeOf(old_mem)).pointer;
|
||||||
|
break :t []align(Slice.alignment) Slice.child;
|
||||||
|
} {
|
||||||
|
return self.allocator.reallocAdvanced(old_mem, new_n, return_address) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Free an array allocated with `alloc`.
|
||||||
|
/// If memory has length 0, free is a no-op.
|
||||||
|
/// To free a single item, see `destroy`.
|
||||||
|
pub fn free(self: NeverFailingAllocator, memory: anytype) void {
|
||||||
|
self.allocator.free(memory);
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Copies `m` to newly allocated memory. Caller owns the memory.
|
||||||
|
pub fn dupe(self: NeverFailingAllocator, comptime T: type, m: []const T) []T {
|
||||||
|
return self.allocator.dupe(T, m) catch unreachable;
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Copies `m` to newly allocated memory, with a null-terminated element. Caller owns the memory.
|
||||||
|
pub fn dupeZ(self: NeverFailingAllocator, comptime T: type, m: []const T) [:0]T {
|
||||||
|
return self.allocator.dupeZ(T, m) catch unreachable;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const NeverFailingArenaAllocator = struct { // MARK: NeverFailingArena
|
||||||
|
arena: std.heap.ArenaAllocator,
|
||||||
|
|
||||||
|
pub fn init(child_allocator: NeverFailingAllocator) NeverFailingArenaAllocator {
|
||||||
|
return .{
|
||||||
|
.arena = .init(child_allocator.allocator),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(self: NeverFailingArenaAllocator) void {
|
||||||
|
self.arena.deinit();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allocator(self: *NeverFailingArenaAllocator) NeverFailingAllocator {
|
||||||
|
return .{
|
||||||
|
.allocator = self.arena.allocator(),
|
||||||
|
.IAssertThatTheProvidedAllocatorCantFail = {},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Resets the arena allocator and frees all allocated memory.
|
||||||
|
///
|
||||||
|
/// `mode` defines how the currently allocated memory is handled.
|
||||||
|
/// See the variant documentation for `ResetMode` for the effects of each mode.
|
||||||
|
///
|
||||||
|
/// The function will return whether the reset operation was successful or not.
|
||||||
|
/// If the reallocation failed `false` is returned. The arena will still be fully
|
||||||
|
/// functional in that case, all memory is released. Future allocations just might
|
||||||
|
/// be slower.
|
||||||
|
///
|
||||||
|
/// NOTE: If `mode` is `free_all`, the function will always return `true`.
|
||||||
|
pub fn reset(self: *NeverFailingArenaAllocator, mode: std.heap.ArenaAllocator.ResetMode) bool {
|
||||||
|
return self.arena.reset(mode);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn shrinkAndFree(self: *NeverFailingArenaAllocator) void {
|
||||||
|
const node = self.arena.state.buffer_list.first orelse return;
|
||||||
|
const allocBuf = @as([*]u8, @ptrCast(node))[0..node.data];
|
||||||
|
const dataSize = std.mem.alignForward(usize, @sizeOf(std.SinglyLinkedList(usize).Node) + self.arena.state.end_index, @alignOf(std.SinglyLinkedList(usize).Node));
|
||||||
|
if(self.arena.child_allocator.rawResize(allocBuf, @enumFromInt(std.math.log2(@alignOf(std.SinglyLinkedList(usize).Node))), dataSize, @returnAddress())) {
|
||||||
|
node.data = dataSize;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
@ -2,7 +2,7 @@ const builtin = @import("builtin");
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
const main = @import("../main.zig");
|
const main = @import("../main.zig");
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
|
|
||||||
fn growCapacity(current: usize, minimum: usize) usize {
|
fn growCapacity(current: usize, minimum: usize) usize {
|
||||||
var new = current;
|
var new = current;
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
const main = @import("main.zig");
|
const main = @import("main.zig");
|
||||||
const NeverFailingAllocator = main.utils.NeverFailingAllocator;
|
const NeverFailingAllocator = main.heap.NeverFailingAllocator;
|
||||||
const List = main.List;
|
const List = main.List;
|
||||||
|
|
||||||
pub const ZonElement = union(enum) { // MARK: Zon
|
pub const ZonElement = union(enum) { // MARK: Zon
|
||||||
@ -233,7 +233,7 @@ pub const ZonElement = union(enum) { // MARK: Zon
|
|||||||
},
|
},
|
||||||
.vector => {
|
.vector => {
|
||||||
const len = @typeInfo(@TypeOf(value)).vector.len;
|
const len = @typeInfo(@TypeOf(value)).vector.len;
|
||||||
const result = initArray(main.utils.NeverFailingAllocator{.allocator = allocator, .IAssertThatTheProvidedAllocatorCantFail = {}});
|
const result = initArray(main.heap.NeverFailingAllocator{.allocator = allocator, .IAssertThatTheProvidedAllocatorCantFail = {}});
|
||||||
result.array.ensureCapacity(len);
|
result.array.ensureCapacity(len);
|
||||||
inline for(0..len) |i| {
|
inline for(0..len) |i| {
|
||||||
result.array.appendAssumeCapacity(createElementFromRandomType(value[i], allocator));
|
result.array.appendAssumeCapacity(createElementFromRandomType(value[i], allocator));
|
||||||
|
Loading…
x
Reference in New Issue
Block a user