Rewrite the biome system (there still some smaller things left to do)

This commit is contained in:
IntegratedQuantum 2023-06-26 17:33:15 +02:00
parent 0867dd1975
commit 64f5b363a4
40 changed files with 1007 additions and 221 deletions

4
.gitignore vendored
View File

@ -4,4 +4,6 @@ zig-out/
zig-cache/
serverAssets/
settings.json
gui_layout.json
gui_layout.json
test.png

View File

@ -1,6 +1,9 @@
{
"chance" : 0,
"type" : "warm_ocean",
"properties" : [
"hot",
"ocean"
],
"minHeight" : -4,
"maxHeight" : 6,

View File

@ -1,5 +1,7 @@
{
"type" : "shrubland",
"properties" : [
"hot"
],
"minHeight" : 4,
"maxHeight" : 60,

View File

@ -1,5 +1,7 @@
{
"type" : "mountain_grassland",
"properties" : [
"mountain",
],
"minHeight" : 60,
"maxHeight" : 256,

View File

@ -1,5 +1,5 @@
{
"type" : "cave",
"isCave" : true,
"maxHeight" : 0,
"music" : "heart-of-the-beast",

View File

@ -1,5 +1,5 @@
{
"type" : "cave",
"isCave" : true,
"maxHeight" : -512,
"chance" : 0.2,

View File

@ -1,5 +1,5 @@
{
"type" : "cave",
"isCave" : true,
"maxHeight" : -512,
"chance" : 0.01,

View File

@ -1,5 +1,5 @@
{
"type" : "cave",
"isCave" : true,
"minHeight" : 0,
"music" : "GymnopedieNo1",

View File

@ -1,5 +1,5 @@
{
"type" : "cave",
"isCave" : true,
"chance" : 0.01,

View File

@ -1,7 +1,12 @@
{
"properties" : [
"cold",
"ocean",
],
"type" : "arctic_ocean",
"minHeight" : -100,
"maxHeight" : -2,
"radius" : 500,
"roughness" : 20,
"hills" : 10,

View File

@ -1,5 +1,9 @@
{
"type" : "mountain_grassland",
"properties" : [
"mountain",
"hot",
"dry"
],
"minHeight" : 60,
"maxHeight" : 256,

View File

@ -1,5 +1,8 @@
{
"type" : "desert",
"properties" : [
"hot",
"dry"
],
"minHeight" : 2,
"maxHeight" : 40,

View File

@ -1,6 +1,6 @@
{
"properties" : [],
"chance" : 0,
"type" : "grassland",
"minHeight" : 24,
"maxHeight" : 40,

View File

@ -1,5 +1,7 @@
{
"type" : "forest",
"properties" : [
"wet"
],
"minHeight" : 2,
"maxHeight" : 40,

View File

@ -1,5 +1,7 @@
{
"type" : "glacier",
"properties" : [
"cold",
],
"minHeight" : 60,
"maxHeight" : 256,

View File

@ -1,5 +1,6 @@
{
"type" : "grassland",
"properties" : [
],
"minHeight" : 2,
"maxHeight" : 40,

View File

@ -1,6 +1,8 @@
{
"properties" : [
"ocean"
],
"chance" : 0,
"type" : "ocean",
"minHeight" : -4,
"maxHeight" : 6,

View File

@ -0,0 +1,36 @@
{
"chance" : 0,
"properties" : [
"ocean"
],
"minHeight" : 4,
"maxHeight" : 4,
"music" : "sunrise",
"radius" : 24,
"ground_structure" : [
"cubyz:grass",
"2 to 3 cubyz:soil"
],
"structures" : [
{
"id" : "cubyz:simple_tree",
"leaves" : "cubyz:oak_leaves",
"log" : "cubyz:oak_log",
"top" : "cubyz:oak_top",
"chance" : 0.001,
"type" : "round",
"height" : 12,
"height_variation" : 10
}
],
"parentBiomes" : [
{
"id" : "cubyz:island_shelf",
"chance" : 1,
}
],
}

View File

@ -0,0 +1,26 @@
{
"chance" : 0,
"properties" : [
"ocean"
],
"minHeight" : -10,
"maxHeight" : -10,
"music" : "Sincerely",
"radius" : 64,
"ground_structure" : [
"3 to 4 cubyz:stone",
"1 to 2 cubyz:sandstone"
],
"maxSubBiomeCount" : 1,
"parentBiomes" : [
{
"id" : "cubyz:ocean",
"chance" : 1,
}
],
}

View File

@ -1,5 +1,8 @@
{
"type" : "rainforest",
"properties" : [
"wet",
"hot",
],
"minHeight" : 2,
"maxHeight" : 40,

View File

@ -1,5 +1,7 @@
{
"type" : "mountain_forest",
"properties" : [
"mountain",
],
"minHeight" : 20,
"maxHeight" : 256,

View File

@ -1,5 +1,8 @@
{
"type" : "ocean",
"properties" : [
"ocean"
],
"radius" : 500,
"minHeight" : -100,
"maxHeight" : -2,

View File

@ -1,5 +1,7 @@
{
"type" : "peak",
"properties" : [
"mountain"
],
"minHeight" : 120,
"maxHeight" : 256,

View File

@ -1,6 +1,9 @@
{
"properties" : [
"cold",
"ocean",
],
"chance" : 0,
"type" : "arctic_ocean",
"minHeight" : -4,
"maxHeight" : 6,

View File

@ -1,6 +1,7 @@
{
"properties" : [
],
"chance" : 0.04,
"type" : "grassland",
"minHeight" : 10,
"maxHeight" : 40,

View File

@ -1,5 +1,8 @@
{
"type" : "swamp",
"properties" : [
"cold",
"wet"
],
"minHeight" : 2,
"maxHeight" : 40,

View File

@ -1,5 +1,7 @@
{
"type" : "taiga",
"properties" : [
"cold"
],
"minHeight" : 2,
"maxHeight" : 40,

View File

@ -1,5 +1,8 @@
{
"type" : "tundra",
"properties" : [
"cold",
"dry"
],
"minHeight" : 2,
"maxHeight" : 40,

View File

@ -1,5 +1,9 @@
{
"type" : "warm_ocean",
"properties" : [
"hot",
"ocean",
],
"radius" : 500,
"minHeight" : -100,
"maxHeight" : -2,

View File

@ -1,6 +1,8 @@
const std = @import("std");
const main = @import("root");
const Vec2f = main.vec.Vec2f;
const Vec2i = main.vec.Vec2i;
const Vec3i = main.vec.Vec3i;
const multiplier: u64 = 0x5deece66d;
@ -49,14 +51,34 @@ pub fn nextFloat(seed: *u64) f32 {
return @intToFloat(f32, nextInt(u24, seed))/@intToFloat(f32, 1 << 24);
}
pub fn nextFloatSigned(seed: *u64) f32 {
return @intToFloat(f32, @bitCast(i24, nextInt(u24, seed)))/@intToFloat(f32, 1 << 23);
}
pub fn nextDouble(seed: *u64) f64 {
const lower: u52 = nextInt(u32, seed);
const upper: u52 = nextInt(u20, seed);
return @intToFloat(f64, upper<<32 | lower)/@intToFloat(f64, 1 << 52);
}
pub fn nextPointInUnitCircle(seed: *u64) Vec2f {
while(true) {
var x: f32 = nextFloatSigned(seed);
var y: f32 = nextFloatSigned(seed);
if(x*x + y*y < 1) {
return Vec2f{x, y};
}
}
}
pub fn initSeed3D(worldSeed: u64, pos: Vec3i) u64 {
const fac = Vec3i {11248723, 105436839, 45399083};
const seed = @reduce(.Xor, fac *% pos);
return @bitCast(u32, seed) ^ worldSeed;
}
pub fn initSeed2D(worldSeed: u64, pos: Vec2i) u64 {
const fac = Vec2i {11248723, 105436839};
const seed = @reduce(.Xor, fac *% pos);
return @bitCast(u32, seed) ^ worldSeed;
}

View File

@ -45,7 +45,7 @@ const MapFragmentPosition = struct {
/// Generates and stores the height and Biome maps of the planet.
pub const MapFragment = struct {
pub const biomeShift = 7;
pub const biomeShift = 5;
/// The average diameter of a biome.
pub const biomeSize = 1 << biomeShift;
pub const mapShift = 8;

View File

@ -53,70 +53,38 @@ const StructureModel = struct {
/// A climate region with special ground, plants and structures.
pub const Biome = struct {
pub const Type = enum { // TODO: I should make this more general. There should be a way to define custom biome types.
/// hot, wet, lowland
rainforest,
/// hot, medium, lowland
shrubland,
/// hot, dry, lowland
desert,
/// temperate, wet, lowland
swamp,
/// temperate, medium, lowland
forest,
/// temperate, dry, lowland
grassland,
/// cold, wet, lowland
tundra,
/// cold, medium, lowland
taiga,
/// cold, dry, lowland
glacier,
const GenerationProperties = packed struct(u8) {
// pairs of opposite properties. In-between values are allowed.
hot: bool = false,
cold: bool = false,
/// temperate, medium, highland
mountain_forest,
/// temperate, dry, highland
mountain_grassland,
/// cold, dry, highland
peak,
inland: bool = false,
ocean: bool = false,
/// temperate ocean
ocean,
/// tropical ocean(TODO: coral reefs and stuff)
warm_ocean,
/// arctic ocean(ice sheets)
arctic_ocean,
wet: bool = false,
dry: bool = false,
/// underground caves
cave,
mountain: bool = false,
antiMountain: bool = false, //???
fn lowerTypes(typ: Type) []const Type {
return switch(typ) {
.rainforest, .shrubland, .desert => &[_]Type{.warm_ocean},
.swamp, .forest, .grassland => &[_]Type{.ocean},
.tundra, .taiga, .glacier => &[_]Type{.arctic_ocean},
.mountain_forest => &[_]Type{.forest},
.mountain_grassland => &[_]Type{.grassland},
.peak => &[_]Type{.tundra},
else => &[_]Type{},
};
}
fn higherTypes(typ: Type) []const Type {
return switch(typ) {
.swamp, .rainforest, .forest, .taiga => &[_]Type{.mountain_forest},
.shrubland, .grassland => &[_]Type{.mountain_grassland},
.mountain_forest, .mountain_grassland, .desert, .tundra, .glacier => &[_]Type{.peak},
.warm_ocean => &[_]Type{.rainforest, .shrubland, .desert},
.ocean => &[_]Type{.swamp, .forest, .grassland},
.arctic_ocean => &[_]Type{.glacier, .tundra},
else => &[_]Type{},
};
pub fn fromJson(json: JsonElement) GenerationProperties {
var result: GenerationProperties = .{};
for(json.toSlice()) |child| {
const property = child.as([]const u8, "");
inline for(@typeInfo(GenerationProperties).Struct.fields) |field| {
if(std.mem.eql(u8, field.name, property)) {
@field(result, field.name) = true;
}
}
}
return result;
}
};
typ: Type,
minHeight: i32,
properties: GenerationProperties,
isCave: bool,
radius: f32,
minHeight: i32, // TODO: Use only one base height.
maxHeight: i32,
roughness: f32,
hills: f32,
@ -129,20 +97,22 @@ pub const Biome = struct {
/// Whether the starting point of a river can be in this biome. If false rivers will be able to flow through this biome anyways.
supportsRivers: bool, // TODO: Reimplement rivers.
/// The first members in this array will get prioritized.
vegetationModels: []StructureModel = &[0]StructureModel{},
upperReplacements: []const *const Biome = &[0]*Biome{},
lowerReplacements: []const *const Biome = &[0]*Biome{},
vegetationModels: []StructureModel = &.{},
subBiomes: main.utils.AliasTable(*const Biome) = undefined,
maxSubBiomeCount: f32,
subBiomeTotalChance: f32 = 0,
upperReplacements: []const *const Biome = &.{}, // TODO: Allow manually adding a list of replacement biomes.
lowerReplacements: []const *const Biome = &.{},
preferredMusic: []const u8, // TODO: Support multiple possibilities that are chose based on time and danger.
isValidPlayerSpawn: bool,
chance: f64,
chance: f32,
pub fn init(self: *Biome, id: []const u8, json: JsonElement) !void {
self.* = Biome {
.typ = std.meta.stringToEnum(Type, json.get([]const u8, "type", "")) orelse blk: {
std.log.warn("Couldn't find biome type {s}. Replacing it with grassland.", .{json.get([]const u8, "type", "")});
break :blk Type.grassland;
},
.id = try main.globalAllocator.dupe(u8, id),
.properties = GenerationProperties.fromJson(json.getChild("properties")),
.isCave = json.get(bool, "isCave", false),
.radius = json.get(f32, "radius", 64),
.stoneBlockType = blocks.getByID(json.get([]const u8, "stoneBlock", "cubyz:stone")),
.roughness = json.get(f32, "roughness", 0),
.hills = json.get(f32, "hills", 0),
@ -154,11 +124,17 @@ pub const Biome = struct {
.supportsRivers = json.get(bool, "rivers", false),
.preferredMusic = try main.globalAllocator.dupe(u8, json.get([]const u8, "music", "")),
.isValidPlayerSpawn = json.get(bool, "validPlayerSpawn", false),
.chance = json.get(f64, "chance", 1),
.chance = json.get(f32, "chance", 1),
.maxSubBiomeCount = json.get(f32, "maxSubBiomeCount", std.math.floatMax(f32)),
};
if(self.minHeight > self.maxHeight) {
std.log.warn("Biome {s} has invalid height range ({}, {})", .{self.id, self.minHeight, self.maxHeight});
}
const parentBiomeList = json.getChild("parentBiomes");
for(parentBiomeList.toSlice()) |parent| {
const result = try unfinishedSubBiomes.getOrPutValue(main.globalAllocator, parent.get([]const u8, "id", ""), .{});
try result.value_ptr.append(main.globalAllocator, .{.biomeId = self.id, .chance = parent.get(f32, "chance", 1)});
}
self.structure = try BlockStructure.init(main.globalAllocator, json.getChild("ground_structure"));
@ -253,14 +229,138 @@ pub const BlockStructure = struct {
}
};
pub const TreeNode = union(enum) {
leaf: struct {
totalChance: f64 = 0,
aliasTable: main.utils.AliasTable(Biome) = undefined,
},
branch: struct {
amplitude: f32,
lowerBorder: f32,
upperBorder: f32,
children: [3]*TreeNode,
},
pub fn init(allocator: Allocator, currentSlice: []Biome, parameterShift: u5) !*TreeNode {
const self = try allocator.create(TreeNode);
if(currentSlice.len <= 1 or parameterShift >= @bitSizeOf(Biome.GenerationProperties)) {
self.* = .{.leaf = .{}};
for(currentSlice) |biome| {
self.leaf.totalChance += biome.chance;
}
self.leaf.aliasTable = try main.utils.AliasTable(Biome).init(allocator, currentSlice);
return self;
}
var chanceLower: f32 = 0;
var chanceMiddle: f32 = 0;
var chanceUpper: f32 = 0;
for(currentSlice) |*biome| {
var properties: u32 = @bitCast(u8, biome.properties);
properties >>= parameterShift;
properties = properties & 3;
if(properties == 0) {
chanceMiddle += 1; // TODO: += biome.chance
} else if(properties == 1) {
chanceLower += 1; // TODO: += biome.chance
} else if(properties == 2) {
chanceUpper += 1; // TODO: += biome.chance
} else unreachable;
}
const totalChance = chanceLower + chanceMiddle + chanceUpper;
chanceLower /= totalChance;
chanceMiddle /= totalChance;
chanceUpper /= totalChance;
self.* = .{
.branch = .{
.amplitude = 1024, // TODO!
.lowerBorder = terrain.noise.ValueNoise.percentile(chanceLower),
.upperBorder = terrain.noise.ValueNoise.percentile(chanceLower + chanceMiddle),
.children = undefined,
}
};
// Partition the slice:
var lowerIndex: usize = 0;
var upperIndex: usize = currentSlice.len - 1;
var i: usize = 0;
while(i <= upperIndex) {
var properties: u32 = @bitCast(u8, currentSlice[i].properties);
properties >>= parameterShift;
properties = properties & 3;
if(properties == 0 or properties == 3) {
i += 1;
} else if(properties == 1) {
const swap = currentSlice[i];
currentSlice[i] = currentSlice[lowerIndex];
currentSlice[lowerIndex] = swap;
i += 1;
lowerIndex += 1;
} else if(properties == 2) {
const swap = currentSlice[i];
currentSlice[i] = currentSlice[upperIndex];
currentSlice[upperIndex] = swap;
upperIndex -= 1;
} else unreachable;
}
self.branch.children[0] = try TreeNode.init(allocator, currentSlice[0..lowerIndex], parameterShift+2);
self.branch.children[1] = try TreeNode.init(allocator, currentSlice[lowerIndex..upperIndex+1], parameterShift+2);
self.branch.children[2] = try TreeNode.init(allocator, currentSlice[upperIndex+1..], parameterShift+2);
return self;
}
pub fn deinit(self: *TreeNode, allocator: Allocator) void {
if(self.* == .branch) {
for(self.branch.children) |child| {
child.deinit(allocator);
}
}
allocator.destroy(self);
}
pub fn getBiome(self: *const TreeNode, seed: *u64, x: f32, y: f32) *const Biome {
switch(self.*) {
.leaf => |leaf| {
var biomeSeed = seed.* ^ @as(u64, 5624786589461)*%@bitCast(u32, @floatToInt(i32, x)) ^ @as(u64, 897650786185)*%@bitCast(u32, @floatToInt(i32, y));
const result = leaf.aliasTable.sample(&biomeSeed);
return result;
},
.branch => |branch| {
const value = terrain.noise.ValueNoise.samplePoint2D(x/branch.amplitude, y/branch.amplitude, main.random.nextInt(u32, seed));
var index: u2 = 0;
if(value >= branch.lowerBorder) {
if(value >= branch.upperBorder) {
index = 2;
} else {
index = 1;
}
}
return branch.children[index].getBiome(seed, x, y);
}
}
}
};
var finishedLoading: bool = false;
var biomes: std.ArrayList(Biome) = undefined;
var biomesById: std.StringHashMap(*const Biome) = undefined;
var byTypeBiomes: [@typeInfo(Biome.Type).Enum.fields.len]RandomList(*const Biome) = [_]RandomList(*const Biome){.{}} ** @typeInfo(Biome.Type).Enum.fields.len;
var caveBiomes: std.ArrayList(Biome) = undefined;
var biomesById: std.StringHashMap(*Biome) = undefined;
pub var byTypeBiomes: *TreeNode = undefined;
const UnfinishedSubBiomeData = struct {
biomeId: []const u8,
chance: f32,
pub fn getItem(self: UnfinishedSubBiomeData) *const Biome {
return getById(self.biomeId);
}
};
var unfinishedSubBiomes: std.StringHashMapUnmanaged(std.ArrayListUnmanaged(UnfinishedSubBiomeData)) = .{};
pub fn init() !void {
biomes = std.ArrayList(Biome).init(main.globalAllocator);
biomesById = std.StringHashMap(*const Biome).init(main.globalAllocator);
caveBiomes = std.ArrayList(Biome).init(main.globalAllocator);
biomesById = std.StringHashMap(*Biome).init(main.globalAllocator);
const list = @import("structures/_list.zig");
inline for(@typeInfo(list).Struct.decls) |decl| {
try StructureModel.registerGenerator(@field(list, decl.name));
@ -274,10 +374,9 @@ pub fn reset() void {
biome.deinit();
}
biomes.clearRetainingCapacity();
caveBiomes.clearRetainingCapacity();
biomesById.clearRetainingCapacity();
for(&byTypeBiomes) |*list| {
list.reset();
}
byTypeBiomes.deinit(main.globalAllocator);
}
pub fn deinit() void {
@ -285,67 +384,45 @@ pub fn deinit() void {
biome.deinit();
}
biomes.deinit();
caveBiomes.deinit();
biomesById.deinit();
for(&byTypeBiomes) |*list| {
list.deinit(main.globalAllocator);
}
// TODO? byTypeBiomes.deinit(main.globalAllocator);
StructureModel.modelRegistry.clearAndFree(main.globalAllocator);
}
pub fn register(id: []const u8, json: JsonElement) !void {
std.log.debug("Registered biome: {s}", .{id});
std.debug.assert(!finishedLoading);
try (try biomes.addOne()).init(id, json);
var biome: Biome = undefined;
try biome.init(id, json);
if(biome.isCave) {
try caveBiomes.append(biome);
} else {
try biomes.append(biome);
}
}
pub fn finishLoading() !void {
std.debug.assert(!finishedLoading);
finishedLoading = true;
byTypeBiomes = try TreeNode.init(main.globalAllocator, biomes.items, 0);
for(biomes.items) |*biome| {
try biomesById.put(biome.id, biome);
try byTypeBiomes[@enumToInt(biome.typ)].add(main.globalAllocator, biome);
}
// Get a list of replacement biomes for each biome:
for(biomes.items) |*biome| {
var replacements = std.ArrayListUnmanaged(*const Biome){};
// Check lower replacements:
// Check if there are replacement biomes of the same type:
for(byTypeBiomes[@enumToInt(biome.typ)].items()) |replacement| {
if(replacement.maxHeight > biome.minHeight and replacement.minHeight < biome.minHeight) {
try replacements.append(main.globalAllocator, replacement);
}
var subBiomeIterator = unfinishedSubBiomes.iterator();
while(subBiomeIterator.next()) |subBiomeData| {
const parentBiome = biomesById.get(subBiomeData.key_ptr.*) orelse {
std.log.warn("Couldn't find biome with id {s}. Cannot add sub-biomes.", .{subBiomeData.key_ptr.*});
continue;
};
const subBiomeDataList = subBiomeData.value_ptr;
for(subBiomeDataList.items) |item| {
parentBiome.subBiomeTotalChance += item.chance;
}
// If that doesn't work, check for the next lower height region:
if(replacements.items.len == 0) {
for(biome.typ.lowerTypes()) |typ| {
for(byTypeBiomes[@enumToInt(typ)].items()) |replacement| {
if(replacement.maxHeight > biome.minHeight and replacement.minHeight < biome.minHeight) {
try replacements.append(main.globalAllocator, replacement);
}
}
}
}
biome.lowerReplacements = try replacements.toOwnedSlice(main.globalAllocator);
// Check upper replacements:
// Check if there are replacement biomes of the same type:
for(byTypeBiomes[@enumToInt(biome.typ)].items()) |replacement| {
if(replacement.minHeight < biome.maxHeight and replacement.maxHeight > biome.maxHeight) {
try replacements.append(main.globalAllocator, replacement);
}
}
// If that doesn't work, check for the next higher height region:
if(replacements.items.len == 0) {
for(biome.typ.higherTypes()) |typ| {
for(byTypeBiomes[@enumToInt(typ)].items()) |replacement| {
if(replacement.minHeight < biome.maxHeight and replacement.maxHeight > biome.maxHeight) {
try replacements.append(main.globalAllocator, replacement);
}
}
}
}
biome.upperReplacements = try replacements.toOwnedSlice(main.globalAllocator);
parentBiome.subBiomes = try main.utils.AliasTable(*const Biome).initFromContext(main.globalAllocator, subBiomeDataList.items);
subBiomeDataList.deinit(main.globalAllocator);
}
unfinishedSubBiomes.clearAndFree(main.globalAllocator);
}
pub fn getById(id: []const u8) *const Biome {
@ -360,6 +437,6 @@ pub fn getRandomly(typ: Biome.Type, seed: *u64) *const Biome {
return byTypeBiomes[@enumToInt(typ)].getRandomly(seed);
}
pub fn getBiomesOfType(typ: Biome.Type) []*const Biome {
return byTypeBiomes[@enumToInt(typ)].items();
pub fn getCaveBiomes() []const Biome {
return caveBiomes.items;
}

View File

@ -22,11 +22,11 @@ pub const priority = 1024;
pub const generatorSeed = 765893678349;
var caveBiomes: []*const Biome = undefined;
var caveBiomes: []const Biome = undefined;
pub fn init(parameters: JsonElement) void {
_ = parameters;
caveBiomes = terrain.biomes.getBiomesOfType(.cave);
caveBiomes = terrain.biomes.getCaveBiomes();
}
pub fn deinit() void {
@ -37,14 +37,14 @@ pub fn generate(map: *CaveBiomeMapFragment, worldSeed: u64) Allocator.Error!void
// Select all the biomes that are within the given height range.
var validBiomes = try std.ArrayListUnmanaged(*const Biome).initCapacity(main.threadAllocator, caveBiomes.len);
defer validBiomes.deinit(main.threadAllocator);
for(caveBiomes) |biome| {
for(caveBiomes) |*biome| {
if(biome.minHeight < map.pos.wy +% CaveBiomeMapFragment.caveBiomeMapSize and biome.maxHeight > map.pos.wy) {
validBiomes.appendAssumeCapacity(biome);
}
}
if(validBiomes.items.len == 0) {
std.log.warn("Couldn't find any cave biome on height {}. Using biome {s} instead.", .{map.pos.wy, caveBiomes[0].id});
validBiomes.appendAssumeCapacity(caveBiomes[0]);
validBiomes.appendAssumeCapacity(&caveBiomes[0]);
}
var seed = random.initSeed3D(worldSeed, .{map.pos.wx, map.pos.wy, map.pos.wz});

View File

@ -0,0 +1,303 @@
const std = @import("std");
const Allocator = std.mem.Allocator;
const main = @import("root");
const Array2D = main.utils.Array2D;
const random = main.random;
const JsonElement = main.JsonElement;
const terrain = main.server.terrain;
const ClimateMapFragment = terrain.ClimateMap.ClimateMapFragment;
const noise = terrain.noise;
const FractalNoise = noise.FractalNoise;
const RandomlyWeightedFractalNoise = noise.RandomlyWeightedFractalNoise;
const PerlinNoise = noise.PerlinNoise;
const Biome = terrain.biomes.Biome;
const TreeNode = terrain.biomes.TreeNode;
const vec = main.vec;
const Vec2i = vec.Vec2i;
const Vec2f = vec.Vec2f;
// Generates the climate map using a fluidynamics simulation, with a circular heat distribution.
pub const id = "cubyz:polar_circles"; // TODO
pub fn init(parameters: JsonElement) void {
_ = parameters;
}
pub fn deinit() void {
}
pub fn generateMapFragment(map: *ClimateMapFragment, worldSeed: u64) Allocator.Error!void {
const map2 = try generateMap(main.threadAllocator, map.pos.wx, map.pos.wz, ClimateMapFragment.mapSize, ClimateMapFragment.mapSize, worldSeed);
defer map2.deinit(main.threadAllocator);
var image = try main.graphics.Image.init(main.threadAllocator, @intCast(u31, map2.width), @intCast(u31, map2.height));
defer image.deinit(main.threadAllocator);
for(0..map2.width) |x| {
for(0..map2.height) |z| {
map.map[x][z] = .{ // TODO
.seed = random.initSeed2D(worldSeed, .{@intCast(i32, map.pos.wx +% @intCast(i32, x)*terrain.SurfaceMap.MapFragment.biomeSize), map.pos.wz +% @intCast(i32, z)*terrain.SurfaceMap.MapFragment.biomeSize}),
.biome = map2.get(x, z),
.x = map.pos.wx +% @intCast(i32, x)*terrain.SurfaceMap.MapFragment.biomeSize,
.z = map.pos.wz +% @intCast(i32, z)*terrain.SurfaceMap.MapFragment.biomeSize,
.height = @intToFloat(f32, map2.get(x, z).minHeight),
};
var seed: u64 = std.hash.Adler32.hash(map2.get(x, z).id) ^ 4371741;// @ptrToInt(map2.get(x, z));
image.setRGB(x, z, @bitCast(main.graphics.Color, 0xff000000 | main.random.nextInt(u32, &seed)));
}
}
try image.exportToFile("test.png");
}
const BiomePoint = struct {
biome: *const Biome,
pos: Vec2f = .{0, 0},
weight: f32 = 1,
fn voronoiDistanceFunction(self: @This(), pos: Vec2f) f32 {
const len = vec.lengthSquare(self.pos - pos);
var result = len*self.weight;
if(result > 1.0) {
return result + (result - 1.0)/8192.0*len;
}
return result;
}
pub fn lessThan(lhs: @This(), rhs: @This()) bool {
return lhs.pos[0] < rhs.pos[0];
}
};
const maxBiomeRadius = 2048;
const chunkSize = maxBiomeRadius;
const Chunk = struct {
wx: i32,
wz: i32,
biomesSortedByX: []BiomePoint,
maxBiomeRadius: f32,
fn getStartCoordinate(minX: f32, biomesSortedByX: []BiomePoint) usize {
// TODO: Should this by vectorized by storing the x-coordinate in a seperate []u8?
var start: usize = 0;
var end: usize = biomesSortedByX.len;
while(end - start > 16) {
const mid = (start + end)/2 - 1;
if(biomesSortedByX[mid].pos[0] < minX) {
start = mid + 1;
} else {
end = mid + 1;
}
}
return start;
}
fn checkIfBiomeIsValid(x: f32, y: f32, biomeRadius: f32, biomesSortedByX: []BiomePoint, chunkLocalMaxBiomeRadius: f32) bool {
const minX = x - biomeRadius - chunkLocalMaxBiomeRadius;
const maxX = x + biomeRadius + chunkLocalMaxBiomeRadius;
var i: usize = getStartCoordinate(minX, biomesSortedByX);
for(biomesSortedByX[i..]) |other| {
if(other.pos[0] >= maxX) break;
const minDistance = (biomeRadius + other.biome.radius)*0.85;
if(vec.lengthSquare(other.pos - Vec2f{x, y}) < minDistance*minDistance) {
return false;
}
}
return true;
}
pub fn init(allocator: Allocator, tree: *TreeNode, worldSeed: u64, wx: i32, wz: i32) !*Chunk {
var neighborBuffer: [8]*Chunk = undefined;
var neighbors: std.ArrayListUnmanaged(*Chunk) = .{.items = neighborBuffer[0..0], .capacity = neighborBuffer.len};
defer for(neighbors.items) |ch| {
ch.deinit(allocator);
};
// Generate the chunks in an interleaved pattern, to allow seamless infinite generation.
if(wx & chunkSize != 0) {
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx +% chunkSize, wz));
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx -% chunkSize, wz));
if(wz & chunkSize != 0) {
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx +% chunkSize, wz +% chunkSize));
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx -% chunkSize, wz +% chunkSize));
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx +% chunkSize, wz -% chunkSize));
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx -% chunkSize, wz -% chunkSize));
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx, wz +% chunkSize));
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx, wz -% chunkSize));
}
} else if(wz & chunkSize != 0) {
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx, wz +% chunkSize));
neighbors.appendAssumeCapacity(try Chunk.init(allocator, tree, worldSeed, wx, wz -% chunkSize));
}
var chunkLocalMaxBiomeRadius: f32 = 0;
var seed = random.initSeed2D(worldSeed, .{wx, wz});
var selectedBiomes: main.utils.SortedList(BiomePoint) = .{};
var rejections: usize = 0;
outer: while(rejections < 100) {
const x = random.nextFloat(&seed)*chunkSize + @intToFloat(f32, wx);
const y = random.nextFloat(&seed)*chunkSize + @intToFloat(f32, wz);
var biomeSeed: u64 = 562478564;
var drawnBiome = tree.getBiome(&biomeSeed, x, y);
if(!checkIfBiomeIsValid(x, y, drawnBiome.radius, selectedBiomes.items(), chunkLocalMaxBiomeRadius)) {
rejections += 1;
continue :outer;
}
for(neighbors.items) |otherChunk| {
if(!checkIfBiomeIsValid(x, y, drawnBiome.radius, otherChunk.biomesSortedByX, otherChunk.maxBiomeRadius)) {
rejections += 1;
continue :outer;
}
}
rejections = 0;
chunkLocalMaxBiomeRadius = @max(chunkLocalMaxBiomeRadius, drawnBiome.radius);
try selectedBiomes.insertSorted(allocator, .{.biome = drawnBiome, .pos = .{x, y}, .weight = 1.0/(drawnBiome.radius*drawnBiome.radius)});
}
const self = try allocator.create(Chunk);
self.* = .{
.wx = wx,
.wz = wz,
.biomesSortedByX = try selectedBiomes.toOwnedSlice(allocator),
.maxBiomeRadius = chunkLocalMaxBiomeRadius,
};
return self;
}
pub fn deinit(self: *Chunk, allocator: Allocator) void {
allocator.free(self.biomesSortedByX);
allocator.destroy(self);
}
};
const GenerationStructure = struct {
chunks: Array2D(*Chunk) = undefined, // Implemented as slices into the original array!
pub fn init(allocator: Allocator, wx: i32, wz: i32, width: u31, height: u31, tree: *TreeNode, worldSeed: u64) !GenerationStructure {
const self: GenerationStructure = .{
.chunks = try Array2D(*Chunk).init(allocator, 2 + @divExact(width, chunkSize), 2 + @divExact(height, chunkSize)),
};
for(0..self.chunks.width) |x| {
for(0..self.chunks.height) |z| {
self.chunks.ptr(x, z).* = try Chunk.init(allocator, tree, worldSeed, wx +% @intCast(i32, x*chunkSize) -% chunkSize, wz +% @intCast(i32, z*chunkSize) -% chunkSize);
}
}
return self;
}
pub fn deinit(self: GenerationStructure, allocator: Allocator) void {
for(self.chunks.mem) |chunk| {
chunk.deinit(allocator);
}
self.chunks.deinit(allocator);
}
fn findClosestBiomeTo(self: GenerationStructure, wx: i32, wz: i32, x: usize, z: usize) *const Biome {
const xf = @intToFloat(f32, wx +% @intCast(i32, x)*terrain.SurfaceMap.MapFragment.biomeSize);
const zf = @intToFloat(f32, wz +% @intCast(i32, z)*terrain.SurfaceMap.MapFragment.biomeSize);
var closestDist = std.math.floatMax(f32);
var closestBiome: *const Biome = undefined;
const cellX: i32 = @intCast(i32, x/(chunkSize/terrain.SurfaceMap.MapFragment.biomeSize));
const cellZ: i32 = @intCast(i32, z/(chunkSize/terrain.SurfaceMap.MapFragment.biomeSize));
// Note that at a small loss of details we can assume that all BiomePoints are withing ±1 chunks of the current one.
var dx: i32 = 0;
while(dx <= 2) : (dx += 1) {
const totalX = cellX + dx;
if(totalX < 0 or totalX >= self.chunks.width) continue;
var dz: i32 = 0;
while(dz <= 2) : (dz += 1) {
const totalZ = cellZ + dz;
if(totalZ < 0 or totalZ >= self.chunks.height) continue;
const chunk = self.chunks.get(@intCast(usize, totalX), @intCast(usize, totalZ));
const minX = xf - 3*chunk.maxBiomeRadius;
const maxX = xf + 3*chunk.maxBiomeRadius;
const list = chunk.biomesSortedByX[Chunk.getStartCoordinate(minX, chunk.biomesSortedByX)..];
for(list) |biomePoint| {
if(biomePoint.pos[0] >= maxX) break;
const dist = biomePoint.voronoiDistanceFunction(.{xf, zf});
if(dist < closestDist) {
closestDist = dist;
closestBiome = biomePoint.biome;
}
}
}
}
std.debug.assert(closestDist != std.math.floatMax(f32));
return closestBiome;
}
fn drawCircleOnTheMap(map: Array2D(*const Biome), biome: *const Biome, wx: i32, wz: i32, width: u31, height: u31, pos: Vec2f) void {
const relPos = (pos - vec.intToFloat(f32, Vec2i{wx, wz}))/@splat(2, @as(f32, terrain.SurfaceMap.MapFragment.biomeSize));
const relRadius = biome.radius/terrain.SurfaceMap.MapFragment.biomeSize;
const min = @floor(@max(Vec2f{0, 0}, relPos - @splat(2, relRadius)));
const max = @ceil(@min(vec.intToFloat(f32, Vec2i{width, height})/@splat(2, @as(f32, terrain.SurfaceMap.MapFragment.biomeSize)), relPos + @splat(2, relRadius)));
var x: f32 = min[0];
while(x < max[0]) : (x += 1) {
var z: f32 = min[1];
while(z < max[1]) : (z += 1) {
const distSquare = vec.lengthSquare(Vec2f{x, z} - relPos);
if(distSquare < relRadius*relRadius) {
map.set(@floatToInt(usize, x), @floatToInt(usize, z), biome);
}
}
}
}
fn addSubBiomesOf(biome: BiomePoint, map: Array2D(*const Biome), extraBiomes: *std.ArrayList(BiomePoint), wx: i32, wz: i32, width: u31, height: u31, worldSeed: u64) !void {
var seed = random.initSeed2D(worldSeed, @bitCast(vec.Vec2i, biome.pos));
var biomeCount: f32 = biome.biome.subBiomeTotalChance*2*random.nextFloat(&seed);
biomeCount = @min(biomeCount, biome.biome.maxSubBiomeCount);
var i: f32 = 0;
while(i < biomeCount) : (i += 1) {
const subBiome = biome.biome.subBiomes.sample(&seed).*;
var maxCenterOffset: f32 = biome.biome.radius - subBiome.radius - 32;
if(maxCenterOffset < 0) {
std.log.warn("SubBiome {s} of {s} is too big", .{subBiome.id, biome.biome.id});
maxCenterOffset = 0;
}
const point = biome.pos + random.nextPointInUnitCircle(&seed)*@splat(2, maxCenterOffset);
drawCircleOnTheMap(map, subBiome, wx, wz, width, height, point);
try extraBiomes.append(.{
.biome = subBiome,
.pos = point,
.weight = 1.0/(subBiome.radius*subBiome.radius)
});
}
}
pub fn toMap(self: GenerationStructure, allocator: Allocator, wx: i32, wz: i32, width: u31, height: u31, worldSeed: u64) !Array2D(*const Biome) {
var result = try Array2D(*const Biome).init(allocator, width/terrain.SurfaceMap.MapFragment.biomeSize, height/terrain.SurfaceMap.MapFragment.biomeSize);
for(0..width/terrain.SurfaceMap.MapFragment.biomeSize) |x| {
for(0..height/terrain.SurfaceMap.MapFragment.biomeSize) |z| {
result.set(x, z, self.findClosestBiomeTo(wx, wz, x, z));
}
}
// Add some sub-biomes:
var extraBiomes = std.ArrayList(BiomePoint).init(main.threadAllocator);
defer extraBiomes.deinit();
for(self.chunks.mem) |chunk| {
for(chunk.biomesSortedByX) |biome| {
try addSubBiomesOf(biome, result, &extraBiomes, wx, wz, width, height, worldSeed);
}
}
// Add some sub-sub(-sub)*-biomes
while(extraBiomes.popOrNull()) |biomePoint| {
try addSubBiomesOf(biomePoint, result, &extraBiomes, wx, wz, width, height, worldSeed);
}
return result;
}
};
pub fn generateMap(allocator: Allocator, wx: i32, wz: i32, width: u31, height: u31, worldSeed: u64) !Array2D(*const Biome) {
var seed: u64 = worldSeed;
var generator = try GenerationStructure.init(main.threadAllocator, wx, wz, width, height, terrain.biomes.byTypeBiomes, seed);
defer generator.deinit(main.threadAllocator);
return try generator.toMap(allocator, wx, wz, width, height, worldSeed);
}

View File

@ -1,3 +1,3 @@
pub const PolarCircles = @import("PolarCircles.zig");
pub const RecursiveAttempt = @import("RecursiveAttempt.zig");

View File

@ -22,24 +22,71 @@ pub fn deinit() void {
}
/// Assumes the 4 points are at tᵢ = (-1, 0, 1, 2)
fn cubicInterpolationWeights(t: f32) [4]f32 {
const t2 = t*t;
const t3 = t*t2;
return [4]f32 { // Using the Lagrange polynomials:
-1.0/6.0*(t3 - 3*t2 + 2*t),
1.0/2.0*(t3 - 2*t2 - t + 2),
-1.0/2.0*(t3 - t2 - 2*t),
1.0/6.0*(t3 - t),
};
}
pub fn generateMapFragment(map: *MapFragment, worldSeed: u64) Allocator.Error!void {
const scaledSize = MapFragment.mapSize;
const mapSize = scaledSize*map.pos.voxelSize;
const biomeSize = MapFragment.biomeSize;
const biomePositions = try terrain.ClimateMap.getBiomeMap(main.threadAllocator, map.pos.wx - biomeSize, map.pos.wz - biomeSize, mapSize + 3*biomeSize, mapSize + 3*biomeSize);
const offset = 8;
const biomePositions = try terrain.ClimateMap.getBiomeMap(main.threadAllocator, map.pos.wx - offset*biomeSize, map.pos.wz - offset*biomeSize, mapSize + 2*offset*biomeSize, mapSize + 2*offset*biomeSize);
defer biomePositions.deinit(main.threadAllocator);
const TerrainData = struct {
height: f32,
roughness: f32,
hills: f32,
mountains: f32,
};
const terrainData = try Array2D(TerrainData).init(main.threadAllocator, biomePositions.width, biomePositions.height);
defer terrainData.deinit(main.threadAllocator);
for(biomePositions.mem, terrainData.mem) |biomePoint, *terrainPoint| {
//var seed: u64 = biomePoint.seed ^ 54738964378901;
terrainPoint.* = .{
.height = @intToFloat(f32, biomePoint.biome.minHeight) + 0.5*@intToFloat(f32, biomePoint.biome.maxHeight - biomePoint.biome.minHeight), // TODO: Randomize
.roughness = biomePoint.biome.roughness,
.hills = biomePoint.biome.hills,
.mountains = biomePoint.biome.mountains,
};
}
for(0..0) |_| { // Smooth the biome heights.
for(1..biomePositions.width-1) |x| {
for(1..biomePositions.height-1) |z| {
var minHeight: f32 = std.math.floatMax(f32);
var maxHeight: f32 = -std.math.floatMax(f32);
for(0..3) |dx| {
for(0..3) |dz| {
minHeight = @min(minHeight, terrainData.get(x - 1 + dx, z - 1 + dz).height);
maxHeight = @max(maxHeight, terrainData.get(x - 1 + dx, z - 1 + dz).height);
}
}
var newHeight = (minHeight + maxHeight)/2;
newHeight = @min(newHeight, @intToFloat(f32, biomePositions.get(x, z).biome.maxHeight));
newHeight = @max(newHeight, @intToFloat(f32, biomePositions.get(x, z).biome.minHeight));
terrainData.ptr(x, z).height = newHeight;
}
}
}
var seed = worldSeed;
random.scrambleSeed(&seed);
seed = @bitCast(u32, (random.nextInt(i32, &seed) | 1)*%map.pos.wx ^ (random.nextInt(i32, &seed) | 1)*%map.pos.wz);
random.scrambleSeed(&seed);
const scaledBiomeSize = biomeSize/map.pos.voxelSize;
const xOffsetMap = try Array2D(f32).init(main.threadAllocator, scaledSize, scaledSize);
defer xOffsetMap.deinit(main.threadAllocator);
const zOffsetMap = try Array2D(f32).init(main.threadAllocator, scaledSize, scaledSize);
defer zOffsetMap.deinit(main.threadAllocator);
try FractalNoise.generateSparseFractalTerrain(map.pos.wx, map.pos.wz, biomeSize/2, worldSeed ^ 675396758496549, xOffsetMap, map.pos.voxelSize);
try FractalNoise.generateSparseFractalTerrain(map.pos.wx, map.pos.wz, biomeSize/2, worldSeed ^ 543864367373859, zOffsetMap, map.pos.voxelSize);
try FractalNoise.generateSparseFractalTerrain(map.pos.wx, map.pos.wz, biomeSize*4, worldSeed ^ 675396758496549, xOffsetMap, map.pos.voxelSize);
try FractalNoise.generateSparseFractalTerrain(map.pos.wx, map.pos.wz, biomeSize*4, worldSeed ^ 543864367373859, zOffsetMap, map.pos.voxelSize);
// A ridgid noise map to generate interesting mountains.
const mountainMap = try Array2D(f32).init(main.threadAllocator, scaledSize, scaledSize);
@ -58,66 +105,34 @@ pub fn generateMapFragment(map: *MapFragment, worldSeed: u64) Allocator.Error!vo
for(0..map.heightMap.len) |x| {
for(0..map.heightMap.len) |z| {
// Do the biome interpolation:
var totalWeight: f32 = 0;
var height: f32 = 0;
var roughness: f32 = 0;
var hills: f32 = 0;
var mountains: f32 = 0;
var xBiome = (x + scaledBiomeSize/2)/scaledBiomeSize;
var zBiome = (z + scaledBiomeSize/2)/scaledBiomeSize;
const wx = @intCast(i32, x)*map.pos.voxelSize + map.pos.wx;
const wz = @intCast(i32, z)*map.pos.voxelSize + map.pos.wz;
var hasOneWithMaxNormLT1 = false;
var x0 = xBiome;
while(x0 <= xBiome + 2) : (x0 += 1) {
var z0 = zBiome;
while(z0 <= zBiome + 2) : (z0 += 1) {
const biomePoint = biomePositions.get(x0, z0);
var dist = @sqrt(biomePoint.distSquare(@intToFloat(f32, wx), @intToFloat(f32, wz)));
dist /= @intToFloat(f32, biomeSize);
const maxNorm = biomePoint.maxNorm(@intToFloat(f32, wx), @intToFloat(f32, wz))/@intToFloat(f32, biomeSize);
if(maxNorm < 1) hasOneWithMaxNormLT1 = true;
// There are cases where this point is further away than 1 unit from all nearby biomes. For that case the euclidian distance function is interpolated to the max-norm for higher distances.
if(dist > 0.9 and maxNorm < 1) {
if(dist < 1) { // interpolate to the maxNorm:
dist = (1 - dist)/(1 - 0.9)*dist + (dist - 0.9)/(1 - 0.9)*maxNorm;
} else {
dist = maxNorm;
}
std.debug.assert(dist < 1);
}
if(dist <= 1) {
var weight = 1 - dist;
// smooth the interpolation with the s-curve:
weight = weight*weight*(3 - 2*weight);
height += biomePoint.height*weight;
roughness += biomePoint.biome.roughness*weight;
hills += biomePoint.biome.hills*weight;
mountains += biomePoint.biome.mountains*weight;
totalWeight += weight;
}
var updatedX = @intToFloat(f32, wx) + (xOffsetMap.get(x, z) - 0.5)*biomeSize*4;
var updatedZ = @intToFloat(f32, wz) + (zOffsetMap.get(x, z) - 0.5)*biomeSize*4;
var xBiome = @floatToInt(i32, @floor((updatedX - @intToFloat(f32, map.pos.wx))/@intToFloat(f32, biomeSize)));
var zBiome = @floatToInt(i32, @floor((updatedZ - @intToFloat(f32, map.pos.wz))/@intToFloat(f32, biomeSize)));
var relXBiome = (0.5 + updatedX - @intToFloat(f32, map.pos.wx +% xBiome*biomeSize))/@intToFloat(f32, biomeSize);
xBiome += offset;
var relZBiome = (0.5 + updatedZ - @intToFloat(f32, map.pos.wz +% zBiome*biomeSize))/@intToFloat(f32, biomeSize);
zBiome += offset;
const coefficientsX = cubicInterpolationWeights(relXBiome);
const coefficientsZ = cubicInterpolationWeights(relZBiome);
for(0..4) |dx| {
for(0..4) |dz| {
const biomeMapX = @intCast(usize, xBiome) + dx - 1;
const biomeMapZ = @intCast(usize, zBiome) + dz - 1;
const weight = coefficientsX[dx]*coefficientsZ[dz];
const terrainPoint = terrainData.get(biomeMapX, biomeMapZ);
height += terrainPoint.height*weight;
roughness += terrainPoint.roughness*weight;
hills += terrainPoint.hills*weight;
mountains += terrainPoint.mountains*weight;
}
}
if(!hasOneWithMaxNormLT1) {
x0 = xBiome;
while(x0 <= xBiome + 2) : (x0 += 1) {
var z0 = zBiome;
while(z0 <= zBiome + 2) : (z0 += 1) {
const biomePoint = biomePositions.get(x0, z0);
var dist = @sqrt(biomePoint.distSquare(@intToFloat(f32, wx), @intToFloat(f32, wz)));
dist /= @intToFloat(f32, biomeSize);
const maxNorm = biomePoint.maxNorm(@intToFloat(f32, wx), @intToFloat(f32, wz))/@intToFloat(f32, biomeSize);
std.log.info("{}, {} | {}, {} : {} {}", .{biomePoint.x, biomePoint.z, wx, wz, dist, maxNorm});
}
}
}
// Norm the result:
std.debug.assert(hasOneWithMaxNormLT1);
std.debug.assert(totalWeight != 0);
height /= totalWeight;
roughness /= totalWeight;
hills /= totalWeight;
mountains /= totalWeight;
height += (roughMap.get(x, z) - 0.5)*2*roughness;
height += (hillMap.get(x, z) - 0.5)*2*hills;
height += (mountainMap.get(x, z) - 0.5)*2*mountains;
@ -127,21 +142,23 @@ pub fn generateMapFragment(map: *MapFragment, worldSeed: u64) Allocator.Error!vo
map.maxHeight = @max(map.maxHeight, @floatToInt(i32, height));
// Select a biome. The shape of the biome is randomized by applying noise (fractal noise and white noise) to the coordinates.
const updatedX = @intToFloat(f32, wx) + (@intToFloat(f32, random.nextInt(u3, &seed)) - 3.5)*@intToFloat(f32, biomeSize)/128 + (xOffsetMap.get(x, z) - 0.5)*biomeSize/2;
const updatedZ = @intToFloat(f32, wz) + (@intToFloat(f32, random.nextInt(u3, &seed)) - 3.5)*@intToFloat(f32, biomeSize)/128 + (zOffsetMap.get(x, z) - 0.5)*biomeSize/2;
xBiome = @floatToInt(usize, ((updatedX - @intToFloat(f32, map.pos.wx))/@intToFloat(f32, map.pos.voxelSize) + @intToFloat(f32, scaledBiomeSize/2))/@intToFloat(f32, scaledBiomeSize));
zBiome = @floatToInt(usize, ((updatedZ - @intToFloat(f32, map.pos.wz))/@intToFloat(f32, map.pos.voxelSize) + @intToFloat(f32, scaledBiomeSize/2))/@intToFloat(f32, scaledBiomeSize));
// Select a biome. Also adding some white noise to make a smoother transition.
updatedX += (@intToFloat(f32, random.nextInt(u3, &seed)) - 3.5)*@intToFloat(f32, biomeSize)/128;
updatedZ += (@intToFloat(f32, random.nextInt(u3, &seed)) - 3.5)*@intToFloat(f32, biomeSize)/128;
xBiome = @floatToInt(i32, @round((updatedX - @intToFloat(f32, map.pos.wx))/@intToFloat(f32, biomeSize)));
xBiome += offset;
zBiome = @floatToInt(i32, @round((updatedZ - @intToFloat(f32, map.pos.wz))/@intToFloat(f32, biomeSize)));
zBiome += offset;
var shortestDist: f32 = std.math.floatMax(f32);
var shortestBiomePoint: terrain.ClimateMap.BiomePoint = undefined;
x0 = xBiome;
var x0 = xBiome;
while(x0 <= xBiome + 2) : (x0 += 1) {
var z0 = zBiome;
while(z0 <= zBiome + 2) : (z0 += 1) {
const distSquare = biomePositions.get(x0, z0).distSquare(updatedX, updatedZ);
const distSquare = biomePositions.get(@intCast(usize, xBiome), @intCast(usize, zBiome)).distSquare(updatedX, updatedZ);
if(distSquare < shortestDist) {
shortestDist = distSquare;
shortestBiomePoint = biomePositions.get(x0, z0);
shortestBiomePoint = biomePositions.get(@intCast(usize, xBiome), @intCast(usize, zBiome));
}
}
}

View File

@ -0,0 +1,100 @@
const std = @import("std");
const main = @import("root");
const random = main.random;
fn getSeedX(x: f32, worldSeed: u64) u64 {
var seed: u64 = worldSeed ^ @as(u64, 54275629861)*%@bitCast(u32, @floatToInt(i32, x));
return seed;
}
fn getSeedY(x: f32, worldSeed: u64) u64 {
var seed: u64 = worldSeed ^ @as(u64, 5478938690717)*%@bitCast(u32, @floatToInt(i32, x));
return seed;
}
fn getGridValue1D(x: f32, worldSeed: u64) f32 {
var seed: u64 = getSeedX(x, worldSeed);
return random.nextFloat(&seed);
}
fn samplePoint1D(_x: f32, lineSeed: u64) f32 {
var seed = lineSeed;
const x = _x + 0.0001*random.nextFloat(&seed); // random offset
const start = @floor(x);
const interp = x - start;
return (1 - interp)*getGridValue1D(start, lineSeed) + interp*getGridValue1D(start + 1, lineSeed);
}
/// The result will be between 0 and 1.
pub fn samplePoint2D(x: f32, _y: f32, worldSeed: u64) f32 {
var seed = worldSeed;
const y = _y + random.nextFloat(&seed); // random offset
const lineSeed = random.nextInt(u64, &seed);
const start = @floor(y);
const interp = y - start;
const lower = samplePoint1D(x, getSeedY(start, lineSeed));
const upper = samplePoint1D(x, getSeedY(start+1, lineSeed));
return (1 - interp)*lower + interp*upper;
}
const percentileTable = [_]f32 {0.0e+00, 9.15669277e-02, 1.18274688e-01, 1.37655034e-01, 1.53483346e-01, 1.67139247e-01, 1.79302796e-01, 1.90366283e-01, 2.00579166e-01, 2.10111454e-01, 2.19084709e-01, 2.27589413e-01, 2.35694572e-01, 2.43454873e-01, 2.50914007e-01, 2.58107364e-01, 2.65064746e-01, 2.71810621e-01, 2.78366297e-01, 2.84749507e-01, 2.90976017e-01, 2.97059237e-01, 3.03011208e-01, 3.08842420e-01, 3.14562231e-01, 3.20178955e-01, 3.25700223e-01, 3.31132620e-01, 3.36482465e-01, 3.41755270e-01, 3.46956104e-01, 3.52089852e-01, 3.57160568e-01, 3.62172454e-01, 3.67129117e-01, 3.72033983e-01, 3.76890212e-01, 3.81700843e-01, 3.86468648e-01, 3.91196310e-01, 3.95886212e-01, 4.00540769e-01, 4.05162155e-01, 4.09752458e-01, 4.14313703e-01, 4.18847769e-01, 4.23356503e-01, 4.27841603e-01, 4.32304769e-01, 4.36747610e-01, 4.41171675e-01, 4.45578455e-01, 4.49969410e-01, 4.54345911e-01, 4.58709388e-01, 4.63061153e-01, 4.67402517e-01, 4.71734791e-01, 4.76059168e-01, 4.80376929e-01, 4.84689295e-01, 4.88997489e-01, 4.93302702e-01, 4.97606158e-01, 5.01908957e-01, 5.06212413e-01, 5.10517597e-01, 5.14825820e-01, 5.19138216e-01, 5.23455977e-01, 5.27780354e-01, 5.32112598e-01, 5.36453962e-01, 5.40805697e-01, 5.45169174e-01, 5.49545705e-01, 5.53936660e-01, 5.58343470e-01, 5.62767505e-01, 5.67210376e-01, 5.71673512e-01, 5.76158583e-01, 5.80667376e-01, 5.85201442e-01, 5.89762687e-01, 5.94352960e-01, 5.98974347e-01, 6.03628933e-01, 6.08318805e-01, 6.13046467e-01, 6.17814302e-01, 6.22624933e-01, 6.27481162e-01, 6.32386028e-01, 6.37342691e-01, 6.42354607e-01, 6.47425293e-01, 6.52559041e-01, 6.57759904e-01, 6.63032710e-01, 6.68382585e-01, 6.73814952e-01, 6.79336249e-01, 6.84952974e-01, 6.90672814e-01, 6.96504056e-01, 7.02455997e-01, 7.08539247e-01, 7.14765727e-01, 7.21148967e-01, 7.27704644e-01, 7.34450578e-01, 7.41407930e-01, 7.48601317e-01, 7.56060481e-01, 7.63820827e-01, 7.71925985e-01, 7.80430734e-01, 7.89404034e-01, 7.98936367e-01, 8.09149265e-01, 8.20212841e-01, 8.32376480e-01, 8.46032440e-01, 8.61860930e-01, 8.81241500e-01, 9.07949805e-01, 1.0};
fn preGeneratePercentileTable() !void {
const randomNumbers = 2048;
const positions = 2048;
const totalValues = randomNumbers*positions;
const values = randomNumbers;
var amount1D: [values+1] u128 = undefined;
@memset(&amount1D, 0);
for(0..randomNumbers+1) |a| {
for(0..randomNumbers+1) |b| {
for(0..positions+1) |x| {
const val = x*a + (positions - x)*b;
amount1D[(val*values)/totalValues] += 1;
}
}
}
var amount2D: [values+1] u128 = undefined;
@memset(&amount2D, 0);
for(0..randomNumbers+1) |a| {
for(0..randomNumbers+1) |b| {
for(0..positions+1) |x| {
const val = x*a + (positions - x)*b;
amount2D[(val*values)/totalValues] += amount1D[a]*amount1D[b];
}
}
}
var samples: u128 = 0;
for(&amount2D) |val| {
samples = try std.math.add(u128, samples, val);
}
std.log.info("{}", .{samples});
var percentiles: [128] f32 = undefined;
var current: u128 = 0;
var i: usize = 0;
for(&percentiles, 0..) |*_percentile, j| {
const goal = j*samples/(percentiles.len-1);
while(current + amount2D[i] < goal) {
current += amount2D[i];
i += 1;
}
const diff = goal - current;
_percentile.* = (@intToFloat(f32, i) + @intToFloat(f32, diff)/@intToFloat(f32, amount2D[i]))/2048;
}
for(&percentiles) |_percentile| {
std.log.info("{}", .{_percentile});
}
}
pub fn percentile(ratio: f32) f32 {
std.debug.assert(ratio >= 0);
const scaledToList = ratio*@intToFloat(f32, percentileTable.len);
const index = @floatToInt(u32, scaledToList);
if(index >= percentileTable.len-1) return 1;
const offset = (scaledToList - @intToFloat(f32, index));
return (1 - offset)*percentileTable[index] + offset*percentileTable[index + 1];
}

View File

@ -12,10 +12,10 @@ pub const FractalNoise = @import("FractalNoise.zig");
/// This generates a rough terrain with some cliffs.
pub const RandomlyWeightedFractalNoise = @import("RandomlyWeightedFractalNoise.zig");
/// The same as fractal noise, but uses random weigths during interpolation phase.
/// This generates a rough terrain with some cliffs.
pub const PerlinNoise = @import("PerlinNoise.zig");
/// Blue noise (aka Poisson Disk Sampling) is a pattern that ensures that all points have a minimum distance towards their neigbors.
/// This contains a static blue noise pattern that is calculated once and then used everywhere around the world. because it is so big the player will never notice issues.
pub const BlueNoise = @import("BlueNoise.zig");
pub const BlueNoise = @import("BlueNoise.zig");
pub const PerlinNoise = @import("PerlinNoise.zig");
pub const ValueNoise = @import("ValueNoise.zig");

View File

@ -175,7 +175,7 @@ const ChunkManager = struct {
// TODO: Store chunk.
}
/// Generates a normal chunk at a given location, or if possible gets it from the cache.
fn getOrGenerateChunk(pos: ChunkPosition) !*Chunk {
pub fn getOrGenerateChunk(pos: ChunkPosition) !*Chunk {
return try chunkCache.findOrCreate(pos, chunkInitFunctionForCache);
}

View File

@ -84,8 +84,110 @@ pub const Compression = struct {
}
};
/// Implementation of https://en.wikipedia.org/wiki/Alias_method
pub fn AliasTable(comptime T: type) type {
return struct {
const AliasData = struct {
chance: u16,
alias: u16,
};
items: []T,
aliasData: []AliasData,
ownsSlice: bool = false,
fn initAliasData(self: *@This(), totalChance: f32, currentChances: []f32) void {
const desiredChance = totalChance/@intToFloat(f32, self.aliasData.len);
var lastOverfullIndex: u16 = 0;
var lastUnderfullIndex: u16 = 0;
outer: while(true) {
while(currentChances[lastOverfullIndex] <= desiredChance) {
lastOverfullIndex += 1;
if(lastOverfullIndex == self.items.len)
break :outer;
}
while(currentChances[lastUnderfullIndex] >= desiredChance) {
lastUnderfullIndex += 1;
if(lastUnderfullIndex == self.items.len)
break :outer;
}
const delta = desiredChance - currentChances[lastUnderfullIndex];
currentChances[lastUnderfullIndex] = desiredChance;
currentChances[lastOverfullIndex] -= delta;
self.aliasData[lastUnderfullIndex] = .{
.alias = lastOverfullIndex,
.chance = @floatToInt(u16, delta/desiredChance*std.math.maxInt(u16)),
};
if (currentChances[lastOverfullIndex] < desiredChance) {
lastUnderfullIndex = @min(lastUnderfullIndex, lastOverfullIndex);
}
}
}
pub fn init(allocator: Allocator, items: []T) !@This() {
var self: @This() = .{
.items = items,
.aliasData = try allocator.alloc(AliasData, items.len),
};
if(items.len == 0) return self;
@memset(self.aliasData, AliasData{.chance = 0, .alias = 0});
const currentChances = try main.threadAllocator.alloc(f32, items.len);
defer main.threadAllocator.free(currentChances);
var totalChance: f32 = 0;
for(items, 0..) |*item, i| {
totalChance += item.chance;
currentChances[i] = item.chance;
}
self.initAliasData(totalChance, currentChances);
return self;
}
pub fn initFromContext(allocator: Allocator, slice: anytype) !@This() {
var items = try allocator.alloc(T, slice.len);
for(slice, items) |context, *result| {
result.* = context.getItem();
}
var self: @This() = .{
.items = items,
.aliasData = try allocator.alloc(AliasData, items.len),
.ownsSlice = true,
};
if(items.len == 0) return self;
@memset(self.aliasData, AliasData{.chance = 0, .alias = 0});
const currentChances = try main.threadAllocator.alloc(f32, items.len);
defer main.threadAllocator.free(currentChances);
var totalChance: f32 = 0;
for(slice, 0..) |context, i| {
totalChance += context.chance;
currentChances[i] = context.chance;
}
self.initAliasData(totalChance, currentChances);
return self;
}
pub fn deinit(self: *const @This(), allocator: Allocator) void {
allocator.free(self.aliasData);
if(self.ownsSlice) {
allocator.free(self.items);
}
}
pub fn sample(self: *const @This(), seed: *u64) *T {
const initialIndex = main.random.nextIntBounded(u16, seed, @intCast(u16, self.items.len));
if(main.random.nextInt(u16, seed) < self.aliasData[initialIndex].chance) {
return &self.items[self.aliasData[initialIndex].alias];
}
return &self.items[initialIndex];
}
};
}
/// A list that allows to choose randomly from the contained object, if they have a chance assigned to them.
/// TODO: Use O(1) sampling: https://en.wikipedia.org/wiki/Alias_method
/// TODO: Is this still needed, now that the alias table exists?
pub fn RandomList(comptime T: type) type {
return struct {
const Self = @This();
@ -138,6 +240,55 @@ pub fn RandomList(comptime T: type) type {
};
}
/// A list that is always sorted in ascending order based on T.lessThan(lhs, rhs).
pub fn SortedList(comptime T: type) type {
return struct {
const Self = @This();
ptr: [*]T = undefined,
len: u32 = 0,
capacity: u32 = 0,
pub fn deinit(self: Self, allocator: Allocator) void {
allocator.free(self.ptr[0..self.capacity]);
}
pub fn items(self: Self) []T {
return self.ptr[0..self.len];
}
fn increaseCapacity(self: *Self, allocator: Allocator) !void {
const newSize = 8 + self.capacity*3/2;
const newSlice = try allocator.realloc(self.ptr[0..self.capacity], newSize);
self.capacity = @intCast(u32, newSlice.len);
self.ptr = newSlice.ptr;
}
pub fn insertSorted(self: *Self, allocator: Allocator, object: T) !void {
if(self.len == self.capacity) {
try self.increaseCapacity(allocator);
}
var i = self.len;
while(i != 0) { // Find the point to insert and move the rest out of the way.
if(object.lessThan(self.ptr[i - 1])) {
self.ptr[i] = self.ptr[i - 1];
} else {
break;
}
i -= 1;
}
self.len += 1;
self.ptr[i] = object;
}
pub fn toOwnedSlice(self: *Self, allocator: Allocator) ![]T {
const output = try allocator.realloc(self.ptr[0..self.capacity], self.len);
self.* = .{};
return output;
}
};
}
pub fn Array2D(comptime T: type) type {
return struct {
const Self = @This();