mirror of
https://github.com/PolyhedralDev/Terra.git
synced 2026-02-16 02:20:57 +00:00
@@ -74,5 +74,6 @@ object Versions {
|
||||
object CLI {
|
||||
const val nbt = "6.1"
|
||||
const val logback = "1.5.8"
|
||||
const val picocli = "4.7.6"
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,7 @@
|
||||
package com.dfsek.terra.addons.biome.pipeline.v2;
|
||||
|
||||
import com.dfsek.terra.api.util.cache.SeededVector2Key;
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.github.benmanes.caffeine.cache.LoadingCache;
|
||||
|
||||
@@ -11,7 +13,6 @@ import java.util.stream.StreamSupport;
|
||||
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.BiomeChunk;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Pipeline;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.SeededVector;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Stage;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.biome.PipelineBiome;
|
||||
import com.dfsek.terra.api.noise.NoiseSampler;
|
||||
@@ -23,7 +24,7 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
|
||||
|
||||
public class PipelineBiomeProvider implements BiomeProvider {
|
||||
|
||||
private final LoadingCache<SeededVector, BiomeChunk> biomeChunkCache;
|
||||
private final LoadingCache<SeededVector2Key, BiomeChunk> biomeChunkCache;
|
||||
private final int chunkSize;
|
||||
private final int resolution;
|
||||
private final NoiseSampler mutator;
|
||||
@@ -90,7 +91,7 @@ public class PipelineBiomeProvider implements BiomeProvider {
|
||||
int xInChunk = x - chunkWorldX;
|
||||
int zInChunk = z - chunkWorldZ;
|
||||
|
||||
return biomeChunkCache.get(new SeededVector(seed, chunkWorldX, chunkWorldZ)).get(xInChunk, zInChunk).getBiome();
|
||||
return biomeChunkCache.get(new SeededVector2Key(chunkWorldX, chunkWorldZ, seed)).get(xInChunk, zInChunk).getBiome();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
package com.dfsek.terra.addons.biome.pipeline.v2.api;
|
||||
|
||||
import com.dfsek.terra.api.util.cache.SeededVector2Key;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
public interface Pipeline {
|
||||
BiomeChunk generateChunk(SeededVector worldCoordinates);
|
||||
BiomeChunk generateChunk(SeededVector2Key worldCoordinates);
|
||||
|
||||
int getChunkSize();
|
||||
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
package com.dfsek.terra.addons.biome.pipeline.v2.api;
|
||||
|
||||
public record SeededVector(long seed, int x, int z) {
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof SeededVector that) {
|
||||
return this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = x;
|
||||
code = 31 * code + z;
|
||||
return 31 * code + ((int) (seed ^ (seed >>> 32)));
|
||||
}
|
||||
}
|
||||
@@ -4,20 +4,20 @@ import java.util.List;
|
||||
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.BiomeChunk;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Expander;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.SeededVector;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Stage;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.biome.PipelineBiome;
|
||||
import com.dfsek.terra.api.util.cache.SeededVector2Key;
|
||||
|
||||
|
||||
public class BiomeChunkImpl implements BiomeChunk {
|
||||
|
||||
private final SeededVector worldOrigin;
|
||||
private final SeededVector2Key worldOrigin;
|
||||
private final int chunkOriginArrayIndex;
|
||||
private final int worldCoordinateScale;
|
||||
private final int size;
|
||||
private PipelineBiome[] biomes;
|
||||
|
||||
public BiomeChunkImpl(SeededVector worldOrigin, PipelineImpl pipeline) {
|
||||
public BiomeChunkImpl(SeededVector2Key worldOrigin, PipelineImpl pipeline) {
|
||||
|
||||
this.worldOrigin = worldOrigin;
|
||||
this.chunkOriginArrayIndex = pipeline.getChunkOriginArrayIndex();
|
||||
@@ -44,7 +44,7 @@ public class BiomeChunkImpl implements BiomeChunk {
|
||||
for(int gridZ = 0; gridZ < gridSize; gridZ++) {
|
||||
int xIndex = gridOrigin + gridX * gridInterval;
|
||||
int zIndex = gridOrigin + gridZ * gridInterval;
|
||||
biomes[(xIndex * size) + zIndex] = pipeline.getSource().get(worldOrigin.seed(), xIndexToWorldCoordinate(xIndex),
|
||||
biomes[(xIndex * size) + zIndex] = pipeline.getSource().get(worldOrigin.seed, xIndexToWorldCoordinate(xIndex),
|
||||
zIndexToWorldCoordinate(zIndex));
|
||||
}
|
||||
}
|
||||
@@ -139,14 +139,14 @@ public class BiomeChunkImpl implements BiomeChunk {
|
||||
}
|
||||
|
||||
private int xIndexToWorldCoordinate(int xIndex) {
|
||||
return (worldOrigin.x() + xIndex - chunkOriginArrayIndex) * worldCoordinateScale;
|
||||
return (worldOrigin.x + xIndex - chunkOriginArrayIndex) * worldCoordinateScale;
|
||||
}
|
||||
|
||||
private int zIndexToWorldCoordinate(int zIndex) {
|
||||
return (worldOrigin.z() + zIndex - chunkOriginArrayIndex) * worldCoordinateScale;
|
||||
return (worldOrigin.z + zIndex - chunkOriginArrayIndex) * worldCoordinateScale;
|
||||
}
|
||||
|
||||
private SeededVector getOrigin() {
|
||||
private SeededVector2Key getOrigin() {
|
||||
return worldOrigin;
|
||||
}
|
||||
|
||||
@@ -216,7 +216,7 @@ public class BiomeChunkImpl implements BiomeChunk {
|
||||
}
|
||||
|
||||
public long worldSeed() {
|
||||
return chunk.getOrigin().seed();
|
||||
return chunk.getOrigin().seed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
package com.dfsek.terra.addons.biome.pipeline.v2.pipeline;
|
||||
|
||||
import com.dfsek.terra.api.util.cache.SeededVector2Key;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@@ -8,7 +10,6 @@ import java.util.List;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.BiomeChunk;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Expander;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Pipeline;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.SeededVector;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Source;
|
||||
import com.dfsek.terra.addons.biome.pipeline.v2.api.Stage;
|
||||
|
||||
@@ -55,7 +56,7 @@ public class PipelineImpl implements Pipeline {
|
||||
}
|
||||
|
||||
@Override
|
||||
public BiomeChunk generateChunk(SeededVector worldCoordinates) {
|
||||
public BiomeChunk generateChunk(SeededVector2Key worldCoordinates) {
|
||||
return new BiomeChunkImpl(worldCoordinates, this);
|
||||
}
|
||||
|
||||
|
||||
@@ -151,7 +151,7 @@ public class NoiseAddon implements AddonInitializer {
|
||||
noiseRegistry.register(addon.key("MAX"), () -> new BinaryArithmeticTemplate<>(MaxSampler::new));
|
||||
noiseRegistry.register(addon.key("MIN"), () -> new BinaryArithmeticTemplate<>(MinSampler::new));
|
||||
|
||||
noiseRegistry.register(addon.key("CACHE"), () -> new CacheSamplerTemplate(plugin.getGenerationThreads()));
|
||||
noiseRegistry.register(addon.key("CACHE"), CacheSamplerTemplate::new);
|
||||
|
||||
|
||||
Map<String, DimensionApplicableNoiseSampler> packSamplers = new LinkedHashMap<>();
|
||||
|
||||
@@ -8,19 +8,17 @@ import com.dfsek.terra.addons.noise.samplers.LinearHeightmapSampler;
|
||||
import com.dfsek.terra.api.noise.NoiseSampler;
|
||||
|
||||
|
||||
public class CacheSamplerTemplate extends SamplerTemplate<LinearHeightmapSampler> {
|
||||
public class CacheSamplerTemplate extends SamplerTemplate<CacheSampler> {
|
||||
@Value("sampler")
|
||||
@Default
|
||||
private NoiseSampler sampler;
|
||||
|
||||
private final int generationThreads;
|
||||
public CacheSamplerTemplate() {
|
||||
|
||||
public CacheSamplerTemplate(int generationThreads) {
|
||||
this.generationThreads = generationThreads;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NoiseSampler get() {
|
||||
return new CacheSampler(sampler, getDimensions(), generationThreads);
|
||||
return new CacheSampler(sampler, getDimensions());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,125 +1,74 @@
|
||||
package com.dfsek.terra.addons.noise.samplers;
|
||||
|
||||
import com.dfsek.terra.api.noise.DerivativeNoiseSampler;
|
||||
import com.dfsek.terra.api.noise.NoiseSampler;
|
||||
|
||||
import com.dfsek.terra.api.util.cache.DoubleSeededVector2Key;
|
||||
import com.dfsek.terra.api.util.cache.DoubleSeededVector3Key;
|
||||
import com.dfsek.terra.api.util.generic.pair.Pair;
|
||||
|
||||
import com.dfsek.terra.api.util.generic.pair.Pair.Mutable;
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.github.benmanes.caffeine.cache.LoadingCache;
|
||||
import com.github.benmanes.caffeine.cache.Scheduler;
|
||||
|
||||
import static com.dfsek.terra.api.util.cache.CacheUtils.CACHE_EXECUTOR;
|
||||
|
||||
|
||||
public class CacheSampler implements DerivativeNoiseSampler {
|
||||
public class CacheSampler implements NoiseSampler {
|
||||
|
||||
private final NoiseSampler sampler;
|
||||
private final LoadingCache<DoubleSeededVector2, Double> cache2D;
|
||||
private final LoadingCache<DoubleSeededVector3, Double> cache3D;
|
||||
private final LoadingCache<DoubleSeededVector2, double[]> cache2DDirv;
|
||||
private final LoadingCache<DoubleSeededVector3, double[]> cache3DDirv;
|
||||
private final ThreadLocal<Mutable<DoubleSeededVector2Key, LoadingCache<DoubleSeededVector2Key, Double>>> cache2D;
|
||||
private final ThreadLocal<Mutable<DoubleSeededVector3Key, LoadingCache<DoubleSeededVector3Key, Double>>> cache3D;
|
||||
|
||||
public CacheSampler(NoiseSampler sampler, int dimensions, int generationThreads) {
|
||||
public CacheSampler(NoiseSampler sampler, int dimensions) {
|
||||
this.sampler = sampler;
|
||||
if (dimensions == 2) {
|
||||
this.cache2D = Caffeine
|
||||
LoadingCache<DoubleSeededVector2Key, Double> cache = Caffeine
|
||||
.newBuilder()
|
||||
.initialCapacity(0)
|
||||
.maximumSize(256L * generationThreads) // 1 full chunk (high res)
|
||||
.build(vec -> sampler.noise(vec.seed, vec.x, vec.z));
|
||||
cache3D = null;
|
||||
cache3DDirv = null;
|
||||
if (DerivativeNoiseSampler.isDifferentiable(sampler)) {
|
||||
this.cache2DDirv = Caffeine
|
||||
.newBuilder()
|
||||
.initialCapacity(0)
|
||||
.maximumSize(256L * generationThreads) // 1 full chunk (high res)
|
||||
.build(vec -> ((DerivativeNoiseSampler) sampler).noised(vec.seed, vec.x, vec.z));
|
||||
} else {
|
||||
cache2DDirv = null;
|
||||
}
|
||||
.executor(CACHE_EXECUTOR)
|
||||
.scheduler(Scheduler.systemScheduler())
|
||||
.initialCapacity(256)
|
||||
.maximumSize(256)
|
||||
.build(this::sampleNoise);
|
||||
this.cache2D = ThreadLocal.withInitial(() -> Pair.of(new DoubleSeededVector2Key(0, 0, 0), cache).mutable());
|
||||
this.cache3D = null;
|
||||
} else {
|
||||
this.cache3D = Caffeine
|
||||
LoadingCache<DoubleSeededVector3Key, Double> cache = Caffeine
|
||||
.newBuilder()
|
||||
.initialCapacity(0)
|
||||
.maximumSize(256L * generationThreads) // 1 full chunk (high res)
|
||||
.build(vec -> sampler.noise(vec.seed, vec.x, vec.y, vec.z));
|
||||
cache2D = null;
|
||||
cache2DDirv = null;
|
||||
if (DerivativeNoiseSampler.isDifferentiable(sampler)) {
|
||||
this.cache3DDirv = Caffeine
|
||||
.newBuilder()
|
||||
.initialCapacity(0)
|
||||
.maximumSize(256L * generationThreads) // 1 full chunk (high res)
|
||||
.build(vec -> ((DerivativeNoiseSampler) sampler).noised(vec.seed, vec.x, vec.y, vec.z));
|
||||
} else {
|
||||
cache3DDirv = null;
|
||||
}
|
||||
.executor(CACHE_EXECUTOR)
|
||||
.scheduler(Scheduler.systemScheduler())
|
||||
.initialCapacity(981504)
|
||||
.maximumSize(981504)
|
||||
.build(this::sampleNoise);
|
||||
this.cache3D = ThreadLocal.withInitial(() -> Pair.of(new DoubleSeededVector3Key(0, 0, 0, 0), cache).mutable());
|
||||
this.cache2D = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isDifferentiable() {
|
||||
return DerivativeNoiseSampler.isDifferentiable(sampler);
|
||||
private Double sampleNoise(DoubleSeededVector2Key vec) {
|
||||
this.cache2D.get().setLeft(new DoubleSeededVector2Key(0, 0, 0));
|
||||
return this.sampler.noise(vec.seed, vec.x, vec.z);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double[] noised(long seed, double x, double y) {
|
||||
return cache2DDirv.get(new DoubleSeededVector2(x, y, seed));
|
||||
}
|
||||
|
||||
@Override
|
||||
public double[] noised(long seed, double x, double y, double z) {
|
||||
return cache3DDirv.get(new DoubleSeededVector3(x, y, z, seed));
|
||||
private Double sampleNoise(DoubleSeededVector3Key vec) {
|
||||
this.cache3D.get().setLeft(new DoubleSeededVector3Key(0, 0, 0, 0));
|
||||
return this.sampler.noise(vec.seed, vec.x, vec.y, vec.z);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double noise(long seed, double x, double y) {
|
||||
DoubleSeededVector2 vec = new DoubleSeededVector2(x, y, seed);
|
||||
if (cache2DDirv != null && cache2DDirv.estimatedSize() != 0) {
|
||||
return cache2DDirv.get(vec)[0];
|
||||
}
|
||||
return cache2D.get(vec);
|
||||
Mutable<DoubleSeededVector2Key, LoadingCache<DoubleSeededVector2Key, Double>> cachePair = cache2D.get();
|
||||
DoubleSeededVector2Key mutableKey = cachePair.getLeft();
|
||||
mutableKey.set(x, y, seed);
|
||||
return cachePair.getRight().get(mutableKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public double noise(long seed, double x, double y, double z) {
|
||||
DoubleSeededVector3 vec = new DoubleSeededVector3(x, y, z, seed);
|
||||
if (cache3DDirv != null && cache3DDirv.estimatedSize() != 0) {
|
||||
return cache3DDirv.get(vec)[0];
|
||||
}
|
||||
return cache3D.get(vec);
|
||||
}
|
||||
|
||||
private record DoubleSeededVector3(double x, double y, double z, long seed) {
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof DoubleSeededVector3 that) {
|
||||
return this.y == that.y && this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = (int) Double.doubleToLongBits(x);
|
||||
code = 31 * code + (int) Double.doubleToLongBits(y);
|
||||
code = 31 * code + (int) Double.doubleToLongBits(z);
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private record DoubleSeededVector2(double x, double z, long seed) {
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof DoubleSeededVector2 that) {
|
||||
return this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = (int) Double.doubleToLongBits(x);
|
||||
code = 31 * code + (int) Double.doubleToLongBits(z);
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
Mutable<DoubleSeededVector3Key, LoadingCache<DoubleSeededVector3Key, Double>> cachePair = cache3D.get();
|
||||
DoubleSeededVector3Key mutableKey = cachePair.getLeft();
|
||||
mutableKey.set(x, y, z, seed);
|
||||
return cachePair.getRight().get(mutableKey);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,10 @@ import com.dfsek.terra.api.config.Loader;
|
||||
import com.dfsek.terra.api.properties.Properties;
|
||||
import com.dfsek.terra.api.util.generic.Lazy;
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Scheduler;
|
||||
|
||||
import static com.dfsek.terra.api.util.cache.CacheUtils.CACHE_EXECUTOR;
|
||||
|
||||
|
||||
/*
|
||||
* Cache prevents configs from loading the same image multiple times into memory
|
||||
@@ -26,8 +30,9 @@ record ImageCache(LoadingCache<String, Image> cache) implements Properties {
|
||||
ImageLibraryPackConfigTemplate config = pack.getContext().get(ImageLibraryPackConfigTemplate.class);
|
||||
ImageCache images;
|
||||
if(!pack.getContext().has(ImageCache.class)) {
|
||||
var cacheBuilder = Caffeine.newBuilder();
|
||||
if(config.unloadOnTimeout()) cacheBuilder.expireAfterAccess(config.getCacheTimeout(), TimeUnit.SECONDS);
|
||||
var cacheBuilder = Caffeine.newBuilder().executor(CACHE_EXECUTOR).scheduler(Scheduler.systemScheduler());
|
||||
if(config.unloadOnTimeout()) cacheBuilder.expireAfterAccess(config.getCacheTimeout(), TimeUnit.SECONDS) .executor(CACHE_EXECUTOR)
|
||||
.scheduler(Scheduler.systemScheduler());
|
||||
images = new ImageCache(cacheBuilder.build(s -> loadImage(s, files)));
|
||||
pack.getContext().put(images);
|
||||
} else images = pack.getContext().get(ImageCache.class);
|
||||
|
||||
9
common/api/src/main/java/com/dfsek/terra/api/util/cache/CacheUtils.java
vendored
Normal file
9
common/api/src/main/java/com/dfsek/terra/api/util/cache/CacheUtils.java
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
package com.dfsek.terra.api.util.cache;
|
||||
|
||||
import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
|
||||
public class CacheUtils {
|
||||
public static final Executor CACHE_EXECUTOR = Executors.newSingleThreadExecutor();
|
||||
}
|
||||
35
common/api/src/main/java/com/dfsek/terra/api/util/cache/DoubleSeededVector2Key.java
vendored
Normal file
35
common/api/src/main/java/com/dfsek/terra/api/util/cache/DoubleSeededVector2Key.java
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
package com.dfsek.terra.api.util.cache;
|
||||
|
||||
public class DoubleSeededVector2Key {
|
||||
|
||||
public double x;
|
||||
public double z;
|
||||
public long seed;
|
||||
|
||||
public DoubleSeededVector2Key(double x, double z, long seed) {
|
||||
this.x = x;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
public void set(double x, double z, long seed) {
|
||||
this.x = x;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof DoubleSeededVector2Key that) {
|
||||
return this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = (int) Double.doubleToLongBits(x);
|
||||
code = 31 * code + (int) Double.doubleToLongBits(z);
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
}
|
||||
38
common/api/src/main/java/com/dfsek/terra/api/util/cache/DoubleSeededVector3Key.java
vendored
Normal file
38
common/api/src/main/java/com/dfsek/terra/api/util/cache/DoubleSeededVector3Key.java
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
package com.dfsek.terra.api.util.cache;
|
||||
|
||||
public class DoubleSeededVector3Key {
|
||||
public double x;
|
||||
public double y;
|
||||
public double z;
|
||||
public long seed;
|
||||
|
||||
public DoubleSeededVector3Key(double x, double y, double z, long seed) {
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
public void set(double x, double y, double z, long seed) {
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof DoubleSeededVector3Key that) {
|
||||
return this.y == that.y && this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = (int) Double.doubleToLongBits(x);
|
||||
code = 31 * code + (int) Double.doubleToLongBits(y);
|
||||
code = 31 * code + (int) Double.doubleToLongBits(z);
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
}
|
||||
37
common/api/src/main/java/com/dfsek/terra/api/util/cache/SeededVector2Key.java
vendored
Normal file
37
common/api/src/main/java/com/dfsek/terra/api/util/cache/SeededVector2Key.java
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
package com.dfsek.terra.api.util.cache;
|
||||
|
||||
|
||||
|
||||
|
||||
public class SeededVector2Key {
|
||||
public int x;
|
||||
public int z;
|
||||
public long seed;
|
||||
|
||||
public SeededVector2Key(int x, int z, long seed) {
|
||||
this.x = x;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
public void set(int x, int z, long seed) {
|
||||
this.x = x;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof SeededVector2Key that) {
|
||||
return this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = x;
|
||||
code = 31 * code + z;
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
}
|
||||
40
common/api/src/main/java/com/dfsek/terra/api/util/cache/SeededVector3Key.java
vendored
Normal file
40
common/api/src/main/java/com/dfsek/terra/api/util/cache/SeededVector3Key.java
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
package com.dfsek.terra.api.util.cache;
|
||||
|
||||
|
||||
|
||||
public class SeededVector3Key {
|
||||
public int x;
|
||||
public int y;
|
||||
public int z;
|
||||
public long seed;
|
||||
|
||||
public SeededVector3Key(int x, int y, int z, long seed) {
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
public void set(int x, int y, int z, long seed) {
|
||||
this.x = x;
|
||||
this.y = y;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof SeededVector3Key that) {
|
||||
return this.y == that.y && this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = x;
|
||||
code = 31 * code + y;
|
||||
code = 31 * code + z;
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
}
|
||||
@@ -97,7 +97,7 @@ public interface BiomeProvider {
|
||||
if(this instanceof CachingBiomeProvider cachingBiomeProvider) {
|
||||
return cachingBiomeProvider;
|
||||
}
|
||||
return new CachingBiomeProvider(this, platform.getGenerationThreads());
|
||||
return new CachingBiomeProvider(this);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
package com.dfsek.terra.api.world.biome.generation;
|
||||
|
||||
import com.dfsek.terra.api.util.cache.SeededVector2Key;
|
||||
import com.dfsek.terra.api.util.cache.SeededVector3Key;
|
||||
import com.dfsek.terra.api.util.generic.pair.Pair;
|
||||
|
||||
import com.dfsek.terra.api.util.generic.pair.Pair.Mutable;
|
||||
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import com.github.benmanes.caffeine.cache.LoadingCache;
|
||||
import com.github.benmanes.caffeine.cache.Scheduler;
|
||||
@@ -9,6 +15,8 @@ import java.util.Optional;
|
||||
import com.dfsek.terra.api.Handle;
|
||||
import com.dfsek.terra.api.world.biome.Biome;
|
||||
|
||||
import static com.dfsek.terra.api.util.cache.CacheUtils.CACHE_EXECUTOR;
|
||||
|
||||
|
||||
/**
|
||||
* A biome provider implementation that lazily evaluates biomes, and caches them.
|
||||
@@ -18,25 +26,43 @@ import com.dfsek.terra.api.world.biome.Biome;
|
||||
public class CachingBiomeProvider implements BiomeProvider, Handle {
|
||||
protected final BiomeProvider delegate;
|
||||
private final int res;
|
||||
private final LoadingCache<SeededVector3, Biome> cache;
|
||||
private final LoadingCache<SeededVector2, Optional<Biome>> baseCache;
|
||||
private final ThreadLocal<Pair.Mutable<SeededVector3Key, LoadingCache<SeededVector3Key, Biome>>> cache;
|
||||
private final ThreadLocal<Pair.Mutable<SeededVector2Key, LoadingCache<SeededVector2Key, Optional<Biome>>>> baseCache;
|
||||
|
||||
protected CachingBiomeProvider(BiomeProvider delegate, int generationThreads) {
|
||||
protected CachingBiomeProvider(BiomeProvider delegate) {
|
||||
this.delegate = delegate;
|
||||
this.res = delegate.resolution();
|
||||
int size = generationThreads * 98304;
|
||||
this.cache = Caffeine
|
||||
.newBuilder()
|
||||
.scheduler(Scheduler.disabledScheduler())
|
||||
.initialCapacity(size)
|
||||
.maximumSize(size) // 1 full chunk (high res)
|
||||
.build(vec -> delegate.getBiome(vec.x * res, vec.y * res, vec.z * res, vec.seed));
|
||||
|
||||
this.baseCache = Caffeine
|
||||
LoadingCache<SeededVector2Key, Optional<Biome>> cache = Caffeine
|
||||
.newBuilder()
|
||||
.maximumSize(256L * generationThreads) // 1 full chunk (high res)
|
||||
.build(vec -> delegate.getBaseBiome(vec.x * res, vec.z * res, vec.seed));
|
||||
.executor(CACHE_EXECUTOR)
|
||||
.scheduler(Scheduler.systemScheduler())
|
||||
.initialCapacity(256)
|
||||
.maximumSize(256)
|
||||
.build(this::sampleBiome);
|
||||
this.baseCache = ThreadLocal.withInitial(() -> Pair.of(new SeededVector2Key(0, 0, 0), cache).mutable());
|
||||
|
||||
LoadingCache<SeededVector3Key, Biome> cache3D = Caffeine
|
||||
.newBuilder()
|
||||
.executor(CACHE_EXECUTOR)
|
||||
.scheduler(Scheduler.systemScheduler())
|
||||
.initialCapacity(981504)
|
||||
.maximumSize(981504)
|
||||
.build(this::sampleBiome);
|
||||
this.cache = ThreadLocal.withInitial(() -> Pair.of(new SeededVector3Key(0, 0, 0, 0), cache3D).mutable());
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
private Optional<Biome> sampleBiome(SeededVector2Key vec) {
|
||||
this.baseCache.get().setLeft(new SeededVector2Key(0, 0, 0));
|
||||
return this.delegate.getBaseBiome(vec.x * res, vec.z * res, vec.seed);
|
||||
}
|
||||
|
||||
private Biome sampleBiome(SeededVector3Key vec) {
|
||||
this.cache.get().setLeft(new SeededVector3Key(0, 0, 0, 0));
|
||||
return this.delegate.getBiome(vec.x * res, vec.y * res, vec.z * res, vec.seed);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -46,12 +72,18 @@ public class CachingBiomeProvider implements BiomeProvider, Handle {
|
||||
|
||||
@Override
|
||||
public Biome getBiome(int x, int y, int z, long seed) {
|
||||
return cache.get(new SeededVector3(x / res, y / res, z / res, seed));
|
||||
Mutable<SeededVector3Key, LoadingCache<SeededVector3Key, Biome>> cachePair = cache.get();
|
||||
SeededVector3Key mutableKey = cachePair.getLeft();
|
||||
mutableKey.set(x, y, z, seed);
|
||||
return cachePair.getRight().get(mutableKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
|
||||
return baseCache.get(new SeededVector2(x / res, z / res, seed));
|
||||
Mutable<SeededVector2Key, LoadingCache<SeededVector2Key, Optional<Biome>>> cachePair = baseCache.get();
|
||||
SeededVector2Key mutableKey = cachePair.getLeft();
|
||||
mutableKey.set(x, z, seed);
|
||||
return cachePair.getRight().get(mutableKey);
|
||||
}
|
||||
|
||||
@Override
|
||||
@@ -63,40 +95,4 @@ public class CachingBiomeProvider implements BiomeProvider, Handle {
|
||||
public int resolution() {
|
||||
return delegate.resolution();
|
||||
}
|
||||
|
||||
private record SeededVector3(int x, int y, int z, long seed) {
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof SeededVector3 that) {
|
||||
return this.y == that.y && this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = x;
|
||||
code = 31 * code + y;
|
||||
code = 31 * code + z;
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private record SeededVector2(int x, int z, long seed) {
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(obj instanceof SeededVector2 that) {
|
||||
return this.z == that.z && this.x == that.x && this.seed == that.seed;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int code = x;
|
||||
code = 31 * code + z;
|
||||
return 31 * code + (Long.hashCode(seed));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,9 @@ dependencies {
|
||||
shadedApi("commons-io", "commons-io", Versions.Libraries.Internal.apacheIO)
|
||||
shadedApi("com.github.Querz", "NBT", Versions.CLI.nbt)
|
||||
|
||||
shadedImplementation("info.picocli", "picocli", Versions.CLI.picocli)
|
||||
annotationProcessor("info.picocli", "picocli-codegen", Versions.CLI.picocli)
|
||||
|
||||
shadedImplementation("com.google.guava", "guava", Versions.Libraries.Internal.guava)
|
||||
|
||||
shadedImplementation("ch.qos.logback", "logback-classic", Versions.CLI.logback)
|
||||
@@ -26,6 +29,12 @@ tasks.withType<Jar> {
|
||||
}
|
||||
}
|
||||
|
||||
tasks.withType<JavaCompile> {
|
||||
doFirst {
|
||||
options.compilerArgs.add("-Aproject=${project.group}/${project.name}")
|
||||
}
|
||||
}
|
||||
|
||||
application {
|
||||
mainClass.set(javaMainClass)
|
||||
}
|
||||
|
||||
@@ -5,41 +5,74 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.Callable;
|
||||
|
||||
import com.dfsek.terra.api.config.ConfigPack;
|
||||
import com.dfsek.terra.api.event.events.platform.PlatformInitializationEvent;
|
||||
import com.dfsek.terra.api.util.vector.Vector2Int;
|
||||
import com.dfsek.terra.cli.world.CLIWorld;
|
||||
|
||||
import picocli.CommandLine;
|
||||
import picocli.CommandLine.Command;
|
||||
import picocli.CommandLine.Option;
|
||||
|
||||
public final class TerraCLI {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(TerraCLI.class);
|
||||
|
||||
public static void main(String... args) {
|
||||
//TODO auto pull in version
|
||||
@Command(name = "TerraCLI", mixinStandardHelpOptions = true, version = "6.6.0",
|
||||
description = "Generates a Terra World and saves it in minecraft region format.")
|
||||
public final class TerraCLI implements Callable<Integer> {
|
||||
@Option(names = { "-s", "--size"}, description = "Number of regions to generate.")
|
||||
private int size = 2;
|
||||
|
||||
@Option(names = { "-p", "--pack"}, description = "Config pack to use.")
|
||||
private String pack = "OVERWORLD";
|
||||
|
||||
@Option(names = { "--seed"}, description = "Seed for world generation.")
|
||||
private long seed = 0;
|
||||
|
||||
@Option(names = { "--max-height"}, description = "Maximum height of the world.")
|
||||
private int maxHeight = 320;
|
||||
|
||||
@Option(names = { "--min-height"}, description = "Minimum height of the world.")
|
||||
private int minHeight = -64;
|
||||
|
||||
@Option(names = { "--no-save"}, description = "Don't save the world to disk.")
|
||||
private boolean noSave = false;
|
||||
|
||||
@Override
|
||||
public Integer call() {
|
||||
Logger LOGGER = LoggerFactory.getLogger(TerraCLI.class);
|
||||
LOGGER.info("Starting Terra CLI...");
|
||||
|
||||
CLIPlatform platform = new CLIPlatform();
|
||||
platform.getEventManager().callEvent(new PlatformInitializationEvent());
|
||||
|
||||
ConfigPack generate = platform.getConfigRegistry().getByID("OVERWORLD").orElseThrow(); // TODO: make this a cli argument
|
||||
ConfigPack generate = platform.getConfigRegistry().getByID(pack).orElseThrow();
|
||||
|
||||
CLIWorld world = new CLIWorld(2, 2, 384, -64, generate);
|
||||
CLIWorld world = new CLIWorld(size, seed, maxHeight, minHeight, generate, noSave);
|
||||
|
||||
world.generate();
|
||||
|
||||
world.serialize().parallel().forEach(mcaFile -> {
|
||||
Vector2Int pos = mcaFile.getLeft();
|
||||
String name = MCAUtil.createNameFromRegionLocation(pos.getX(), pos.getZ());
|
||||
LOGGER.info("Writing region ({}, {}) to {}", pos.getX(), pos.getZ(), name);
|
||||
if(!noSave) {
|
||||
world.serialize().parallel().forEach(mcaFile -> {
|
||||
Vector2Int pos = mcaFile.getLeft();
|
||||
String name = MCAUtil.createNameFromRegionLocation(pos.getX(), pos.getZ());
|
||||
LOGGER.info("Writing region ({}, {}) to {}", pos.getX(), pos.getZ(), name);
|
||||
|
||||
try {
|
||||
MCAUtil.write(mcaFile.getRight(), name);
|
||||
} catch(IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
LOGGER.info("Wrote region to file.");
|
||||
});
|
||||
try {
|
||||
MCAUtil.write(mcaFile.getRight(), name);
|
||||
} catch(IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
LOGGER.info("Wrote region to file.");
|
||||
});
|
||||
}
|
||||
LOGGER.info("Done.");
|
||||
System.exit(0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
public static void main(String... args) {
|
||||
int exitCode = new CommandLine(new TerraCLI()).execute(args);
|
||||
System.exit(exitCode);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ public class CLIWorld implements ServerWorld, NBTSerializable<Stream<Pair<Vector
|
||||
private final ChunkGenerator chunkGenerator;
|
||||
private final BiomeProvider biomeProvider;
|
||||
private final ConfigPack pack;
|
||||
private final boolean noSave;
|
||||
private final AtomicInteger amount = new AtomicInteger(0);
|
||||
|
||||
private final ExecutorService executor = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() - 1);
|
||||
@@ -51,7 +52,7 @@ public class CLIWorld implements ServerWorld, NBTSerializable<Stream<Pair<Vector
|
||||
long seed,
|
||||
int maxHeight,
|
||||
int minHeight,
|
||||
ConfigPack pack) {
|
||||
ConfigPack pack, boolean noSave) {
|
||||
this.size = size;
|
||||
this.maxHeight = maxHeight;
|
||||
this.minHeight = minHeight;
|
||||
@@ -59,6 +60,7 @@ public class CLIWorld implements ServerWorld, NBTSerializable<Stream<Pair<Vector
|
||||
this.chunkGenerator = pack.getGeneratorProvider().newInstance(pack);
|
||||
this.biomeProvider = pack.getBiomeProvider();
|
||||
this.pack = pack;
|
||||
this.noSave = noSave;
|
||||
|
||||
|
||||
size += 1;
|
||||
@@ -73,6 +75,7 @@ public class CLIWorld implements ServerWorld, NBTSerializable<Stream<Pair<Vector
|
||||
}
|
||||
|
||||
public void generate() {
|
||||
ArrayList<Double> CPSHistory = new ArrayList<>();
|
||||
int sizeChunks = size * 32;
|
||||
List<Future<?>> futures = new ArrayList<>();
|
||||
final AtomicLong start = new AtomicLong(System.nanoTime());
|
||||
@@ -83,7 +86,13 @@ public class CLIWorld implements ServerWorld, NBTSerializable<Stream<Pair<Vector
|
||||
futures.add(executor.submit(() -> {
|
||||
try {
|
||||
int num = amount.getAndIncrement();
|
||||
CLIChunk chunk = getChunkAt(finalX, finalZ);
|
||||
CLIChunk chunk;
|
||||
if (!noSave) {
|
||||
chunk = getChunkAt(finalX, finalZ);
|
||||
} else {
|
||||
chunk = new CLIChunk(Math.floorMod(finalX, 32), Math.floorMod(finalZ, 32), this);
|
||||
}
|
||||
|
||||
BiomeProvider cachingBiomeProvider = pack.getBiomeProvider();
|
||||
chunkGenerator.generateChunkData(chunk, this, cachingBiomeProvider, finalX, finalZ);
|
||||
CLIProtoWorld protoWorld = new CLIProtoWorld(this, cachingBiomeProvider, finalX, finalZ);
|
||||
@@ -91,6 +100,7 @@ public class CLIWorld implements ServerWorld, NBTSerializable<Stream<Pair<Vector
|
||||
if(num % 240 == 239) {
|
||||
long time = System.nanoTime();
|
||||
double cps = num / ((double) (time - start.get()) / 1000000000);
|
||||
CPSHistory.add(cps);
|
||||
LOGGER.info("Generating chunk at ({}, {}), generated {} chunks at {}cps", finalX, finalZ, num, cps);
|
||||
amount.set(0);
|
||||
start.set(System.nanoTime());
|
||||
@@ -109,6 +119,8 @@ public class CLIWorld implements ServerWorld, NBTSerializable<Stream<Pair<Vector
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
LOGGER.info("Average CPS: {}", CPSHistory.stream().mapToDouble(d -> d).average().orElse(0));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
Reference in New Issue
Block a user