mirror of
https://github.com/PolyhedralDev/Terra.git
synced 2025-07-03 16:35:50 +00:00
Merge pull request #292 from PolyhedralDev/dev/cli
CLI generator & performance improvements
This commit is contained in:
commit
f353d1686c
@ -41,7 +41,7 @@ public class BiomePipelineProvider implements BiomeProvider {
|
||||
this.mutator = mutator;
|
||||
this.noiseAmp = noiseAmp;
|
||||
holderCache = CacheBuilder.newBuilder()
|
||||
.maximumSize(128)
|
||||
.maximumSize(1024)
|
||||
.build(
|
||||
new CacheLoader<>() {
|
||||
@Override
|
||||
@ -101,31 +101,6 @@ public class BiomePipelineProvider implements BiomeProvider {
|
||||
return biomes;
|
||||
}
|
||||
|
||||
private static final class SeededVector {
|
||||
private final int x;
|
||||
private final int z;
|
||||
private final long seed;
|
||||
|
||||
private SeededVector(int x, int z, long seed) {
|
||||
this.x = x;
|
||||
this.z = z;
|
||||
this.seed = seed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 0;
|
||||
result = 31 * result + ((int) (seed ^ (seed >>> 32)));
|
||||
result = 31 * result + x;
|
||||
result = 31 * result + z;
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if(!(obj instanceof SeededVector that)) return false;
|
||||
|
||||
return this.seed == that.seed && this.x == that.x && this.z == that.z;
|
||||
}
|
||||
private record SeededVector(int x, int z, long seed) {
|
||||
}
|
||||
}
|
||||
|
@ -8,6 +8,8 @@
|
||||
package com.dfsek.terra.addons.chunkgenerator.generation;
|
||||
|
||||
|
||||
import com.dfsek.terra.api.world.info.WorldProperties;
|
||||
|
||||
import net.jafama.FastMath;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
@ -20,7 +22,6 @@ import com.dfsek.terra.api.Platform;
|
||||
import com.dfsek.terra.api.block.state.BlockState;
|
||||
import com.dfsek.terra.api.config.ConfigPack;
|
||||
import com.dfsek.terra.api.profiler.ProfileFrame;
|
||||
import com.dfsek.terra.api.world.ServerWorld;
|
||||
import com.dfsek.terra.api.world.WritableWorld;
|
||||
import com.dfsek.terra.api.world.biome.Biome;
|
||||
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
|
||||
@ -38,9 +39,11 @@ public class NoiseChunkGenerator3D implements ChunkGenerator {
|
||||
|
||||
private final int carverHorizontalResolution;
|
||||
private final int carverVerticalResolution;
|
||||
private final ConfigPack configPack;
|
||||
|
||||
public NoiseChunkGenerator3D(ConfigPack c, Platform platform, int elevationBlend, int carverHorizontalResolution,
|
||||
int carverVerticalResolution) {
|
||||
this.configPack = c;
|
||||
this.platform = platform;
|
||||
this.air = platform.getWorldHandle().air();
|
||||
this.carverHorizontalResolution = carverHorizontalResolution;
|
||||
@ -107,8 +110,8 @@ public class NoiseChunkGenerator3D implements ChunkGenerator {
|
||||
}
|
||||
|
||||
@Override
|
||||
public BlockState getBlock(ServerWorld world, int x, int y, int z) {
|
||||
BiomeProvider provider = world.getBiomeProvider();
|
||||
public BlockState getBlock(WorldProperties world, int x, int y, int z) {
|
||||
BiomeProvider provider = configPack.getBiomeProvider();
|
||||
Biome biome = provider.getBiome(x, z, world.getSeed());
|
||||
Sampler3D sampler = samplerCache.get(x, z, world);
|
||||
|
||||
|
@ -21,7 +21,7 @@ public final class PaletteUtil {
|
||||
|
||||
public static Palette getPalette(int x, int y, int z, Sampler3D sampler, PaletteInfo paletteInfo) {
|
||||
SlantHolder slant = paletteInfo.slantHolder();
|
||||
if(slant != null) {
|
||||
if(!slant.isEmpty()) {
|
||||
double slope = derivative(sampler, x, y, z);
|
||||
if(slope > slant.getMinSlope()) {
|
||||
return slant.getPalette(slope).getPalette(y);
|
||||
|
@ -7,6 +7,9 @@
|
||||
|
||||
package com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation;
|
||||
|
||||
import com.dfsek.terra.api.util.vector.Vector2Int;
|
||||
import com.dfsek.terra.api.world.biome.Biome;
|
||||
|
||||
import net.jafama.FastMath;
|
||||
|
||||
import java.util.HashMap;
|
||||
@ -53,11 +56,14 @@ public class ChunkInterpolator {
|
||||
|
||||
double[][][] noiseStorage = new double[5][5][size + 1];
|
||||
|
||||
BiomeCache cache = new BiomeCache(provider);
|
||||
|
||||
|
||||
for(int x = 0; x < 5; x++) {
|
||||
for(int z = 0; z < 5; z++) {
|
||||
BiomeNoiseProperties generationSettings = provider.getBiome(xOrigin + (x << 2), zOrigin + (z << 2), seed).getContext().get(
|
||||
BiomeNoiseProperties.class);
|
||||
BiomeNoiseProperties generationSettings = cache.get(xOrigin + (x << 2), zOrigin + (z << 2), seed)
|
||||
.getContext()
|
||||
.get(BiomeNoiseProperties.class);
|
||||
Map<BiomeNoiseProperties, MutableInteger> genMap = new HashMap<>();
|
||||
|
||||
int step = generationSettings.blendStep();
|
||||
@ -66,7 +72,7 @@ public class ChunkInterpolator {
|
||||
for(int xi = -blend; xi <= blend; xi++) {
|
||||
for(int zi = -blend; zi <= blend; zi++) {
|
||||
genMap.computeIfAbsent(
|
||||
provider.getBiome(xOrigin + (x << 2) + (xi * step), zOrigin + (z << 2) + (zi * step), seed)
|
||||
cache.get(xOrigin + (x << 2) + (xi * step), zOrigin + (z << 2) + (zi * step), seed)
|
||||
.getContext()
|
||||
.get(BiomeNoiseProperties.class),
|
||||
g -> new MutableInteger(0)).increment(); // Increment by 1
|
||||
@ -96,6 +102,20 @@ public class ChunkInterpolator {
|
||||
}
|
||||
}
|
||||
|
||||
private static final class BiomeCache {
|
||||
|
||||
private final BiomeProvider provider;
|
||||
private final Map<Vector2Int, Biome> cache = new HashMap<>();
|
||||
|
||||
private BiomeCache(BiomeProvider provider) {
|
||||
this.provider = provider;
|
||||
}
|
||||
|
||||
public Biome get(int x, int z, long seed) {
|
||||
return cache.computeIfAbsent(Vector2Int.of(x, z), vec -> provider.getBiome(x, z, seed));
|
||||
}
|
||||
}
|
||||
|
||||
private static int reRange(int value, int high) {
|
||||
return FastMath.max(FastMath.min(value, high), 0);
|
||||
}
|
||||
|
@ -17,6 +17,8 @@
|
||||
|
||||
package com.dfsek.terra.addons.chunkgenerator.generation.math.samplers;
|
||||
|
||||
import com.dfsek.terra.api.world.info.WorldProperties;
|
||||
|
||||
import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.CacheLoader;
|
||||
import com.google.common.cache.LoadingCache;
|
||||
@ -45,13 +47,13 @@ public class SamplerProvider {
|
||||
});
|
||||
}
|
||||
|
||||
public Sampler3D get(int x, int z, World world) {
|
||||
public Sampler3D get(int x, int z, WorldProperties world) {
|
||||
int cx = FastMath.floorDiv(x, 16);
|
||||
int cz = FastMath.floorDiv(z, 16);
|
||||
return getChunk(cx, cz, world);
|
||||
}
|
||||
|
||||
public Sampler3D getChunk(int cx, int cz, World world) {
|
||||
public Sampler3D getChunk(int cx, int cz, WorldProperties world) {
|
||||
return cache.getUnchecked(new WorldContext(cx, cz, world.getSeed(), world.getMinHeight(), world.getMaxHeight()));
|
||||
}
|
||||
|
||||
|
@ -20,6 +20,10 @@ public class SlantHolder {
|
||||
this.minSlope = minSlope;
|
||||
}
|
||||
|
||||
public boolean isEmpty() {
|
||||
return layers.isEmpty();
|
||||
}
|
||||
|
||||
public PaletteHolder getPalette(double slope) {
|
||||
return layers.floorEntry(slope).getValue();
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ import com.dfsek.paralithic.eval.tokenizer.ParseException;
|
||||
import com.dfsek.paralithic.functions.Function;
|
||||
import com.dfsek.tectonic.api.config.template.annotations.Default;
|
||||
import com.dfsek.tectonic.api.config.template.annotations.Value;
|
||||
import com.dfsek.tectonic.api.exception.LoadException;
|
||||
import com.dfsek.tectonic.api.exception.ValidationException;
|
||||
|
||||
import java.util.HashMap;
|
||||
@ -55,21 +56,10 @@ public class ExpressionFunctionTemplate extends SamplerTemplate<ExpressionFuncti
|
||||
Map<String, Function> noiseFunctionMap = generateFunctions();
|
||||
return new ExpressionFunction(noiseFunctionMap, equation, vars);
|
||||
} catch(ParseException e) {
|
||||
throw new IllegalStateException(e);
|
||||
throw new LoadException("Failed to parse expression.", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean validate() throws ValidationException {
|
||||
try {
|
||||
Map<String, Function> noiseFunctionMap = generateFunctions();
|
||||
new ExpressionFunction(noiseFunctionMap, equation, vars);
|
||||
} catch(ParseException e) {
|
||||
throw new ValidationException("Errors occurred while parsing noise equation: ", e);
|
||||
}
|
||||
return super.validate();
|
||||
}
|
||||
|
||||
private Map<String, Function> generateFunctions() throws ParseException {
|
||||
Map<String, Function> noiseFunctionMap = new HashMap<>();
|
||||
|
||||
|
@ -44,24 +44,27 @@ public class TerraScriptAddon implements AddonInitializer {
|
||||
.then(event -> {
|
||||
CheckedRegistry<Structure> structureRegistry = event.getPack().getOrCreateRegistry(Structure.class);
|
||||
CheckedRegistry<LootTable> lootRegistry = event.getPack().getOrCreateRegistry(LootTable.class);
|
||||
event.getPack().getLoader().open("", ".tesf").thenEntries(entries -> {
|
||||
for(Map.Entry<String, InputStream> entry : entries) {
|
||||
event.getPack().getLoader().open("", ".tesf").thenEntries(
|
||||
entries ->
|
||||
entries.stream()
|
||||
.parallel()
|
||||
.map(entry -> {
|
||||
try {
|
||||
String id = StringUtil.fileName(entry.getKey());
|
||||
StructureScript structureScript = new StructureScript(entry.getValue(),
|
||||
return new StructureScript(entry.getValue(),
|
||||
id,
|
||||
platform,
|
||||
structureRegistry,
|
||||
lootRegistry,
|
||||
event
|
||||
.getPack()
|
||||
.getOrCreateRegistry(FunctionBuilder.class));
|
||||
structureRegistry.register(structureScript.getID(), structureScript);
|
||||
event.getPack().getOrCreateRegistry(FunctionBuilder.class));
|
||||
} catch(ParseException e) {
|
||||
throw new LoadException("Failed to load script \"" + entry.getKey() + "\"", e);
|
||||
}
|
||||
}
|
||||
}).close();
|
||||
})
|
||||
.toList()
|
||||
.forEach(structureScript -> structureRegistry.register(structureScript.getID(),
|
||||
structureScript)))
|
||||
.close();
|
||||
})
|
||||
.priority(2)
|
||||
.failThrough();
|
||||
|
@ -1,4 +0,0 @@
|
||||
dependencies {
|
||||
shadedApi(project(":common:api:util"))
|
||||
shadedApi("ca.solo-studios", "strata", Versions.Libraries.strata)
|
||||
}
|
@ -1,10 +1,11 @@
|
||||
afterEvaluate {
|
||||
subprojects.forEach {
|
||||
if (it != project) {
|
||||
println("Project: ${it.name}")
|
||||
dependencies {
|
||||
shadedApi(it)
|
||||
}
|
||||
}
|
||||
}
|
||||
dependencies {
|
||||
shadedApi("ca.solo-studios", "strata", Versions.Libraries.strata)
|
||||
shadedApi("org.slf4j", "slf4j-api", Versions.Libraries.slf4j)
|
||||
shadedApi("cloud.commandframework", "cloud-core", Versions.Libraries.cloud)
|
||||
|
||||
shadedApi("com.dfsek", "paralithic", Versions.Libraries.paralithic)
|
||||
shadedApi("com.dfsek.tectonic", "common", Versions.Libraries.tectonic)
|
||||
|
||||
|
||||
shadedImplementation("net.jafama", "jafama", Versions.Libraries.Internal.jafama)
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
dependencies {
|
||||
shadedApi(project(":common:api:util"))
|
||||
shadedApi(project(":common:api:noise"))
|
||||
shadedApi(project(":common:api:registry"))
|
||||
shadedApi(project(":common:api:addons"))
|
||||
|
||||
shadedApi("org.slf4j", "slf4j-api", Versions.Libraries.slf4j)
|
||||
shadedApi("cloud.commandframework", "cloud-core", Versions.Libraries.cloud)
|
||||
|
||||
|
||||
shadedImplementation("net.jafama", "jafama", Versions.Libraries.Internal.jafama)
|
||||
}
|
||||
|
@ -1,3 +0,0 @@
|
||||
# Dependency Injection
|
||||
|
||||
A simple annotation-based dependency injection framework
|
@ -1,4 +0,0 @@
|
||||
dependencies {
|
||||
shadedApi(project(":common:api:util"))
|
||||
}
|
||||
|
@ -1,8 +0,0 @@
|
||||
dependencies {
|
||||
shadedApi(project(":common:api:util"))
|
||||
|
||||
shadedApi("com.dfsek", "paralithic", Versions.Libraries.paralithic)
|
||||
|
||||
shadedImplementation("net.jafama", "jafama", Versions.Libraries.Internal.jafama)
|
||||
}
|
||||
|
@ -1,4 +0,0 @@
|
||||
dependencies {
|
||||
shadedApi(project(":common:api:util"))
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user