speed pass

This commit is contained in:
Brian Neumann-Fopiano
2026-02-23 19:04:19 -05:00
parent 18d4dce1db
commit bf207b7062
34 changed files with 2895 additions and 450 deletions

1
core/plugins/Iris/cache/instance vendored Normal file
View File

@@ -0,0 +1 @@
2117487583

View File

@@ -97,6 +97,7 @@ public class Iris extends VolmitPlugin implements Listener {
private static Thread shutdownHook; private static Thread shutdownHook;
private static File settingsFile; private static File settingsFile;
private static final String PENDING_WORLD_DELETE_FILE = "pending-world-deletes.txt"; private static final String PENDING_WORLD_DELETE_FILE = "pending-world-deletes.txt";
private static final StackWalker DEBUG_STACK_WALKER = StackWalker.getInstance();
private static final Map<String, ChunkGenerator> stagedRuntimeGenerators = new ConcurrentHashMap<>(); private static final Map<String, ChunkGenerator> stagedRuntimeGenerators = new ConcurrentHashMap<>();
private static final Map<String, BiomeProvider> stagedRuntimeBiomeProviders = new ConcurrentHashMap<>(); private static final Map<String, BiomeProvider> stagedRuntimeBiomeProviders = new ConcurrentHashMap<>();
@@ -303,21 +304,37 @@ public class Iris extends VolmitPlugin implements Listener {
return; return;
} }
StackWalker.StackFrame frame = null;
try { try {
throw new RuntimeException(); frame = DEBUG_STACK_WALKER.walk(stream -> stream.skip(1).findFirst().orElse(null));
} catch (Throwable e) { } catch (Throwable ignored) {
try {
String[] cc = e.getStackTrace()[1].getClassName().split("\\Q.\\E");
if (cc.length > 5) {
debug(cc[3] + "/" + cc[4] + "/" + cc[cc.length - 1], e.getStackTrace()[1].getLineNumber(), string);
} else {
debug(cc[3] + "/" + cc[4], e.getStackTrace()[1].getLineNumber(), string);
}
} catch (Throwable ex) {
debug("Origin", -1, string);
}
} }
if (frame == null) {
debug("Origin", -1, string);
return;
}
String className = frame.getClassName();
String[] cc = className == null ? new String[0] : className.split("\\Q.\\E");
int line = frame.getLineNumber();
if (cc.length > 5) {
debug(cc[3] + "/" + cc[4] + "/" + cc[cc.length - 1], line, string);
return;
}
if (cc.length > 4) {
debug(cc[3] + "/" + cc[4], line, string);
return;
}
if (cc.length > 0) {
debug(cc[cc.length - 1], line, string);
return;
}
debug("Origin", line, string);
} }
public static void debug(String category, int line, String string) { public static void debug(String category, int line, String string) {

View File

@@ -1324,6 +1324,9 @@ public final class ExternalDataPackPipeline {
writtenPaths.add(outputRelativePath); writtenPaths.add(outputRelativePath);
byte[] outputBytes = inputAsset.bytes(); byte[] outputBytes = inputAsset.bytes();
if (projectedEntry.type() == ProjectedEntryType.STRUCTURE_NBT && !remappedKeys.isEmpty()) {
outputBytes = StructureNbtJigsawPoolRewriter.rewrite(outputBytes, remappedKeys);
}
if (projectedEntry.type() == ProjectedEntryType.STRUCTURE if (projectedEntry.type() == ProjectedEntryType.STRUCTURE
|| projectedEntry.type() == ProjectedEntryType.STRUCTURE_SET || projectedEntry.type() == ProjectedEntryType.STRUCTURE_SET
|| projectedEntry.type() == ProjectedEntryType.CONFIGURED_FEATURE || projectedEntry.type() == ProjectedEntryType.CONFIGURED_FEATURE

View File

@@ -1,7 +0,0 @@
package art.arcane.iris.core;
public enum IrisHotPathMetricsMode {
SAMPLED,
EXACT,
DISABLED
}

View File

@@ -153,8 +153,6 @@ public class IrisSettings {
public boolean useTicketQueue = true; public boolean useTicketQueue = true;
public IrisRuntimeSchedulerMode runtimeSchedulerMode = IrisRuntimeSchedulerMode.AUTO; public IrisRuntimeSchedulerMode runtimeSchedulerMode = IrisRuntimeSchedulerMode.AUTO;
public IrisPaperLikeBackendMode paperLikeBackendMode = IrisPaperLikeBackendMode.AUTO; public IrisPaperLikeBackendMode paperLikeBackendMode = IrisPaperLikeBackendMode.AUTO;
public IrisHotPathMetricsMode hotPathMetricsMode = IrisHotPathMetricsMode.SAMPLED;
public int hotPathMetricsSampleStride = 1024;
public int maxConcurrency = 256; public int maxConcurrency = 256;
public int paperLikeMaxConcurrency = 96; public int paperLikeMaxConcurrency = 96;
public int foliaMaxConcurrency = 32; public int foliaMaxConcurrency = 32;
@@ -191,20 +189,6 @@ public class IrisSettings {
return paperLikeBackendMode; return paperLikeBackendMode;
} }
public IrisHotPathMetricsMode getHotPathMetricsMode() {
if (hotPathMetricsMode == null) {
return IrisHotPathMetricsMode.SAMPLED;
}
return hotPathMetricsMode;
}
public int getHotPathMetricsSampleStride() {
int stride = Math.max(1, Math.min(hotPathMetricsSampleStride, 65_536));
int normalized = Integer.highestOneBit(stride);
return normalized <= 0 ? 1 : normalized;
}
public int getSaveIntervalMs() { public int getSaveIntervalMs() {
return Math.max(5_000, Math.min(saveIntervalMs, 900_000)); return Math.max(5_000, Math.min(saveIntervalMs, 900_000));
} }
@@ -315,6 +299,7 @@ public class IrisSettings {
public boolean studio = true; public boolean studio = true;
public boolean openVSCode = true; public boolean openVSCode = true;
public boolean disableTimeAndWeather = true; public boolean disableTimeAndWeather = true;
public boolean enableEntitySpawning = false;
public boolean autoStartDefaultStudio = false; public boolean autoStartDefaultStudio = false;
} }

View File

@@ -0,0 +1,198 @@
package art.arcane.iris.core;
import art.arcane.volmlib.util.nbt.io.NBTDeserializer;
import art.arcane.volmlib.util.nbt.io.NBTSerializer;
import art.arcane.volmlib.util.nbt.io.NamedTag;
import art.arcane.volmlib.util.nbt.tag.ByteTag;
import art.arcane.volmlib.util.nbt.tag.CompoundTag;
import art.arcane.volmlib.util.nbt.tag.IntTag;
import art.arcane.volmlib.util.nbt.tag.ListTag;
import art.arcane.volmlib.util.nbt.tag.NumberTag;
import art.arcane.volmlib.util.nbt.tag.ShortTag;
import art.arcane.volmlib.util.nbt.tag.Tag;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
final class StructureNbtJigsawPoolRewriter {
private StructureNbtJigsawPoolRewriter() {
}
static byte[] rewrite(byte[] bytes, Map<String, String> remappedKeys) {
if (bytes == null || bytes.length == 0 || remappedKeys == null || remappedKeys.isEmpty()) {
return bytes;
}
try {
NbtReadResult readResult = readNamedTagWithCompression(bytes);
Tag<?> rootTag = readResult.namedTag().getTag();
if (!(rootTag instanceof CompoundTag compoundTag)) {
return bytes;
}
boolean rewritten = rewriteJigsawPoolReferences(compoundTag, remappedKeys);
if (!rewritten) {
return bytes;
}
return writeNamedTag(readResult.namedTag(), readResult.compressed());
} catch (Throwable ignored) {
return bytes;
}
}
private static boolean rewriteJigsawPoolReferences(CompoundTag root, Map<String, String> remappedKeys) {
ListTag<?> palette = root.getListTag("palette");
ListTag<?> blocks = root.getListTag("blocks");
if (palette == null || blocks == null || palette.size() <= 0 || blocks.size() <= 0) {
return false;
}
Set<Integer> jigsawStates = new HashSet<>();
for (int paletteIndex = 0; paletteIndex < palette.size(); paletteIndex++) {
Object paletteRaw = palette.get(paletteIndex);
if (!(paletteRaw instanceof CompoundTag paletteEntry)) {
continue;
}
String blockName = paletteEntry.getString("Name");
if ("minecraft:jigsaw".equalsIgnoreCase(blockName)) {
jigsawStates.add(paletteIndex);
}
}
if (jigsawStates.isEmpty()) {
return false;
}
boolean rewritten = false;
for (Object blockRaw : blocks.getValue()) {
if (!(blockRaw instanceof CompoundTag blockTag)) {
continue;
}
Integer stateIndex = tagToInt(blockTag.get("state"));
if (stateIndex == null || !jigsawStates.contains(stateIndex)) {
continue;
}
CompoundTag blockNbt = blockTag.getCompoundTag("nbt");
if (blockNbt == null || blockNbt.size() <= 0) {
continue;
}
String poolValue = blockNbt.getString("pool");
if (poolValue == null || poolValue.isBlank()) {
continue;
}
String normalizedPool = normalizeResourceKey(poolValue);
if (normalizedPool == null || normalizedPool.isBlank()) {
continue;
}
String remappedPool = remappedKeys.get(normalizedPool);
if (remappedPool == null || remappedPool.isBlank()) {
continue;
}
blockNbt.putString("pool", remappedPool);
rewritten = true;
}
return rewritten;
}
private static Integer tagToInt(Tag<?> tag) {
if (tag == null) {
return null;
}
if (tag instanceof IntTag intTag) {
return intTag.asInt();
}
if (tag instanceof ShortTag shortTag) {
return (int) shortTag.asShort();
}
if (tag instanceof ByteTag byteTag) {
return (int) byteTag.asByte();
}
if (tag instanceof NumberTag<?> numberTag) {
Number value = numberTag.getValue();
if (value != null) {
return value.intValue();
}
}
Object value = tag.getValue();
if (value instanceof Number number) {
return number.intValue();
}
return null;
}
private static String normalizeResourceKey(String value) {
if (value == null) {
return null;
}
String normalized = value.trim();
if (normalized.isEmpty()) {
return "";
}
if (normalized.charAt(0) == '#') {
normalized = normalized.substring(1);
}
String namespace = "minecraft";
String path = normalized;
int separator = normalized.indexOf(':');
if (separator >= 0) {
namespace = normalized.substring(0, separator).trim().toLowerCase();
path = normalized.substring(separator + 1).trim();
}
if (path.startsWith("worldgen/template_pool/")) {
path = path.substring("worldgen/template_pool/".length());
}
path = path.replace('\\', '/');
while (path.startsWith("/")) {
path = path.substring(1);
}
while (path.endsWith("/")) {
path = path.substring(0, path.length() - 1);
}
if (path.isEmpty()) {
return "";
}
return namespace + ":" + path;
}
private static NbtReadResult readNamedTagWithCompression(byte[] bytes) throws IOException {
IOException primary = null;
try {
NamedTag uncompressed = new NBTDeserializer(false).fromStream(new ByteArrayInputStream(bytes));
return new NbtReadResult(uncompressed, false);
} catch (IOException e) {
primary = e;
}
try {
NamedTag compressed = new NBTDeserializer(true).fromStream(new ByteArrayInputStream(bytes));
return new NbtReadResult(compressed, true);
} catch (IOException e) {
if (primary != null) {
e.addSuppressed(primary);
}
throw e;
}
}
private static byte[] writeNamedTag(NamedTag namedTag, boolean compressed) throws IOException {
return new NBTSerializer(compressed).toBytes(namedTag);
}
private record NbtReadResult(NamedTag namedTag, boolean compressed) {
}
}

View File

@@ -173,9 +173,13 @@ public class IrisPregenerator {
init(); init();
ticker.start(); ticker.start();
checkRegions(); checkRegions();
var p = PrecisionStopwatch.start(); PrecisionStopwatch p = PrecisionStopwatch.start();
task.iterateRegions((x, z) -> visitRegion(x, z, true)); task.iterateRegions((x, z) -> visitRegion(x, z, true));
task.iterateRegions((x, z) -> visitRegion(x, z, false)); if (generator.isAsyncChunkMode()) {
visitChunksInterleaved();
} else {
task.iterateRegions((x, z) -> visitRegion(x, z, false));
}
Iris.info("Pregen took " + Form.duration((long) p.getMilliseconds())); Iris.info("Pregen took " + Form.duration((long) p.getMilliseconds()));
shutdown(); shutdown();
if (benchmarking == null) { if (benchmarking == null) {
@@ -260,6 +264,46 @@ public class IrisPregenerator {
generator.supportsRegions(x, z, listener); generator.supportsRegions(x, z, listener);
} }
private void visitChunksInterleaved() {
task.iterateAllChunksInterleaved((regionX, regionZ, chunkX, chunkZ, firstChunkInRegion, lastChunkInRegion) -> {
while (paused.get() && !shutdown.get()) {
J.sleep(50);
}
Position2 regionPos = new Position2(regionX, regionZ);
if (shutdown.get()) {
if (!generatedRegions.contains(regionPos)) {
listener.onRegionSkipped(regionX, regionZ);
generatedRegions.add(regionPos);
}
return false;
}
if (generatedRegions.contains(regionPos)) {
return true;
}
if (firstChunkInRegion) {
currentGeneratorMethod.set(generator.getMethod(regionX, regionZ));
listener.onRegionGenerating(regionX, regionZ);
}
generator.generateChunk(chunkX, chunkZ, listener);
if (lastChunkInRegion) {
listener.onRegionGenerated(regionX, regionZ);
if (saveLatch.flip()) {
listener.onSaving();
generator.save();
}
generatedRegions.add(regionPos);
checkRegions();
}
return true;
});
}
public void pause() { public void pause() {
paused.set(true); paused.set(true);
} }

View File

@@ -97,6 +97,54 @@ public class PregenTask {
iterateRegions(((rX, rZ) -> iterateChunks(rX, rZ, s))); iterateRegions(((rX, rZ) -> iterateChunks(rX, rZ, s)));
} }
public void iterateAllChunksInterleaved(InterleavedChunkSpiraled spiraled) {
if (spiraled == null) {
return;
}
KList<RegionChunkCursor> cursors = new KList<>();
iterateRegions((regionX, regionZ) -> {
KList<Position2> chunks = new KList<>();
iterateChunks(regionX, regionZ, (chunkX, chunkZ) -> chunks.add(new Position2(chunkX, chunkZ)));
if (!chunks.isEmpty()) {
cursors.add(new RegionChunkCursor(regionX, regionZ, chunks));
}
});
boolean hasProgress = true;
while (hasProgress) {
hasProgress = false;
for (RegionChunkCursor cursor : cursors) {
if (!cursor.hasNext()) {
continue;
}
hasProgress = true;
Position2 chunk = cursor.next();
if (chunk == null) {
continue;
}
boolean shouldContinue = spiraled.on(
cursor.getRegionX(),
cursor.getRegionZ(),
chunk.getX(),
chunk.getZ(),
cursor.getIndex() == 1,
!cursor.hasNext()
);
if (!shouldContinue) {
return;
}
}
}
}
@FunctionalInterface
public interface InterleavedChunkSpiraled {
boolean on(int regionX, int regionZ, int chunkX, int chunkZ, boolean firstChunkInRegion, boolean lastChunkInRegion);
}
private class Bounds { private class Bounds {
private Bound chunk = null; private Bound chunk = null;
private Bound region = null; private Bound region = null;
@@ -147,4 +195,44 @@ public class PregenTask {
throw new IllegalStateException("This Position2 may not be modified"); throw new IllegalStateException("This Position2 may not be modified");
} }
} }
private static final class RegionChunkCursor {
private final int regionX;
private final int regionZ;
private final KList<Position2> chunks;
private int index;
private RegionChunkCursor(int regionX, int regionZ, KList<Position2> chunks) {
this.regionX = regionX;
this.regionZ = regionZ;
this.chunks = chunks;
this.index = 0;
}
private boolean hasNext() {
return index < chunks.size();
}
private Position2 next() {
if (!hasNext()) {
return null;
}
Position2 value = chunks.get(index);
index++;
return value;
}
private int getRegionX() {
return regionX;
}
private int getRegionZ() {
return regionZ;
}
private int getIndex() {
return index;
}
}
} }

View File

@@ -59,6 +59,10 @@ public interface PregeneratorMethod {
*/ */
String getMethod(int x, int z); String getMethod(int x, int z);
default boolean isAsyncChunkMode() {
return false;
}
/** /**
* Called to generate a region. Execute sync, if multicore internally, wait * Called to generate a region. Execute sync, if multicore internally, wait
* for the task to complete * for the task to complete

View File

@@ -55,6 +55,10 @@ public class AsyncPregenMethod implements PregeneratorMethod {
private final boolean foliaRuntime; private final boolean foliaRuntime;
private final String backendMode; private final String backendMode;
private final int workerPoolThreads; private final int workerPoolThreads;
private final int runtimeCpuThreads;
private final int effectiveWorkerThreads;
private final int recommendedRuntimeConcurrencyCap;
private final int configuredMaxConcurrency;
private final Executor executor; private final Executor executor;
private final Semaphore semaphore; private final Semaphore semaphore;
private final int threads; private final int threads;
@@ -86,6 +90,10 @@ public class AsyncPregenMethod implements PregeneratorMethod {
IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen(); IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen();
this.runtimeSchedulerMode = IrisRuntimeSchedulerMode.resolve(pregen); this.runtimeSchedulerMode = IrisRuntimeSchedulerMode.resolve(pregen);
this.foliaRuntime = runtimeSchedulerMode == IrisRuntimeSchedulerMode.FOLIA; this.foliaRuntime = runtimeSchedulerMode == IrisRuntimeSchedulerMode.FOLIA;
int detectedWorkerPoolThreads = resolveWorkerPoolThreads();
int detectedCpuThreads = Math.max(1, Runtime.getRuntime().availableProcessors());
int configuredWorldGenThreads = Math.max(1, IrisSettings.get().getConcurrency().getWorldGenThreads());
int workerThreadsForCap = Math.max(detectedCpuThreads, Math.max(configuredWorldGenThreads, Math.max(1, detectedWorkerPoolThreads)));
if (foliaRuntime) { if (foliaRuntime) {
this.paperLikeBackendMode = IrisPaperLikeBackendMode.AUTO; this.paperLikeBackendMode = IrisPaperLikeBackendMode.AUTO;
this.backendMode = "folia-region"; this.backendMode = "folia-region";
@@ -100,14 +108,19 @@ public class AsyncPregenMethod implements PregeneratorMethod {
this.backendMode = "paper-ticket"; this.backendMode = "paper-ticket";
} }
} }
int configuredThreads = pregen.getMaxConcurrency(); int configuredThreads = applyRuntimeConcurrencyCap(
if (foliaRuntime) { pregen.getMaxConcurrency(),
configuredThreads = Math.min(configuredThreads, pregen.getFoliaMaxConcurrency()); foliaRuntime,
} else { workerThreadsForCap
configuredThreads = Math.min(configuredThreads, resolvePaperLikeConcurrencyCap(pregen.getPaperLikeMaxConcurrency())); );
} this.configuredMaxConcurrency = Math.max(1, pregen.getMaxConcurrency());
this.threads = Math.max(1, configuredThreads); this.threads = Math.max(1, configuredThreads);
this.workerPoolThreads = resolveWorkerPoolThreads(); this.workerPoolThreads = detectedWorkerPoolThreads;
this.runtimeCpuThreads = detectedCpuThreads;
this.effectiveWorkerThreads = workerThreadsForCap;
this.recommendedRuntimeConcurrencyCap = foliaRuntime
? computeFoliaRecommendedCap(workerThreadsForCap)
: computePaperLikeRecommendedCap(workerThreadsForCap);
this.semaphore = new Semaphore(this.threads, true); this.semaphore = new Semaphore(this.threads, true);
this.timeoutSeconds = pregen.getChunkLoadTimeoutSeconds(); this.timeoutSeconds = pregen.getChunkLoadTimeoutSeconds();
this.timeoutWarnIntervalMs = pregen.getTimeoutWarnIntervalMs(); this.timeoutWarnIntervalMs = pregen.getTimeoutWarnIntervalMs();
@@ -267,8 +280,40 @@ public class AsyncPregenMethod implements PregeneratorMethod {
} }
} }
private int resolvePaperLikeConcurrencyCap(int configuredCap) { static int computePaperLikeRecommendedCap(int workerThreads) {
return Math.max(8, configuredCap); int normalizedWorkers = Math.max(1, workerThreads);
int recommendedCap = normalizedWorkers * 2;
if (recommendedCap < 8) {
return 8;
}
if (recommendedCap > 96) {
return 96;
}
return recommendedCap;
}
static int computeFoliaRecommendedCap(int workerThreads) {
int normalizedWorkers = Math.max(1, workerThreads);
int recommendedCap = normalizedWorkers * 4;
if (recommendedCap < 64) {
return 64;
}
if (recommendedCap > 192) {
return 192;
}
return recommendedCap;
}
static int applyRuntimeConcurrencyCap(int maxConcurrency, boolean foliaRuntime, int workerThreads) {
int normalizedMaxConcurrency = Math.max(1, maxConcurrency);
int recommendedCap = foliaRuntime
? computeFoliaRecommendedCap(workerThreads)
: computePaperLikeRecommendedCap(workerThreads);
return Math.min(normalizedMaxConcurrency, recommendedCap);
} }
private String metricsSnapshot() { private String metricsSnapshot() {
@@ -365,6 +410,10 @@ public class AsyncPregenMethod implements PregeneratorMethod {
+ ", threads=" + threads + ", threads=" + threads
+ ", adaptiveLimit=" + adaptiveInFlightLimit.get() + ", adaptiveLimit=" + adaptiveInFlightLimit.get()
+ ", workerPoolThreads=" + workerPoolThreads + ", workerPoolThreads=" + workerPoolThreads
+ ", cpuThreads=" + runtimeCpuThreads
+ ", effectiveWorkerThreads=" + effectiveWorkerThreads
+ ", maxConcurrency=" + configuredMaxConcurrency
+ ", recommendedCap=" + recommendedRuntimeConcurrencyCap
+ ", urgent=" + urgent + ", urgent=" + urgent
+ ", timeout=" + timeoutSeconds + "s"); + ", timeout=" + timeoutSeconds + "s");
unloadAndSaveAllChunks(); unloadAndSaveAllChunks();
@@ -376,6 +425,11 @@ public class AsyncPregenMethod implements PregeneratorMethod {
return "Async"; return "Async";
} }
@Override
public boolean isAsyncChunkMode() {
return true;
}
@Override @Override
public void close() { public void close() {
semaphore.acquireUninterruptibly(threads); semaphore.acquireUninterruptibly(threads);
@@ -492,35 +546,47 @@ public class AsyncPregenMethod implements PregeneratorMethod {
private class FoliaRegionExecutor implements Executor { private class FoliaRegionExecutor implements Executor {
@Override @Override
public void generate(int x, int z, PregenListener listener) { public void generate(int x, int z, PregenListener listener) {
try {
PaperLib.getChunkAtAsync(world, x, z, true, urgent)
.orTimeout(timeoutSeconds, TimeUnit.SECONDS)
.whenComplete((chunk, throwable) -> completeFoliaChunk(x, z, listener, chunk, throwable));
return;
} catch (Throwable ignored) {
}
if (!J.runRegion(world, x, z, () -> PaperLib.getChunkAtAsync(world, x, z, true, urgent) if (!J.runRegion(world, x, z, () -> PaperLib.getChunkAtAsync(world, x, z, true, urgent)
.orTimeout(timeoutSeconds, TimeUnit.SECONDS) .orTimeout(timeoutSeconds, TimeUnit.SECONDS)
.whenComplete((chunk, throwable) -> { .whenComplete((chunk, throwable) -> completeFoliaChunk(x, z, listener, chunk, throwable)))) {
boolean success = false;
try {
if (throwable != null) {
onChunkFutureFailure(x, z, throwable);
return;
}
listener.onChunkGenerated(x, z);
listener.onChunkCleaned(x, z);
if (chunk != null) {
lastUse.put(chunk, M.ms());
}
success = true;
} catch (Throwable e) {
Iris.reportError(e);
e.printStackTrace();
} finally {
markFinished(success);
semaphore.release();
}
}))) {
markFinished(false); markFinished(false);
semaphore.release(); semaphore.release();
Iris.warn("Failed to schedule Folia region pregen task at " + x + "," + z + ". " + metricsSnapshot()); Iris.warn("Failed to schedule Folia region pregen task at " + x + "," + z + ". " + metricsSnapshot());
} }
} }
private void completeFoliaChunk(int x, int z, PregenListener listener, Chunk chunk, Throwable throwable) {
boolean success = false;
try {
if (throwable != null) {
onChunkFutureFailure(x, z, throwable);
return;
}
if (chunk == null) {
return;
}
listener.onChunkGenerated(x, z);
listener.onChunkCleaned(x, z);
lastUse.put(chunk, M.ms());
success = true;
} catch (Throwable e) {
Iris.reportError(e);
e.printStackTrace();
} finally {
markFinished(success);
semaphore.release();
}
}
} }
private class ServiceExecutor implements Executor { private class ServiceExecutor implements Executor {

View File

@@ -56,8 +56,13 @@ import org.bukkit.entity.Player;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.time.Duration;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List;
import java.util.Locale; import java.util.Locale;
import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@@ -74,6 +79,9 @@ import static art.arcane.iris.util.common.misc.ServerProperties.BUKKIT_YML;
@Data @Data
@Accessors(fluent = true, chain = true) @Accessors(fluent = true, chain = true)
public class IrisCreator { public class IrisCreator {
private static final int STUDIO_PREWARM_RADIUS_CHUNKS = 1;
private static final Duration STUDIO_PREWARM_TIMEOUT = Duration.ofSeconds(45L);
/** /**
* Specify an area to pregenerate during creation * Specify an area to pregenerate during creation
*/ */
@@ -254,6 +262,7 @@ public class IrisCreator {
if (studioEntryLocation == null) { if (studioEntryLocation == null) {
sender.sendMessage(C.YELLOW + "Studio opened, but entry location could not be resolved safely."); sender.sendMessage(C.YELLOW + "Studio opened, but entry location could not be resolved safely.");
} else { } else {
prewarmStudioEntryChunks(world, studioEntryLocation, STUDIO_PREWARM_RADIUS_CHUNKS, STUDIO_PREWARM_TIMEOUT);
CompletableFuture<Boolean> teleportFuture = PaperLib.teleportAsync(senderPlayer, studioEntryLocation); CompletableFuture<Boolean> teleportFuture = PaperLib.teleportAsync(senderPlayer, studioEntryLocation);
if (teleportFuture != null) { if (teleportFuture != null) {
teleportFuture.thenAccept(success -> { teleportFuture.thenAccept(success -> {
@@ -497,6 +506,197 @@ public class IrisCreator {
return true; return true;
} }
private void prewarmStudioEntryChunks(World world, Location entry, int radiusChunks, Duration timeout) throws IrisException {
if (world == null || entry == null) {
throw new IrisException("Studio prewarm failed: world or entry location is null.");
}
int centerChunkX = entry.getBlockX() >> 4;
int centerChunkZ = entry.getBlockZ() >> 4;
List<StudioChunkCoordinate> chunkTargets = resolveStudioPrewarmTargets(centerChunkX, centerChunkZ, radiusChunks);
if (chunkTargets.isEmpty()) {
throw new IrisException("Studio prewarm failed: no target chunks were resolved.");
}
int loadedBefore = 0;
Map<StudioChunkCoordinate, CompletableFuture<Chunk>> futures = new LinkedHashMap<>();
for (StudioChunkCoordinate coordinate : chunkTargets) {
if (world.isChunkLoaded(coordinate.getX(), coordinate.getZ())) {
loadedBefore++;
}
CompletableFuture<Chunk> chunkFuture = PaperLib.getChunkAtAsync(world, coordinate.getX(), coordinate.getZ(), true);
if (chunkFuture == null) {
throw new IrisException("Studio prewarm failed: async chunk future was null for " + coordinate + ".");
}
futures.put(coordinate, chunkFuture);
}
int total = chunkTargets.size();
int completed = 0;
Set<StudioChunkCoordinate> remaining = new LinkedHashSet<>(chunkTargets);
long startNanos = System.nanoTime();
long timeoutNanos = Math.max(1L, timeout.toNanos());
reportStudioProgress(0.88D, "Prewarming entry chunks (0/" + total + ")");
while (!remaining.isEmpty()) {
long elapsedNanos = System.nanoTime() - startNanos;
if (elapsedNanos >= timeoutNanos) {
StudioPrewarmDiagnostics diagnostics = buildStudioPrewarmDiagnostics(world, chunkTargets, remaining, loadedBefore, elapsedNanos);
throw new IrisException("Studio prewarm timed out: " + diagnostics.toMessage());
}
boolean progressed = false;
List<StudioChunkCoordinate> completedCoordinates = new ArrayList<>();
for (StudioChunkCoordinate coordinate : remaining) {
CompletableFuture<Chunk> chunkFuture = futures.get(coordinate);
if (chunkFuture == null || !chunkFuture.isDone()) {
continue;
}
try {
Chunk loadedChunk = chunkFuture.get();
if (loadedChunk == null) {
throw new IrisException("Studio prewarm failed: chunk " + coordinate + " resolved to null.");
}
} catch (IrisException e) {
throw e;
} catch (Throwable e) {
throw new IrisException("Studio prewarm failed while loading chunk " + coordinate + ".", e);
}
completedCoordinates.add(coordinate);
progressed = true;
}
if (!completedCoordinates.isEmpty()) {
for (StudioChunkCoordinate completedCoordinate : completedCoordinates) {
remaining.remove(completedCoordinate);
}
completed += completedCoordinates.size();
double ratio = (double) completed / (double) total;
reportStudioProgress(0.88D + (0.04D * ratio), "Prewarming entry chunks (" + completed + "/" + total + ")");
}
if (!progressed) {
J.sleep(20);
}
}
long elapsedNanos = System.nanoTime() - startNanos;
StudioPrewarmDiagnostics diagnostics = buildStudioPrewarmDiagnostics(world, chunkTargets, new LinkedHashSet<>(), loadedBefore, elapsedNanos);
Iris.info("Studio prewarm complete: " + diagnostics.toMessage());
}
private StudioPrewarmDiagnostics buildStudioPrewarmDiagnostics(
World world,
List<StudioChunkCoordinate> chunkTargets,
Set<StudioChunkCoordinate> timedOutChunks,
int loadedBefore,
long elapsedNanos
) {
int loadedAfter = 0;
for (StudioChunkCoordinate coordinate : chunkTargets) {
if (world.isChunkLoaded(coordinate.getX(), coordinate.getZ())) {
loadedAfter++;
}
}
int generatedDuring = Math.max(0, loadedAfter - loadedBefore);
List<String> timedOut = new ArrayList<>();
for (StudioChunkCoordinate timedOutChunk : timedOutChunks) {
timedOut.add(timedOutChunk.toString());
}
long elapsedMs = TimeUnit.NANOSECONDS.toMillis(Math.max(0L, elapsedNanos));
return new StudioPrewarmDiagnostics(elapsedMs, loadedBefore, loadedAfter, generatedDuring, timedOut);
}
private List<StudioChunkCoordinate> resolveStudioPrewarmTargets(int centerChunkX, int centerChunkZ, int radiusChunks) {
int safeRadius = Math.max(0, radiusChunks);
List<StudioChunkCoordinate> targets = new ArrayList<>();
targets.add(new StudioChunkCoordinate(centerChunkX, centerChunkZ));
for (int x = -safeRadius; x <= safeRadius; x++) {
for (int z = -safeRadius; z <= safeRadius; z++) {
if (x == 0 && z == 0) {
continue;
}
targets.add(new StudioChunkCoordinate(centerChunkX + x, centerChunkZ + z));
}
}
return targets;
}
private static final class StudioChunkCoordinate {
private final int x;
private final int z;
private StudioChunkCoordinate(int x, int z) {
this.x = x;
this.z = z;
}
private int getX() {
return x;
}
private int getZ() {
return z;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof StudioChunkCoordinate coordinate)) {
return false;
}
return x == coordinate.x && z == coordinate.z;
}
@Override
public int hashCode() {
return 31 * x + z;
}
@Override
public String toString() {
return x + "," + z;
}
}
private static final class StudioPrewarmDiagnostics {
private final long elapsedMs;
private final int loadedBefore;
private final int loadedAfter;
private final int generatedDuring;
private final List<String> timedOutChunks;
private StudioPrewarmDiagnostics(long elapsedMs, int loadedBefore, int loadedAfter, int generatedDuring, List<String> timedOutChunks) {
this.elapsedMs = elapsedMs;
this.loadedBefore = loadedBefore;
this.loadedAfter = loadedAfter;
this.generatedDuring = generatedDuring;
this.timedOutChunks = new ArrayList<>(timedOutChunks);
}
private String toMessage() {
return "elapsedMs=" + elapsedMs
+ ", loadedBefore=" + loadedBefore
+ ", loadedAfter=" + loadedAfter
+ ", generatedDuring=" + generatedDuring
+ ", timedOut=" + timedOutChunks;
}
}
private static boolean containsCreateWorldUnsupportedOperation(Throwable throwable) { private static boolean containsCreateWorldUnsupportedOperation(Throwable throwable) {
Throwable cursor = throwable; Throwable cursor = throwable;
while (cursor != null) { while (cursor != null) {

View File

@@ -19,7 +19,6 @@
package art.arcane.iris.engine; package art.arcane.iris.engine;
import art.arcane.iris.Iris; import art.arcane.iris.Iris;
import art.arcane.iris.core.IrisHotPathMetricsMode;
import art.arcane.iris.core.IrisSettings; import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.core.loader.IrisData; import art.arcane.iris.core.loader.IrisData;
import art.arcane.iris.engine.data.cache.Cache; import art.arcane.iris.engine.data.cache.Cache;
@@ -50,8 +49,6 @@ import java.util.*;
public class IrisComplex implements DataProvider { public class IrisComplex implements DataProvider {
private static final BlockData AIR = Material.AIR.createBlockData(); private static final BlockData AIR = Material.AIR.createBlockData();
private static final NoiseBounds ZERO_NOISE_BOUNDS = new NoiseBounds(0D, 0D); private static final NoiseBounds ZERO_NOISE_BOUNDS = new NoiseBounds(0D, 0D);
private static final int HOT_PATH_METRICS_FLUSH_SIZE = 64;
private static final ThreadLocal<HotPathMetricsState> HOT_PATH_METRICS = ThreadLocal.withInitial(HotPathMetricsState::new);
private RNG rng; private RNG rng;
private double fluidHeight; private double fluidHeight;
private IrisData data; private IrisData data;
@@ -324,11 +321,6 @@ public class IrisComplex implements DataProvider {
return 0; return 0;
} }
IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen();
IrisHotPathMetricsMode metricsMode = pregen.getHotPathMetricsMode();
HotPathMetricsState metricsState = metricsMode == IrisHotPathMetricsMode.DISABLED ? null : HOT_PATH_METRICS.get();
boolean sampleMetrics = metricsState != null && metricsState.shouldSample(metricsMode, pregen.getHotPathMetricsSampleStride());
long interpolateStartNanos = sampleMetrics ? System.nanoTime() : 0L;
CoordinateBiomeCache sampleCache = new CoordinateBiomeCache(64); CoordinateBiomeCache sampleCache = new CoordinateBiomeCache(64);
IdentityHashMap<IrisBiome, GeneratorBounds> cachedBounds = generatorBounds.get(interpolator); IdentityHashMap<IrisBiome, GeneratorBounds> cachedBounds = generatorBounds.get(interpolator);
IdentityHashMap<IrisBiome, GeneratorBounds> localBounds = new IdentityHashMap<>(8); IdentityHashMap<IrisBiome, GeneratorBounds> localBounds = new IdentityHashMap<>(8);
@@ -350,22 +342,15 @@ public class IrisComplex implements DataProvider {
return ZERO_NOISE_BOUNDS; return ZERO_NOISE_BOUNDS;
}); });
if (sampleMetrics) {
metricsState.recordInterpolate(engine, System.nanoTime() - interpolateStartNanos);
}
double hi = sampledBounds.max(); double hi = sampledBounds.max();
double lo = sampledBounds.min(); double lo = sampledBounds.min();
long generatorStartNanos = sampleMetrics ? System.nanoTime() : 0L;
double d = 0; double d = 0;
for (IrisGenerator i : generators) { for (IrisGenerator i : generators) {
d += M.lerp(lo, hi, i.getHeight(x, z, seed + 239945)); d += M.lerp(lo, hi, i.getHeight(x, z, seed + 239945));
} }
if (sampleMetrics) {
metricsState.recordGenerator(engine, System.nanoTime() - generatorStartNanos);
}
return d / generators.size(); return d / generators.size();
} }
@@ -636,69 +621,6 @@ public class IrisComplex implements DataProvider {
} }
} }
private static class HotPathMetricsState {
private long callCounter;
private long interpolateNanos;
private int interpolateSamples;
private long generatorNanos;
private int generatorSamples;
private boolean shouldSample(IrisHotPathMetricsMode mode, int sampleStride) {
if (mode == IrisHotPathMetricsMode.EXACT) {
return true;
}
long current = callCounter++;
return (current & (sampleStride - 1L)) == 0L;
}
private void recordInterpolate(Engine engine, long nanos) {
if (nanos < 0L) {
return;
}
interpolateNanos += nanos;
interpolateSamples++;
if (interpolateSamples >= HOT_PATH_METRICS_FLUSH_SIZE) {
flushInterpolate(engine);
}
}
private void recordGenerator(Engine engine, long nanos) {
if (nanos < 0L) {
return;
}
generatorNanos += nanos;
generatorSamples++;
if (generatorSamples >= HOT_PATH_METRICS_FLUSH_SIZE) {
flushGenerator(engine);
}
}
private void flushInterpolate(Engine engine) {
if (interpolateSamples <= 0) {
return;
}
double averageMs = (interpolateNanos / (double) interpolateSamples) / 1_000_000D;
engine.getMetrics().getNoiseHeightInterpolate().put(averageMs);
interpolateNanos = 0L;
interpolateSamples = 0;
}
private void flushGenerator(Engine engine) {
if (generatorSamples <= 0) {
return;
}
double averageMs = (generatorNanos / (double) generatorSamples) / 1_000_000D;
engine.getMetrics().getNoiseHeightGenerator().put(averageMs);
generatorNanos = 0L;
generatorSamples = 0;
}
}
public void close() { public void close() {
} }

View File

@@ -708,4 +708,5 @@ public class IrisEngine implements Engine {
} }
return true; return true;
} }
} }

View File

@@ -301,13 +301,17 @@ public class IrisWorldManager extends EngineAssignedWorldManager {
Chunk chunk = world.getChunkAt(chunkX, chunkZ); Chunk chunk = world.getChunkAt(chunkX, chunkZ);
if (IrisSettings.get().getWorld().isPostLoadBlockUpdates()) { if (IrisSettings.get().getWorld().isPostLoadBlockUpdates()) {
if (J.isFolia() && !getMantle().isChunkLoaded(chunkX, chunkZ)) { if (!getMantle().isChunkLoaded(chunkX, chunkZ)) {
warmupMantleChunkAsync(chunkX, chunkZ); warmupMantleChunkAsync(chunkX, chunkZ);
return; return;
} }
getEngine().updateChunk(chunk); getEngine().updateChunk(chunk);
} }
if (!isEntitySpawningEnabledForCurrentWorld()) {
return;
}
if (!IrisSettings.get().getWorld().isMarkerEntitySpawningSystem()) { if (!IrisSettings.get().getWorld().isMarkerEntitySpawningSystem()) {
return; return;
} }
@@ -585,6 +589,10 @@ public class IrisWorldManager extends EngineAssignedWorldManager {
return; return;
} }
if (!isEntitySpawningEnabledForCurrentWorld()) {
return;
}
IrisComplex complex = getEngine().getComplex(); IrisComplex complex = getEngine().getComplex();
if (complex == null) { if (complex == null) {
return; return;
@@ -680,6 +688,14 @@ public class IrisWorldManager extends EngineAssignedWorldManager {
return (initial ? s.getInitialSpawns() : s.getSpawns()).stream(); return (initial ? s.getInitialSpawns() : s.getSpawns()).stream();
} }
private boolean isEntitySpawningEnabledForCurrentWorld() {
if (!getEngine().isStudio()) {
return true;
}
return IrisSettings.get().getStudio().isEnableEntitySpawning();
}
private KList<IrisEntitySpawn> spawnRandomly(List<IrisEntitySpawn> types) { private KList<IrisEntitySpawn> spawnRandomly(List<IrisEntitySpawn> types) {
KList<IrisEntitySpawn> rarityTypes = new KList<>(); KList<IrisEntitySpawn> rarityTypes = new KList<>();
int totalRarity = 0; int totalRarity = 0;

View File

@@ -23,6 +23,7 @@ import art.arcane.iris.engine.framework.EngineAssignedActuator;
import art.arcane.iris.engine.object.IrisBiome; import art.arcane.iris.engine.object.IrisBiome;
import art.arcane.iris.engine.object.IrisRegion; import art.arcane.iris.engine.object.IrisRegion;
import art.arcane.volmlib.util.collection.KList; import art.arcane.volmlib.util.collection.KList;
import art.arcane.iris.util.project.context.ChunkedDataCache;
import art.arcane.iris.util.project.context.ChunkContext; import art.arcane.iris.util.project.context.ChunkContext;
import art.arcane.volmlib.util.documentation.BlockCoordinates; import art.arcane.volmlib.util.documentation.BlockCoordinates;
import art.arcane.iris.util.project.hunk.Hunk; import art.arcane.iris.util.project.hunk.Hunk;
@@ -74,6 +75,11 @@ public class IrisTerrainNormalActuator extends EngineAssignedActuator<BlockData>
*/ */
@BlockCoordinates @BlockCoordinates
public void terrainSliver(int x, int z, int xf, Hunk<BlockData> h, ChunkContext context) { public void terrainSliver(int x, int z, int xf, Hunk<BlockData> h, ChunkContext context) {
terrainSliverOptimized(x, z, xf, h, context);
}
@BlockCoordinates
private void terrainSliverLegacy(int x, int z, int xf, Hunk<BlockData> h, ChunkContext context) {
int zf, realX, realZ, hf, he; int zf, realX, realZ, hf, he;
IrisBiome biome; IrisBiome biome;
IrisRegion region; IrisRegion region;
@@ -159,4 +165,87 @@ public class IrisTerrainNormalActuator extends EngineAssignedActuator<BlockData>
} }
} }
} }
@BlockCoordinates
private void terrainSliverOptimized(int x, int z, int xf, Hunk<BlockData> h, ChunkContext context) {
int chunkHeight = h.getHeight();
int chunkDepth = h.getDepth();
int fluidHeight = getDimension().getFluidHeight();
boolean bedrockEnabled = getDimension().isBedrock();
ChunkedDataCache<IrisBiome> biomeCache = context.getBiome();
ChunkedDataCache<IrisRegion> regionCache = context.getRegion();
ChunkedDataCache<Double> heightCache = context.getHeight();
ChunkedDataCache<BlockData> fluidCache = context.getFluid();
ChunkedDataCache<BlockData> rockCache = context.getRock();
int realX = xf + x;
for (int zf = 0; zf < chunkDepth; zf++) {
int realZ = zf + z;
IrisBiome biome = biomeCache.get(xf, zf);
IrisRegion region = regionCache.get(xf, zf);
int he = (int) Math.round(Math.min(chunkHeight, heightCache.get(xf, zf)));
int hf = Math.round(Math.max(Math.min(chunkHeight, fluidHeight), he));
if (hf < 0) {
continue;
}
int topY = Math.min(hf, chunkHeight - 1);
BlockData fluid = fluidCache.get(xf, zf);
BlockData rock = rockCache.get(xf, zf);
KList<BlockData> blocks = null;
KList<BlockData> fblocks = null;
for (int i = topY; i >= 0; i--) {
if (i == 0 && bedrockEnabled) {
h.set(xf, i, zf, BEDROCK);
lastBedrock = i;
continue;
}
BlockData ore = biome.generateOres(realX, i, realZ, rng, getData(), true);
ore = ore == null ? region.generateOres(realX, i, realZ, rng, getData(), true) : ore;
ore = ore == null ? getDimension().generateOres(realX, i, realZ, rng, getData(), true) : ore;
if (ore != null) {
h.set(xf, i, zf, ore);
continue;
}
if (i > he && i <= hf) {
int fdepth = hf - i;
if (fblocks == null) {
fblocks = biome.generateSeaLayers(realX, realZ, rng, hf - he, getData());
}
if (fblocks.hasIndex(fdepth)) {
h.set(xf, i, zf, fblocks.get(fdepth));
} else {
h.set(xf, i, zf, fluid);
}
continue;
}
if (i <= he) {
int depth = he - i;
if (blocks == null) {
blocks = biome.generateLayers(getDimension(), realX, realZ, rng, he, he, getData(), getComplex());
}
if (blocks.hasIndex(depth)) {
h.set(xf, i, zf, blocks.get(depth));
continue;
}
ore = biome.generateOres(realX, i, realZ, rng, getData(), false);
ore = ore == null ? region.generateOres(realX, i, realZ, rng, getData(), false) : ore;
ore = ore == null ? getDimension().generateOres(realX, i, realZ, rng, getData(), false) : ore;
if (ore != null) {
h.set(xf, i, zf, ore);
} else {
h.set(xf, i, zf, rock);
}
}
}
}
}
} }

View File

@@ -318,7 +318,7 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
} }
var mantle = getMantle().getMantle(); var mantle = getMantle().getMantle();
if (!mantle.isLoaded(c)) { if (!mantle.isLoaded(c)) {
var msg = "Mantle Chunk " + c.getX() + c.getX() + " is not loaded"; var msg = "Mantle Chunk " + c.getX() + "," + c.getZ() + " is not loaded";
if (W.getStack().getCallerClass().equals(ChunkUpdater.class)) Iris.warn(msg); if (W.getStack().getCallerClass().equals(ChunkUpdater.class)) Iris.warn(msg);
else Iris.debug(msg); else Iris.debug(msg);
return; return;

View File

@@ -170,6 +170,33 @@ public class MantleWriter implements IObjectPlacer, AutoCloseable {
matter.slice(matter.getClass(t)).set(x & 15, y & 15, z & 15, t); matter.slice(matter.getClass(t)).set(x & 15, y & 15, z & 15, t);
} }
public boolean setDataIfAbsent(int x, int y, int z, MatterCavern value) {
if (value == null) {
return false;
}
int cx = x >> 4;
int cz = z >> 4;
if (y < 0 || y >= mantle.getWorldHeight()) {
return false;
}
MantleChunk<Matter> chunk = acquireChunk(cx, cz);
if (chunk == null) {
return false;
}
Matter matter = chunk.getOrCreate(y >> 4);
MatterCavern existing = matter.<MatterCavern>slice(MatterCavern.class).get(x & 15, y & 15, z & 15);
if (existing != null) {
return false;
}
matter.<MatterCavern>slice(MatterCavern.class).set(x & 15, y & 15, z & 15, value);
return true;
}
public <T> T getData(int x, int y, int z, Class<T> type) { public <T> T getData(int x, int y, int z, Class<T> type) {
int cx = x >> 4; int cx = x >> 4;
int cz = z >> 4; int cz = z >> 4;

View File

@@ -25,8 +25,11 @@ import art.arcane.iris.engine.object.IrisCaveFieldModule;
import art.arcane.iris.engine.object.IrisCaveProfile; import art.arcane.iris.engine.object.IrisCaveProfile;
import art.arcane.iris.engine.object.IrisRange; import art.arcane.iris.engine.object.IrisRange;
import art.arcane.iris.util.project.noise.CNG; import art.arcane.iris.util.project.noise.CNG;
import art.arcane.volmlib.util.mantle.runtime.MantleChunk;
import art.arcane.volmlib.util.math.RNG; import art.arcane.volmlib.util.math.RNG;
import art.arcane.volmlib.util.matter.Matter;
import art.arcane.volmlib.util.matter.MatterCavern; import art.arcane.volmlib.util.matter.MatterCavern;
import art.arcane.volmlib.util.matter.MatterSlice;
import art.arcane.volmlib.util.scheduling.PrecisionStopwatch; import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import java.util.ArrayList; import java.util.ArrayList;
@@ -48,7 +51,7 @@ public class IrisCaveCarver3D {
private final CNG surfaceBreakDensity; private final CNG surfaceBreakDensity;
private final RNG thresholdRng; private final RNG thresholdRng;
private final ModuleState[] modules; private final ModuleState[] modules;
private final double normalization; private final double inverseNormalization;
private final MatterCavern carveAir; private final MatterCavern carveAir;
private final MatterCavern carveLava; private final MatterCavern carveLava;
private final MatterCavern carveForcedAir; private final MatterCavern carveForcedAir;
@@ -89,7 +92,8 @@ public class IrisCaveCarver3D {
} }
this.modules = moduleStates.toArray(new ModuleState[0]); this.modules = moduleStates.toArray(new ModuleState[0]);
normalization = weight <= 0 ? 1 : weight; double normalization = weight <= 0 ? 1 : weight;
inverseNormalization = 1D / normalization;
hasModules = modules.length > 0; hasModules = modules.length > 0;
} }
@@ -99,7 +103,7 @@ public class IrisCaveCarver3D {
Arrays.fill(scratch.fullWeights, 1D); Arrays.fill(scratch.fullWeights, 1D);
scratch.fullWeightsInitialized = true; scratch.fullWeightsInitialized = true;
} }
return carve(writer, chunkX, chunkZ, scratch.fullWeights, 0D, 0D, null); return carve(writer, chunkX, chunkZ, scratch.fullWeights, 0D, 0D, null, null);
} }
public int carve( public int carve(
@@ -110,7 +114,7 @@ public class IrisCaveCarver3D {
double minWeight, double minWeight,
double thresholdPenalty double thresholdPenalty
) { ) {
return carve(writer, chunkX, chunkZ, columnWeights, minWeight, thresholdPenalty, null); return carve(writer, chunkX, chunkZ, columnWeights, minWeight, thresholdPenalty, null, null);
} }
public int carve( public int carve(
@@ -121,6 +125,19 @@ public class IrisCaveCarver3D {
double minWeight, double minWeight,
double thresholdPenalty, double thresholdPenalty,
IrisRange worldYRange IrisRange worldYRange
) {
return carve(writer, chunkX, chunkZ, columnWeights, minWeight, thresholdPenalty, worldYRange, null);
}
public int carve(
MantleWriter writer,
int chunkX,
int chunkZ,
double[] columnWeights,
double minWeight,
double thresholdPenalty,
IrisRange worldYRange,
int[] precomputedSurfaceHeights
) { ) {
PrecisionStopwatch applyStopwatch = PrecisionStopwatch.start(); PrecisionStopwatch applyStopwatch = PrecisionStopwatch.start();
try { try {
@@ -150,27 +167,38 @@ public class IrisCaveCarver3D {
int surfaceBreakDepth = Math.max(0, profile.getSurfaceBreakDepth()); int surfaceBreakDepth = Math.max(0, profile.getSurfaceBreakDepth());
double surfaceBreakNoiseThreshold = profile.getSurfaceBreakNoiseThreshold(); double surfaceBreakNoiseThreshold = profile.getSurfaceBreakNoiseThreshold();
double surfaceBreakThresholdBoost = Math.max(0, profile.getSurfaceBreakThresholdBoost()); double surfaceBreakThresholdBoost = Math.max(0, profile.getSurfaceBreakThresholdBoost());
int waterMinDepthBelowSurface = Math.max(0, profile.getWaterMinDepthBelowSurface());
boolean waterRequiresFloor = profile.isWaterRequiresFloor();
boolean allowSurfaceBreak = profile.isAllowSurfaceBreak(); boolean allowSurfaceBreak = profile.isAllowSurfaceBreak();
if (maxY < minY) { if (maxY < minY) {
return 0; return 0;
} }
MantleChunk<Matter> chunk = writer.acquireChunk(chunkX, chunkZ);
if (chunk == null) {
return 0;
}
int x0 = chunkX << 4; int x0 = chunkX << 4;
int z0 = chunkZ << 4; int z0 = chunkZ << 4;
int[] columnSurface = scratch.columnSurface;
int[] columnMaxY = scratch.columnMaxY; int[] columnMaxY = scratch.columnMaxY;
int[] surfaceBreakFloorY = scratch.surfaceBreakFloorY; int[] surfaceBreakFloorY = scratch.surfaceBreakFloorY;
boolean[] surfaceBreakColumn = scratch.surfaceBreakColumn; boolean[] surfaceBreakColumn = scratch.surfaceBreakColumn;
double[] columnThreshold = scratch.columnThreshold; double[] columnThreshold = scratch.columnThreshold;
double[] clampedWeights = scratch.clampedColumnWeights;
double[] verticalEdgeFade = prepareVerticalEdgeFadeTable(scratch, minY, maxY);
MatterCavern[] matterByY = prepareMatterByYTable(scratch, minY, maxY);
prepareSectionCaches(scratch, minY, maxY);
for (int lx = 0; lx < 16; lx++) { for (int lx = 0; lx < 16; lx++) {
int x = x0 + lx; int x = x0 + lx;
for (int lz = 0; lz < 16; lz++) { for (int lz = 0; lz < 16; lz++) {
int z = z0 + lz; int z = z0 + lz;
int index = (lx << 4) | lz; int index = (lx << 4) | lz;
int columnSurfaceY = engine.getHeight(x, z); int columnSurfaceY;
if (precomputedSurfaceHeights != null && precomputedSurfaceHeights.length > index) {
columnSurfaceY = precomputedSurfaceHeights[index];
} else {
columnSurfaceY = engine.getHeight(x, z);
}
int clearanceTopY = Math.min(maxY, Math.max(minY, columnSurfaceY - surfaceClearance)); int clearanceTopY = Math.min(maxY, Math.max(minY, columnSurfaceY - surfaceClearance));
boolean breakColumn = allowSurfaceBreak boolean breakColumn = allowSurfaceBreak
&& signed(surfaceBreakDensity.noiseFast2D(x, z)) >= surfaceBreakNoiseThreshold; && signed(surfaceBreakDensity.noiseFast2D(x, z)) >= surfaceBreakNoiseThreshold;
@@ -178,30 +206,30 @@ public class IrisCaveCarver3D {
? Math.min(maxY, Math.max(minY, columnSurfaceY)) ? Math.min(maxY, Math.max(minY, columnSurfaceY))
: clearanceTopY; : clearanceTopY;
columnSurface[index] = columnSurfaceY;
columnMaxY[index] = columnTopY; columnMaxY[index] = columnTopY;
surfaceBreakFloorY[index] = Math.max(minY, columnSurfaceY - surfaceBreakDepth); surfaceBreakFloorY[index] = Math.max(minY, columnSurfaceY - surfaceBreakDepth);
surfaceBreakColumn[index] = breakColumn; surfaceBreakColumn[index] = breakColumn;
columnThreshold[index] = profile.getDensityThreshold().get(thresholdRng, x, z, data) - profile.getThresholdBias(); columnThreshold[index] = profile.getDensityThreshold().get(thresholdRng, x, z, data) - profile.getThresholdBias();
clampedWeights[index] = clampColumnWeight(columnWeights[index]);
} }
} }
int carved = carvePass( int latticeStep = Math.max(2, sampleStep);
writer, int carved = carvePassLattice(
chunk,
x0, x0,
z0, z0,
minY, minY,
maxY, maxY,
sampleStep, latticeStep,
surfaceBreakThresholdBoost, surfaceBreakThresholdBoost,
waterMinDepthBelowSurface,
waterRequiresFloor,
columnSurface,
columnMaxY, columnMaxY,
surfaceBreakFloorY, surfaceBreakFloorY,
surfaceBreakColumn, surfaceBreakColumn,
columnThreshold, columnThreshold,
columnWeights, clampedWeights,
verticalEdgeFade,
matterByY,
resolvedMinWeight, resolvedMinWeight,
resolvedThresholdPenalty, resolvedThresholdPenalty,
0D, 0D,
@@ -211,27 +239,71 @@ public class IrisCaveCarver3D {
int minCarveCells = Math.max(0, profile.getMinCarveCells()); int minCarveCells = Math.max(0, profile.getMinCarveCells());
double recoveryThresholdBoost = Math.max(0, profile.getRecoveryThresholdBoost()); double recoveryThresholdBoost = Math.max(0, profile.getRecoveryThresholdBoost());
if (carved < minCarveCells && recoveryThresholdBoost > 0D) { if (carved < minCarveCells && recoveryThresholdBoost > 0D) {
carved += carvePass( carved += carvePassLattice(
writer, chunk,
x0,
z0,
minY,
maxY,
latticeStep,
surfaceBreakThresholdBoost,
columnMaxY,
surfaceBreakFloorY,
surfaceBreakColumn,
columnThreshold,
clampedWeights,
verticalEdgeFade,
matterByY,
resolvedMinWeight,
resolvedThresholdPenalty,
recoveryThresholdBoost,
true
);
}
if (carved == 0 && hasFallbackCandidates(columnMaxY, clampedWeights, minY, resolvedMinWeight)) {
carved += carvePassFallback(
chunk,
x0, x0,
z0, z0,
minY, minY,
maxY, maxY,
sampleStep, sampleStep,
surfaceBreakThresholdBoost, surfaceBreakThresholdBoost,
waterMinDepthBelowSurface,
waterRequiresFloor,
columnSurface,
columnMaxY, columnMaxY,
surfaceBreakFloorY, surfaceBreakFloorY,
surfaceBreakColumn, surfaceBreakColumn,
columnThreshold, columnThreshold,
columnWeights, clampedWeights,
verticalEdgeFade,
matterByY,
resolvedMinWeight, resolvedMinWeight,
resolvedThresholdPenalty, resolvedThresholdPenalty,
recoveryThresholdBoost, 0D,
true false
); );
if (carved < minCarveCells && recoveryThresholdBoost > 0D) {
carved += carvePassFallback(
chunk,
x0,
z0,
minY,
maxY,
sampleStep,
surfaceBreakThresholdBoost,
columnMaxY,
surfaceBreakFloorY,
surfaceBreakColumn,
columnThreshold,
clampedWeights,
verticalEdgeFade,
matterByY,
resolvedMinWeight,
resolvedThresholdPenalty,
recoveryThresholdBoost,
true
);
}
} }
return carved; return carved;
@@ -240,35 +312,174 @@ public class IrisCaveCarver3D {
} }
} }
private int carvePass( private int carvePassLattice(
MantleWriter writer, MantleChunk<Matter> chunk,
int x0, int x0,
int z0, int z0,
int minY, int minY,
int maxY, int maxY,
int sampleStep, int latticeStep,
double surfaceBreakThresholdBoost, double surfaceBreakThresholdBoost,
int waterMinDepthBelowSurface,
boolean waterRequiresFloor,
int[] columnSurface,
int[] columnMaxY, int[] columnMaxY,
int[] surfaceBreakFloorY, int[] surfaceBreakFloorY,
boolean[] surfaceBreakColumn, boolean[] surfaceBreakColumn,
double[] columnThreshold, double[] columnThreshold,
double[] columnWeights, double[] clampedWeights,
double[] verticalEdgeFade,
MatterCavern[] matterByY,
double minWeight, double minWeight,
double thresholdPenalty, double thresholdPenalty,
double thresholdBoost, double thresholdBoost,
boolean skipExistingCarved boolean skipExistingCarved
) { ) {
int carved = 0; int carved = 0;
Scratch scratch = SCRATCH.get();
double[] passThreshold = scratch.passThreshold;
int[] tileIndices = scratch.tileIndices;
int[] tileLocalX = scratch.tileLocalX;
int[] tileLocalZ = scratch.tileLocalZ;
int[] tileTopY = scratch.tileTopY;
for (int index = 0; index < 256; index++) {
double columnWeight = clampedWeights[index];
if (columnWeight <= minWeight || columnMaxY[index] < minY) {
passThreshold[index] = Double.NaN;
continue;
}
passThreshold[index] = columnThreshold[index] + thresholdBoost - ((1D - columnWeight) * thresholdPenalty);
}
for (int lx = 0; lx < 16; lx += 2) {
int x = x0 + lx;
int lx1 = lx + 1;
for (int lz = 0; lz < 16; lz += 2) {
int z = z0 + lz;
int lz1 = lz + 1;
int activeColumns = 0;
int index00 = (lx << 4) | lz;
if (!Double.isNaN(passThreshold[index00])) {
tileIndices[activeColumns] = index00;
tileLocalX[activeColumns] = lx;
tileLocalZ[activeColumns] = lz;
tileTopY[activeColumns] = columnMaxY[index00];
activeColumns++;
}
int index01 = (lx << 4) | lz1;
if (!Double.isNaN(passThreshold[index01])) {
tileIndices[activeColumns] = index01;
tileLocalX[activeColumns] = lx;
tileLocalZ[activeColumns] = lz1;
tileTopY[activeColumns] = columnMaxY[index01];
activeColumns++;
}
int index10 = (lx1 << 4) | lz;
if (!Double.isNaN(passThreshold[index10])) {
tileIndices[activeColumns] = index10;
tileLocalX[activeColumns] = lx1;
tileLocalZ[activeColumns] = lz;
tileTopY[activeColumns] = columnMaxY[index10];
activeColumns++;
}
int index11 = (lx1 << 4) | lz1;
if (!Double.isNaN(passThreshold[index11])) {
tileIndices[activeColumns] = index11;
tileLocalX[activeColumns] = lx1;
tileLocalZ[activeColumns] = lz1;
tileTopY[activeColumns] = columnMaxY[index11];
activeColumns++;
}
if (activeColumns == 0) {
continue;
}
int tileMaxY = minY;
for (int columnIndex = 0; columnIndex < activeColumns; columnIndex++) {
if (tileTopY[columnIndex] > tileMaxY) {
tileMaxY = tileTopY[columnIndex];
}
}
if (tileMaxY < minY) {
continue;
}
for (int y = minY; y <= tileMaxY; y += latticeStep) {
double density = sampleDensityOptimized(x, y, z);
int stampMaxY = Math.min(maxY, y + 1);
for (int yy = y; yy <= stampMaxY; yy++) {
MatterCavern matter = matterByY[yy - minY];
MatterSlice<MatterCavern> cavernSlice = resolveCavernSlice(scratch, chunk, yy >> 4);
int localY = yy & 15;
int fadeIndex = yy - minY;
for (int columnIndex = 0; columnIndex < activeColumns; columnIndex++) {
if (yy > tileTopY[columnIndex]) {
continue;
}
int index = tileIndices[columnIndex];
double localThreshold = passThreshold[index];
if (surfaceBreakColumn[index] && yy >= surfaceBreakFloorY[index]) {
localThreshold += surfaceBreakThresholdBoost;
}
localThreshold -= verticalEdgeFade[fadeIndex];
if (density > localThreshold) {
continue;
}
int localX = tileLocalX[columnIndex];
int localZ = tileLocalZ[columnIndex];
if (skipExistingCarved) {
if (cavernSlice.get(localX, localY, localZ) == null) {
cavernSlice.set(localX, localY, localZ, matter);
carved++;
}
continue;
}
cavernSlice.set(localX, localY, localZ, matter);
carved++;
}
}
}
}
}
return carved;
}
private int carvePassFallback(
MantleChunk<Matter> chunk,
int x0,
int z0,
int minY,
int maxY,
int sampleStep,
double surfaceBreakThresholdBoost,
int[] columnMaxY,
int[] surfaceBreakFloorY,
boolean[] surfaceBreakColumn,
double[] columnThreshold,
double[] clampedWeights,
double[] verticalEdgeFade,
MatterCavern[] matterByY,
double minWeight,
double thresholdPenalty,
double thresholdBoost,
boolean skipExistingCarved
) {
int carved = 0;
Scratch scratch = SCRATCH.get();
for (int lx = 0; lx < 16; lx++) { for (int lx = 0; lx < 16; lx++) {
int x = x0 + lx; int x = x0 + lx;
for (int lz = 0; lz < 16; lz++) { for (int lz = 0; lz < 16; lz++) {
int z = z0 + lz; int z = z0 + lz;
int index = (lx << 4) | lz; int index = (lx << 4) | lz;
double columnWeight = clampColumnWeight(columnWeights[index]); double columnWeight = clampedWeights[index];
if (columnWeight <= minWeight) { if (columnWeight <= minWeight) {
continue; continue;
} }
@@ -280,7 +491,6 @@ public class IrisCaveCarver3D {
boolean breakColumn = surfaceBreakColumn[index]; boolean breakColumn = surfaceBreakColumn[index];
int breakFloorY = surfaceBreakFloorY[index]; int breakFloorY = surfaceBreakFloorY[index];
int surfaceY = columnSurface[index];
double threshold = columnThreshold[index] + thresholdBoost - ((1D - columnWeight) * thresholdPenalty); double threshold = columnThreshold[index] + thresholdBoost - ((1D - columnWeight) * thresholdPenalty);
for (int y = minY; y <= columnTopY; y += sampleStep) { for (int y = minY; y <= columnTopY; y += sampleStep) {
@@ -289,18 +499,25 @@ public class IrisCaveCarver3D {
localThreshold += surfaceBreakThresholdBoost; localThreshold += surfaceBreakThresholdBoost;
} }
localThreshold = applyVerticalEdgeFade(localThreshold, y, minY, maxY); localThreshold -= verticalEdgeFade[y - minY];
if (sampleDensity(x, y, z) > localThreshold) { if (sampleDensityOptimized(x, y, z) > localThreshold) {
continue; continue;
} }
int carveMaxY = Math.min(columnTopY, y + sampleStep - 1); int carveMaxY = Math.min(columnTopY, y + sampleStep - 1);
for (int yy = y; yy <= carveMaxY; yy++) { for (int yy = y; yy <= carveMaxY; yy++) {
if (skipExistingCarved && writer.isCarved(x, yy, z)) { MatterCavern matter = matterByY[yy - minY];
MatterSlice<MatterCavern> cavernSlice = resolveCavernSlice(scratch, chunk, yy >> 4);
int localY = yy & 15;
if (skipExistingCarved) {
if (cavernSlice.get(lx, localY, lz) == null) {
cavernSlice.set(lx, localY, lz, matter);
carved++;
}
continue; continue;
} }
writer.setData(x, yy, z, resolveMatter(x, yy, z, surfaceY, localThreshold, waterMinDepthBelowSurface, waterRequiresFloor)); cavernSlice.set(lx, localY, lz, matter);
carved++; carved++;
} }
} }
@@ -310,85 +527,163 @@ public class IrisCaveCarver3D {
return carved; return carved;
} }
private double applyVerticalEdgeFade(double threshold, int y, int minY, int maxY) { private boolean hasFallbackCandidates(int[] columnMaxY, double[] clampedWeights, int minY, double minWeight) {
int fadeRange = Math.max(0, profile.getVerticalEdgeFade()); for (int index = 0; index < 256; index++) {
if (fadeRange <= 0 || maxY <= minY) { if (clampedWeights[index] <= minWeight) {
return threshold; continue;
} }
int floorDistance = y - minY; if (columnMaxY[index] >= minY) {
int ceilingDistance = maxY - y; return true;
int edgeDistance = Math.min(floorDistance, ceilingDistance);
if (edgeDistance >= fadeRange) {
return threshold;
}
double t = Math.max(0D, Math.min(1D, edgeDistance / (double) fadeRange));
double smooth = t * t * (3D - (2D * t));
double fadeStrength = Math.max(0D, profile.getVerticalEdgeFadeStrength());
return threshold - ((1D - smooth) * fadeStrength);
}
private double sampleDensity(int x, int y, int z) {
if (!hasWarp && !hasModules) {
double density = signed(baseDensity.noiseFast3D(x, y, z)) * baseWeight;
density += signed(detailDensity.noiseFast3D(x, y, z)) * detailWeight;
return density / normalization;
}
double warpedX = x;
double warpedY = y;
double warpedZ = z;
if (hasWarp) {
double warpA = signed(warpDensity.noiseFast3D(x, y, z));
double warpB = signed(warpDensity.noiseFast3D(x + 31.37D, y - 17.21D, z + 23.91D));
double offsetX = warpA * warpStrength;
double offsetY = warpB * warpStrength;
double offsetZ = (warpA - warpB) * 0.5D * warpStrength;
warpedX += offsetX;
warpedY += offsetY;
warpedZ += offsetZ;
}
double density = signed(baseDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * baseWeight;
density += signed(detailDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * detailWeight;
if (hasModules) {
for (int moduleIndex = 0; moduleIndex < modules.length; moduleIndex++) {
ModuleState module = modules[moduleIndex];
if (y < module.minY || y > module.maxY) {
continue;
}
double moduleDensity = signed(module.density.noiseFast3D(warpedX, warpedY, warpedZ)) - module.threshold;
if (module.invert) {
moduleDensity = -moduleDensity;
}
density += moduleDensity * module.weight;
} }
} }
return density / normalization; return false;
} }
private MatterCavern resolveMatter(int x, int y, int z, int surfaceY, double localThreshold, int waterMinDepthBelowSurface, boolean waterRequiresFloor) { private double sampleDensityOptimized(int x, int y, int z) {
if (!hasWarp) {
if (!hasModules) {
return sampleDensityNoWarpNoModules(x, y, z);
}
return sampleDensityNoWarpModules(x, y, z);
}
if (!hasModules) {
return sampleDensityWarpOnly(x, y, z);
}
return sampleDensityWarpModules(x, y, z);
}
private double sampleDensityNoWarpNoModules(int x, int y, int z) {
double density = signed(baseDensity.noiseFast3D(x, y, z)) * baseWeight;
density += signed(detailDensity.noiseFast3D(x, y, z)) * detailWeight;
return density * inverseNormalization;
}
private double sampleDensityNoWarpModules(int x, int y, int z) {
double density = signed(baseDensity.noiseFast3D(x, y, z)) * baseWeight;
density += signed(detailDensity.noiseFast3D(x, y, z)) * detailWeight;
for (int moduleIndex = 0; moduleIndex < modules.length; moduleIndex++) {
ModuleState module = modules[moduleIndex];
if (y < module.minY || y > module.maxY) {
continue;
}
double moduleDensity = signed(module.density.noiseFast3D(x, y, z)) - module.threshold;
if (module.invert) {
moduleDensity = -moduleDensity;
}
density += moduleDensity * module.weight;
}
return density * inverseNormalization;
}
private double sampleDensityWarpOnly(int x, int y, int z) {
double warpA = signed(warpDensity.noiseFast3D(x, y, z));
double warpB = signed(warpDensity.noiseFast3D(x + 31.37D, y - 17.21D, z + 23.91D));
double warpedX = x + (warpA * warpStrength);
double warpedY = y + (warpB * warpStrength);
double warpedZ = z + ((warpA - warpB) * 0.5D * warpStrength);
double density = signed(baseDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * baseWeight;
density += signed(detailDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * detailWeight;
return density * inverseNormalization;
}
private double sampleDensityWarpModules(int x, int y, int z) {
double warpA = signed(warpDensity.noiseFast3D(x, y, z));
double warpB = signed(warpDensity.noiseFast3D(x + 31.37D, y - 17.21D, z + 23.91D));
double warpedX = x + (warpA * warpStrength);
double warpedY = y + (warpB * warpStrength);
double warpedZ = z + ((warpA - warpB) * 0.5D * warpStrength);
double density = signed(baseDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * baseWeight;
density += signed(detailDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * detailWeight;
for (int moduleIndex = 0; moduleIndex < modules.length; moduleIndex++) {
ModuleState module = modules[moduleIndex];
if (y < module.minY || y > module.maxY) {
continue;
}
double moduleDensity = signed(module.density.noiseFast3D(warpedX, warpedY, warpedZ)) - module.threshold;
if (module.invert) {
moduleDensity = -moduleDensity;
}
density += moduleDensity * module.weight;
}
return density * inverseNormalization;
}
private MatterSlice<MatterCavern> resolveCavernSlice(Scratch scratch, MantleChunk<Matter> chunk, int sectionIndex) {
@SuppressWarnings("unchecked")
MatterSlice<MatterCavern> cachedSlice = (MatterSlice<MatterCavern>) scratch.sectionSlices[sectionIndex];
if (cachedSlice != null) {
return cachedSlice;
}
Matter sectionMatter = scratch.sectionMatter[sectionIndex];
if (sectionMatter == null) {
sectionMatter = chunk.getOrCreate(sectionIndex);
scratch.sectionMatter[sectionIndex] = sectionMatter;
}
MatterSlice<MatterCavern> resolvedSlice = sectionMatter.slice(MatterCavern.class);
scratch.sectionSlices[sectionIndex] = resolvedSlice;
return resolvedSlice;
}
private MatterCavern[] prepareMatterByYTable(Scratch scratch, int minY, int maxY) {
int size = Math.max(0, maxY - minY + 1);
if (scratch.matterByY.length < size) {
scratch.matterByY = new MatterCavern[size];
}
MatterCavern[] matterByY = scratch.matterByY;
boolean allowLava = profile.isAllowLava();
boolean allowWater = profile.isAllowWater();
int lavaHeight = engine.getDimension().getCaveLavaHeight(); int lavaHeight = engine.getDimension().getCaveLavaHeight();
int fluidHeight = engine.getDimension().getFluidHeight(); int fluidHeight = engine.getDimension().getFluidHeight();
if (profile.isAllowLava() && y <= lavaHeight) { for (int y = minY; y <= maxY; y++) {
return carveLava; int offset = y - minY;
if (allowLava && y <= lavaHeight) {
matterByY[offset] = carveLava;
continue;
}
if (allowWater && y <= fluidHeight) {
matterByY[offset] = carveAir;
continue;
}
if (!allowLava && y <= lavaHeight) {
matterByY[offset] = carveForcedAir;
continue;
}
matterByY[offset] = carveAir;
} }
if (profile.isAllowWater() && y <= fluidHeight) { return matterByY;
return carveAir; }
private void prepareSectionCaches(Scratch scratch, int minY, int maxY) {
int minSection = Math.max(0, minY >> 4);
int maxSection = Math.max(minSection, maxY >> 4);
int requiredSections = maxSection + 1;
if (scratch.sectionMatter.length < requiredSections) {
scratch.sectionMatter = new Matter[requiredSections];
scratch.sectionSlices = new MatterSlice<?>[requiredSections];
return;
} }
if (!profile.isAllowLava() && y <= lavaHeight) { for (int section = minSection; section <= maxSection; section++) {
return carveForcedAir; scratch.sectionMatter[section] = null;
scratch.sectionSlices[section] = null;
} }
return carveAir;
} }
private double clampColumnWeight(double weight) { private double clampColumnWeight(double weight) {
@@ -411,6 +706,38 @@ public class IrisCaveCarver3D {
return (value * 2D) - 1D; return (value * 2D) - 1D;
} }
private double[] prepareVerticalEdgeFadeTable(Scratch scratch, int minY, int maxY) {
int size = Math.max(0, maxY - minY + 1);
if (scratch.verticalEdgeFade.length < size) {
scratch.verticalEdgeFade = new double[size];
}
double[] verticalEdgeFade = scratch.verticalEdgeFade;
int fadeRange = Math.max(0, profile.getVerticalEdgeFade());
double fadeStrength = Math.max(0D, profile.getVerticalEdgeFadeStrength());
if (size <= 0 || fadeRange <= 0 || maxY <= minY || fadeStrength <= 0D) {
Arrays.fill(verticalEdgeFade, 0, size, 0D);
return verticalEdgeFade;
}
for (int y = minY; y <= maxY; y++) {
int floorDistance = y - minY;
int ceilingDistance = maxY - y;
int edgeDistance = Math.min(floorDistance, ceilingDistance);
int offsetIndex = y - minY;
if (edgeDistance >= fadeRange) {
verticalEdgeFade[offsetIndex] = 0D;
continue;
}
double t = Math.max(0D, Math.min(1D, edgeDistance / (double) fadeRange));
double smooth = t * t * (3D - (2D * t));
verticalEdgeFade[offsetIndex] = (1D - smooth) * fadeStrength;
}
return verticalEdgeFade;
}
private static final class ModuleState { private static final class ModuleState {
private final CNG density; private final CNG density;
private final int minY; private final int minY;
@@ -431,12 +758,21 @@ public class IrisCaveCarver3D {
} }
private static final class Scratch { private static final class Scratch {
private final int[] columnSurface = new int[256];
private final int[] columnMaxY = new int[256]; private final int[] columnMaxY = new int[256];
private final int[] surfaceBreakFloorY = new int[256]; private final int[] surfaceBreakFloorY = new int[256];
private final boolean[] surfaceBreakColumn = new boolean[256]; private final boolean[] surfaceBreakColumn = new boolean[256];
private final double[] columnThreshold = new double[256]; private final double[] columnThreshold = new double[256];
private final double[] passThreshold = new double[256];
private final double[] fullWeights = new double[256]; private final double[] fullWeights = new double[256];
private final double[] clampedColumnWeights = new double[256];
private final int[] tileIndices = new int[4];
private final int[] tileLocalX = new int[4];
private final int[] tileLocalZ = new int[4];
private final int[] tileTopY = new int[4];
private double[] verticalEdgeFade = new double[0];
private MatterCavern[] matterByY = new MatterCavern[0];
private Matter[] sectionMatter = new Matter[0];
private MatterSlice<?>[] sectionSlices = new MatterSlice<?>[0];
private boolean fullWeightsInitialized; private boolean fullWeightsInitialized;
} }
} }

View File

@@ -36,7 +36,7 @@ import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap; import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Comparator; import java.util.Arrays;
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@@ -45,15 +45,20 @@ import java.util.Map;
public class MantleCarvingComponent extends IrisMantleComponent { public class MantleCarvingComponent extends IrisMantleComponent {
private static final int CHUNK_SIZE = 16; private static final int CHUNK_SIZE = 16;
private static final int CHUNK_AREA = CHUNK_SIZE * CHUNK_SIZE; private static final int CHUNK_AREA = CHUNK_SIZE * CHUNK_SIZE;
private static final int TILE_SIZE = 2;
private static final int TILE_COUNT = CHUNK_SIZE / TILE_SIZE;
private static final int TILE_AREA = TILE_COUNT * TILE_COUNT;
private static final int BLEND_RADIUS = 3; private static final int BLEND_RADIUS = 3;
private static final int FIELD_SIZE = CHUNK_SIZE + (BLEND_RADIUS * 2); private static final int FIELD_SIZE = CHUNK_SIZE + (BLEND_RADIUS * 2);
private static final double MIN_WEIGHT = 0.08D; private static final double MIN_WEIGHT = 0.08D;
private static final double THRESHOLD_PENALTY = 0.24D; private static final double THRESHOLD_PENALTY = 0.24D;
private static final int MAX_BLENDED_PROFILE_PASSES = 2;
private static final int KERNEL_WIDTH = (BLEND_RADIUS * 2) + 1; private static final int KERNEL_WIDTH = (BLEND_RADIUS * 2) + 1;
private static final int KERNEL_SIZE = KERNEL_WIDTH * KERNEL_WIDTH; private static final int KERNEL_SIZE = KERNEL_WIDTH * KERNEL_WIDTH;
private static final int[] KERNEL_DX = new int[KERNEL_SIZE]; private static final int[] KERNEL_DX = new int[KERNEL_SIZE];
private static final int[] KERNEL_DZ = new int[KERNEL_SIZE]; private static final int[] KERNEL_DZ = new int[KERNEL_SIZE];
private static final double[] KERNEL_WEIGHT = new double[KERNEL_SIZE]; private static final double[] KERNEL_WEIGHT = new double[KERNEL_SIZE];
private static final ThreadLocal<BlendScratch> BLEND_SCRATCH = ThreadLocal.withInitial(BlendScratch::new);
private final Map<IrisCaveProfile, IrisCaveCarver3D> profileCarvers = new IdentityHashMap<>(); private final Map<IrisCaveProfile, IrisCaveCarver3D> profileCarvers = new IdentityHashMap<>();
@@ -78,32 +83,39 @@ public class MantleCarvingComponent extends IrisMantleComponent {
public void generateLayer(MantleWriter writer, int x, int z, ChunkContext context) { public void generateLayer(MantleWriter writer, int x, int z, ChunkContext context) {
IrisDimensionCarvingResolver.State resolverState = new IrisDimensionCarvingResolver.State(); IrisDimensionCarvingResolver.State resolverState = new IrisDimensionCarvingResolver.State();
Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache = new Long2ObjectOpenHashMap<>(FIELD_SIZE * FIELD_SIZE); Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache = new Long2ObjectOpenHashMap<>(FIELD_SIZE * FIELD_SIZE);
BlendScratch blendScratch = BLEND_SCRATCH.get();
int[] chunkSurfaceHeights = prepareChunkSurfaceHeights(x, z, context, blendScratch.chunkSurfaceHeights);
PrecisionStopwatch resolveStopwatch = PrecisionStopwatch.start(); PrecisionStopwatch resolveStopwatch = PrecisionStopwatch.start();
List<WeightedProfile> weightedProfiles = resolveWeightedProfiles(x, z, resolverState, caveBiomeCache); List<WeightedProfile> weightedProfiles = resolveWeightedProfiles(x, z, resolverState, caveBiomeCache);
getEngineMantle().getEngine().getMetrics().getCarveResolve().put(resolveStopwatch.getMilliseconds()); getEngineMantle().getEngine().getMetrics().getCarveResolve().put(resolveStopwatch.getMilliseconds());
for (WeightedProfile weightedProfile : weightedProfiles) { for (WeightedProfile weightedProfile : weightedProfiles) {
carveProfile(weightedProfile, writer, x, z); carveProfile(weightedProfile, writer, x, z, chunkSurfaceHeights);
} }
} }
@ChunkCoordinates @ChunkCoordinates
private void carveProfile(WeightedProfile weightedProfile, MantleWriter writer, int cx, int cz) { private void carveProfile(WeightedProfile weightedProfile, MantleWriter writer, int cx, int cz, int[] chunkSurfaceHeights) {
IrisCaveCarver3D carver = getCarver(weightedProfile.profile); IrisCaveCarver3D carver = getCarver(weightedProfile.profile);
carver.carve(writer, cx, cz, weightedProfile.columnWeights, MIN_WEIGHT, THRESHOLD_PENALTY, weightedProfile.worldYRange); carver.carve(writer, cx, cz, weightedProfile.columnWeights, MIN_WEIGHT, THRESHOLD_PENALTY, weightedProfile.worldYRange, chunkSurfaceHeights);
} }
private List<WeightedProfile> resolveWeightedProfiles(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) { private List<WeightedProfile> resolveWeightedProfiles(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) {
IrisCaveProfile[] profileField = buildProfileField(chunkX, chunkZ, resolverState, caveBiomeCache); BlendScratch blendScratch = BLEND_SCRATCH.get();
Map<IrisCaveProfile, double[]> profileWeights = new IdentityHashMap<>(); IrisCaveProfile[] profileField = blendScratch.profileField;
IrisCaveProfile[] columnProfiles = new IrisCaveProfile[KERNEL_SIZE]; Map<IrisCaveProfile, double[]> tileProfileWeights = blendScratch.tileProfileWeights;
double[] columnProfileWeights = new double[KERNEL_SIZE]; IdentityHashMap<IrisCaveProfile, Boolean> activeProfiles = blendScratch.activeProfiles;
IrisCaveProfile[] kernelProfiles = blendScratch.kernelProfiles;
double[] kernelProfileWeights = blendScratch.kernelProfileWeights;
activeProfiles.clear();
fillProfileField(profileField, chunkX, chunkZ, resolverState, caveBiomeCache);
for (int localX = 0; localX < CHUNK_SIZE; localX++) { for (int tileX = 0; tileX < TILE_COUNT; tileX++) {
for (int localZ = 0; localZ < CHUNK_SIZE; localZ++) { for (int tileZ = 0; tileZ < TILE_COUNT; tileZ++) {
int profileCount = 0; int profileCount = 0;
int columnIndex = (localX << 4) | localZ; int sampleLocalX = (tileX * TILE_SIZE) + 1;
int centerX = localX + BLEND_RADIUS; int sampleLocalZ = (tileZ * TILE_SIZE) + 1;
int centerZ = localZ + BLEND_RADIUS; int centerX = sampleLocalX + BLEND_RADIUS;
int centerZ = sampleLocalZ + BLEND_RADIUS;
double totalKernelWeight = 0D; double totalKernelWeight = 0D;
for (int kernelIndex = 0; kernelIndex < KERNEL_SIZE; kernelIndex++) { for (int kernelIndex = 0; kernelIndex < KERNEL_SIZE; kernelIndex++) {
@@ -115,12 +127,12 @@ public class MantleCarvingComponent extends IrisMantleComponent {
} }
double kernelWeight = KERNEL_WEIGHT[kernelIndex]; double kernelWeight = KERNEL_WEIGHT[kernelIndex];
int existingIndex = findProfileIndex(columnProfiles, profileCount, profile); int existingIndex = findProfileIndex(kernelProfiles, profileCount, profile);
if (existingIndex >= 0) { if (existingIndex >= 0) {
columnProfileWeights[existingIndex] += kernelWeight; kernelProfileWeights[existingIndex] += kernelWeight;
} else { } else {
columnProfiles[profileCount] = profile; kernelProfiles[profileCount] = profile;
columnProfileWeights[profileCount] = kernelWeight; kernelProfileWeights[profileCount] = kernelWeight;
profileCount++; profileCount++;
} }
totalKernelWeight += kernelWeight; totalKernelWeight += kernelWeight;
@@ -130,25 +142,50 @@ public class MantleCarvingComponent extends IrisMantleComponent {
continue; continue;
} }
IrisCaveProfile dominantProfile = null;
double dominantKernelWeight = Double.NEGATIVE_INFINITY;
for (int profileIndex = 0; profileIndex < profileCount; profileIndex++) { for (int profileIndex = 0; profileIndex < profileCount; profileIndex++) {
IrisCaveProfile profile = columnProfiles[profileIndex]; IrisCaveProfile profile = kernelProfiles[profileIndex];
double normalizedWeight = columnProfileWeights[profileIndex] / totalKernelWeight; double kernelWeight = kernelProfileWeights[profileIndex];
double[] weights = profileWeights.computeIfAbsent(profile, key -> new double[CHUNK_AREA]); if (kernelWeight > dominantKernelWeight) {
weights[columnIndex] = normalizedWeight; dominantProfile = profile;
columnProfiles[profileIndex] = null; dominantKernelWeight = kernelWeight;
columnProfileWeights[profileIndex] = 0D; } else if (kernelWeight == dominantKernelWeight
&& profileSortKey(profile) < profileSortKey(dominantProfile)) {
dominantProfile = profile;
}
kernelProfiles[profileIndex] = null;
kernelProfileWeights[profileIndex] = 0D;
} }
if (dominantProfile == null) {
continue;
}
int tileIndex = tileIndex(tileX, tileZ);
double dominantWeight = clampWeight(dominantKernelWeight / totalKernelWeight);
double[] tileWeights = tileProfileWeights.get(dominantProfile);
if (tileWeights == null) {
tileWeights = new double[TILE_AREA];
tileProfileWeights.put(dominantProfile, tileWeights);
} else if (!activeProfiles.containsKey(dominantProfile)) {
Arrays.fill(tileWeights, 0D);
}
activeProfiles.put(dominantProfile, Boolean.TRUE);
tileWeights[tileIndex] = dominantWeight;
} }
} }
List<WeightedProfile> weightedProfiles = new ArrayList<>(); List<WeightedProfile> tileWeightedProfiles = new ArrayList<>();
for (Map.Entry<IrisCaveProfile, double[]> entry : profileWeights.entrySet()) { for (IrisCaveProfile profile : activeProfiles.keySet()) {
IrisCaveProfile profile = entry.getKey(); double[] tileWeights = tileProfileWeights.get(profile);
double[] weights = entry.getValue(); if (tileWeights == null) {
continue;
}
double totalWeight = 0D; double totalWeight = 0D;
double maxWeight = 0D; double maxWeight = 0D;
for (double weight : tileWeights) {
for (double weight : weights) {
totalWeight += weight; totalWeight += weight;
if (weight > maxWeight) { if (weight > maxWeight) {
maxWeight = weight; maxWeight = weight;
@@ -159,22 +196,27 @@ public class MantleCarvingComponent extends IrisMantleComponent {
continue; continue;
} }
double averageWeight = totalWeight / CHUNK_AREA; double averageWeight = totalWeight / TILE_AREA;
weightedProfiles.add(new WeightedProfile(profile, weights, averageWeight, null)); tileWeightedProfiles.add(new WeightedProfile(profile, tileWeights, averageWeight, null));
} }
weightedProfiles.sort(Comparator.comparingDouble(WeightedProfile::averageWeight)); List<WeightedProfile> boundedTileProfiles = limitAndMergeBlendedProfiles(tileWeightedProfiles, MAX_BLENDED_PROFILE_PASSES, TILE_AREA);
weightedProfiles.addAll(0, resolveDimensionCarvingProfiles(chunkX, chunkZ, resolverState)); List<WeightedProfile> blendedProfiles = expandTileWeightedProfiles(boundedTileProfiles);
return weightedProfiles; List<WeightedProfile> resolvedProfiles = resolveDimensionCarvingProfiles(chunkX, chunkZ, resolverState, blendScratch);
resolvedProfiles.addAll(blendedProfiles);
return resolvedProfiles;
} }
private List<WeightedProfile> resolveDimensionCarvingProfiles(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState) { private List<WeightedProfile> resolveDimensionCarvingProfiles(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState, BlendScratch blendScratch) {
List<WeightedProfile> weightedProfiles = new ArrayList<>(); List<WeightedProfile> weightedProfiles = new ArrayList<>();
List<IrisDimensionCarvingEntry> entries = getDimension().getCarving(); List<IrisDimensionCarvingEntry> entries = getDimension().getCarving();
if (entries == null || entries.isEmpty()) { if (entries == null || entries.isEmpty()) {
return weightedProfiles; return weightedProfiles;
} }
Map<IrisDimensionCarvingEntry, IrisDimensionCarvingEntry[]> dimensionTilePlans = blendScratch.dimensionTilePlans;
dimensionTilePlans.clear();
for (IrisDimensionCarvingEntry entry : entries) { for (IrisDimensionCarvingEntry entry : entries) {
if (entry == null || !entry.isEnabled()) { if (entry == null || !entry.isEnabled()) {
continue; continue;
@@ -185,41 +227,93 @@ public class MantleCarvingComponent extends IrisMantleComponent {
continue; continue;
} }
Map<IrisCaveProfile, double[]> rootProfileWeights = new IdentityHashMap<>(); IrisDimensionCarvingEntry[] tilePlan = dimensionTilePlans.computeIfAbsent(entry, key -> new IrisDimensionCarvingEntry[TILE_AREA]);
buildDimensionTilePlan(tilePlan, chunkX, chunkZ, entry, resolverState);
Map<IrisCaveProfile, double[]> rootProfileTileWeights = new IdentityHashMap<>();
IrisRange worldYRange = entry.getWorldYRange(); IrisRange worldYRange = entry.getWorldYRange();
for (int localX = 0; localX < CHUNK_SIZE; localX++) { for (int tileIndex = 0; tileIndex < TILE_AREA; tileIndex++) {
for (int localZ = 0; localZ < CHUNK_SIZE; localZ++) { IrisDimensionCarvingEntry resolvedEntry = tilePlan[tileIndex];
int worldX = (chunkX << 4) + localX; IrisBiome resolvedBiome = IrisDimensionCarvingResolver.resolveEntryBiome(getEngineMantle().getEngine(), resolvedEntry, resolverState);
int worldZ = (chunkZ << 4) + localZ; if (resolvedBiome == null) {
int columnIndex = (localX << 4) | localZ; continue;
IrisDimensionCarvingEntry resolvedEntry = IrisDimensionCarvingResolver.resolveFromRoot(getEngineMantle().getEngine(), entry, worldX, worldZ, resolverState);
IrisBiome resolvedBiome = IrisDimensionCarvingResolver.resolveEntryBiome(getEngineMantle().getEngine(), resolvedEntry, resolverState);
if (resolvedBiome == null) {
continue;
}
IrisCaveProfile profile = resolvedBiome.getCaveProfile();
if (!isProfileEnabled(profile)) {
continue;
}
double[] weights = rootProfileWeights.computeIfAbsent(profile, key -> new double[CHUNK_AREA]);
weights[columnIndex] = 1D;
} }
IrisCaveProfile profile = resolvedBiome.getCaveProfile();
if (!isProfileEnabled(profile)) {
continue;
}
double[] tileWeights = rootProfileTileWeights.computeIfAbsent(profile, key -> new double[TILE_AREA]);
tileWeights[tileIndex] = 1D;
} }
List<Map.Entry<IrisCaveProfile, double[]>> profileEntries = new ArrayList<>(rootProfileWeights.entrySet()); List<Map.Entry<IrisCaveProfile, double[]>> profileEntries = new ArrayList<>(rootProfileTileWeights.entrySet());
profileEntries.sort((a, b) -> Integer.compare(a.getKey().hashCode(), b.getKey().hashCode())); profileEntries.sort((a, b) -> Integer.compare(a.getKey().hashCode(), b.getKey().hashCode()));
for (Map.Entry<IrisCaveProfile, double[]> profileEntry : profileEntries) { for (Map.Entry<IrisCaveProfile, double[]> profileEntry : profileEntries) {
weightedProfiles.add(new WeightedProfile(profileEntry.getKey(), profileEntry.getValue(), -1D, worldYRange)); double[] columnWeights = expandTileWeightsToColumns(profileEntry.getValue());
weightedProfiles.add(new WeightedProfile(profileEntry.getKey(), columnWeights, -1D, worldYRange));
} }
} }
return weightedProfiles; return weightedProfiles;
} }
private IrisCaveProfile[] buildProfileField(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) { private void buildDimensionTilePlan(IrisDimensionCarvingEntry[] tilePlan, int chunkX, int chunkZ, IrisDimensionCarvingEntry entry, IrisDimensionCarvingResolver.State resolverState) {
IrisCaveProfile[] profileField = new IrisCaveProfile[FIELD_SIZE * FIELD_SIZE]; for (int tileX = 0; tileX < TILE_COUNT; tileX++) {
int worldX = (chunkX << 4) + (tileX * TILE_SIZE);
for (int tileZ = 0; tileZ < TILE_COUNT; tileZ++) {
int worldZ = (chunkZ << 4) + (tileZ * TILE_SIZE);
int tileIndex = tileIndex(tileX, tileZ);
tilePlan[tileIndex] = IrisDimensionCarvingResolver.resolveFromRoot(getEngineMantle().getEngine(), entry, worldX, worldZ, resolverState);
}
}
}
private List<WeightedProfile> expandTileWeightedProfiles(List<WeightedProfile> tileWeightedProfiles) {
List<WeightedProfile> expandedProfiles = new ArrayList<>(tileWeightedProfiles.size());
for (WeightedProfile tileWeightedProfile : tileWeightedProfiles) {
double[] columnWeights = expandTileWeightsToColumns(tileWeightedProfile.columnWeights);
double averageWeight = computeAverageWeight(columnWeights, CHUNK_AREA);
expandedProfiles.add(new WeightedProfile(tileWeightedProfile.profile, columnWeights, averageWeight, tileWeightedProfile.worldYRange));
}
expandedProfiles.sort(MantleCarvingComponent::compareByCarveOrder);
return expandedProfiles;
}
private static double[] expandTileWeightsToColumns(double[] tileWeights) {
double[] columnWeights = new double[CHUNK_AREA];
if (tileWeights == null || tileWeights.length == 0) {
return columnWeights;
}
for (int tileX = 0; tileX < TILE_COUNT; tileX++) {
int columnX = tileX * TILE_SIZE;
int columnX2 = columnX + 1;
for (int tileZ = 0; tileZ < TILE_COUNT; tileZ++) {
int tileIndex = tileIndex(tileX, tileZ);
double weight = tileWeights[tileIndex];
if (weight <= 0D) {
continue;
}
int columnZ = tileZ * TILE_SIZE;
int columnZ2 = columnZ + 1;
columnWeights[(columnX << 4) | columnZ] = weight;
columnWeights[(columnX << 4) | columnZ2] = weight;
columnWeights[(columnX2 << 4) | columnZ] = weight;
columnWeights[(columnX2 << 4) | columnZ2] = weight;
}
}
return columnWeights;
}
private static int tileIndex(int tileX, int tileZ) {
return (tileX * TILE_COUNT) + tileZ;
}
private void fillProfileField(IrisCaveProfile[] profileField, int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) {
int startX = (chunkX << 4) - BLEND_RADIUS; int startX = (chunkX << 4) - BLEND_RADIUS;
int startZ = (chunkZ << 4) - BLEND_RADIUS; int startZ = (chunkZ << 4) - BLEND_RADIUS;
@@ -230,8 +324,6 @@ public class MantleCarvingComponent extends IrisMantleComponent {
profileField[(fieldX * FIELD_SIZE) + fieldZ] = resolveColumnProfile(worldX, worldZ, resolverState, caveBiomeCache); profileField[(fieldX * FIELD_SIZE) + fieldZ] = resolveColumnProfile(worldX, worldZ, resolverState, caveBiomeCache);
} }
} }
return profileField;
} }
private int findProfileIndex(IrisCaveProfile[] profiles, int size, IrisCaveProfile profile) { private int findProfileIndex(IrisCaveProfile[] profiles, int size, IrisCaveProfile profile) {
@@ -308,6 +400,136 @@ public class MantleCarvingComponent extends IrisMantleComponent {
return 0; return 0;
} }
private int[] prepareChunkSurfaceHeights(int chunkX, int chunkZ, ChunkContext context, int[] scratch) {
int[] surfaceHeights = scratch;
int baseX = chunkX << 4;
int baseZ = chunkZ << 4;
boolean useContextHeight = context != null
&& context.getHeight() != null
&& context.getX() == baseX
&& context.getZ() == baseZ;
for (int localX = 0; localX < CHUNK_SIZE; localX++) {
int worldX = baseX + localX;
for (int localZ = 0; localZ < CHUNK_SIZE; localZ++) {
int worldZ = baseZ + localZ;
int columnIndex = (localX << 4) | localZ;
if (useContextHeight) {
Double cachedHeight = context.getHeight().get(localX, localZ);
if (cachedHeight != null) {
surfaceHeights[columnIndex] = (int) Math.round(cachedHeight);
continue;
}
}
surfaceHeights[columnIndex] = getEngineMantle().getEngine().getHeight(worldX, worldZ);
}
}
return surfaceHeights;
}
private static List<WeightedProfile> limitAndMergeBlendedProfiles(List<WeightedProfile> blendedProfiles, int maxProfiles) {
return limitAndMergeBlendedProfiles(blendedProfiles, maxProfiles, CHUNK_AREA);
}
private static List<WeightedProfile> limitAndMergeBlendedProfiles(List<WeightedProfile> blendedProfiles, int maxProfiles, int areaSize) {
if (blendedProfiles == null || blendedProfiles.isEmpty()) {
return new ArrayList<>();
}
int clampedLimit = Math.max(1, maxProfiles);
List<WeightedProfile> rankedProfiles = new ArrayList<>(blendedProfiles);
rankedProfiles.sort(MantleCarvingComponent::compareBySelectionRank);
List<WeightedProfile> keptProfiles = new ArrayList<>();
int keptCount = Math.min(clampedLimit, rankedProfiles.size());
for (int index = 0; index < keptCount; index++) {
keptProfiles.add(rankedProfiles.get(index));
}
if (rankedProfiles.size() > keptCount) {
for (int columnIndex = 0; columnIndex < areaSize; columnIndex++) {
int dominantIndex = 0;
double dominantWeight = Double.NEGATIVE_INFINITY;
for (int keptIndex = 0; keptIndex < keptProfiles.size(); keptIndex++) {
double keptWeight = keptProfiles.get(keptIndex).columnWeights[columnIndex];
if (keptWeight > dominantWeight) {
dominantWeight = keptWeight;
dominantIndex = keptIndex;
}
}
double droppedWeight = 0D;
for (int droppedIndex = keptCount; droppedIndex < rankedProfiles.size(); droppedIndex++) {
droppedWeight += rankedProfiles.get(droppedIndex).columnWeights[columnIndex];
}
if (droppedWeight <= 0D) {
continue;
}
WeightedProfile dominantProfile = keptProfiles.get(dominantIndex);
double mergedWeight = dominantProfile.columnWeights[columnIndex] + droppedWeight;
dominantProfile.columnWeights[columnIndex] = clampWeight(mergedWeight);
}
}
List<WeightedProfile> mergedProfiles = new ArrayList<>();
for (WeightedProfile keptProfile : keptProfiles) {
double averageWeight = computeAverageWeight(keptProfile.columnWeights, areaSize);
mergedProfiles.add(new WeightedProfile(keptProfile.profile, keptProfile.columnWeights, averageWeight, keptProfile.worldYRange));
}
mergedProfiles.sort(MantleCarvingComponent::compareByCarveOrder);
return mergedProfiles;
}
private static int compareBySelectionRank(WeightedProfile a, WeightedProfile b) {
int weightOrder = Double.compare(b.averageWeight, a.averageWeight);
if (weightOrder != 0) {
return weightOrder;
}
return Integer.compare(profileSortKey(a.profile), profileSortKey(b.profile));
}
private static int compareByCarveOrder(WeightedProfile a, WeightedProfile b) {
int weightOrder = Double.compare(a.averageWeight, b.averageWeight);
if (weightOrder != 0) {
return weightOrder;
}
return Integer.compare(profileSortKey(a.profile), profileSortKey(b.profile));
}
private static int profileSortKey(IrisCaveProfile profile) {
if (profile == null) {
return 0;
}
return profile.hashCode();
}
private static double computeAverageWeight(double[] weights) {
return computeAverageWeight(weights, CHUNK_AREA);
}
private static double computeAverageWeight(double[] weights, int areaSize) {
if (weights == null || weights.length == 0) {
return 0D;
}
double sum = 0D;
for (double weight : weights) {
sum += weight;
}
return sum / Math.max(1, areaSize);
}
private static double clampWeight(double value) {
if (Double.isNaN(value) || Double.isInfinite(value)) {
return 0D;
}
if (value <= 0D) {
return 0D;
}
if (value >= 1D) {
return 1D;
}
return value;
}
private static final class WeightedProfile { private static final class WeightedProfile {
private final IrisCaveProfile profile; private final IrisCaveProfile profile;
private final double[] columnWeights; private final double[] columnWeights;
@@ -325,4 +547,14 @@ public class MantleCarvingComponent extends IrisMantleComponent {
return averageWeight; return averageWeight;
} }
} }
private static final class BlendScratch {
private final IrisCaveProfile[] profileField = new IrisCaveProfile[FIELD_SIZE * FIELD_SIZE];
private final IrisCaveProfile[] kernelProfiles = new IrisCaveProfile[KERNEL_SIZE];
private final double[] kernelProfileWeights = new double[KERNEL_SIZE];
private final IdentityHashMap<IrisCaveProfile, double[]> tileProfileWeights = new IdentityHashMap<>();
private final IdentityHashMap<IrisDimensionCarvingEntry, IrisDimensionCarvingEntry[]> dimensionTilePlans = new IdentityHashMap<>();
private final IdentityHashMap<IrisCaveProfile, Boolean> activeProfiles = new IdentityHashMap<>();
private final int[] chunkSurfaceHeights = new int[CHUNK_AREA];
}
} }

View File

@@ -46,8 +46,11 @@ import org.bukkit.Material;
import org.bukkit.block.data.BlockData; import org.bukkit.block.data.BlockData;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
public class IrisCarveModifier extends EngineAssignedModifier<BlockData> { public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
private static final ThreadLocal<CarveScratch> SCRATCH = ThreadLocal.withInitial(CarveScratch::new);
private final RNG rng; private final RNG rng;
private final BlockData AIR = Material.CAVE_AIR.createBlockData(); private final BlockData AIR = Material.CAVE_AIR.createBlockData();
private final BlockData LAVA = Material.LAVA.createBlockData(); private final BlockData LAVA = Material.LAVA.createBlockData();
@@ -67,9 +70,12 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
MantleChunk<Matter> mc = mantle.getChunk(x, z).use(); MantleChunk<Matter> mc = mantle.getChunk(x, z).use();
IrisDimensionCarvingResolver.State resolverState = new IrisDimensionCarvingResolver.State(); IrisDimensionCarvingResolver.State resolverState = new IrisDimensionCarvingResolver.State();
Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache = new Long2ObjectOpenHashMap<>(2048); Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache = new Long2ObjectOpenHashMap<>(2048);
int[][] columnHeights = new int[256][]; CarveScratch scratch = SCRATCH.get();
int[] columnHeightSizes = new int[256]; scratch.reset();
PackedWallBuffer walls = new PackedWallBuffer(512); PackedWallBuffer walls = scratch.walls;
ColumnMask[] columnMasks = scratch.columnMasks;
Map<String, IrisBiome> customBiomeCache = scratch.customBiomeCache;
try { try {
PrecisionStopwatch resolveStopwatch = PrecisionStopwatch.start(); PrecisionStopwatch resolveStopwatch = PrecisionStopwatch.start();
mc.iterate(MatterCavern.class, (xx, yy, zz, c) -> { mc.iterate(MatterCavern.class, (xx, yy, zz, c) -> {
@@ -90,7 +96,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
return; return;
} }
appendColumnHeight(columnHeights, columnHeightSizes, columnIndex, yy); columnMasks[columnIndex].add(yy);
if (rz < 15 && mc.get(xx, yy, zz + 1, MatterCavern.class) == null) { if (rz < 15 && mc.get(xx, yy, zz + 1, MatterCavern.class) == null) {
walls.put(rx, yy, rz + 1, c); walls.put(rx, yy, rz + 1, c);
@@ -131,9 +137,10 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
walls.forEach((rx, yy, rz, cavern) -> { walls.forEach((rx, yy, rz, cavern) -> {
int worldX = rx + (x << 4); int worldX = rx + (x << 4);
int worldZ = rz + (z << 4); int worldZ = rz + (z << 4);
IrisBiome biome = cavern.getCustomBiome().isEmpty() String customBiome = cavern.getCustomBiome();
IrisBiome biome = customBiome.isEmpty()
? resolveCaveBiome(caveBiomeCache, worldX, yy, worldZ, resolverState) ? resolveCaveBiome(caveBiomeCache, worldX, yy, worldZ, resolverState)
: getEngine().getData().getBiomeLoader().load(cavern.getCustomBiome()); : resolveCustomBiome(customBiomeCache, customBiome);
if (biome != null) { if (biome != null) {
biome.setInferredType(InferredType.CAVE); biome.setInferredType(InferredType.CAVE);
@@ -146,43 +153,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
}); });
for (int columnIndex = 0; columnIndex < 256; columnIndex++) { for (int columnIndex = 0; columnIndex < 256; columnIndex++) {
int size = columnHeightSizes[columnIndex]; processColumnFromMask(output, mc, mantle, columnMasks[columnIndex], columnIndex, x, z, resolverState, caveBiomeCache);
if (size <= 0) {
continue;
}
int[] heights = columnHeights[columnIndex];
Arrays.sort(heights, 0, size);
int rx = columnIndex >> 4;
int rz = columnIndex & 15;
CaveZone zone = new CaveZone();
zone.setFloor(heights[0]);
int buf = heights[0] - 1;
for (int heightIndex = 0; heightIndex < size; heightIndex++) {
int y = heights[heightIndex];
if (y < 0 || y > getEngine().getHeight()) {
continue;
}
if (y == buf + 1) {
buf = y;
zone.ceiling = buf;
} else if (zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4), resolverState, caveBiomeCache);
zone = new CaveZone();
zone.setFloor(y);
buf = y;
} else {
zone = new CaveZone();
zone.setFloor(y);
buf = y;
}
}
if (zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4), resolverState, caveBiomeCache);
}
} }
} finally { } finally {
getEngine().getMetrics().getCarveApply().put(applyStopwatch.getMilliseconds()); getEngine().getMetrics().getCarveApply().put(applyStopwatch.getMilliseconds());
@@ -193,6 +164,60 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
} }
} }
private void processColumnFromMask(
Hunk<BlockData> output,
MantleChunk<Matter> mc,
Mantle<Matter> mantle,
ColumnMask columnMask,
int columnIndex,
int chunkX,
int chunkZ,
IrisDimensionCarvingResolver.State resolverState,
Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache
) {
if (columnMask == null || columnMask.isEmpty()) {
return;
}
int firstHeight = columnMask.nextSetBit(0);
if (firstHeight < 0) {
return;
}
int rx = columnIndex >> 4;
int rz = columnIndex & 15;
int worldX = rx + (chunkX << 4);
int worldZ = rz + (chunkZ << 4);
CaveZone zone = new CaveZone();
zone.setFloor(firstHeight);
int buf = firstHeight - 1;
int y = firstHeight;
while (y >= 0) {
if (y >= 0 && y <= getEngine().getHeight()) {
if (y == buf + 1) {
buf = y;
zone.ceiling = buf;
} else if (zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, worldX, worldZ, resolverState, caveBiomeCache);
zone = new CaveZone();
zone.setFloor(y);
buf = y;
} else {
zone = new CaveZone();
zone.setFloor(y);
buf = y;
}
}
y = columnMask.nextSetBit(y + 1);
}
if (zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, worldX, worldZ, resolverState, caveBiomeCache);
}
}
private void processZone(Hunk<BlockData> output, MantleChunk<Matter> mc, Mantle<Matter> mantle, CaveZone zone, int rx, int rz, int xx, int zz, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) { private void processZone(Hunk<BlockData> output, MantleChunk<Matter> mc, Mantle<Matter> mantle, CaveZone zone, int rx, int rz, int xx, int zz, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) {
int center = (zone.floor + zone.ceiling) / 2; int center = (zone.floor + zone.ceiling) / 2;
String customBiome = ""; String customBiome = "";
@@ -303,20 +328,14 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
return resolvedBiome; return resolvedBiome;
} }
private void appendColumnHeight(int[][] heights, int[] sizes, int columnIndex, int y) { private IrisBiome resolveCustomBiome(Map<String, IrisBiome> customBiomeCache, String customBiome) {
int[] column = heights[columnIndex]; if (customBiomeCache.containsKey(customBiome)) {
int size = sizes[columnIndex]; return customBiomeCache.get(customBiome);
if (column == null) {
column = new int[8];
heights[columnIndex] = column;
} else if (size >= column.length) {
int nextSize = column.length << 1;
column = Arrays.copyOf(column, nextSize);
heights[columnIndex] = column;
} }
column[size] = y; IrisBiome loaded = getEngine().getData().getBiomeLoader().load(customBiome);
sizes[columnIndex] = size + 1; customBiomeCache.put(customBiome, loaded);
return loaded;
} }
private static final class PackedWallBuffer { private static final class PackedWallBuffer {
@@ -384,6 +403,12 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
} }
} }
private void clear() {
Arrays.fill(keys, EMPTY_KEY);
Arrays.fill(values, null);
size = 0;
}
private void resize() { private void resize() {
int[] oldKeys = keys; int[] oldKeys = keys;
MatterCavern[] oldValues = values; MatterCavern[] oldValues = values;
@@ -443,6 +468,87 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
} }
} }
private static final class CarveScratch {
private final ColumnMask[] columnMasks = new ColumnMask[256];
private final PackedWallBuffer walls = new PackedWallBuffer(512);
private final Map<String, IrisBiome> customBiomeCache = new HashMap<>();
private CarveScratch() {
for (int index = 0; index < columnMasks.length; index++) {
columnMasks[index] = new ColumnMask();
}
}
private void reset() {
for (int index = 0; index < columnMasks.length; index++) {
columnMasks[index].clear();
}
walls.clear();
customBiomeCache.clear();
}
}
private static final class ColumnMask {
private long[] words = new long[8];
private int maxWord = -1;
private void add(int y) {
if (y < 0) {
return;
}
int wordIndex = y >> 6;
if (wordIndex >= words.length) {
words = Arrays.copyOf(words, Math.max(words.length << 1, wordIndex + 1));
}
words[wordIndex] |= 1L << (y & 63);
if (wordIndex > maxWord) {
maxWord = wordIndex;
}
}
private int nextSetBit(int fromBit) {
if (maxWord < 0) {
return -1;
}
int startBit = Math.max(0, fromBit);
int wordIndex = startBit >> 6;
if (wordIndex > maxWord) {
return -1;
}
long word = words[wordIndex] & (-1L << (startBit & 63));
while (true) {
if (word != 0L) {
return (wordIndex << 6) + Long.numberOfTrailingZeros(word);
}
wordIndex++;
if (wordIndex > maxWord) {
return -1;
}
word = words[wordIndex];
}
}
private boolean isEmpty() {
return maxWord < 0;
}
private void clear() {
if (maxWord < 0) {
return;
}
for (int index = 0; index <= maxWord; index++) {
words[index] = 0L;
}
maxWord = -1;
}
}
@FunctionalInterface @FunctionalInterface
private interface PackedWallConsumer { private interface PackedWallConsumer {
void accept(int x, int y, int z, MatterCavern cavern); void accept(int x, int y, int z, MatterCavern cavern);

View File

@@ -579,10 +579,6 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
public void generateNoise(@NotNull WorldInfo world, @NotNull Random random, int x, int z, @NotNull ChunkGenerator.ChunkData d) { public void generateNoise(@NotNull WorldInfo world, @NotNull Random random, int x, int z, @NotNull ChunkGenerator.ChunkData d) {
try { try {
Engine engine = getEngine(world); Engine engine = getEngine(world);
World realWorld = engine.getWorld().realWorld();
if (realWorld != null && IrisToolbelt.isWorldMaintenanceActive(realWorld)) {
return;
}
computeStudioGenerator(); computeStudioGenerator();
TerrainChunk tc = TerrainChunk.create(d); TerrainChunk tc = TerrainChunk.create(d);
this.world.bind(world); this.world.bind(world);

View File

@@ -321,12 +321,23 @@ public class J {
return; return;
} }
if (!runGlobalImmediate(r)) { if (isFolia()) {
try { if (runGlobalImmediate(r)) {
Bukkit.getScheduler().scheduleSyncDelayedTask(Iris.instance, r); return;
} catch (UnsupportedOperationException e) {
throw new IllegalStateException("Failed to schedule sync task (Folia scheduler unavailable, BukkitScheduler unsupported).", e);
} }
throw new IllegalStateException("Failed to schedule sync task on Folia runtime.");
}
try {
Bukkit.getScheduler().scheduleSyncDelayedTask(Iris.instance, r);
} catch (UnsupportedOperationException e) {
FoliaScheduler.forceFoliaThreading(Bukkit.getServer());
if (runGlobalImmediate(r)) {
return;
}
throw new IllegalStateException("Failed to schedule sync task (Folia scheduler unavailable, BukkitScheduler unsupported).", e);
} }
} }
@@ -397,10 +408,28 @@ public class J {
return; return;
} }
try { if (isFolia()) {
if (!runGlobalDelayed(r, delay)) { if (runGlobalDelayed(r, delay)) {
Bukkit.getScheduler().scheduleSyncDelayedTask(Iris.instance, r, delay); return;
} }
a(() -> {
if (sleep(ticksToMilliseconds(delay))) {
s(r);
}
});
return;
}
try {
Bukkit.getScheduler().scheduleSyncDelayedTask(Iris.instance, r, delay);
} catch (UnsupportedOperationException e) {
FoliaScheduler.forceFoliaThreading(Bukkit.getServer());
if (runGlobalDelayed(r, delay)) {
return;
}
throw new IllegalStateException("Failed to schedule delayed sync task (Folia scheduler unavailable, BukkitScheduler unsupported).", e);
} catch (Throwable e) { } catch (Throwable e) {
Iris.reportError(e); Iris.reportError(e);
} }
@@ -551,6 +580,11 @@ public class J {
return false; return false;
} }
if (isPrimaryThread()) {
runnable.run();
return true;
}
return FoliaScheduler.runGlobal(Iris.instance, runnable); return FoliaScheduler.runGlobal(Iris.instance, runnable);
} }
@@ -559,6 +593,10 @@ public class J {
return false; return false;
} }
if (delayTicks <= 0) {
return runGlobalImmediate(runnable);
}
return FoliaScheduler.runGlobal(Iris.instance, runnable, Math.max(0, delayTicks)); return FoliaScheduler.runGlobal(Iris.instance, runnable, Math.max(0, delayTicks));
} }

View File

@@ -1,19 +1,18 @@
package art.arcane.iris.util.project.context; package art.arcane.iris.util.project.context;
import art.arcane.iris.core.IrisHotPathMetricsMode; import art.arcane.iris.Iris;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.engine.IrisComplex; import art.arcane.iris.engine.IrisComplex;
import art.arcane.iris.engine.framework.EngineMetrics; import art.arcane.iris.engine.framework.EngineMetrics;
import art.arcane.iris.engine.object.IrisBiome; import art.arcane.iris.engine.object.IrisBiome;
import art.arcane.iris.engine.object.IrisRegion; import art.arcane.iris.engine.object.IrisRegion;
import art.arcane.volmlib.util.atomics.AtomicRollingSequence; import art.arcane.iris.util.common.parallel.MultiBurst;
import org.bukkit.block.data.BlockData; import org.bukkit.block.data.BlockData;
import java.util.IdentityHashMap; import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
public class ChunkContext { public class ChunkContext {
private static final int PREFILL_METRICS_FLUSH_SIZE = 64;
private static final ThreadLocal<PrefillMetricsState> PREFILL_METRICS = ThreadLocal.withInitial(PrefillMetricsState::new);
private final int x; private final int x;
private final int z; private final int z;
private final ChunkedDataCache<Double> height; private final ChunkedDataCache<Double> height;
@@ -47,41 +46,45 @@ public class ChunkContext {
if (cache) { if (cache) {
PrefillPlan resolvedPlan = prefillPlan == null ? PrefillPlan.NO_CAVE : prefillPlan; PrefillPlan resolvedPlan = prefillPlan == null ? PrefillPlan.NO_CAVE : prefillPlan;
PrefillMetricsState metricsState = PREFILL_METRICS.get(); boolean capturePrefillMetric = metrics != null;
IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen(); long totalStartNanos = capturePrefillMetric ? System.nanoTime() : 0L;
IrisHotPathMetricsMode metricsMode = pregen.getHotPathMetricsMode(); List<PrefillFillTask> fillTasks = new ArrayList<>(6);
boolean sampleMetrics = metricsMode != IrisHotPathMetricsMode.DISABLED
&& metricsState.shouldSample(metricsMode, pregen.getHotPathMetricsSampleStride());
long totalStartNanos = sampleMetrics ? System.nanoTime() : 0L;
if (resolvedPlan.height) { if (resolvedPlan.height) {
fill(height, metrics == null ? null : metrics.getContextPrefillHeight(), sampleMetrics, metricsState); fillTasks.add(new PrefillFillTask(height));
} }
if (resolvedPlan.biome) { if (resolvedPlan.biome) {
fill(biome, metrics == null ? null : metrics.getContextPrefillBiome(), sampleMetrics, metricsState); fillTasks.add(new PrefillFillTask(biome));
} }
if (resolvedPlan.rock) { if (resolvedPlan.rock) {
fill(rock, metrics == null ? null : metrics.getContextPrefillRock(), sampleMetrics, metricsState); fillTasks.add(new PrefillFillTask(rock));
} }
if (resolvedPlan.fluid) { if (resolvedPlan.fluid) {
fill(fluid, metrics == null ? null : metrics.getContextPrefillFluid(), sampleMetrics, metricsState); fillTasks.add(new PrefillFillTask(fluid));
} }
if (resolvedPlan.region) { if (resolvedPlan.region) {
fill(region, metrics == null ? null : metrics.getContextPrefillRegion(), sampleMetrics, metricsState); fillTasks.add(new PrefillFillTask(region));
} }
if (resolvedPlan.cave) { if (resolvedPlan.cave) {
fill(cave, metrics == null ? null : metrics.getContextPrefillCave(), sampleMetrics, metricsState); fillTasks.add(new PrefillFillTask(cave));
} }
if (metrics != null && sampleMetrics) {
metricsState.record(metrics.getContextPrefill(), System.nanoTime() - totalStartNanos);
}
}
}
private void fill(ChunkedDataCache<?> dataCache, AtomicRollingSequence metrics, boolean sampleMetrics, PrefillMetricsState metricsState) { if (fillTasks.size() <= 1 || Iris.instance == null) {
long startNanos = sampleMetrics ? System.nanoTime() : 0L; for (PrefillFillTask fillTask : fillTasks) {
dataCache.fill(); fillTask.run();
if (metrics != null && sampleMetrics) { }
metricsState.record(metrics, System.nanoTime() - startNanos); } else {
List<CompletableFuture<Void>> futures = new ArrayList<>(fillTasks.size());
for (PrefillFillTask fillTask : fillTasks) {
futures.add(CompletableFuture.runAsync(fillTask, MultiBurst.burst));
}
for (CompletableFuture<Void> future : futures) {
future.join();
}
}
if (capturePrefillMetric) {
metrics.getContextPrefill().put((System.nanoTime() - totalStartNanos) / 1_000_000D);
}
} }
} }
@@ -139,43 +142,16 @@ public class ChunkContext {
} }
} }
private static final class PrefillMetricsState { private static final class PrefillFillTask implements Runnable {
private long callCounter; private final ChunkedDataCache<?> dataCache;
private final IdentityHashMap<AtomicRollingSequence, MetricBucket> buckets = new IdentityHashMap<>();
private boolean shouldSample(IrisHotPathMetricsMode mode, int sampleStride) { private PrefillFillTask(ChunkedDataCache<?> dataCache) {
if (mode == IrisHotPathMetricsMode.EXACT) { this.dataCache = dataCache;
return true;
}
long current = callCounter++;
return (current & (sampleStride - 1L)) == 0L;
} }
private void record(AtomicRollingSequence sequence, long nanos) { @Override
if (sequence == null || nanos < 0L) { public void run() {
return; dataCache.fill();
}
MetricBucket bucket = buckets.get(sequence);
if (bucket == null) {
bucket = new MetricBucket();
buckets.put(sequence, bucket);
}
bucket.nanos += nanos;
bucket.samples++;
if (bucket.samples >= PREFILL_METRICS_FLUSH_SIZE) {
double averageMs = (bucket.nanos / (double) bucket.samples) / 1_000_000D;
sequence.put(averageMs);
bucket.nanos = 0L;
bucket.samples = 0;
}
} }
} }
private static final class MetricBucket {
private long nanos;
private int samples;
}
} }

View File

@@ -73,6 +73,8 @@ public class CNG {
private double power; private double power;
private NoiseStyle leakStyle; private NoiseStyle leakStyle;
private ProceduralStream<Double> customGenerator; private ProceduralStream<Double> customGenerator;
private transient boolean identityPostFastPath;
private transient boolean fastPathStateDirty = true;
public CNG(RNG random) { public CNG(RNG random) {
this(random, 1); this(random, 1);
@@ -112,6 +114,8 @@ public class CNG {
if (generator instanceof OctaveNoise) { if (generator instanceof OctaveNoise) {
((OctaveNoise) generator).setOctaves(octaves); ((OctaveNoise) generator).setOctaves(octaves);
} }
refreshFastPathState();
} }
public static CNG signature(RNG rng) { public static CNG signature(RNG rng) {
@@ -304,6 +308,7 @@ public class CNG {
public CNG bake() { public CNG bake() {
bakedScale *= scale; bakedScale *= scale;
scale = 1; scale = 1;
markFastPathStateDirty();
return this; return this;
} }
@@ -313,6 +318,7 @@ public class CNG {
} }
children.add(c); children.add(c);
markFastPathStateDirty();
return this; return this;
} }
@@ -323,32 +329,38 @@ public class CNG {
public CNG fractureWith(CNG c, double scale) { public CNG fractureWith(CNG c, double scale) {
fracture = c; fracture = c;
fscale = scale; fscale = scale;
markFastPathStateDirty();
return this; return this;
} }
public CNG scale(double c) { public CNG scale(double c) {
scale = c; scale = c;
markFastPathStateDirty();
return this; return this;
} }
public CNG patch(double c) { public CNG patch(double c) {
patch = c; patch = c;
markFastPathStateDirty();
return this; return this;
} }
public CNG up(double c) { public CNG up(double c) {
up = c; up = c;
markFastPathStateDirty();
return this; return this;
} }
public CNG down(double c) { public CNG down(double c) {
down = c; down = c;
markFastPathStateDirty();
return this; return this;
} }
public CNG injectWith(NoiseInjector i) { public CNG injectWith(NoiseInjector i) {
injector = i == null ? ADD : i; injector = i == null ? ADD : i;
injectorMode = resolveInjectorMode(injector); injectorMode = resolveInjectorMode(injector);
markFastPathStateDirty();
return this; return this;
} }
@@ -665,7 +677,7 @@ public class CNG {
return generator.noise(x * scl, 0D, 0D) * opacity; return generator.noise(x * scl, 0D, 0D) * opacity;
} }
double fx = x + ((fracture.noise(x) - 0.5D) * fscale); double fx = x + ((fracture.noiseFast1D(x) - 0.5D) * fscale);
return generator.noise(fx * scl, 0D, 0D) * opacity; return generator.noise(fx * scl, 0D, 0D) * opacity;
} }
@@ -676,8 +688,8 @@ public class CNG {
return generator.noise(x * scl, z * scl, 0D) * opacity; return generator.noise(x * scl, z * scl, 0D) * opacity;
} }
double fx = x + ((fracture.noise(x, z) - 0.5D) * fscale); double fx = x + ((fracture.noiseFast2D(x, z) - 0.5D) * fscale);
double fz = z + ((fracture.noise(z, x) - 0.5D) * fscale); double fz = z + ((fracture.noiseFast2D(z, x) - 0.5D) * fscale);
return generator.noise(fx * scl, fz * scl, 0D) * opacity; return generator.noise(fx * scl, fz * scl, 0D) * opacity;
} }
@@ -688,9 +700,9 @@ public class CNG {
return generator.noise(x * scl, y * scl, z * scl) * opacity; return generator.noise(x * scl, y * scl, z * scl) * opacity;
} }
double fx = x + ((fracture.noise(x, y, z) - 0.5D) * fscale); double fx = x + ((fracture.noiseFast3D(x, y, z) - 0.5D) * fscale);
double fy = y + ((fracture.noise(y, x) - 0.5D) * fscale); double fy = y + ((fracture.noiseFast2D(y, x) - 0.5D) * fscale);
double fz = z + ((fracture.noise(z, x, y) - 0.5D) * fscale); double fz = z + ((fracture.noiseFast3D(z, x, y) - 0.5D) * fscale);
return generator.noise(fx * scl, fy * scl, fz * scl) * opacity; return generator.noise(fx * scl, fy * scl, fz * scl) * opacity;
} }
@@ -913,6 +925,10 @@ public class CNG {
return cache.get((int) x, (int) z); return cache.get((int) x, (int) z);
} }
if (isIdentityPostFastPath()) {
return getNoise(x, z);
}
return applyPost(getNoise(x, z), x, z); return applyPost(getNoise(x, z), x, z);
} }
@@ -921,11 +937,16 @@ public class CNG {
} }
public double noiseFast3D(double x, double y, double z) { public double noiseFast3D(double x, double y, double z) {
if (isIdentityPostFastPath()) {
return getNoise(x, y, z);
}
return applyPost(getNoise(x, y, z), x, y, z); return applyPost(getNoise(x, y, z), x, y, z);
} }
public CNG pow(double power) { public CNG pow(double power) {
this.power = power; this.power = power;
markFastPathStateDirty();
return this; return this;
} }
@@ -942,6 +963,28 @@ public class CNG {
return generator != null && generator.isStatic(); return generator != null && generator.isStatic();
} }
private boolean isIdentityPostFastPath() {
if (fastPathStateDirty) {
refreshFastPathState();
}
return identityPostFastPath;
}
private void markFastPathStateDirty() {
fastPathStateDirty = true;
}
private void refreshFastPathState() {
identityPostFastPath = power == 1D
&& children == null
&& fracture == null
&& down == 0D
&& up == 0D
&& patch == 1D;
fastPathStateDirty = false;
}
private enum InjectorMode { private enum InjectorMode {
ADD, ADD,
SRC_SUBTRACT, SRC_SUBTRACT,

View File

@@ -0,0 +1,93 @@
package art.arcane.iris.core;
import art.arcane.volmlib.util.nbt.io.NBTDeserializer;
import art.arcane.volmlib.util.nbt.io.NBTSerializer;
import art.arcane.volmlib.util.nbt.io.NamedTag;
import art.arcane.volmlib.util.nbt.tag.CompoundTag;
import art.arcane.volmlib.util.nbt.tag.IntTag;
import art.arcane.volmlib.util.nbt.tag.ListTag;
import art.arcane.volmlib.util.nbt.tag.Tag;
import org.junit.Test;
import java.io.ByteArrayInputStream;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
public class ExternalDataPackPipelineNbtRewriteTest {
@Test
public void rewritesOnlyJigsawPoolReferencesForCompressedAndUncompressedNbt() throws Exception {
for (boolean compressed : new boolean[]{false, true}) {
byte[] source = encodeStructureNbt(compressed, true);
Map<String, String> remapped = new HashMap<>();
remapped.put("minecraft:witch_hut/foundation", "iris_external_1:witch_hut/foundation");
byte[] rewritten = invokeRewrite(source, remapped);
CompoundTag root = decodeRoot(rewritten, compressed);
ListTag<?> blocks = root.getListTag("blocks");
CompoundTag jigsawBlock = (CompoundTag) blocks.get(0);
CompoundTag nonJigsawBlock = (CompoundTag) blocks.get(1);
assertEquals("iris_external_1:witch_hut/foundation", jigsawBlock.getCompoundTag("nbt").getString("pool"));
assertEquals("minecraft:witch_hut/foundation", nonJigsawBlock.getCompoundTag("nbt").getString("pool"));
}
}
@Test
public void nonJigsawPayloadIsLeftUnchanged() throws Exception {
byte[] source = encodeStructureNbt(false, false);
Map<String, String> remapped = new HashMap<>();
remapped.put("minecraft:witch_hut/foundation", "iris_external_1:witch_hut/foundation");
byte[] rewritten = invokeRewrite(source, remapped);
assertArrayEquals(source, rewritten);
}
private byte[] invokeRewrite(byte[] input, Map<String, String> remappedKeys) {
return StructureNbtJigsawPoolRewriter.rewrite(input, remappedKeys);
}
private byte[] encodeStructureNbt(boolean compressed, boolean includeJigsaw) throws Exception {
CompoundTag root = new CompoundTag();
ListTag<CompoundTag> palette = new ListTag<>(CompoundTag.class);
CompoundTag firstPalette = new CompoundTag();
firstPalette.putString("Name", includeJigsaw ? "minecraft:jigsaw" : "minecraft:stone");
palette.add(firstPalette);
CompoundTag secondPalette = new CompoundTag();
secondPalette.putString("Name", "minecraft:stone");
palette.add(secondPalette);
root.put("palette", palette);
ListTag<CompoundTag> blocks = new ListTag<>(CompoundTag.class);
blocks.add(blockTag(0, "minecraft:witch_hut/foundation"));
blocks.add(blockTag(1, "minecraft:witch_hut/foundation"));
root.put("blocks", blocks);
NamedTag named = new NamedTag("test", root);
return new NBTSerializer(compressed).toBytes(named);
}
private CompoundTag blockTag(int state, String pool) {
CompoundTag block = new CompoundTag();
block.putInt("state", state);
CompoundTag nbt = new CompoundTag();
nbt.putString("pool", pool);
block.put("nbt", nbt);
ListTag<IntTag> pos = new ListTag<>(IntTag.class);
pos.add(new IntTag(0));
pos.add(new IntTag(0));
pos.add(new IntTag(0));
block.put("pos", pos);
return block;
}
private CompoundTag decodeRoot(byte[] bytes, boolean compressed) throws Exception {
NamedTag namedTag = new NBTDeserializer(compressed).fromStream(new ByteArrayInputStream(bytes));
Tag<?> rootTag = namedTag.getTag();
return (CompoundTag) rootTag;
}
}

View File

@@ -0,0 +1,54 @@
package art.arcane.iris.core.pregenerator;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.volmlib.util.math.Position2;
import org.junit.Test;
import java.util.HashSet;
import java.util.Set;
import static org.junit.Assert.assertEquals;
public class PregenTaskInterleavedTraversalTest {
@Test
public void interleavedTraversalIsDeterministicAndComplete() {
PregenTask task = PregenTask.builder()
.center(new Position2(0, 0))
.radiusX(1024)
.radiusZ(1024)
.build();
KList<Long> baseline = new KList<>();
task.iterateAllChunks((x, z) -> baseline.add(asKey(x, z)));
KList<Long> firstInterleaved = new KList<>();
task.iterateAllChunksInterleaved((regionX, regionZ, chunkX, chunkZ, firstChunkInRegion, lastChunkInRegion) -> {
firstInterleaved.add(asKey(chunkX, chunkZ));
return true;
});
KList<Long> secondInterleaved = new KList<>();
task.iterateAllChunksInterleaved((regionX, regionZ, chunkX, chunkZ, firstChunkInRegion, lastChunkInRegion) -> {
secondInterleaved.add(asKey(chunkX, chunkZ));
return true;
});
assertEquals(baseline.size(), firstInterleaved.size());
assertEquals(firstInterleaved, secondInterleaved);
assertEquals(asSet(baseline), asSet(firstInterleaved));
}
private Set<Long> asSet(KList<Long> values) {
Set<Long> set = new HashSet<>();
for (Long value : values) {
set.add(value);
}
return set;
}
private long asKey(int x, int z) {
long high = (long) x << 32;
long low = z & 0xFFFFFFFFL;
return high | low;
}
}

View File

@@ -0,0 +1,32 @@
package art.arcane.iris.core.pregenerator.methods;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class AsyncPregenMethodConcurrencyCapTest {
@Test
public void paperLikeRecommendedCapTracksWorkerThreads() {
assertEquals(8, AsyncPregenMethod.computePaperLikeRecommendedCap(1));
assertEquals(8, AsyncPregenMethod.computePaperLikeRecommendedCap(4));
assertEquals(24, AsyncPregenMethod.computePaperLikeRecommendedCap(12));
assertEquals(96, AsyncPregenMethod.computePaperLikeRecommendedCap(80));
}
@Test
public void foliaRecommendedCapTracksWorkerThreads() {
assertEquals(64, AsyncPregenMethod.computeFoliaRecommendedCap(1));
assertEquals(64, AsyncPregenMethod.computeFoliaRecommendedCap(12));
assertEquals(80, AsyncPregenMethod.computeFoliaRecommendedCap(20));
assertEquals(192, AsyncPregenMethod.computeFoliaRecommendedCap(80));
}
@Test
public void runtimeCapUsesGlobalCeilingAndWorkerRecommendation() {
assertEquals(80, AsyncPregenMethod.applyRuntimeConcurrencyCap(256, true, 20));
assertEquals(12, AsyncPregenMethod.applyRuntimeConcurrencyCap(12, true, 20));
assertEquals(64, AsyncPregenMethod.applyRuntimeConcurrencyCap(256, true, 8));
assertEquals(16, AsyncPregenMethod.applyRuntimeConcurrencyCap(256, false, 8));
assertEquals(20, AsyncPregenMethod.applyRuntimeConcurrencyCap(20, false, 40));
}
}

View File

@@ -0,0 +1,268 @@
package art.arcane.iris.engine.mantle.components;
import art.arcane.iris.core.loader.IrisData;
import art.arcane.iris.engine.framework.Engine;
import art.arcane.iris.engine.framework.EngineMetrics;
import art.arcane.iris.engine.framework.SeedManager;
import art.arcane.iris.engine.mantle.MantleWriter;
import art.arcane.iris.engine.object.IrisCaveProfile;
import art.arcane.iris.engine.object.IrisDimension;
import art.arcane.iris.engine.object.IrisGeneratorStyle;
import art.arcane.iris.engine.object.IrisRange;
import art.arcane.iris.engine.object.IrisStyledRange;
import art.arcane.iris.engine.object.IrisWorld;
import art.arcane.iris.engine.object.NoiseStyle;
import art.arcane.volmlib.util.mantle.runtime.Mantle;
import art.arcane.volmlib.util.mantle.runtime.MantleChunk;
import art.arcane.volmlib.util.matter.Matter;
import art.arcane.volmlib.util.matter.MatterCavern;
import art.arcane.volmlib.util.matter.MatterSlice;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.Server;
import org.bukkit.block.data.BlockData;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.logging.Logger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
public class IrisCaveCarver3DNearParityTest {
@BeforeClass
public static void setupBukkit() {
if (Bukkit.getServer() != null) {
return;
}
Server server = mock(Server.class);
BlockData emptyBlockData = mock(BlockData.class);
doReturn(Logger.getLogger("IrisTest")).when(server).getLogger();
doReturn("IrisTestServer").when(server).getName();
doReturn("1.0").when(server).getVersion();
doReturn("1.0").when(server).getBukkitVersion();
doReturn(emptyBlockData).when(server).createBlockData(any(Material.class));
doReturn(emptyBlockData).when(server).createBlockData(anyString());
Bukkit.setServer(server);
}
@Test
public void carvedCellDistributionStableAcrossEquivalentCarvers() {
Engine engine = createEngine(128, 92);
IrisCaveCarver3D firstCarver = new IrisCaveCarver3D(engine, createProfile());
WriterCapture firstCapture = createWriterCapture(128);
int firstCarved = firstCarver.carve(firstCapture.writer, 7, -3);
IrisCaveCarver3D secondCarver = new IrisCaveCarver3D(engine, createProfile());
WriterCapture secondCapture = createWriterCapture(128);
int secondCarved = secondCarver.carve(secondCapture.writer, 7, -3);
assertTrue(firstCarved > 0);
assertEquals(firstCarved, secondCarved);
assertEquals(firstCapture.carvedCells, secondCapture.carvedCells);
}
@Test
public void latticePathCarvesChunkEdgesAndRespectsWorldHeightClipping() {
Engine engine = createEngine(48, 46);
IrisCaveCarver3D carver = new IrisCaveCarver3D(engine, createProfile());
WriterCapture capture = createWriterCapture(48);
double[] columnWeights = new double[256];
Arrays.fill(columnWeights, 1D);
int[] precomputedSurfaceHeights = new int[256];
Arrays.fill(precomputedSurfaceHeights, 46);
int carved = carver.carve(capture.writer, 0, 0, columnWeights, 0D, 0D, new IrisRange(0D, 80D), precomputedSurfaceHeights);
assertTrue(carved > 0);
assertTrue(hasX(capture.carvedCells, 14));
assertTrue(hasX(capture.carvedCells, 15));
assertTrue(hasZ(capture.carvedCells, 14));
assertTrue(hasZ(capture.carvedCells, 15));
assertTrue(maxY(capture.carvedCells) <= 47);
assertTrue(minY(capture.carvedCells) >= 0);
}
private Engine createEngine(int worldHeight, int sampledHeight) {
Engine engine = mock(Engine.class);
IrisData data = mock(IrisData.class);
IrisDimension dimension = mock(IrisDimension.class);
SeedManager seedManager = new SeedManager(942_337_445L);
EngineMetrics metrics = new EngineMetrics(16);
IrisWorld world = IrisWorld.builder().minHeight(0).maxHeight(worldHeight).build();
doReturn(data).when(engine).getData();
doReturn(dimension).when(engine).getDimension();
doReturn(seedManager).when(engine).getSeedManager();
doReturn(metrics).when(engine).getMetrics();
doReturn(world).when(engine).getWorld();
doReturn(sampledHeight).when(engine).getHeight(anyInt(), anyInt());
doReturn(18).when(dimension).getCaveLavaHeight();
doReturn(64).when(dimension).getFluidHeight();
return engine;
}
private IrisCaveProfile createProfile() {
IrisCaveProfile profile = new IrisCaveProfile();
profile.setEnabled(true);
profile.setVerticalRange(new IrisRange(0D, 120D));
profile.setVerticalEdgeFade(14);
profile.setVerticalEdgeFadeStrength(0.21D);
profile.setBaseDensityStyle(new IrisGeneratorStyle(NoiseStyle.SIMPLEX).zoomed(0.07D));
profile.setDetailDensityStyle(new IrisGeneratorStyle(NoiseStyle.SIMPLEX).zoomed(0.17D));
profile.setWarpStyle(new IrisGeneratorStyle(NoiseStyle.SIMPLEX).zoomed(0.12D));
profile.setSurfaceBreakStyle(new IrisGeneratorStyle(NoiseStyle.SIMPLEX).zoomed(0.09D));
profile.setBaseWeight(1D);
profile.setDetailWeight(0.48D);
profile.setWarpStrength(0.37D);
profile.setDensityThreshold(new IrisStyledRange(1D, 1D, new IrisGeneratorStyle(NoiseStyle.FLAT)));
profile.setThresholdBias(0D);
profile.setSampleStep(2);
profile.setMinCarveCells(0);
profile.setRecoveryThresholdBoost(0D);
profile.setSurfaceClearance(5);
profile.setAllowSurfaceBreak(true);
profile.setSurfaceBreakNoiseThreshold(0.16D);
profile.setSurfaceBreakDepth(12);
profile.setSurfaceBreakThresholdBoost(0.17D);
profile.setAllowWater(true);
profile.setWaterMinDepthBelowSurface(8);
profile.setWaterRequiresFloor(false);
profile.setAllowLava(true);
return profile;
}
private WriterCapture createWriterCapture(int worldHeight) {
MantleWriter writer = mock(MantleWriter.class);
@SuppressWarnings("unchecked")
Mantle<Matter> mantle = mock(Mantle.class);
@SuppressWarnings("unchecked")
MantleChunk<Matter> chunk = mock(MantleChunk.class);
Map<Integer, Matter> sections = new HashMap<>();
Map<Integer, Map<Integer, MatterCavern>> sectionCells = new HashMap<>();
Set<String> carvedCells = new HashSet<>();
doReturn(mantle).when(writer).getMantle();
doReturn(worldHeight).when(mantle).getWorldHeight();
doReturn(chunk).when(writer).acquireChunk(anyInt(), anyInt());
doAnswer(invocation -> {
int sectionIndex = invocation.getArgument(0);
Matter section = sections.get(sectionIndex);
if (section != null) {
return section;
}
Matter created = createSection(sectionIndex, sectionCells, carvedCells);
sections.put(sectionIndex, created);
return created;
}).when(chunk).getOrCreate(anyInt());
return new WriterCapture(writer, carvedCells);
}
private Matter createSection(int sectionIndex, Map<Integer, Map<Integer, MatterCavern>> sectionCells, Set<String> carvedCells) {
Matter matter = mock(Matter.class);
@SuppressWarnings("unchecked")
MatterSlice<MatterCavern> slice = mock(MatterSlice.class);
Map<Integer, MatterCavern> localCells = sectionCells.computeIfAbsent(sectionIndex, key -> new HashMap<>());
doReturn(slice).when(matter).slice(MatterCavern.class);
doAnswer(invocation -> {
int localX = invocation.getArgument(0);
int localY = invocation.getArgument(1);
int localZ = invocation.getArgument(2);
return localCells.get(packLocal(localX, localY, localZ));
}).when(slice).get(anyInt(), anyInt(), anyInt());
doAnswer(invocation -> {
int localX = invocation.getArgument(0);
int localY = invocation.getArgument(1);
int localZ = invocation.getArgument(2);
MatterCavern value = invocation.getArgument(3);
localCells.put(packLocal(localX, localY, localZ), value);
int worldY = (sectionIndex << 4) + localY;
carvedCells.add(cellKey(localX, worldY, localZ));
return null;
}).when(slice).set(anyInt(), anyInt(), anyInt(), any(MatterCavern.class));
return matter;
}
private int packLocal(int x, int y, int z) {
return (x << 8) | (y << 4) | z;
}
private String cellKey(int x, int y, int z) {
return x + ":" + y + ":" + z;
}
private boolean hasX(Set<String> carvedCells, int x) {
for (String cell : carvedCells) {
String[] split = cell.split(":");
if (Integer.parseInt(split[0]) == x) {
return true;
}
}
return false;
}
private boolean hasZ(Set<String> carvedCells, int z) {
for (String cell : carvedCells) {
String[] split = cell.split(":");
if (Integer.parseInt(split[2]) == z) {
return true;
}
}
return false;
}
private int maxY(Set<String> carvedCells) {
int max = Integer.MIN_VALUE;
for (String cell : carvedCells) {
String[] split = cell.split(":");
int y = Integer.parseInt(split[1]);
if (y > max) {
max = y;
}
}
return max;
}
private int minY(Set<String> carvedCells) {
int min = Integer.MAX_VALUE;
for (String cell : carvedCells) {
String[] split = cell.split(":");
int y = Integer.parseInt(split[1]);
if (y < min) {
min = y;
}
}
return min;
}
private static final class WriterCapture {
private final MantleWriter writer;
private final Set<String> carvedCells;
private WriterCapture(MantleWriter writer, Set<String> carvedCells) {
this.writer = writer;
this.carvedCells = carvedCells;
}
}
}

View File

@@ -0,0 +1,143 @@
package art.arcane.iris.engine.mantle.components;
import art.arcane.iris.engine.object.IrisCaveProfile;
import org.junit.BeforeClass;
import org.junit.Test;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class MantleCarvingComponentTop2BlendTest {
private static Constructor<?> weightedProfileConstructor;
private static Method limitMethod;
private static Method expandTileMethod;
private static Field profileField;
private static Field columnWeightsField;
@BeforeClass
public static void setup() throws Exception {
Class<?> weightedProfileClass = Class.forName("art.arcane.iris.engine.mantle.components.MantleCarvingComponent$WeightedProfile");
weightedProfileConstructor = weightedProfileClass.getDeclaredConstructor(IrisCaveProfile.class, double[].class, double.class, Class.forName("art.arcane.iris.engine.object.IrisRange"));
weightedProfileConstructor.setAccessible(true);
limitMethod = MantleCarvingComponent.class.getDeclaredMethod("limitAndMergeBlendedProfiles", List.class, int.class, int.class);
limitMethod.setAccessible(true);
expandTileMethod = MantleCarvingComponent.class.getDeclaredMethod("expandTileWeightsToColumns", double[].class);
expandTileMethod.setAccessible(true);
profileField = weightedProfileClass.getDeclaredField("profile");
profileField.setAccessible(true);
columnWeightsField = weightedProfileClass.getDeclaredField("columnWeights");
columnWeightsField.setAccessible(true);
}
@Test
public void topTwoProfilesAreKeptAndDroppedWeightsAreMergedIntoDominantColumns() throws Exception {
WeightedInput input = createWeightedProfiles();
List<?> limited = invokeLimit(input.weightedProfiles(), 2);
assertEquals(2, limited.size());
Map<IrisCaveProfile, double[]> byProfile = extractWeightsByProfile(limited);
IrisCaveProfile first = input.profiles().first();
IrisCaveProfile second = input.profiles().second();
assertEquals(1.0D, byProfile.get(first)[1], 0D);
assertEquals(1.0D, byProfile.get(second)[0], 0D);
}
@Test
public void topTwoMergeIsDeterministicAcrossRuns() throws Exception {
WeightedInput firstInput = createWeightedProfiles();
WeightedInput secondInput = createWeightedProfiles();
List<?> first = invokeLimit(firstInput.weightedProfiles(), 2);
List<?> second = invokeLimit(secondInput.weightedProfiles(), 2);
Map<IrisCaveProfile, double[]> firstByProfile = extractWeightsByProfile(first);
Map<IrisCaveProfile, double[]> secondByProfile = extractWeightsByProfile(second);
assertEquals(firstByProfile.get(firstInput.profiles().first())[0], secondByProfile.get(secondInput.profiles().first())[0], 0D);
assertEquals(firstByProfile.get(firstInput.profiles().first())[1], secondByProfile.get(secondInput.profiles().first())[1], 0D);
assertEquals(firstByProfile.get(firstInput.profiles().second())[0], secondByProfile.get(secondInput.profiles().second())[0], 0D);
assertEquals(firstByProfile.get(firstInput.profiles().second())[1], secondByProfile.get(secondInput.profiles().second())[1], 0D);
}
@Test
public void tileWeightsExpandIntoFourColumnsPerTile() throws Exception {
double[] tileWeights = new double[64];
tileWeights[0] = 0.42D;
tileWeights[9] = 0.73D;
double[] expanded = invokeExpand(tileWeights);
assertEquals(0.42D, expanded[(0 << 4) | 0], 0D);
assertEquals(0.42D, expanded[(0 << 4) | 1], 0D);
assertEquals(0.42D, expanded[(1 << 4) | 0], 0D);
assertEquals(0.42D, expanded[(1 << 4) | 1], 0D);
assertEquals(0.73D, expanded[(2 << 4) | 2], 0D);
assertEquals(0.73D, expanded[(2 << 4) | 3], 0D);
assertEquals(0.73D, expanded[(3 << 4) | 2], 0D);
assertEquals(0.73D, expanded[(3 << 4) | 3], 0D);
}
private WeightedInput createWeightedProfiles() throws Exception {
IrisCaveProfile first = new IrisCaveProfile().setEnabled(true).setBaseWeight(1.31D);
IrisCaveProfile second = new IrisCaveProfile().setEnabled(true).setBaseWeight(1.17D);
IrisCaveProfile third = new IrisCaveProfile().setEnabled(true).setBaseWeight(0.93D);
Profiles profiles = new Profiles(first, second, third);
double[] firstWeights = new double[64];
firstWeights[0] = 0.2D;
firstWeights[1] = 0.8D;
double[] secondWeights = new double[64];
secondWeights[0] = 0.7D;
secondWeights[1] = 0.1D;
double[] thirdWeights = new double[64];
thirdWeights[0] = 0.3D;
thirdWeights[1] = 0.4D;
List<Object> weighted = new ArrayList<>();
weighted.add(weightedProfileConstructor.newInstance(first, firstWeights, average(firstWeights), null));
weighted.add(weightedProfileConstructor.newInstance(second, secondWeights, average(secondWeights), null));
weighted.add(weightedProfileConstructor.newInstance(third, thirdWeights, average(thirdWeights), null));
return new WeightedInput(weighted, profiles);
}
private List<?> invokeLimit(List<Object> weightedProfiles, int limit) throws Exception {
return (List<?>) limitMethod.invoke(null, weightedProfiles, limit, 64);
}
private double[] invokeExpand(double[] tileWeights) throws Exception {
return (double[]) expandTileMethod.invoke(null, (Object) tileWeights);
}
private Map<IrisCaveProfile, double[]> extractWeightsByProfile(List<?> weightedProfiles) throws Exception {
Map<IrisCaveProfile, double[]> byProfile = new IdentityHashMap<>();
for (Object weightedProfile : weightedProfiles) {
IrisCaveProfile profile = (IrisCaveProfile) profileField.get(weightedProfile);
double[] weights = (double[]) columnWeightsField.get(weightedProfile);
byProfile.put(profile, weights);
}
return byProfile;
}
private double average(double[] weights) {
double total = 0D;
for (double weight : weights) {
total += weight;
}
return total / weights.length;
}
private record Profiles(IrisCaveProfile first, IrisCaveProfile second, IrisCaveProfile third) {
}
private record WeightedInput(List<Object> weightedProfiles, Profiles profiles) {
}
}

View File

@@ -0,0 +1,186 @@
package art.arcane.iris.engine.modifier;
import org.junit.BeforeClass;
import org.junit.Test;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import static org.junit.Assert.assertEquals;
public class IrisCarveModifierZoneParityTest {
private static Constructor<?> columnMaskConstructor;
private static Method addMethod;
private static Method nextSetBitMethod;
private static Method clearMethod;
@BeforeClass
public static void setup() throws Exception {
Class<?> columnMaskClass = Class.forName("art.arcane.iris.engine.modifier.IrisCarveModifier$ColumnMask");
columnMaskConstructor = columnMaskClass.getDeclaredConstructor();
addMethod = columnMaskClass.getDeclaredMethod("add", int.class);
nextSetBitMethod = columnMaskClass.getDeclaredMethod("nextSetBit", int.class);
clearMethod = columnMaskClass.getDeclaredMethod("clear");
columnMaskConstructor.setAccessible(true);
addMethod.setAccessible(true);
nextSetBitMethod.setAccessible(true);
clearMethod.setAccessible(true);
}
@Test
public void randomColumnZonesMatchLegacySortedResolver() throws Exception {
Object columnMask = columnMaskConstructor.newInstance();
Random random = new Random(913_447L);
int maxHeight = 320;
for (int scenario = 0; scenario < 400; scenario++) {
clearMethod.invoke(columnMask);
int sampleSize = 1 + random.nextInt(180);
Set<Integer> uniqueHeights = new HashSet<>();
while (uniqueHeights.size() < sampleSize) {
uniqueHeights.add(random.nextInt(480) - 80);
}
int[] heights = toIntArray(uniqueHeights);
for (int index = 0; index < heights.length; index++) {
addMethod.invoke(columnMask, heights[index]);
}
List<String> expectedZones = legacyZones(heights, maxHeight);
List<String> actualZones = bitsetZones(columnMask, maxHeight);
assertEquals("scenario=" + scenario, expectedZones, actualZones);
}
}
@Test
public void edgeColumnsMatchLegacySortedResolver() throws Exception {
Object columnMask = columnMaskConstructor.newInstance();
int maxHeight = 320;
int[][] scenarios = new int[][]{
{-10, -1, 0, 1, 2, 5, 6, 9, 10, 11, 12, 200, 201, 205},
{300, 301, 302, 304, 305, 307, 308, 309, 310, 400, 401},
{0, 2, 4, 6, 8, 10, 12},
{10, 11, 12, 13, 14, 15, 16, 17}
};
for (int scenario = 0; scenario < scenarios.length; scenario++) {
clearMethod.invoke(columnMask);
int[] heights = Arrays.copyOf(scenarios[scenario], scenarios[scenario].length);
for (int index = 0; index < heights.length; index++) {
addMethod.invoke(columnMask, heights[index]);
}
List<String> expectedZones = legacyZones(heights, maxHeight);
List<String> actualZones = bitsetZones(columnMask, maxHeight);
assertEquals("edge-scenario=" + scenario, expectedZones, actualZones);
}
}
private int[] toIntArray(Set<Integer> values) {
int[] array = new int[values.size()];
int index = 0;
for (Integer value : values) {
array[index++] = value;
}
return array;
}
private List<String> legacyZones(int[] heights, int maxHeight) {
List<String> zones = new ArrayList<>();
if (heights.length == 0) {
return zones;
}
int[] sorted = Arrays.copyOf(heights, heights.length);
Arrays.sort(sorted);
int floor = sorted[0];
int ceiling = -1;
int buf = sorted[0] - 1;
for (int index = 0; index < sorted.length; index++) {
int y = sorted[index];
if (y < 0 || y > maxHeight) {
continue;
}
if (y == buf + 1) {
buf = y;
ceiling = buf;
} else if (isValidZone(floor, ceiling, maxHeight)) {
zones.add(zoneKey(floor, ceiling));
floor = y;
ceiling = -1;
buf = y;
} else {
floor = y;
ceiling = -1;
buf = y;
}
}
if (isValidZone(floor, ceiling, maxHeight)) {
zones.add(zoneKey(floor, ceiling));
}
return zones;
}
private List<String> bitsetZones(Object columnMask, int maxHeight) throws Exception {
List<String> zones = new ArrayList<>();
int firstHeight = nextSetBit(columnMask, 0);
if (firstHeight < 0) {
return zones;
}
int floor = firstHeight;
int ceiling = -1;
int buf = firstHeight - 1;
int y = firstHeight;
while (y >= 0) {
if (y >= 0 && y <= maxHeight) {
if (y == buf + 1) {
buf = y;
ceiling = buf;
} else if (isValidZone(floor, ceiling, maxHeight)) {
zones.add(zoneKey(floor, ceiling));
floor = y;
ceiling = -1;
buf = y;
} else {
floor = y;
ceiling = -1;
buf = y;
}
}
y = nextSetBit(columnMask, y + 1);
}
if (isValidZone(floor, ceiling, maxHeight)) {
zones.add(zoneKey(floor, ceiling));
}
return zones;
}
private int nextSetBit(Object columnMask, int fromBit) throws Exception {
return (Integer) nextSetBitMethod.invoke(columnMask, fromBit);
}
private boolean isValidZone(int floor, int ceiling, int maxHeight) {
return floor < ceiling
&& floor >= 0
&& ceiling <= maxHeight
&& ((ceiling - floor) - 1) > 0;
}
private String zoneKey(int floor, int ceiling) {
return floor + ":" + ceiling;
}
}

View File

@@ -103,6 +103,27 @@ public class IrisDimensionCarvingResolverParityTest {
} }
} }
@Test
public void tileAnchoredChunkPlanResolutionIsStableAcrossRepeatedBuilds() {
Fixture fixture = createMixedDepthFixture();
IrisDimensionCarvingResolver.State state = new IrisDimensionCarvingResolver.State();
IrisDimensionCarvingEntry root = legacyResolveRootEntry(fixture.engine, 80);
for (int chunkX = -24; chunkX <= 24; chunkX += 6) {
for (int chunkZ = -24; chunkZ <= 24; chunkZ += 6) {
IrisDimensionCarvingEntry[] firstPlan = buildTilePlan(fixture.engine, root, chunkX, chunkZ, state);
IrisDimensionCarvingEntry[] secondPlan = buildTilePlan(fixture.engine, root, chunkX, chunkZ, state);
for (int tileIndex = 0; tileIndex < firstPlan.length; tileIndex++) {
assertSame(
"tile plan mismatch at chunkX=" + chunkX + " chunkZ=" + chunkZ + " tileIndex=" + tileIndex,
firstPlan[tileIndex],
secondPlan[tileIndex]
);
}
}
}
}
private Fixture createFixture() { private Fixture createFixture() {
IrisBiome rootLowBiome = mock(IrisBiome.class); IrisBiome rootLowBiome = mock(IrisBiome.class);
IrisBiome rootHighBiome = mock(IrisBiome.class); IrisBiome rootHighBiome = mock(IrisBiome.class);
@@ -251,6 +272,19 @@ public class IrisDimensionCarvingResolverParityTest {
return new Fixture(engine); return new Fixture(engine);
} }
private IrisDimensionCarvingEntry[] buildTilePlan(Engine engine, IrisDimensionCarvingEntry rootEntry, int chunkX, int chunkZ, IrisDimensionCarvingResolver.State state) {
IrisDimensionCarvingEntry[] plan = new IrisDimensionCarvingEntry[64];
for (int tileX = 0; tileX < 8; tileX++) {
int worldX = (chunkX << 4) + (tileX << 1);
for (int tileZ = 0; tileZ < 8; tileZ++) {
int worldZ = (chunkZ << 4) + (tileZ << 1);
int tileIndex = (tileX * 8) + tileZ;
plan[tileIndex] = IrisDimensionCarvingResolver.resolveFromRoot(engine, rootEntry, worldX, worldZ, state);
}
}
return plan;
}
private IrisDimensionCarvingEntry buildEntry(String id, String biome, IrisRange worldRange, int depth, List<String> children) { private IrisDimensionCarvingEntry buildEntry(String id, String biome, IrisRange worldRange, int depth, List<String> children) {
IrisDimensionCarvingEntry entry = new IrisDimensionCarvingEntry(); IrisDimensionCarvingEntry entry = new IrisDimensionCarvingEntry();
entry.setId(id); entry.setId(id);

View File

@@ -0,0 +1,148 @@
package art.arcane.iris.util.project.context;
import art.arcane.iris.engine.IrisComplex;
import art.arcane.iris.engine.object.IrisBiome;
import art.arcane.iris.engine.object.IrisRegion;
import art.arcane.iris.util.project.stream.ProceduralStream;
import org.bukkit.block.data.BlockData;
import org.junit.Test;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.anyDouble;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
public class ChunkContextPrefillPlanTest {
@Test
public void noCavePrefillSkipsCaveCacheFill() {
AtomicInteger caveCalls = new AtomicInteger();
AtomicInteger heightCalls = new AtomicInteger();
AtomicInteger biomeCalls = new AtomicInteger();
AtomicInteger rockCalls = new AtomicInteger();
AtomicInteger fluidCalls = new AtomicInteger();
AtomicInteger regionCalls = new AtomicInteger();
ChunkContext context = createContext(
ChunkContext.PrefillPlan.NO_CAVE,
caveCalls,
heightCalls,
biomeCalls,
rockCalls,
fluidCalls,
regionCalls
);
assertEquals(256, heightCalls.get());
assertEquals(256, biomeCalls.get());
assertEquals(256, rockCalls.get());
assertEquals(256, fluidCalls.get());
assertEquals(256, regionCalls.get());
assertEquals(0, caveCalls.get());
assertEquals(34051D, context.getHeight().get(2, 3), 0D);
context.getCave().get(2, 3);
context.getCave().get(2, 3);
assertEquals(1, caveCalls.get());
}
@Test
public void allPrefillIncludesCaveCacheFill() {
AtomicInteger caveCalls = new AtomicInteger();
AtomicInteger heightCalls = new AtomicInteger();
AtomicInteger biomeCalls = new AtomicInteger();
AtomicInteger rockCalls = new AtomicInteger();
AtomicInteger fluidCalls = new AtomicInteger();
AtomicInteger regionCalls = new AtomicInteger();
ChunkContext context = createContext(
ChunkContext.PrefillPlan.ALL,
caveCalls,
heightCalls,
biomeCalls,
rockCalls,
fluidCalls,
regionCalls
);
assertEquals(256, heightCalls.get());
assertEquals(256, biomeCalls.get());
assertEquals(256, rockCalls.get());
assertEquals(256, fluidCalls.get());
assertEquals(256, regionCalls.get());
assertEquals(256, caveCalls.get());
context.getCave().get(1, 1);
assertEquals(256, caveCalls.get());
}
private ChunkContext createContext(
ChunkContext.PrefillPlan prefillPlan,
AtomicInteger caveCalls,
AtomicInteger heightCalls,
AtomicInteger biomeCalls,
AtomicInteger rockCalls,
AtomicInteger fluidCalls,
AtomicInteger regionCalls
) {
IrisComplex complex = mock(IrisComplex.class);
@SuppressWarnings("unchecked")
ProceduralStream<Double> heightStream = mock(ProceduralStream.class);
doAnswer(invocation -> {
heightCalls.incrementAndGet();
double worldX = invocation.getArgument(0);
double worldZ = invocation.getArgument(1);
return (worldX * 1000D) + worldZ;
}).when(heightStream).get(anyDouble(), anyDouble());
@SuppressWarnings("unchecked")
ProceduralStream<IrisBiome> biomeStream = mock(ProceduralStream.class);
IrisBiome biome = mock(IrisBiome.class);
doAnswer(invocation -> {
biomeCalls.incrementAndGet();
return biome;
}).when(biomeStream).get(anyDouble(), anyDouble());
@SuppressWarnings("unchecked")
ProceduralStream<IrisBiome> caveStream = mock(ProceduralStream.class);
IrisBiome caveBiome = mock(IrisBiome.class);
doAnswer(invocation -> {
caveCalls.incrementAndGet();
return caveBiome;
}).when(caveStream).get(anyDouble(), anyDouble());
@SuppressWarnings("unchecked")
ProceduralStream<BlockData> rockStream = mock(ProceduralStream.class);
BlockData rock = mock(BlockData.class);
doAnswer(invocation -> {
rockCalls.incrementAndGet();
return rock;
}).when(rockStream).get(anyDouble(), anyDouble());
@SuppressWarnings("unchecked")
ProceduralStream<BlockData> fluidStream = mock(ProceduralStream.class);
BlockData fluid = mock(BlockData.class);
doAnswer(invocation -> {
fluidCalls.incrementAndGet();
return fluid;
}).when(fluidStream).get(anyDouble(), anyDouble());
@SuppressWarnings("unchecked")
ProceduralStream<IrisRegion> regionStream = mock(ProceduralStream.class);
IrisRegion region = mock(IrisRegion.class);
doAnswer(invocation -> {
regionCalls.incrementAndGet();
return region;
}).when(regionStream).get(anyDouble(), anyDouble());
doReturn(heightStream).when(complex).getHeightStream();
doReturn(biomeStream).when(complex).getTrueBiomeStream();
doReturn(caveStream).when(complex).getCaveBiomeStream();
doReturn(rockStream).when(complex).getRockStream();
doReturn(fluidStream).when(complex).getFluidStream();
doReturn(regionStream).when(complex).getRegionStream();
return new ChunkContext(32, 48, complex, true, prefillPlan, null);
}
}

View File

@@ -0,0 +1,100 @@
package art.arcane.iris.util.project.noise;
import art.arcane.volmlib.util.math.RNG;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class CNGFastPathParityTest {
@Test
public void identityFastPathMatchesLegacyAcrossSeedAndCoordinateGrid() {
for (long seed = 3L; seed <= 11L; seed++) {
CNG generator = createIdentityGenerator(seed);
assertFastPathParity("identity-seed-" + seed, generator);
}
}
@Test
public void transformedGeneratorsMatchLegacyAcrossSeedAndCoordinateGrid() {
for (long seed = 21L; seed <= 27L; seed++) {
List<CNG> generators = createTransformedGenerators(seed);
for (int index = 0; index < generators.size(); index++) {
assertFastPathParity("transformed-seed-" + seed + "-case-" + index, generators.get(index));
}
}
}
private void assertFastPathParity(String label, CNG generator) {
for (int x = -320; x <= 320; x += 19) {
for (int z = -320; z <= 320; z += 23) {
double expected = generator.noise(x, z);
double actual = generator.noiseFast2D(x, z);
assertEquals(label + " 2D x=" + x + " z=" + z, expected, actual, 1.0E-12D);
}
}
for (int x = -128; x <= 128; x += 17) {
for (int y = -96; y <= 96; y += 13) {
for (int z = -128; z <= 128; z += 19) {
double expected = generator.noise(x, y, z);
double actual = generator.noiseFast3D(x, y, z);
assertEquals(label + " 3D x=" + x + " y=" + y + " z=" + z, expected, actual, 1.0E-12D);
}
}
}
}
private CNG createIdentityGenerator(long seed) {
DeterministicNoiseGenerator generator = new DeterministicNoiseGenerator(0.31D + (seed * 0.01D));
return new CNG(new RNG(seed), generator, 1D, 1).bake();
}
private List<CNG> createTransformedGenerators(long seed) {
List<CNG> generators = new ArrayList<>();
CNG powerTransformed = createIdentityGenerator(seed).pow(1.27D);
generators.add(powerTransformed);
CNG offsetTransformed = createIdentityGenerator(seed + 1L).up(0.08D).down(0.03D).patch(0.91D);
generators.add(offsetTransformed);
CNG fractured = createIdentityGenerator(seed + 2L).fractureWith(createIdentityGenerator(seed + 300L), 12.5D);
generators.add(fractured);
CNG withChildren = createIdentityGenerator(seed + 3L);
withChildren.child(createIdentityGenerator(seed + 400L));
withChildren.child(createIdentityGenerator(seed + 401L));
generators.add(withChildren);
return generators;
}
private static class DeterministicNoiseGenerator implements NoiseGenerator {
private final double offset;
private DeterministicNoiseGenerator(double offset) {
this.offset = offset;
}
@Override
public double noise(double x) {
double angle = (x * 0.011D) + offset;
return 0.2D + (((Math.sin(angle) + 1D) * 0.5D) * 0.6D);
}
@Override
public double noise(double x, double z) {
double angle = (x * 0.013D) + (z * 0.017D) + offset;
return 0.2D + (((Math.sin(angle) + 1D) * 0.5D) * 0.6D);
}
@Override
public double noise(double x, double y, double z) {
double angle = (x * 0.007D) + (y * 0.015D) + (z * 0.019D) + offset;
return 0.2D + (((Math.sin(angle) + 1D) * 0.5D) * 0.6D);
}
}
}

View File

@@ -45,12 +45,14 @@ import java.lang.reflect.Field;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.*; import java.util.*;
import java.util.concurrent.CompletableFuture; import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.Supplier; import java.util.function.Supplier;
public class IrisChunkGenerator extends CustomChunkGenerator { public class IrisChunkGenerator extends CustomChunkGenerator {
private static final WrappedField<ChunkGenerator, BiomeSource> BIOME_SOURCE; private static final WrappedField<ChunkGenerator, BiomeSource> BIOME_SOURCE;
private static final WrappedReturningMethod<Heightmap, Object> SET_HEIGHT; private static final WrappedReturningMethod<Heightmap, Object> SET_HEIGHT;
private static final int EXTERNAL_FOUNDATION_MAX_DEPTH = 96; private static final int EXTERNAL_FOUNDATION_MAX_DEPTH = 96;
private static final Set<String> loggedExternalStructureFingerprintKeys = ConcurrentHashMap.newKeySet();
private final ChunkGenerator delegate; private final ChunkGenerator delegate;
private final Engine engine; private final Engine engine;
private volatile Registry<Structure> cachedStructureRegistry; private volatile Registry<Structure> cachedStructureRegistry;
@@ -389,9 +391,13 @@ public class IrisChunkGenerator extends CustomChunkGenerator {
} }
String normalized = structureKey.toLowerCase(Locale.ROOT); String normalized = structureKey.toLowerCase(Locale.ROOT);
return "minecraft:ancient_city".equals(normalized) if (!"minecraft:ancient_city".equals(normalized)
|| "minecraft:mineshaft".equals(normalized) && !"minecraft:mineshaft".equals(normalized)
|| "minecraft:mineshaft_mesa".equals(normalized); && !"minecraft:mineshaft_mesa".equals(normalized)) {
return false;
}
return loggedExternalStructureFingerprintKeys.add(normalized);
} }
private static void logExternalStructureFingerprint(String structureKey, StructureStart start) { private static void logExternalStructureFingerprint(String structureKey, StructureStart start) {