This commit is contained in:
Brian Neumann-Fopiano
2026-02-22 23:25:01 -05:00
parent 651dfa247e
commit 18d4dce1db
36 changed files with 2109 additions and 253 deletions

View File

@@ -1,6 +1,8 @@
import io.github.slimjar.resolver.data.Mirror
import org.ajoberstar.grgit.Grgit
import org.gradle.api.Task
import org.gradle.api.tasks.Copy
import org.gradle.api.tasks.TaskProvider
import org.gradle.api.tasks.compile.JavaCompile
import org.gradle.jvm.tasks.Jar
import org.gradle.jvm.toolchain.JavaLanguageVersion
@@ -228,6 +230,28 @@ tasks.named('processResources').configure {
}
}
def runningTestTasks = gradle.startParameter.taskNames.any { String taskName -> taskName.toLowerCase().contains('test') }
if (runningTestTasks) {
TaskProvider<Task> processResourcesTask = tasks.named('processResources')
tasks.named('classes').configure { Task classesTask ->
Set<Object> dependencies = new LinkedHashSet<Object>(classesTask.getDependsOn())
dependencies.removeIf { Object dependency ->
if (dependency instanceof TaskProvider) {
return ((TaskProvider<?>) dependency).name == processResourcesTask.name
}
if (dependency instanceof Task) {
return ((Task) dependency).name == processResourcesTask.name
}
String dependencyName = String.valueOf(dependency)
return dependencyName == 'processResources' || dependencyName.endsWith(':processResources')
}
classesTask.setDependsOn(dependencies)
}
processResourcesTask.configure { Task task ->
task.enabled = false
}
}
tasks.named('shadowJar', com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar).configure {
dependsOn(embeddedAgentJar)
mergeServiceFiles()

View File

@@ -0,0 +1,7 @@
package art.arcane.iris.core;
public enum IrisHotPathMetricsMode {
SAMPLED,
EXACT,
DISABLED
}

View File

@@ -0,0 +1,7 @@
package art.arcane.iris.core;
public enum IrisPaperLikeBackendMode {
AUTO,
TICKET,
SERVICE
}

View File

@@ -0,0 +1,70 @@
package art.arcane.iris.core;
import art.arcane.volmlib.util.scheduling.FoliaScheduler;
import org.bukkit.Bukkit;
import org.bukkit.Server;
import java.util.Locale;
public enum IrisRuntimeSchedulerMode {
AUTO,
PAPER_LIKE,
FOLIA;
public static IrisRuntimeSchedulerMode resolve(IrisSettings.IrisSettingsPregen pregen) {
Server server = Bukkit.getServer();
boolean regionizedRuntime = FoliaScheduler.isRegionizedRuntime(server);
if (regionizedRuntime) {
return FOLIA;
}
IrisRuntimeSchedulerMode configuredMode = pregen == null ? null : pregen.getRuntimeSchedulerMode();
if (configuredMode != null && configuredMode != AUTO) {
if (configuredMode == FOLIA) {
return PAPER_LIKE;
}
return configuredMode;
}
String bukkitName = Bukkit.getName();
String bukkitVersion = Bukkit.getVersion();
String serverClassName = server == null ? "" : server.getClass().getName();
if (containsIgnoreCase(bukkitName, "folia")
|| containsIgnoreCase(bukkitVersion, "folia")
|| containsIgnoreCase(serverClassName, "folia")) {
return FOLIA;
}
if (containsIgnoreCase(bukkitName, "purpur")
|| containsIgnoreCase(bukkitVersion, "purpur")
|| containsIgnoreCase(serverClassName, "purpur")
|| containsIgnoreCase(bukkitName, "paper")
|| containsIgnoreCase(bukkitVersion, "paper")
|| containsIgnoreCase(serverClassName, "paper")
|| containsIgnoreCase(bukkitName, "pufferfish")
|| containsIgnoreCase(bukkitVersion, "pufferfish")
|| containsIgnoreCase(serverClassName, "pufferfish")
|| containsIgnoreCase(bukkitName, "spigot")
|| containsIgnoreCase(bukkitVersion, "spigot")
|| containsIgnoreCase(serverClassName, "spigot")
|| containsIgnoreCase(bukkitName, "craftbukkit")
|| containsIgnoreCase(bukkitVersion, "craftbukkit")
|| containsIgnoreCase(serverClassName, "craftbukkit")) {
return PAPER_LIKE;
}
if (regionizedRuntime) {
return FOLIA;
}
return PAPER_LIKE;
}
private static boolean containsIgnoreCase(String value, String contains) {
if (value == null || contains == null || contains.isEmpty()) {
return false;
}
return value.toLowerCase(Locale.ROOT).contains(contains.toLowerCase(Locale.ROOT));
}
}

View File

@@ -151,9 +151,16 @@ public class IrisSettings {
public boolean useHighPriority = false;
public boolean useVirtualThreads = false;
public boolean useTicketQueue = true;
public IrisRuntimeSchedulerMode runtimeSchedulerMode = IrisRuntimeSchedulerMode.AUTO;
public IrisPaperLikeBackendMode paperLikeBackendMode = IrisPaperLikeBackendMode.AUTO;
public IrisHotPathMetricsMode hotPathMetricsMode = IrisHotPathMetricsMode.SAMPLED;
public int hotPathMetricsSampleStride = 1024;
public int maxConcurrency = 256;
public int paperLikeMaxConcurrency = 96;
public int foliaMaxConcurrency = 32;
public int chunkLoadTimeoutSeconds = 15;
public int timeoutWarnIntervalMs = 500;
public int saveIntervalMs = 120_000;
public boolean startupNoisemapPrebake = true;
public boolean enablePregenPerformanceProfile = true;
public int pregenProfileNoiseCacheSize = 4_096;
@@ -167,6 +174,40 @@ public class IrisSettings {
public int getTimeoutWarnIntervalMs() {
return Math.max(timeoutWarnIntervalMs, 250);
}
public int getPaperLikeMaxConcurrency() {
return Math.max(1, paperLikeMaxConcurrency);
}
public int getFoliaMaxConcurrency() {
return Math.max(1, foliaMaxConcurrency);
}
public IrisPaperLikeBackendMode getPaperLikeBackendMode() {
if (paperLikeBackendMode == null) {
return IrisPaperLikeBackendMode.AUTO;
}
return paperLikeBackendMode;
}
public IrisHotPathMetricsMode getHotPathMetricsMode() {
if (hotPathMetricsMode == null) {
return IrisHotPathMetricsMode.SAMPLED;
}
return hotPathMetricsMode;
}
public int getHotPathMetricsSampleStride() {
int stride = Math.max(1, Math.min(hotPathMetricsSampleStride, 65_536));
int normalized = Integer.highestOneBit(stride);
return normalized <= 0 ? 1 : normalized;
}
public int getSaveIntervalMs() {
return Math.max(5_000, Math.min(saveIntervalMs, 900_000));
}
}
@Data

View File

@@ -116,6 +116,10 @@ public class FoliaWorldsLink {
}
public boolean isActive() {
if (!J.isFolia()) {
return false;
}
return isWorldsProviderActive() || isPaperWorldLoaderActive();
}

View File

@@ -29,6 +29,8 @@ import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
public class ImageResourceLoader extends ResourceLoader<IrisImage> {
@@ -67,12 +69,28 @@ public class ImageResourceLoader extends ResourceLoader<IrisImage> {
}
}
void getPNGFiles(File directory, Set<String> m) {
for (File file : directory.listFiles()) {
void getPNGFiles(File directory, Set<String> m, HashSet<String> visitedDirectories) {
if (directory == null || !directory.exists()) {
return;
}
if (directory.isDirectory()) {
String canonicalDirectory = toCanonicalPath(directory);
if (canonicalDirectory != null && !visitedDirectories.add(canonicalDirectory)) {
return;
}
}
File[] listedFiles = directory.listFiles();
if (listedFiles == null) {
return;
}
for (File file : listedFiles) {
if (file.isFile() && file.getName().endsWith(".png")) {
m.add(file.getName().replaceAll("\\Q.png\\E", ""));
} else if (file.isDirectory()) {
getPNGFiles(file, m);
getPNGFiles(file, m, visitedDirectories);
}
}
}
@@ -85,10 +103,11 @@ public class ImageResourceLoader extends ResourceLoader<IrisImage> {
Iris.debug("Building " + resourceTypeName + " Possibility Lists");
KSet<String> m = new KSet<>();
HashSet<String> visitedDirectories = new HashSet<>();
for (File i : getFolders()) {
getPNGFiles(i, m);
getPNGFiles(i, m, visitedDirectories);
}
// for (File i : getFolders()) {
@@ -116,6 +135,14 @@ public class ImageResourceLoader extends ResourceLoader<IrisImage> {
return possibleKeys;
}
private String toCanonicalPath(File file) {
try {
return file.getCanonicalPath();
} catch (IOException ignored) {
return null;
}
}
public File findFile(String name) {
for (File i : getFolders(name)) {
for (File j : i.listFiles()) {

View File

@@ -27,6 +27,8 @@ import art.arcane.volmlib.util.data.KCache;
import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
public class MatterObjectResourceLoader extends ResourceLoader<IrisMatterObject> {
private String[] possibleKeys;
@@ -65,12 +67,28 @@ public class MatterObjectResourceLoader extends ResourceLoader<IrisMatterObject>
}
}
private void findMatFiles(File dir, KSet<String> m) {
for (File file : dir.listFiles()) {
private void findMatFiles(File dir, KSet<String> m, HashSet<String> visitedDirectories) {
if (dir == null || !dir.exists()) {
return;
}
if (dir.isDirectory()) {
String canonicalDirectory = toCanonicalPath(dir);
if (canonicalDirectory != null && !visitedDirectories.add(canonicalDirectory)) {
return;
}
}
File[] listedFiles = dir.listFiles();
if (listedFiles == null) {
return;
}
for (File file : listedFiles) {
if (file.isFile() && file.getName().endsWith(".mat")) {
m.add(file.getName().replaceAll("\\Q.mat\\E", ""));
} else if (file.isDirectory()) {
findMatFiles(file, m);
findMatFiles(file, m, visitedDirectories);
}
}
}
@@ -82,9 +100,10 @@ public class MatterObjectResourceLoader extends ResourceLoader<IrisMatterObject>
Iris.debug("Building " + resourceTypeName + " Possibility Lists");
KSet<String> m = new KSet<>();
HashSet<String> visitedDirectories = new HashSet<>();
for (File folder : getFolders()) {
findMatFiles(folder, m);
findMatFiles(folder, m, visitedDirectories);
}
KList<String> v = new KList<>(m);
@@ -92,6 +111,14 @@ public class MatterObjectResourceLoader extends ResourceLoader<IrisMatterObject>
return possibleKeys;
}
private String toCanonicalPath(File file) {
try {
return file.getCanonicalPath();
} catch (IOException ignored) {
return null;
}
}
// public String[] getPossibleKeys() {
// if (possibleKeys != null) {

View File

@@ -27,6 +27,8 @@ import art.arcane.volmlib.util.data.KCache;
import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import java.io.File;
import java.io.IOException;
import java.util.HashSet;
public class ObjectResourceLoader extends ResourceLoader<IrisObject> {
public ObjectResourceLoader(File root, IrisData idm, String folderName, String resourceTypeName) {
@@ -75,26 +77,51 @@ public class ObjectResourceLoader extends ResourceLoader<IrisObject> {
}
Iris.debug("Building " + resourceTypeName + " Possibility Lists");
KSet<String> m = new KSet<>();
HashSet<String> visitedDirectories = new HashSet<>();
for (File i : getFolders()) {
m.addAll(getFiles(i, ".iob", true));
m.addAll(getFiles(i, ".iob", true, visitedDirectories));
}
possibleKeys = m.toArray(new String[0]);
return possibleKeys;
}
private KList<String> getFiles(File dir, String ext, boolean skipDirName) {
private KList<String> getFiles(File dir, String ext, boolean skipDirName, HashSet<String> visitedDirectories) {
KList<String> paths = new KList<>();
if (dir == null || !dir.exists()) {
return paths;
}
if (dir.isDirectory()) {
String canonicalDirectory = toCanonicalPath(dir);
if (canonicalDirectory != null && !visitedDirectories.add(canonicalDirectory)) {
return paths;
}
}
File[] listedFiles = dir.listFiles();
if (listedFiles == null) {
return paths;
}
String name = skipDirName ? "" : dir.getName() + "/";
for (File f : dir.listFiles()) {
for (File f : listedFiles) {
if (f.isFile() && f.getName().endsWith(ext)) {
paths.add(name + f.getName().replaceAll("\\Q" + ext + "\\E", ""));
} else if (f.isDirectory()) {
getFiles(f, ext, false).forEach(e -> paths.add(name + e));
getFiles(f, ext, false, visitedDirectories).forEach(e -> paths.add(name + e));
}
}
return paths;
}
private String toCanonicalPath(File file) {
try {
return file.getCanonicalPath();
} catch (IOException ignored) {
return null;
}
}
public File findFile(String name) {
for (File i : getFolders(name)) {
for (File j : i.listFiles()) {

View File

@@ -48,6 +48,10 @@ import java.io.*;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.Consumer;
import java.util.function.Predicate;
@@ -61,6 +65,13 @@ import java.util.zip.GZIPOutputStream;
public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
public static final AtomicDouble tlt = new AtomicDouble(0);
private static final int CACHE_SIZE = 100000;
private static final ExecutorService schemaBuildExecutor = Executors.newSingleThreadExecutor(runnable -> {
Thread thread = new Thread(runnable, "Iris-Schema-Builder");
thread.setDaemon(true);
thread.setPriority(Thread.MIN_PRIORITY);
return thread;
});
private static final Set<String> schemaBuildQueue = ConcurrentHashMap.newKeySet();
protected final AtomicCache<KList<File>> folderCache;
protected KSet<String> firstAccess;
protected File root;
@@ -102,7 +113,18 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
o.put("fileMatch", new JSONArray(fm.toArray()));
o.put("url", "./.iris/schema/" + getFolderName() + "-schema.json");
File a = new File(getManager().getDataFolder(), ".iris/schema/" + getFolderName() + "-schema.json");
J.attemptAsync(() -> IO.writeAll(a, new SchemaBuilder(objectClass, manager).construct().toString(4)));
String schemaPath = a.getAbsolutePath();
if (!a.exists() && schemaBuildQueue.add(schemaPath)) {
schemaBuildExecutor.execute(() -> {
try {
IO.writeAll(a, new SchemaBuilder(objectClass, manager).construct().toString(4));
} catch (Throwable e) {
Iris.reportError(e);
} finally {
schemaBuildQueue.remove(schemaPath);
}
});
}
return o;
}
@@ -149,20 +171,44 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
}
private KList<File> matchAllFiles(File root, Predicate<File> f) {
KList<File> fx = new KList<>();
matchFiles(root, fx, f);
return fx;
KList<File> files = new KList<>();
HashSet<String> visitedDirectories = new HashSet<>();
matchFiles(root, files, f, visitedDirectories);
return files;
}
private void matchFiles(File at, KList<File> files, Predicate<File> f) {
private void matchFiles(File at, KList<File> files, Predicate<File> f, HashSet<String> visitedDirectories) {
if (at == null || !at.exists()) {
return;
}
if (at.isDirectory()) {
for (File i : at.listFiles()) {
matchFiles(i, files, f);
String canonicalPath = toCanonicalPath(at);
if (canonicalPath != null && !visitedDirectories.add(canonicalPath)) {
return;
}
} else {
if (f.test(at)) {
files.add(at);
File[] listedFiles = at.listFiles();
if (listedFiles == null) {
return;
}
for (File listedFile : listedFiles) {
matchFiles(listedFile, files, f, visitedDirectories);
}
return;
}
if (f.test(at)) {
files.add(at);
}
}
private String toCanonicalPath(File file) {
try {
return file.getCanonicalPath();
} catch (IOException ignored) {
return null;
}
}

View File

@@ -24,6 +24,7 @@ import art.arcane.iris.core.nms.container.BiomeColor;
import art.arcane.iris.core.nms.container.BlockProperty;
import art.arcane.iris.core.nms.container.StructurePlacement;
import art.arcane.iris.core.nms.datapack.DataVersion;
import art.arcane.iris.util.common.scheduling.J;
import art.arcane.iris.engine.framework.Engine;
import art.arcane.iris.engine.platform.PlatformChunkGenerator;
import art.arcane.volmlib.util.collection.KList;
@@ -108,11 +109,13 @@ public interface INMSBinding {
return CompletableFuture.failedFuture(new IllegalStateException("Missing dimension types to create world"));
}
FoliaWorldsLink link = FoliaWorldsLink.get();
if (link.isActive()) {
CompletableFuture<World> future = link.createWorld(c);
if (future != null) {
return future;
if (J.isFolia()) {
FoliaWorldsLink link = FoliaWorldsLink.get();
if (link.isActive()) {
CompletableFuture<World> future = link.createWorld(c);
if (future != null) {
return future;
}
}
}
return CompletableFuture.completedFuture(createWorld(c));

View File

@@ -19,6 +19,7 @@
package art.arcane.iris.core.pregenerator;
import art.arcane.iris.Iris;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.core.tools.IrisPackBenchmarking;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.volmlib.util.collection.KSet;
@@ -66,13 +67,14 @@ public class IrisPregenerator {
private final KSet<Position2> retry;
private final KSet<Position2> net;
private final ChronoLatch cl;
private final ChronoLatch saveLatch = new ChronoLatch(30000);
private final ChronoLatch saveLatch;
private final IrisPackBenchmarking benchmarking;
public IrisPregenerator(PregenTask task, PregeneratorMethod generator, PregenListener listener) {
benchmarking = IrisPackBenchmarking.getInstance();
this.listener = listenify(listener);
cl = new ChronoLatch(5000);
saveLatch = new ChronoLatch(IrisSettings.get().getPregen().getSaveIntervalMs());
generatedRegions = new KSet<>();
this.shutdown = new AtomicBoolean(false);
this.paused = new AtomicBoolean(false);

View File

@@ -19,10 +19,13 @@
package art.arcane.iris.core.pregenerator.methods;
import art.arcane.iris.Iris;
import art.arcane.iris.core.IrisPaperLikeBackendMode;
import art.arcane.iris.core.IrisRuntimeSchedulerMode;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.core.pregenerator.PregenListener;
import art.arcane.iris.core.pregenerator.PregeneratorMethod;
import art.arcane.iris.core.tools.IrisToolbelt;
import art.arcane.iris.engine.framework.Engine;
import art.arcane.volmlib.util.mantle.runtime.Mantle;
import art.arcane.volmlib.util.matter.Matter;
import art.arcane.volmlib.util.math.M;
@@ -33,6 +36,7 @@ import org.bukkit.Chunk;
import org.bukkit.World;
import java.lang.reflect.InvocationTargetException;
import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
@@ -44,11 +48,13 @@ import java.util.concurrent.atomic.AtomicLong;
public class AsyncPregenMethod implements PregeneratorMethod {
private static final AtomicInteger THREAD_COUNT = new AtomicInteger();
private static final int FOLIA_MAX_CONCURRENCY = 32;
private static final int NON_FOLIA_MAX_CONCURRENCY = 96;
private static final int NON_FOLIA_CONCURRENCY_FACTOR = 2;
private static final int ADAPTIVE_TIMEOUT_STEP = 3;
private final World world;
private final IrisRuntimeSchedulerMode runtimeSchedulerMode;
private final IrisPaperLikeBackendMode paperLikeBackendMode;
private final boolean foliaRuntime;
private final String backendMode;
private final int workerPoolThreads;
private final Executor executor;
private final Semaphore semaphore;
private final int threads;
@@ -68,6 +74,8 @@ public class AsyncPregenMethod implements PregeneratorMethod {
private final AtomicLong failed = new AtomicLong();
private final AtomicLong lastProgressAt = new AtomicLong(M.ms());
private final AtomicLong lastPermitWaitLog = new AtomicLong(0L);
private final Object permitMonitor = new Object();
private volatile Engine metricsEngine;
public AsyncPregenMethod(World world, int unusedThreads) {
if (!PaperLib.isPaper()) {
@@ -75,20 +83,31 @@ public class AsyncPregenMethod implements PregeneratorMethod {
}
this.world = world;
if (J.isFolia()) {
IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen();
this.runtimeSchedulerMode = IrisRuntimeSchedulerMode.resolve(pregen);
this.foliaRuntime = runtimeSchedulerMode == IrisRuntimeSchedulerMode.FOLIA;
if (foliaRuntime) {
this.paperLikeBackendMode = IrisPaperLikeBackendMode.AUTO;
this.backendMode = "folia-region";
this.executor = new FoliaRegionExecutor();
} else {
boolean useTicketQueue = IrisSettings.get().getPregen().isUseTicketQueue();
this.executor = useTicketQueue ? new TicketExecutor() : new ServiceExecutor();
this.paperLikeBackendMode = resolvePaperLikeBackendMode(pregen);
if (paperLikeBackendMode == IrisPaperLikeBackendMode.SERVICE) {
this.executor = new ServiceExecutor();
this.backendMode = "paper-service";
} else {
this.executor = new TicketExecutor();
this.backendMode = "paper-ticket";
}
}
IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen();
int configuredThreads = pregen.getMaxConcurrency();
if (J.isFolia()) {
configuredThreads = Math.min(configuredThreads, FOLIA_MAX_CONCURRENCY);
if (foliaRuntime) {
configuredThreads = Math.min(configuredThreads, pregen.getFoliaMaxConcurrency());
} else {
configuredThreads = Math.min(configuredThreads, resolveNonFoliaConcurrencyCap());
configuredThreads = Math.min(configuredThreads, resolvePaperLikeConcurrencyCap(pregen.getPaperLikeMaxConcurrency()));
}
this.threads = Math.max(1, configuredThreads);
this.workerPoolThreads = resolveWorkerPoolThreads();
this.semaphore = new Semaphore(this.threads, true);
this.timeoutSeconds = pregen.getChunkLoadTimeoutSeconds();
this.timeoutWarnIntervalMs = pregen.getTimeoutWarnIntervalMs();
@@ -98,8 +117,32 @@ public class AsyncPregenMethod implements PregeneratorMethod {
this.adaptiveMinInFlightLimit = Math.max(4, Math.min(16, Math.max(1, this.threads / 4)));
}
private IrisPaperLikeBackendMode resolvePaperLikeBackendMode(IrisSettings.IrisSettingsPregen pregen) {
IrisPaperLikeBackendMode configuredMode = pregen.getPaperLikeBackendMode();
if (configuredMode != IrisPaperLikeBackendMode.AUTO) {
return configuredMode;
}
return pregen.isUseVirtualThreads() ? IrisPaperLikeBackendMode.SERVICE : IrisPaperLikeBackendMode.TICKET;
}
private int resolveWorkerPoolThreads() {
try {
Class<?> moonriseCommonClass = Class.forName("ca.spottedleaf.moonrise.common.util.MoonriseCommon");
java.lang.reflect.Field workerPoolField = moonriseCommonClass.getDeclaredField("WORKER_POOL");
Object workerPool = workerPoolField.get(null);
Object coreThreads = workerPool.getClass().getDeclaredMethod("getCoreThreads").invoke(workerPool);
if (coreThreads instanceof Thread[] threadsArray) {
return threadsArray.length;
}
} catch (Throwable ignored) {
}
return -1;
}
private void unloadAndSaveAllChunks() {
if (J.isFolia()) {
if (foliaRuntime) {
// Folia requires world/chunk mutations to be region-owned; periodic global unload/save is unsafe.
lastUse.clear();
return;
@@ -190,6 +233,7 @@ public class AsyncPregenMethod implements PregeneratorMethod {
int next = Math.max(adaptiveMinInFlightLimit, current - 1);
if (adaptiveInFlightLimit.compareAndSet(current, next)) {
logAdaptiveLimit("decrease", next);
notifyPermitWaiters();
return;
}
}
@@ -205,6 +249,7 @@ public class AsyncPregenMethod implements PregeneratorMethod {
int next = Math.min(threads, current + 1);
if (adaptiveInFlightLimit.compareAndSet(current, next)) {
logAdaptiveLimit("increase", next);
notifyPermitWaiters();
return;
}
}
@@ -222,11 +267,8 @@ public class AsyncPregenMethod implements PregeneratorMethod {
}
}
private int resolveNonFoliaConcurrencyCap() {
int worldGenThreads = Math.max(1, IrisSettings.get().getConcurrency().getWorldGenThreads());
int recommended = worldGenThreads * NON_FOLIA_CONCURRENCY_FACTOR;
int bounded = Math.max(8, Math.min(NON_FOLIA_MAX_CONCURRENCY, recommended));
return bounded;
private int resolvePaperLikeConcurrencyCap(int configuredCap) {
return Math.max(8, configuredCap);
}
private String metricsSnapshot() {
@@ -259,6 +301,48 @@ public class AsyncPregenMethod implements PregeneratorMethod {
if (after < 0) {
inFlight.compareAndSet(after, 0);
}
notifyPermitWaiters();
}
private void notifyPermitWaiters() {
synchronized (permitMonitor) {
permitMonitor.notifyAll();
}
}
private void recordAdaptiveWait(long waitedMs) {
Engine engine = resolveMetricsEngine();
if (engine != null) {
engine.getMetrics().getPregenWaitAdaptive().put(waitedMs);
}
}
private void recordPermitWait(long waitedMs) {
Engine engine = resolveMetricsEngine();
if (engine != null) {
engine.getMetrics().getPregenWaitPermit().put(waitedMs);
}
}
private Engine resolveMetricsEngine() {
Engine cachedEngine = metricsEngine;
if (cachedEngine != null) {
return cachedEngine;
}
if (!IrisToolbelt.isIrisWorld(world)) {
return null;
}
try {
Engine resolvedEngine = IrisToolbelt.access(world).getEngine();
if (resolvedEngine != null) {
metricsEngine = resolvedEngine;
}
return resolvedEngine;
} catch (Throwable ignored) {
return null;
}
}
private void logPermitWaitIfNeeded(int x, int z, long waitedMs) {
@@ -276,9 +360,11 @@ public class AsyncPregenMethod implements PregeneratorMethod {
@Override
public void init() {
Iris.info("Async pregen init: world=" + world.getName()
+ ", mode=" + (J.isFolia() ? "folia" : "paper")
+ ", mode=" + runtimeSchedulerMode.name().toLowerCase(Locale.ROOT)
+ ", backend=" + backendMode
+ ", threads=" + threads
+ ", adaptiveLimit=" + adaptiveInFlightLimit.get()
+ ", workerPoolThreads=" + workerPoolThreads
+ ", urgent=" + urgent
+ ", timeout=" + timeoutSeconds + "s");
unloadAndSaveAllChunks();
@@ -318,17 +404,26 @@ public class AsyncPregenMethod implements PregeneratorMethod {
listener.onChunkGenerating(x, z);
try {
long waitStart = M.ms();
while (inFlight.get() >= adaptiveInFlightLimit.get()) {
long waited = Math.max(0L, M.ms() - waitStart);
logPermitWaitIfNeeded(x, z, waited);
if (!J.sleep(5)) {
return;
synchronized (permitMonitor) {
while (inFlight.get() >= adaptiveInFlightLimit.get()) {
long waited = Math.max(0L, M.ms() - waitStart);
logPermitWaitIfNeeded(x, z, waited);
permitMonitor.wait(5000L);
}
}
long adaptiveWait = Math.max(0L, M.ms() - waitStart);
if (adaptiveWait > 0L) {
recordAdaptiveWait(adaptiveWait);
}
long permitWaitStart = M.ms();
while (!semaphore.tryAcquire(5, TimeUnit.SECONDS)) {
logPermitWaitIfNeeded(x, z, Math.max(0L, M.ms() - waitStart));
}
long permitWait = Math.max(0L, M.ms() - permitWaitStart);
if (permitWait > 0L) {
recordPermitWait(permitWait);
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return;

View File

@@ -33,6 +33,7 @@ import art.arcane.volmlib.util.collection.KList;
import art.arcane.volmlib.util.collection.KMap;
import art.arcane.volmlib.util.collection.KSet;
import art.arcane.volmlib.util.exceptions.IrisException;
import art.arcane.iris.util.common.format.C;
import art.arcane.volmlib.util.format.Form;
import art.arcane.volmlib.util.io.IO;
import art.arcane.volmlib.util.json.JSONArray;
@@ -61,6 +62,8 @@ import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Consumer;
@SuppressWarnings("ALL")
@@ -165,6 +168,11 @@ public class IrisProject {
J.attemptAsync(() ->
{
try {
if (d == null) {
sender.sendMessage("Could not load dimension \"" + getName() + "\"");
return;
}
if (d.getLoader() == null) {
sender.sendMessage("Could not get dimension loader");
return;
@@ -176,12 +184,11 @@ public class IrisProject {
Iris.warn("Project missing code-workspace: " + ff.getAbsolutePath() + " Re-creating code workspace.");
try {
IO.writeAll(ff, createCodeWorkspaceConfig());
IO.writeAll(ff, createCodeWorkspaceConfig(false));
} catch (IOException e1) {
Iris.reportError(e1);
e1.printStackTrace();
}
updateWorkspace();
if (!doOpenVSCode(f)) {
Iris.warn("Tried creating code workspace but failed a second time. Your project is likely corrupt.");
}
@@ -198,16 +205,20 @@ public class IrisProject {
for (File i : Objects.requireNonNull(f.listFiles())) {
if (i.getName().endsWith(".code-workspace")) {
foundWork = true;
J.a(() ->
{
updateWorkspace();
});
if (IrisSettings.get().getStudio().isOpenVSCode()) {
if (!GraphicsEnvironment.isHeadless()) {
Iris.msg("Opening VSCode. You may see the output from VSCode.");
Iris.msg("VSCode output always starts with: '(node:#####) electron'");
Desktop.getDesktop().open(i);
Thread launcherThread = new Thread(() -> {
try {
Desktop.getDesktop().open(i);
} catch (Throwable e) {
Iris.reportError(e);
}
}, "Iris-VSCode-Launcher");
launcherThread.setDaemon(true);
launcherThread.start();
}
}
@@ -222,30 +233,121 @@ public class IrisProject {
close();
}
J.a(() -> {
IrisDimension d = IrisData.loadAnyDimension(getName(), null);
if (d == null) {
sender.sendMessage("Can't find dimension: " + getName());
return;
} else if (sender.isPlayer()) {
J.runEntity(sender.player(), () -> sender.player().setGameMode(GameMode.SPECTATOR));
}
AtomicReference<String> stage = new AtomicReference<>("Queued");
AtomicReference<Double> progress = new AtomicReference<>(0.01D);
AtomicBoolean complete = new AtomicBoolean(false);
AtomicBoolean failed = new AtomicBoolean(false);
startStudioOpenReporter(sender, stage, progress, complete, failed);
J.a(() -> {
World maintenanceWorld = null;
boolean maintenanceActive = false;
try {
stage.set("Loading dimension");
progress.set(0.05D);
IrisDimension d = IrisData.loadAnyDimension(getName(), null);
if (d == null) {
failed.set(true);
sender.sendMessage(C.RED + "Can't find dimension: " + getName());
return;
} else if (sender.isPlayer()) {
J.runEntity(sender.player(), () -> sender.player().setGameMode(GameMode.SPECTATOR));
}
stage.set("Creating world");
progress.set(0.12D);
activeProvider = (PlatformChunkGenerator) IrisToolbelt.createWorld()
.seed(seed)
.sender(sender)
.studio(true)
.name("iris-" + UUID.randomUUID())
.dimension(d.getLoadKey())
.studioProgressConsumer((value, currentStage) -> {
if (currentStage != null && !currentStage.isBlank()) {
stage.set(currentStage);
}
progress.set(Math.max(0D, Math.min(0.99D, value)));
})
.create().getGenerator();
onDone.accept(activeProvider.getTarget().getWorld().realWorld());
if (activeProvider != null) {
maintenanceWorld = activeProvider.getTarget().getWorld().realWorld();
if (maintenanceWorld != null) {
IrisToolbelt.beginWorldMaintenance(maintenanceWorld, "studio-open");
maintenanceActive = true;
}
onDone.accept(maintenanceWorld);
}
} catch (IrisException e) {
e.printStackTrace();
failed.set(true);
Iris.reportError(e);
sender.sendMessage(C.RED + "Failed to open studio world: " + e.getMessage());
} catch (Throwable e) {
failed.set(true);
Iris.reportError(e);
sender.sendMessage(C.RED + "Studio open failed: " + e.getMessage());
} finally {
if (activeProvider != null) {
stage.set("Opening workspace");
progress.set(Math.max(progress.get(), 0.95D));
openVSCode(sender);
}
if (maintenanceActive && maintenanceWorld != null) {
World worldToRelease = maintenanceWorld;
J.a(() -> {
J.sleep(15000);
IrisToolbelt.endWorldMaintenance(worldToRelease, "studio-open");
});
maintenanceActive = false;
}
if (maintenanceActive && maintenanceWorld != null) {
IrisToolbelt.endWorldMaintenance(maintenanceWorld, "studio-open");
}
complete.set(true);
}
});
}
private void startStudioOpenReporter(VolmitSender sender, AtomicReference<String> stage, AtomicReference<Double> progress, AtomicBoolean complete, AtomicBoolean failed) {
J.a(() -> {
String[] spinner = {"|", "/", "-", "\\"};
int spinIndex = 0;
long nextConsoleUpdate = 0L;
while (!complete.get()) {
double currentProgress = Math.max(0D, Math.min(0.97D, progress.get()));
String currentStage = stage.get();
String currentSpinner = spinner[spinIndex % spinner.length];
if (sender.isPlayer()) {
sender.sendProgress(currentProgress, "Studio " + currentSpinner + " " + currentStage);
} else {
long now = System.currentTimeMillis();
if (now >= nextConsoleUpdate) {
sender.sendMessage(C.WHITE + "Studio " + Form.pc(currentProgress, 0) + C.GRAY + " - " + currentStage);
nextConsoleUpdate = now + 1500L;
}
}
spinIndex++;
J.sleep(120);
}
if (activeProvider != null) {
openVSCode(sender);
if (failed.get()) {
if (sender.isPlayer()) {
sender.sendProgress(1D, "Studio open failed");
} else {
sender.sendMessage(C.RED + "Studio open failed.");
}
return;
}
if (sender.isPlayer()) {
sender.sendProgress(1D, "Studio ready");
} else {
sender.sendMessage(C.GREEN + "Studio ready.");
}
});
}
@@ -361,6 +463,10 @@ public class IrisProject {
}
public JSONObject createCodeWorkspaceConfig() {
return createCodeWorkspaceConfig(true);
}
private JSONObject createCodeWorkspaceConfig(boolean includeSchemas) {
JSONObject ws = new JSONObject();
JSONArray folders = new JSONArray();
JSONObject folder = new JSONObject();
@@ -391,43 +497,50 @@ public class IrisProject {
settings.put("[json]", jc);
settings.put("json.maxItemsComputed", 30000);
JSONArray schemas = new JSONArray();
IrisData dm = IrisData.get(getPath());
for (ResourceLoader<?> r : dm.getLoaders().v()) {
if (r.supportsSchemas()) {
schemas.put(r.buildSchema());
}
}
for (Class<?> i : dm.resolveSnippets()) {
try {
String snipType = i.getDeclaredAnnotation(Snippet.class).value();
JSONObject o = new JSONObject();
KList<String> fm = new KList<>();
for (int g = 1; g < 8; g++) {
fm.add("/snippet/" + snipType + Form.repeat("/*", g) + ".json");
IrisData dm = null;
if (includeSchemas) {
dm = IrisData.get(getPath());
for (ResourceLoader<?> r : dm.getLoaders().v()) {
if (r.supportsSchemas()) {
schemas.put(r.buildSchema());
}
}
o.put("fileMatch", new JSONArray(fm.toArray()));
o.put("url", "./.iris/schema/snippet/" + snipType + "-schema.json");
schemas.put(o);
File a = new File(dm.getDataFolder(), ".iris/schema/snippet/" + snipType + "-schema.json");
J.attemptAsync(() -> {
try {
IO.writeAll(a, new SchemaBuilder(i, dm).construct().toString(4));
} catch (Throwable e) {
e.printStackTrace();
for (Class<?> i : dm.resolveSnippets()) {
try {
String snipType = i.getDeclaredAnnotation(Snippet.class).value();
JSONObject o = new JSONObject();
KList<String> fm = new KList<>();
for (int g = 1; g < 8; g++) {
fm.add("/snippet/" + snipType + Form.repeat("/*", g) + ".json");
}
});
} catch (Throwable e) {
e.printStackTrace();
o.put("fileMatch", new JSONArray(fm.toArray()));
o.put("url", "./.iris/schema/snippet/" + snipType + "-schema.json");
schemas.put(o);
IrisData snippetData = dm;
File a = new File(snippetData.getDataFolder(), ".iris/schema/snippet/" + snipType + "-schema.json");
J.attemptAsync(() -> {
try {
IO.writeAll(a, new SchemaBuilder(i, snippetData).construct().toString(4));
} catch (Throwable e) {
e.printStackTrace();
}
});
} catch (Throwable e) {
e.printStackTrace();
}
}
}
settings.put("json.schemas", schemas);
ws.put("settings", settings);
if (!includeSchemas) {
return ws;
}
File schemasFile = new File(path, ".idea" + File.separator + "jsonSchemas.xml");
Document doc = IO.read(schemasFile);
Element mappings = (Element) doc.selectSingleNode("//component[@name='JsonSchemaMappingsProjectConfiguration']");

View File

@@ -20,6 +20,7 @@ package art.arcane.iris.core.tools;
import com.google.common.util.concurrent.AtomicDouble;
import art.arcane.iris.Iris;
import art.arcane.iris.core.IrisRuntimeSchedulerMode;
import art.arcane.iris.core.IrisWorlds;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.core.ServerConfigurator;
@@ -42,6 +43,7 @@ import art.arcane.volmlib.util.io.IO;
import art.arcane.iris.util.common.plugin.VolmitSender;
import art.arcane.iris.util.common.scheduling.J;
import art.arcane.volmlib.util.scheduling.O;
import art.arcane.volmlib.util.scheduling.FoliaScheduler;
import io.papermc.lib.PaperLib;
import lombok.Data;
import lombok.experimental.Accessors;
@@ -60,6 +62,7 @@ import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.function.IntSupplier;
@@ -102,6 +105,7 @@ public class IrisCreator {
* Benchmark mode
*/
private boolean benchmark = false;
private BiConsumer<Double, String> studioProgressConsumer;
public static boolean removeFromBukkitYml(String name) throws IOException {
YamlConfiguration yml = YamlConfiguration.loadConfiguration(BUKKIT_YML);
@@ -132,6 +136,8 @@ public class IrisCreator {
throw new IrisException("You cannot invoke create() on the main thread.");
}
reportStudioProgress(0.02D, "Preparing studio open");
if (studio()) {
World existing = Bukkit.getWorld(name());
if (existing == null) {
@@ -141,6 +147,7 @@ public class IrisCreator {
}
}
reportStudioProgress(0.08D, "Resolving dimension");
IrisDimension d = IrisToolbelt.getDimension(dimension());
if (d == null) {
@@ -150,11 +157,18 @@ public class IrisCreator {
if (sender == null)
sender = Iris.getSender();
reportStudioProgress(0.16D, "Preparing world pack");
if (!studio() || benchmark) {
Iris.service(StudioSVC.class).installIntoWorld(sender, d.getLoadKey(), new File(Bukkit.getWorldContainer(), name()));
}
if (studio()) {
IrisRuntimeSchedulerMode runtimeSchedulerMode = IrisRuntimeSchedulerMode.resolve(IrisSettings.get().getPregen());
Iris.info("Studio create scheduling: mode=" + runtimeSchedulerMode.name().toLowerCase(Locale.ROOT)
+ ", regionizedRuntime=" + FoliaScheduler.isRegionizedRuntime(Bukkit.getServer()));
}
prebakeNoisemapsBeforeWorldCreate(d);
reportStudioProgress(0.28D, "Installing datapacks");
AtomicDouble pp = new AtomicDouble(0);
O<Boolean> done = new O<>();
done.set(false);
@@ -180,6 +194,7 @@ public class IrisCreator {
if (ServerConfigurator.installDataPacks(verifyDataPacks, includeExternalDataPacks, extraWorldDatapackFoldersByPack)) {
throw new IrisException("Datapacks were missing!");
}
reportStudioProgress(0.40D, "Datapacks ready");
PlatformChunkGenerator access = (PlatformChunkGenerator) wc.generator();
if (access == null) throw new IrisException("Access is null. Something bad happened.");
@@ -195,7 +210,10 @@ public class IrisCreator {
int req = access.getSpawnChunks().join();
for (int c = 0; c < req && !done.get(); c = g.getAsInt()) {
double v = (double) c / req;
if (sender.isPlayer()) {
if (studioProgressConsumer != null) {
reportStudioProgress(0.40D + (0.42D * v), "Generating spawn");
J.sleep(16);
} else if (sender.isPlayer()) {
sender.sendProgress(v, "Generating");
J.sleep(16);
} else {
@@ -208,6 +226,7 @@ public class IrisCreator {
World world;
reportStudioProgress(0.46D, "Creating world");
try {
world = J.sfut(() -> INMS.get().createWorldAsync(wc))
.thenCompose(Function.identity())
@@ -224,6 +243,7 @@ public class IrisCreator {
}
done.set(true);
reportStudioProgress(0.86D, "World created");
if (sender.isPlayer() && !benchmark) {
Player senderPlayer = sender.player();
@@ -267,6 +287,7 @@ public class IrisCreator {
addToBukkitYml();
J.s(() -> Iris.linkMultiverseCore.updateWorld(world, dimension));
}
reportStudioProgress(0.93D, "Applying world settings");
if (pregen != null) {
CompletableFuture<Boolean> ff = new CompletableFuture<>();
@@ -296,9 +317,24 @@ public class IrisCreator {
e.printStackTrace();
}
}
reportStudioProgress(0.98D, "Finalizing");
return world;
}
private void reportStudioProgress(double progress, String stage) {
BiConsumer<Double, String> consumer = studioProgressConsumer;
if (consumer == null) {
return;
}
double clamped = Math.max(0D, Math.min(1D, progress));
try {
consumer.accept(clamped, stage);
} catch (Throwable e) {
Iris.reportError(e);
}
}
private void prebakeNoisemapsBeforeWorldCreate(IrisDimension dimension) {
IrisSettings.IrisSettingsPregen pregenSettings = IrisSettings.get().getPregen();
if (!pregenSettings.isStartupNoisemapPrebake()) {

View File

@@ -19,6 +19,7 @@
package art.arcane.iris.core.tools;
import art.arcane.iris.Iris;
import art.arcane.iris.core.IrisRuntimeSchedulerMode;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.core.gui.PregeneratorJob;
import art.arcane.iris.core.loader.IrisData;
@@ -233,7 +234,11 @@ public class IrisToolbelt {
*/
public static PregeneratorJob pregenerate(PregenTask task, PregeneratorMethod method, Engine engine, boolean cached) {
applyPregenPerformanceProfile(engine);
boolean useCachedWrapper = cached && engine != null && !J.isFolia();
boolean useCachedWrapper = false;
if (cached && engine != null) {
IrisRuntimeSchedulerMode runtimeSchedulerMode = IrisRuntimeSchedulerMode.resolve(IrisSettings.get().getPregen());
useCachedWrapper = runtimeSchedulerMode != IrisRuntimeSchedulerMode.FOLIA;
}
return new PregeneratorJob(task, useCachedWrapper ? new CachedPregenMethod(method, engine.getWorld().name()) : method, engine);
}

View File

@@ -19,6 +19,7 @@
package art.arcane.iris.engine;
import art.arcane.iris.Iris;
import art.arcane.iris.core.IrisHotPathMetricsMode;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.core.loader.IrisData;
import art.arcane.iris.engine.data.cache.Cache;
@@ -29,6 +30,7 @@ import art.arcane.iris.util.project.context.IrisContext;
import art.arcane.iris.util.common.data.DataProvider;
import art.arcane.volmlib.util.math.M;
import art.arcane.volmlib.util.math.RNG;
import art.arcane.iris.util.project.interpolation.IrisInterpolation.NoiseBounds;
import art.arcane.iris.util.project.noise.CNG;
import art.arcane.iris.util.project.stream.ProceduralStream;
import art.arcane.iris.util.project.stream.interpolation.Interpolated;
@@ -47,6 +49,9 @@ import java.util.*;
@ToString(exclude = "data")
public class IrisComplex implements DataProvider {
private static final BlockData AIR = Material.AIR.createBlockData();
private static final NoiseBounds ZERO_NOISE_BOUNDS = new NoiseBounds(0D, 0D);
private static final int HOT_PATH_METRICS_FLUSH_SIZE = 64;
private static final ThreadLocal<HotPathMetricsState> HOT_PATH_METRICS = ThreadLocal.withInitial(HotPathMetricsState::new);
private RNG rng;
private double fluidHeight;
private IrisData data;
@@ -84,6 +89,7 @@ public class IrisComplex implements DataProvider {
private IrisRegion focusRegion;
private Map<IrisInterpolator, IdentityHashMap<IrisBiome, GeneratorBounds>> generatorBounds;
private Set<IrisBiome> generatorBiomes;
private final Map<IrisBiome, ChildSelectionPlan> childSelectionPlans = Collections.synchronizedMap(new IdentityHashMap<>());
public IrisComplex(Engine engine) {
this(engine, false);
@@ -318,10 +324,15 @@ public class IrisComplex implements DataProvider {
return 0;
}
IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen();
IrisHotPathMetricsMode metricsMode = pregen.getHotPathMetricsMode();
HotPathMetricsState metricsState = metricsMode == IrisHotPathMetricsMode.DISABLED ? null : HOT_PATH_METRICS.get();
boolean sampleMetrics = metricsState != null && metricsState.shouldSample(metricsMode, pregen.getHotPathMetricsSampleStride());
long interpolateStartNanos = sampleMetrics ? System.nanoTime() : 0L;
CoordinateBiomeCache sampleCache = new CoordinateBiomeCache(64);
IdentityHashMap<IrisBiome, GeneratorBounds> cachedBounds = generatorBounds.get(interpolator);
IdentityHashMap<IrisBiome, GeneratorBounds> localBounds = new IdentityHashMap<>(8);
double hi = interpolator.interpolate(x, z, (xx, zz) -> {
NoiseBounds sampledBounds = interpolator.interpolateBounds(x, z, (xx, zz) -> {
try {
IrisBiome bx = sampleCache.get(xx, zz);
if (bx == null) {
@@ -329,57 +340,32 @@ public class IrisComplex implements DataProvider {
sampleCache.put(xx, zz, bx);
}
GeneratorBounds bounds = cachedBounds == null ? null : cachedBounds.get(bx);
if (bounds == null) {
bounds = localBounds.get(bx);
if (bounds == null) {
bounds = computeGeneratorBounds(engine, generators, bx);
localBounds.put(bx, bounds);
}
}
return bounds.max;
GeneratorBounds bounds = resolveGeneratorBounds(engine, generators, bx, cachedBounds, localBounds);
return bounds.noiseBounds;
} catch (Throwable e) {
Iris.reportError(e);
e.printStackTrace();
Iris.error("Failed to sample hi biome at " + xx + " " + zz + "...");
Iris.error("Failed to sample interpolated biome bounds at " + xx + " " + zz + "...");
}
return 0;
return ZERO_NOISE_BOUNDS;
});
if (sampleMetrics) {
metricsState.recordInterpolate(engine, System.nanoTime() - interpolateStartNanos);
}
double lo = interpolator.interpolate(x, z, (xx, zz) -> {
try {
IrisBiome bx = sampleCache.get(xx, zz);
if (bx == null) {
bx = baseBiomeStream.get(xx, zz);
sampleCache.put(xx, zz, bx);
}
GeneratorBounds bounds = cachedBounds == null ? null : cachedBounds.get(bx);
if (bounds == null) {
bounds = localBounds.get(bx);
if (bounds == null) {
bounds = computeGeneratorBounds(engine, generators, bx);
localBounds.put(bx, bounds);
}
}
return bounds.min;
} catch (Throwable e) {
Iris.reportError(e);
e.printStackTrace();
Iris.error("Failed to sample lo biome at " + xx + " " + zz + "...");
}
return 0;
});
double hi = sampledBounds.max();
double lo = sampledBounds.min();
long generatorStartNanos = sampleMetrics ? System.nanoTime() : 0L;
double d = 0;
for (IrisGenerator i : generators) {
d += M.lerp(lo, hi, i.getHeight(x, z, seed + 239945));
}
if (sampleMetrics) {
metricsState.recordGenerator(engine, System.nanoTime() - generatorStartNanos);
}
return d / generators.size();
}
@@ -443,6 +429,28 @@ public class IrisComplex implements DataProvider {
return new GeneratorBounds(min, max);
}
private GeneratorBounds resolveGeneratorBounds(
Engine engine,
Set<IrisGenerator> generators,
IrisBiome biome,
IdentityHashMap<IrisBiome, GeneratorBounds> cachedBounds,
IdentityHashMap<IrisBiome, GeneratorBounds> localBounds
) {
GeneratorBounds bounds = cachedBounds == null ? null : cachedBounds.get(biome);
if (bounds != null) {
return bounds;
}
GeneratorBounds local = localBounds.get(biome);
if (local != null) {
return local;
}
GeneratorBounds computed = computeGeneratorBounds(engine, generators, biome);
localBounds.put(biome, computed);
return computed;
}
private IrisBiome implode(IrisBiome b, Double x, Double z) {
if (b.getChildren().isEmpty()) {
return b;
@@ -461,20 +469,48 @@ public class IrisComplex implements DataProvider {
}
CNG childCell = b.getChildrenGenerator(rng, 123, b.getChildShrinkFactor());
KList<IrisBiome> chx = b.getRealChildren(this).copy();
chx.add(b);
IrisBiome biome = childCell.fitRarity(chx, x, z);
ChildSelectionPlan childSelectionPlan = resolveChildSelectionPlan(b);
IrisBiome biome = childSelectionPlan.select(childCell, x, z);
biome.setInferredType(b.getInferredType());
return implode(biome, x, z, max - 1);
}
private ChildSelectionPlan resolveChildSelectionPlan(IrisBiome biome) {
ChildSelectionPlan cachedPlan = childSelectionPlans.get(biome);
if (cachedPlan != null) {
return cachedPlan;
}
synchronized (childSelectionPlans) {
ChildSelectionPlan synchronizedPlan = childSelectionPlans.get(biome);
if (synchronizedPlan != null) {
return synchronizedPlan;
}
KList<IrisBiome> children = biome.getRealChildren(this);
KList<IrisBiome> options = new KList<>();
for (IrisBiome child : children) {
if (child != null) {
options.add(child);
}
}
options.add(biome);
ChildSelectionPlan createdPlan = ChildSelectionPlan.create(options);
childSelectionPlans.put(biome, createdPlan);
return createdPlan;
}
}
private static class GeneratorBounds {
private final double min;
private final double max;
private final NoiseBounds noiseBounds;
private GeneratorBounds(double min, double max) {
this.min = min;
this.max = max;
this.noiseBounds = new NoiseBounds(min, max);
}
}
@@ -528,6 +564,141 @@ public class IrisComplex implements DataProvider {
}
}
private static class ChildSelectionPlan {
private final IrisBiome[] mappedBiomes;
private final int maxIndex;
private ChildSelectionPlan(IrisBiome[] mappedBiomes) {
this.mappedBiomes = mappedBiomes;
this.maxIndex = mappedBiomes.length - 1;
}
private static ChildSelectionPlan create(KList<IrisBiome> options) {
if (options.isEmpty()) {
return new ChildSelectionPlan(new IrisBiome[0]);
}
int maxRarity = 1;
for (IrisBiome biome : options) {
if (biome != null && biome.getRarity() > maxRarity) {
maxRarity = biome.getRarity();
}
}
int rarityMax = maxRarity + 1;
boolean flip = false;
KList<IrisBiome> mapped = new KList<>();
for (IrisBiome biome : options) {
if (biome == null) {
continue;
}
int rarity = Math.max(1, biome.getRarity());
int count = rarityMax - rarity;
for (int index = 0; index < count; index++) {
flip = !flip;
if (flip) {
mapped.add(biome);
} else {
mapped.add(0, biome);
}
}
}
if (mapped.isEmpty()) {
IrisBiome[] fallback = new IrisBiome[]{options.get(0)};
return new ChildSelectionPlan(fallback);
}
IrisBiome[] mappedBiomes = mapped.toArray(new IrisBiome[0]);
return new ChildSelectionPlan(mappedBiomes);
}
private IrisBiome select(CNG childCell, double x, double z) {
if (mappedBiomes.length == 0) {
return null;
}
if (mappedBiomes.length == 1) {
return mappedBiomes[0];
}
int selectedIndex = childCell.fit2D(0, maxIndex, x, z);
if (selectedIndex < 0) {
return mappedBiomes[0];
}
if (selectedIndex > maxIndex) {
return mappedBiomes[maxIndex];
}
return mappedBiomes[selectedIndex];
}
}
private static class HotPathMetricsState {
private long callCounter;
private long interpolateNanos;
private int interpolateSamples;
private long generatorNanos;
private int generatorSamples;
private boolean shouldSample(IrisHotPathMetricsMode mode, int sampleStride) {
if (mode == IrisHotPathMetricsMode.EXACT) {
return true;
}
long current = callCounter++;
return (current & (sampleStride - 1L)) == 0L;
}
private void recordInterpolate(Engine engine, long nanos) {
if (nanos < 0L) {
return;
}
interpolateNanos += nanos;
interpolateSamples++;
if (interpolateSamples >= HOT_PATH_METRICS_FLUSH_SIZE) {
flushInterpolate(engine);
}
}
private void recordGenerator(Engine engine, long nanos) {
if (nanos < 0L) {
return;
}
generatorNanos += nanos;
generatorSamples++;
if (generatorSamples >= HOT_PATH_METRICS_FLUSH_SIZE) {
flushGenerator(engine);
}
}
private void flushInterpolate(Engine engine) {
if (interpolateSamples <= 0) {
return;
}
double averageMs = (interpolateNanos / (double) interpolateSamples) / 1_000_000D;
engine.getMetrics().getNoiseHeightInterpolate().put(averageMs);
interpolateNanos = 0L;
interpolateSamples = 0;
}
private void flushGenerator(Engine engine) {
if (generatorSamples <= 0) {
return;
}
double averageMs = (generatorNanos / (double) generatorSamples) / 1_000_000D;
engine.getMetrics().getNoiseHeightGenerator().put(averageMs);
generatorNanos = 0L;
generatorSamples = 0;
}
}
public void close() {
}

View File

@@ -324,6 +324,18 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
return;
}
if (!J.isFolia() && !J.isPrimaryThread()) {
CompletableFuture<?> scheduled = J.sfut(() -> updateChunk(c));
if (scheduled != null) {
try {
scheduled.join();
} catch (Throwable e) {
Iris.reportError(e);
}
}
return;
}
var chunk = mantle.getChunk(c).use();
try {
Runnable tileTask = () -> {
@@ -424,7 +436,7 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
}
if (!J.isFolia()) {
return J.isPrimaryThread();
return true;
}
return J.isOwnedByCurrentRegion(chunk.getWorld(), chunk.getX(), chunk.getZ());

View File

@@ -39,6 +39,17 @@ public class EngineMetrics {
private final AtomicRollingSequence deposit;
private final AtomicRollingSequence carveResolve;
private final AtomicRollingSequence carveApply;
private final AtomicRollingSequence noiseHeightInterpolate;
private final AtomicRollingSequence noiseHeightGenerator;
private final AtomicRollingSequence contextPrefill;
private final AtomicRollingSequence contextPrefillHeight;
private final AtomicRollingSequence contextPrefillBiome;
private final AtomicRollingSequence contextPrefillRock;
private final AtomicRollingSequence contextPrefillFluid;
private final AtomicRollingSequence contextPrefillRegion;
private final AtomicRollingSequence contextPrefillCave;
private final AtomicRollingSequence pregenWaitPermit;
private final AtomicRollingSequence pregenWaitAdaptive;
public EngineMetrics(int mem) {
this.total = new AtomicRollingSequence(mem);
@@ -56,6 +67,17 @@ public class EngineMetrics {
this.deposit = new AtomicRollingSequence(mem);
this.carveResolve = new AtomicRollingSequence(mem);
this.carveApply = new AtomicRollingSequence(mem);
this.noiseHeightInterpolate = new AtomicRollingSequence(mem);
this.noiseHeightGenerator = new AtomicRollingSequence(mem);
this.contextPrefill = new AtomicRollingSequence(mem);
this.contextPrefillHeight = new AtomicRollingSequence(mem);
this.contextPrefillBiome = new AtomicRollingSequence(mem);
this.contextPrefillRock = new AtomicRollingSequence(mem);
this.contextPrefillFluid = new AtomicRollingSequence(mem);
this.contextPrefillRegion = new AtomicRollingSequence(mem);
this.contextPrefillCave = new AtomicRollingSequence(mem);
this.pregenWaitPermit = new AtomicRollingSequence(mem);
this.pregenWaitAdaptive = new AtomicRollingSequence(mem);
}
public KMap<String, Double> pull() {
@@ -75,6 +97,17 @@ public class EngineMetrics {
v.put("deposit", deposit.getAverage());
v.put("carve.resolve", carveResolve.getAverage());
v.put("carve.apply", carveApply.getAverage());
v.put("noise.height.interpolate", noiseHeightInterpolate.getAverage());
v.put("noise.height.generator", noiseHeightGenerator.getAverage());
v.put("context.prefill", contextPrefill.getAverage());
v.put("context.prefill.height", contextPrefillHeight.getAverage());
v.put("context.prefill.biome", contextPrefillBiome.getAverage());
v.put("context.prefill.rock", contextPrefillRock.getAverage());
v.put("context.prefill.fluid", contextPrefillFluid.getAverage());
v.put("context.prefill.region", contextPrefillRegion.getAverage());
v.put("context.prefill.cave", contextPrefillCave.getAverage());
v.put("pregen.wait.permit", pregenWaitPermit.getAverage());
v.put("pregen.wait.adaptive", pregenWaitAdaptive.getAverage());
return v;
}

View File

@@ -78,7 +78,8 @@ public interface EngineMode extends Staged {
cacheContext = false;
}
}
ChunkContext ctx = new ChunkContext(x, z, getComplex(), cacheContext);
ChunkContext.PrefillPlan prefillPlan = cacheContext ? ChunkContext.PrefillPlan.NO_CAVE : ChunkContext.PrefillPlan.NONE;
ChunkContext ctx = new ChunkContext(x, z, getComplex(), cacheContext, prefillPlan, getEngine().getMetrics());
IrisContext.getOr(getEngine()).setChunkContext(ctx);
EngineStage[] stages = getStages().toArray(new EngineStage[0]);

View File

@@ -25,12 +25,13 @@ import art.arcane.iris.engine.object.IrisCaveFieldModule;
import art.arcane.iris.engine.object.IrisCaveProfile;
import art.arcane.iris.engine.object.IrisRange;
import art.arcane.iris.util.project.noise.CNG;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.volmlib.util.math.RNG;
import art.arcane.volmlib.util.matter.MatterCavern;
import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class IrisCaveCarver3D {
private static final byte LIQUID_AIR = 0;
@@ -46,7 +47,7 @@ public class IrisCaveCarver3D {
private final CNG warpDensity;
private final CNG surfaceBreakDensity;
private final RNG thresholdRng;
private final KList<ModuleState> modules;
private final ModuleState[] modules;
private final double normalization;
private final MatterCavern carveAir;
private final MatterCavern carveLava;
@@ -64,7 +65,7 @@ public class IrisCaveCarver3D {
this.carveAir = new MatterCavern(true, "", LIQUID_AIR);
this.carveLava = new MatterCavern(true, "", LIQUID_LAVA);
this.carveForcedAir = new MatterCavern(true, "", LIQUID_FORCED_AIR);
this.modules = new KList<>();
List<ModuleState> moduleStates = new ArrayList<>();
RNG baseRng = new RNG(engine.getSeedManager().getCarve());
this.baseDensity = profile.getBaseDensityStyle().create(baseRng.nextParallelRNG(934_447), data);
@@ -82,13 +83,14 @@ public class IrisCaveCarver3D {
for (IrisCaveFieldModule module : profile.getModules()) {
CNG moduleDensity = module.getStyle().create(baseRng.nextParallelRNG(1_000_003L + (index * 65_537L)), data);
ModuleState state = new ModuleState(module, moduleDensity);
modules.add(state);
moduleStates.add(state);
weight += Math.abs(state.weight);
index++;
}
this.modules = moduleStates.toArray(new ModuleState[0]);
normalization = weight <= 0 ? 1 : weight;
hasModules = !modules.isEmpty();
hasModules = modules.length > 0;
}
public int carve(MantleWriter writer, int chunkX, int chunkZ) {
@@ -171,7 +173,7 @@ public class IrisCaveCarver3D {
int columnSurfaceY = engine.getHeight(x, z);
int clearanceTopY = Math.min(maxY, Math.max(minY, columnSurfaceY - surfaceClearance));
boolean breakColumn = allowSurfaceBreak
&& signed(surfaceBreakDensity.noise(x, z)) >= surfaceBreakNoiseThreshold;
&& signed(surfaceBreakDensity.noiseFast2D(x, z)) >= surfaceBreakNoiseThreshold;
int columnTopY = breakColumn
? Math.min(maxY, Math.max(minY, columnSurfaceY))
: clearanceTopY;
@@ -329,8 +331,8 @@ public class IrisCaveCarver3D {
private double sampleDensity(int x, int y, int z) {
if (!hasWarp && !hasModules) {
double density = signed(baseDensity.noise(x, y, z)) * baseWeight;
density += signed(detailDensity.noise(x, y, z)) * detailWeight;
double density = signed(baseDensity.noiseFast3D(x, y, z)) * baseWeight;
density += signed(detailDensity.noiseFast3D(x, y, z)) * detailWeight;
return density / normalization;
}
@@ -338,8 +340,8 @@ public class IrisCaveCarver3D {
double warpedY = y;
double warpedZ = z;
if (hasWarp) {
double warpA = signed(warpDensity.noise(x, y, z));
double warpB = signed(warpDensity.noise(x + 31.37D, y - 17.21D, z + 23.91D));
double warpA = signed(warpDensity.noiseFast3D(x, y, z));
double warpB = signed(warpDensity.noiseFast3D(x + 31.37D, y - 17.21D, z + 23.91D));
double offsetX = warpA * warpStrength;
double offsetY = warpB * warpStrength;
double offsetZ = (warpA - warpB) * 0.5D * warpStrength;
@@ -348,16 +350,17 @@ public class IrisCaveCarver3D {
warpedZ += offsetZ;
}
double density = signed(baseDensity.noise(warpedX, warpedY, warpedZ)) * baseWeight;
density += signed(detailDensity.noise(warpedX, warpedY, warpedZ)) * detailWeight;
double density = signed(baseDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * baseWeight;
density += signed(detailDensity.noiseFast3D(warpedX, warpedY, warpedZ)) * detailWeight;
if (hasModules) {
for (ModuleState module : modules) {
for (int moduleIndex = 0; moduleIndex < modules.length; moduleIndex++) {
ModuleState module = modules[moduleIndex];
if (y < module.minY || y > module.maxY) {
continue;
}
double moduleDensity = signed(module.density.noise(warpedX, warpedY, warpedZ)) - module.threshold;
double moduleDensity = signed(module.density.noiseFast3D(warpedX, warpedY, warpedZ)) - module.threshold;
if (module.invert) {
moduleDensity = -moduleDensity;
}

View File

@@ -18,6 +18,7 @@
package art.arcane.iris.engine.mantle.components;
import art.arcane.iris.engine.data.cache.Cache;
import art.arcane.iris.engine.mantle.ComponentFlag;
import art.arcane.iris.engine.mantle.EngineMantle;
import art.arcane.iris.engine.mantle.IrisMantleComponent;
@@ -32,6 +33,7 @@ import art.arcane.iris.util.project.context.ChunkContext;
import art.arcane.volmlib.util.documentation.ChunkCoordinates;
import art.arcane.volmlib.util.mantle.flag.ReservedFlag;
import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
import java.util.ArrayList;
import java.util.Comparator;
@@ -75,8 +77,9 @@ public class MantleCarvingComponent extends IrisMantleComponent {
@Override
public void generateLayer(MantleWriter writer, int x, int z, ChunkContext context) {
IrisDimensionCarvingResolver.State resolverState = new IrisDimensionCarvingResolver.State();
Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache = new Long2ObjectOpenHashMap<>(FIELD_SIZE * FIELD_SIZE);
PrecisionStopwatch resolveStopwatch = PrecisionStopwatch.start();
List<WeightedProfile> weightedProfiles = resolveWeightedProfiles(x, z, resolverState);
List<WeightedProfile> weightedProfiles = resolveWeightedProfiles(x, z, resolverState, caveBiomeCache);
getEngineMantle().getEngine().getMetrics().getCarveResolve().put(resolveStopwatch.getMilliseconds());
for (WeightedProfile weightedProfile : weightedProfiles) {
carveProfile(weightedProfile, writer, x, z);
@@ -89,8 +92,8 @@ public class MantleCarvingComponent extends IrisMantleComponent {
carver.carve(writer, cx, cz, weightedProfile.columnWeights, MIN_WEIGHT, THRESHOLD_PENALTY, weightedProfile.worldYRange);
}
private List<WeightedProfile> resolveWeightedProfiles(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState) {
IrisCaveProfile[] profileField = buildProfileField(chunkX, chunkZ, resolverState);
private List<WeightedProfile> resolveWeightedProfiles(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) {
IrisCaveProfile[] profileField = buildProfileField(chunkX, chunkZ, resolverState, caveBiomeCache);
Map<IrisCaveProfile, double[]> profileWeights = new IdentityHashMap<>();
IrisCaveProfile[] columnProfiles = new IrisCaveProfile[KERNEL_SIZE];
double[] columnProfileWeights = new double[KERNEL_SIZE];
@@ -215,7 +218,7 @@ public class MantleCarvingComponent extends IrisMantleComponent {
return weightedProfiles;
}
private IrisCaveProfile[] buildProfileField(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState) {
private IrisCaveProfile[] buildProfileField(int chunkX, int chunkZ, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) {
IrisCaveProfile[] profileField = new IrisCaveProfile[FIELD_SIZE * FIELD_SIZE];
int startX = (chunkX << 4) - BLEND_RADIUS;
int startZ = (chunkZ << 4) - BLEND_RADIUS;
@@ -224,7 +227,7 @@ public class MantleCarvingComponent extends IrisMantleComponent {
int worldX = startX + fieldX;
for (int fieldZ = 0; fieldZ < FIELD_SIZE; fieldZ++) {
int worldZ = startZ + fieldZ;
profileField[(fieldX * FIELD_SIZE) + fieldZ] = resolveColumnProfile(worldX, worldZ, resolverState);
profileField[(fieldX * FIELD_SIZE) + fieldZ] = resolveColumnProfile(worldX, worldZ, resolverState, caveBiomeCache);
}
}
@@ -241,7 +244,7 @@ public class MantleCarvingComponent extends IrisMantleComponent {
return -1;
}
private IrisCaveProfile resolveColumnProfile(int worldX, int worldZ, IrisDimensionCarvingResolver.State resolverState) {
private IrisCaveProfile resolveColumnProfile(int worldX, int worldZ, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) {
IrisCaveProfile resolved = null;
IrisCaveProfile dimensionProfile = getDimension().getCaveProfile();
if (isProfileEnabled(dimensionProfile)) {
@@ -266,7 +269,14 @@ public class MantleCarvingComponent extends IrisMantleComponent {
int surfaceY = getEngineMantle().getEngine().getHeight(worldX, worldZ, true);
int sampleY = Math.max(1, surfaceY - 56);
IrisBiome caveBiome = getEngineMantle().getEngine().getCaveBiome(worldX, sampleY, worldZ, resolverState);
long cacheKey = Cache.key(worldX, worldZ);
IrisBiome caveBiome = caveBiomeCache.get(cacheKey);
if (caveBiome == null) {
caveBiome = getEngineMantle().getEngine().getCaveBiome(worldX, sampleY, worldZ, resolverState);
if (caveBiome != null) {
caveBiomeCache.put(cacheKey, caveBiome);
}
}
if (caveBiome != null) {
IrisCaveProfile caveProfile = caveBiome.getCaveProfile();
if (isProfileEnabled(caveProfile)) {

View File

@@ -33,12 +33,14 @@ import art.arcane.iris.util.project.hunk.Hunk;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.volmlib.util.mantle.runtime.Mantle;
import art.arcane.volmlib.util.mantle.runtime.MantleChunk;
import art.arcane.volmlib.util.math.BlockPosition;
import art.arcane.volmlib.util.math.M;
import art.arcane.volmlib.util.math.RNG;
import art.arcane.volmlib.util.matter.Matter;
import art.arcane.volmlib.util.matter.MatterCavern;
import art.arcane.volmlib.util.matter.slices.MarkerMatter;
import art.arcane.volmlib.util.scheduling.PrecisionStopwatch;
import it.unimi.dsi.fastutil.longs.Long2ObjectOpenHashMap;
import lombok.Data;
import org.bukkit.Material;
import org.bukkit.block.data.BlockData;
@@ -64,6 +66,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
Mantle<Matter> mantle = getEngine().getMantle().getMantle();
MantleChunk<Matter> mc = mantle.getChunk(x, z).use();
IrisDimensionCarvingResolver.State resolverState = new IrisDimensionCarvingResolver.State();
Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache = new Long2ObjectOpenHashMap<>(2048);
int[][] columnHeights = new int[256][];
int[] columnHeightSizes = new int[256];
PackedWallBuffer walls = new PackedWallBuffer(512);
@@ -129,7 +132,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
int worldX = rx + (x << 4);
int worldZ = rz + (z << 4);
IrisBiome biome = cavern.getCustomBiome().isEmpty()
? getEngine().getCaveBiome(worldX, yy, worldZ, resolverState)
? resolveCaveBiome(caveBiomeCache, worldX, yy, worldZ, resolverState)
: getEngine().getData().getBiomeLoader().load(cavern.getCustomBiome());
if (biome != null) {
@@ -166,7 +169,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
buf = y;
zone.ceiling = buf;
} else if (zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4), resolverState);
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4), resolverState, caveBiomeCache);
zone = new CaveZone();
zone.setFloor(y);
buf = y;
@@ -178,7 +181,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
}
if (zone.isValid(getEngine())) {
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4), resolverState);
processZone(output, mc, mantle, zone, rx, rz, rx + (x << 4), rz + (z << 4), resolverState, caveBiomeCache);
}
}
} finally {
@@ -190,7 +193,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
}
}
private void processZone(Hunk<BlockData> output, MantleChunk<Matter> mc, Mantle<Matter> mantle, CaveZone zone, int rx, int rz, int xx, int zz, IrisDimensionCarvingResolver.State resolverState) {
private void processZone(Hunk<BlockData> output, MantleChunk<Matter> mc, Mantle<Matter> mantle, CaveZone zone, int rx, int rz, int xx, int zz, IrisDimensionCarvingResolver.State resolverState, Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache) {
int center = (zone.floor + zone.ceiling) / 2;
String customBiome = "";
@@ -221,7 +224,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
}
IrisBiome biome = customBiome.isEmpty()
? getEngine().getCaveBiome(xx, center, zz, resolverState)
? resolveCaveBiome(caveBiomeCache, xx, center, zz, resolverState)
: getEngine().getData().getBiomeLoader().load(customBiome);
if (biome == null) {
@@ -286,6 +289,20 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
}
}
private IrisBiome resolveCaveBiome(Long2ObjectOpenHashMap<IrisBiome> caveBiomeCache, int x, int y, int z, IrisDimensionCarvingResolver.State resolverState) {
long key = BlockPosition.toLong(x, y, z);
IrisBiome cachedBiome = caveBiomeCache.get(key);
if (cachedBiome != null) {
return cachedBiome;
}
IrisBiome resolvedBiome = getEngine().getCaveBiome(x, y, z, resolverState);
if (resolvedBiome != null) {
caveBiomeCache.put(key, resolvedBiome);
}
return resolvedBiome;
}
private void appendColumnHeight(int[][] heights, int[] sizes, int columnIndex, int y) {
int[] column = heights[columnIndex];
int size = sizes[columnIndex];

View File

@@ -1,6 +1,7 @@
package art.arcane.iris.engine.object;
import art.arcane.iris.engine.framework.Engine;
import art.arcane.iris.util.project.interpolation.IrisInterpolation;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.iris.util.project.noise.CNG;
@@ -153,7 +154,8 @@ public final class IrisDimensionCarvingResolver {
long seed = resolveChildSeed(engine, state);
CNG childGenerator = parent.getChildrenGenerator(seed, engine.getData());
int selectedIndex = childGenerator.fit(0, selectionPlan.maxIndex, worldX, worldZ);
double sample = childGenerator.noiseFast2D(worldX, worldZ);
int selectedIndex = (int) Math.round(IrisInterpolation.lerp(0, selectionPlan.maxIndex, sample));
CarvingChoice selected = selectionPlan.get(selectedIndex);
if (selected == null || selected.entry == null) {
return parent;

View File

@@ -25,6 +25,8 @@ import art.arcane.iris.engine.object.annotations.Required;
import art.arcane.volmlib.util.function.NoiseProvider;
import art.arcane.iris.util.project.interpolation.InterpolationMethod;
import art.arcane.iris.util.project.interpolation.IrisInterpolation;
import art.arcane.iris.util.project.interpolation.IrisInterpolation.NoiseBounds;
import art.arcane.iris.util.project.interpolation.IrisInterpolation.NoiseBoundsProvider;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
@@ -71,4 +73,12 @@ public class IrisInterpolator {
public double interpolate(int x, int z, NoiseProvider provider) {
return IrisInterpolation.getNoise(getFunction(), x, z, getHorizontalScale(), provider);
}
public NoiseBounds interpolateBounds(double x, double z, NoiseBoundsProvider provider) {
return interpolateBounds((int) Math.round(x), (int) Math.round(z), provider);
}
public NoiseBounds interpolateBounds(int x, int z, NoiseBoundsProvider provider) {
return IrisInterpolation.getNoiseBounds(getFunction(), x, z, getHorizontalScale(), provider);
}
}

View File

@@ -106,7 +106,8 @@ public class IrisNoiseGenerator {
g += 819;
}
double n = getGenerator(superSeed, data).fitDouble(0, opacity, (x / zoom) + offsetX, (z / zoom) + offsetZ);
CNG cng = getGenerator(superSeed, data);
double n = cng.noiseFast2D((x / zoom) + offsetX, (z / zoom) + offsetZ) * opacity;
n = negative ? (-n + opacity) : n;
n = (exponent != 1 ? n < 0 ? -Math.pow(-n, exponent) : Math.pow(n, exponent) : n) + offsetY;
n = parametric ? IrisInterpolation.parametric(n, 1) : n;

View File

@@ -168,7 +168,7 @@ public class J {
}
public static boolean isFolia() {
return FoliaScheduler.isFolia(Iris.instance);
return FoliaScheduler.isFolia(Bukkit.getServer());
}
public static boolean isPrimaryThread() {
@@ -176,10 +176,26 @@ public class J {
}
public static boolean isOwnedByCurrentRegion(Entity entity) {
if (entity == null) {
return false;
}
if (!isFolia()) {
return isPrimaryThread();
}
return FoliaScheduler.isOwnedByCurrentRegion(entity);
}
public static boolean isOwnedByCurrentRegion(World world, int chunkX, int chunkZ) {
if (world == null) {
return false;
}
if (!isFolia()) {
return isPrimaryThread();
}
return FoliaScheduler.isOwnedByCurrentRegion(world, chunkX, chunkZ);
}
@@ -531,34 +547,66 @@ public class J {
}
private static boolean runGlobalImmediate(Runnable runnable) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runGlobal(Iris.instance, runnable);
}
private static boolean runGlobalDelayed(Runnable runnable, int delayTicks) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runGlobal(Iris.instance, runnable, Math.max(0, delayTicks));
}
private static boolean runRegionImmediate(World world, int chunkX, int chunkZ, Runnable runnable) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runRegion(Iris.instance, world, chunkX, chunkZ, runnable);
}
private static boolean runRegionDelayed(World world, int chunkX, int chunkZ, Runnable runnable, int delayTicks) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runRegion(Iris.instance, world, chunkX, chunkZ, runnable, Math.max(0, delayTicks));
}
private static boolean runAsyncImmediate(Runnable runnable) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runAsync(Iris.instance, runnable);
}
private static boolean runAsyncDelayed(Runnable runnable, int delayTicks) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runAsync(Iris.instance, runnable, Math.max(0, delayTicks));
}
private static boolean runEntityImmediate(Entity entity, Runnable runnable) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runEntity(Iris.instance, entity, runnable);
}
private static boolean runEntityDelayed(Entity entity, Runnable runnable, int delayTicks) {
if (!isFolia()) {
return false;
}
return FoliaScheduler.runEntity(Iris.instance, entity, runnable, Math.max(0, delayTicks));
}

View File

@@ -1,17 +1,19 @@
package art.arcane.iris.util.project.context;
import art.arcane.iris.core.IrisHotPathMetricsMode;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.engine.IrisComplex;
import art.arcane.iris.engine.framework.EngineMetrics;
import art.arcane.iris.engine.object.IrisBiome;
import art.arcane.iris.engine.object.IrisRegion;
import art.arcane.iris.util.common.parallel.MultiBurst;
import art.arcane.volmlib.util.atomics.AtomicRollingSequence;
import org.bukkit.block.data.BlockData;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.IdentityHashMap;
public class ChunkContext {
private static final int PREFILL_METRICS_FLUSH_SIZE = 64;
private static final ThreadLocal<PrefillMetricsState> PREFILL_METRICS = ThreadLocal.withInitial(PrefillMetricsState::new);
private final int x;
private final int z;
private final ChunkedDataCache<Double> height;
@@ -22,10 +24,18 @@ public class ChunkContext {
private final ChunkedDataCache<IrisRegion> region;
public ChunkContext(int x, int z, IrisComplex complex) {
this(x, z, complex, true);
this(x, z, complex, true, PrefillPlan.NO_CAVE, null);
}
public ChunkContext(int x, int z, IrisComplex complex, boolean cache) {
this(x, z, complex, cache, PrefillPlan.NO_CAVE, null);
}
public ChunkContext(int x, int z, IrisComplex complex, boolean cache, EngineMetrics metrics) {
this(x, z, complex, cache, PrefillPlan.NO_CAVE, metrics);
}
public ChunkContext(int x, int z, IrisComplex complex, boolean cache, PrefillPlan prefillPlan, EngineMetrics metrics) {
this.x = x;
this.z = z;
this.height = new ChunkedDataCache<>(complex.getHeightStream(), x, z, cache);
@@ -36,17 +46,42 @@ public class ChunkContext {
this.region = new ChunkedDataCache<>(complex.getRegionStream(), x, z, cache);
if (cache) {
Executor executor = MultiBurst.burst;
List<CompletableFuture<Void>> tasks = new ArrayList<>(6);
tasks.add(CompletableFuture.runAsync(() -> height.fill(executor), executor));
tasks.add(CompletableFuture.runAsync(() -> biome.fill(executor), executor));
tasks.add(CompletableFuture.runAsync(() -> cave.fill(executor), executor));
tasks.add(CompletableFuture.runAsync(() -> rock.fill(executor), executor));
tasks.add(CompletableFuture.runAsync(() -> fluid.fill(executor), executor));
tasks.add(CompletableFuture.runAsync(() -> region.fill(executor), executor));
for (CompletableFuture<Void> task : tasks) {
task.join();
PrefillPlan resolvedPlan = prefillPlan == null ? PrefillPlan.NO_CAVE : prefillPlan;
PrefillMetricsState metricsState = PREFILL_METRICS.get();
IrisSettings.IrisSettingsPregen pregen = IrisSettings.get().getPregen();
IrisHotPathMetricsMode metricsMode = pregen.getHotPathMetricsMode();
boolean sampleMetrics = metricsMode != IrisHotPathMetricsMode.DISABLED
&& metricsState.shouldSample(metricsMode, pregen.getHotPathMetricsSampleStride());
long totalStartNanos = sampleMetrics ? System.nanoTime() : 0L;
if (resolvedPlan.height) {
fill(height, metrics == null ? null : metrics.getContextPrefillHeight(), sampleMetrics, metricsState);
}
if (resolvedPlan.biome) {
fill(biome, metrics == null ? null : metrics.getContextPrefillBiome(), sampleMetrics, metricsState);
}
if (resolvedPlan.rock) {
fill(rock, metrics == null ? null : metrics.getContextPrefillRock(), sampleMetrics, metricsState);
}
if (resolvedPlan.fluid) {
fill(fluid, metrics == null ? null : metrics.getContextPrefillFluid(), sampleMetrics, metricsState);
}
if (resolvedPlan.region) {
fill(region, metrics == null ? null : metrics.getContextPrefillRegion(), sampleMetrics, metricsState);
}
if (resolvedPlan.cave) {
fill(cave, metrics == null ? null : metrics.getContextPrefillCave(), sampleMetrics, metricsState);
}
if (metrics != null && sampleMetrics) {
metricsState.record(metrics.getContextPrefill(), System.nanoTime() - totalStartNanos);
}
}
}
private void fill(ChunkedDataCache<?> dataCache, AtomicRollingSequence metrics, boolean sampleMetrics, PrefillMetricsState metricsState) {
long startNanos = sampleMetrics ? System.nanoTime() : 0L;
dataCache.fill();
if (metrics != null && sampleMetrics) {
metricsState.record(metrics, System.nanoTime() - startNanos);
}
}
@@ -81,4 +116,66 @@ public class ChunkContext {
public ChunkedDataCache<IrisRegion> getRegion() {
return region;
}
public enum PrefillPlan {
ALL(true, true, true, true, true, true),
NO_CAVE(true, true, false, true, true, true),
NONE(false, false, false, false, false, false);
private final boolean height;
private final boolean biome;
private final boolean cave;
private final boolean rock;
private final boolean fluid;
private final boolean region;
PrefillPlan(boolean height, boolean biome, boolean cave, boolean rock, boolean fluid, boolean region) {
this.height = height;
this.biome = biome;
this.cave = cave;
this.rock = rock;
this.fluid = fluid;
this.region = region;
}
}
private static final class PrefillMetricsState {
private long callCounter;
private final IdentityHashMap<AtomicRollingSequence, MetricBucket> buckets = new IdentityHashMap<>();
private boolean shouldSample(IrisHotPathMetricsMode mode, int sampleStride) {
if (mode == IrisHotPathMetricsMode.EXACT) {
return true;
}
long current = callCounter++;
return (current & (sampleStride - 1L)) == 0L;
}
private void record(AtomicRollingSequence sequence, long nanos) {
if (sequence == null || nanos < 0L) {
return;
}
MetricBucket bucket = buckets.get(sequence);
if (bucket == null) {
bucket = new MetricBucket();
buckets.put(sequence, bucket);
}
bucket.nanos += nanos;
bucket.samples++;
if (bucket.samples >= PREFILL_METRICS_FLUSH_SIZE) {
double averageMs = (bucket.nanos / (double) bucket.samples) / 1_000_000D;
sequence.put(averageMs);
bucket.nanos = 0L;
bucket.samples = 0;
}
}
}
private static final class MetricBucket {
private long nanos;
private int samples;
}
}

View File

@@ -1,13 +1,9 @@
package art.arcane.iris.util.project.context;
import art.arcane.iris.util.project.stream.ProceduralStream;
import art.arcane.iris.util.project.stream.utility.CachedStream2D;
import art.arcane.volmlib.util.documentation.BlockCoordinates;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import java.util.concurrent.ForkJoinPool;
public class ChunkedDataCache<T> {
private final int x;
@@ -31,7 +27,7 @@ public class ChunkedDataCache<T> {
}
public void fill() {
fill(ForkJoinPool.commonPool());
fill(null);
}
public void fill(Executor executor) {
@@ -39,20 +35,17 @@ public class ChunkedDataCache<T> {
return;
}
List<CompletableFuture<Void>> tasks = new ArrayList<>(16);
for (int j = 0; j < 16; j++) {
int row = j;
tasks.add(CompletableFuture.runAsync(() -> {
int rowOffset = row * 16;
double zz = (z + row);
for (int i = 0; i < 16; i++) {
data[rowOffset + i] = stream.get(x + i, zz);
}
}, executor));
if (stream instanceof CachedStream2D<?> cachedStream) {
cachedStream.fillChunk(x, z, data);
return;
}
for (CompletableFuture<Void> task : tasks) {
task.join();
for (int row = 0; row < 16; row++) {
int rowOffset = row * 16;
int worldZ = z + row;
for (int column = 0; column < 16; column++) {
data[rowOffset + column] = stream.get(x + column, worldZ);
}
}
}
@@ -63,11 +56,14 @@ public class ChunkedDataCache<T> {
return stream.get(this.x + x, this.z + z);
}
T value = (T) data[(z * 16) + x];
int index = (z * 16) + x;
T value = (T) data[index];
if (value != null) {
return value;
}
return stream.get(this.x + x, this.z + z);
T sampled = stream.get(this.x + x, this.z + z);
data[index] = sampled;
return sampled;
}
}

View File

@@ -37,6 +37,7 @@ import java.util.HashMap;
public class IrisInterpolation {
public static CNG cng = NoiseStyle.SIMPLEX.create(new RNG());
private static final ThreadLocal<NoiseSampleCache2D> NOISE_SAMPLE_CACHE_2D = ThreadLocal.withInitial(() -> new NoiseSampleCache2D(64));
private static final ThreadLocal<NoiseBoundsSampleCache2D> NOISE_BOUNDS_SAMPLE_CACHE_2D = ThreadLocal.withInitial(() -> new NoiseBoundsSampleCache2D(64));
public static double bezier(double t) {
return t * t * (3.0d - 2.0d * t);
@@ -1041,6 +1042,16 @@ public class IrisInterpolation {
return n.noise(x, z);
}
public static NoiseBounds getNoiseBounds(InterpolationMethod method, int x, int z, double h, NoiseBoundsProvider noise) {
NoiseBoundsSampleCache2D cache = NOISE_BOUNDS_SAMPLE_CACHE_2D.get();
cache.clear();
NoiseProvider minProvider = (sampleX, sampleZ) -> cache.getOrSampleMin(sampleX, sampleZ, noise);
NoiseProvider maxProvider = (sampleX, sampleZ) -> cache.getOrSampleMax(sampleX, sampleZ, noise);
double min = getNoise(method, x, z, h, minProvider);
double max = getNoise(method, x, z, h, maxProvider);
return new NoiseBounds(min, max);
}
private static boolean usesSampleCache(InterpolationMethod method) {
return switch (method) {
case BILINEAR_STARCAST_3,
@@ -1176,6 +1187,170 @@ public class IrisInterpolation {
}
}
@FunctionalInterface
public interface NoiseBoundsProvider {
NoiseBounds noise(double x, double z);
}
public static final class NoiseBounds {
private final double min;
private final double max;
public NoiseBounds(double min, double max) {
this.min = min;
this.max = max;
}
public double min() {
return min;
}
public double max() {
return max;
}
}
private static class NoiseBoundsSampleCache2D {
private long[] xBits;
private long[] zBits;
private double[] minValues;
private double[] maxValues;
private byte[] states;
private int mask;
private int resizeThreshold;
private int size;
public NoiseBoundsSampleCache2D(int initialCapacity) {
int minimumCapacity = Math.max(8, initialCapacity);
int tableSize = tableSizeFor((minimumCapacity << 1) + minimumCapacity);
xBits = new long[tableSize];
zBits = new long[tableSize];
minValues = new double[tableSize];
maxValues = new double[tableSize];
states = new byte[tableSize];
mask = tableSize - 1;
resizeThreshold = Math.max(1, (tableSize * 3) >> 2);
size = 0;
}
public void clear() {
if (size == 0) {
return;
}
Arrays.fill(states, (byte) 0);
size = 0;
}
public double getOrSampleMin(double sampleX, double sampleZ, NoiseBoundsProvider provider) {
long xBitsValue = Double.doubleToLongBits(sampleX);
long zBitsValue = Double.doubleToLongBits(sampleZ);
int slot = findSlot(xBitsValue, zBitsValue);
if (states[slot] != 0) {
return minValues[slot];
}
NoiseBounds bounds = provider.noise(sampleX, sampleZ);
insert(slot, xBitsValue, zBitsValue, bounds.min(), bounds.max());
return bounds.min();
}
public double getOrSampleMax(double sampleX, double sampleZ, NoiseBoundsProvider provider) {
long xBitsValue = Double.doubleToLongBits(sampleX);
long zBitsValue = Double.doubleToLongBits(sampleZ);
int slot = findSlot(xBitsValue, zBitsValue);
if (states[slot] != 0) {
return maxValues[slot];
}
NoiseBounds bounds = provider.noise(sampleX, sampleZ);
insert(slot, xBitsValue, zBitsValue, bounds.min(), bounds.max());
return bounds.max();
}
private int findSlot(long xb, long zb) {
int slot = mix(xb, zb) & mask;
while (states[slot] != 0) {
if (xBits[slot] == xb && zBits[slot] == zb) {
break;
}
slot = (slot + 1) & mask;
}
return slot;
}
private void insert(int slot, long xb, long zb, double min, double max) {
xBits[slot] = xb;
zBits[slot] = zb;
minValues[slot] = min;
maxValues[slot] = max;
states[slot] = 1;
size++;
if (size >= resizeThreshold) {
grow();
}
}
private int mix(long xb, long zb) {
long hash = xb * 0x9E3779B97F4A7C15L;
hash ^= Long.rotateLeft(zb * 0xC2B2AE3D27D4EB4FL, 32);
hash ^= (hash >>> 33);
hash *= 0xff51afd7ed558ccdL;
hash ^= (hash >>> 33);
return (int) hash;
}
private void grow() {
long[] previousXBits = xBits;
long[] previousZBits = zBits;
double[] previousMin = minValues;
double[] previousMax = maxValues;
byte[] previousStates = states;
int nextLength = xBits.length << 1;
long[] nextXBits = new long[nextLength];
long[] nextZBits = new long[nextLength];
double[] nextMin = new double[nextLength];
double[] nextMax = new double[nextLength];
byte[] nextStates = new byte[nextLength];
xBits = nextXBits;
zBits = nextZBits;
minValues = nextMin;
maxValues = nextMax;
states = nextStates;
mask = nextLength - 1;
resizeThreshold = Math.max(1, (nextLength * 3) >> 2);
size = 0;
for (int i = 0; i < previousStates.length; i++) {
if (previousStates[i] == 0) {
continue;
}
int slot = findSlot(previousXBits[i], previousZBits[i]);
xBits[slot] = previousXBits[i];
zBits[slot] = previousZBits[i];
minValues[slot] = previousMin[i];
maxValues[slot] = previousMax[i];
states[slot] = 1;
size++;
}
}
private int tableSizeFor(int value) {
int n = value - 1;
n |= n >>> 1;
n |= n >>> 2;
n |= n >>> 4;
n |= n >>> 8;
n |= n >>> 16;
int tableSize = n + 1;
if (tableSize < 8) {
return 8;
}
return tableSize;
}
}
public static double rangeScale(double amin, double amax, double bmin, double bmax, double b) {
return amin + ((amax - amin) * ((b - bmin) / (bmax - bmin)));
}

View File

@@ -63,6 +63,7 @@ public class CNG {
private FloatCache cache;
private NoiseGenerator generator;
private NoiseInjector injector;
private InjectorMode injectorMode;
private RNG rng;
private boolean noscale;
private int oct;
@@ -106,6 +107,7 @@ public class CNG {
this.generator = generator;
this.opacity = opacity;
this.injector = ADD;
this.injectorMode = InjectorMode.ADD;
if (generator instanceof OctaveNoise) {
((OctaveNoise) generator).setOctaves(octaves);
@@ -345,11 +347,60 @@ public class CNG {
}
public CNG injectWith(NoiseInjector i) {
injector = i;
injector = i == null ? ADD : i;
injectorMode = resolveInjectorMode(injector);
return this;
}
private InjectorMode resolveInjectorMode(NoiseInjector i) {
if (i == ADD) {
return InjectorMode.ADD;
}
if (i == SRC_SUBTRACT) {
return InjectorMode.SRC_SUBTRACT;
}
if (i == DST_SUBTRACT) {
return InjectorMode.DST_SUBTRACT;
}
if (i == MULTIPLY) {
return InjectorMode.MULTIPLY;
}
if (i == MAX) {
return InjectorMode.MAX;
}
if (i == MIN) {
return InjectorMode.MIN;
}
if (i == SRC_MOD) {
return InjectorMode.SRC_MOD;
}
if (i == SRC_POW) {
return InjectorMode.SRC_POW;
}
if (i == DST_MOD) {
return InjectorMode.DST_MOD;
}
if (i == DST_POW) {
return InjectorMode.DST_POW;
}
return InjectorMode.CUSTOM;
}
public <T extends IRare> T fitRarity(KList<T> b, double... dim) {
if (dim.length == 2) {
return fitRarity2D(b, dim[0], dim[1]);
}
if (b.size() == 0) {
return null;
}
@@ -358,27 +409,7 @@ public class CNG {
return b.get(0);
}
KList<T> rarityMapped = new KList<>();
boolean o = false;
int max = 1;
for (T i : b) {
if (i.getRarity() > max) {
max = i.getRarity();
}
}
max++;
for (T i : b) {
for (int j = 0; j < max - i.getRarity(); j++) {
//noinspection AssignmentUsedAsCondition
if (o = !o) {
rarityMapped.add(i);
} else {
rarityMapped.add(0, i);
}
}
}
KList<T> rarityMapped = buildRarityMapped(b);
if (rarityMapped.size() == 1) {
return rarityMapped.get(0);
@@ -391,6 +422,92 @@ public class CNG {
return fit(rarityMapped, dim);
}
public <T extends IRare> T fitRarity2D(KList<T> b, double x, double z) {
if (b.size() == 0) {
return null;
}
if (b.size() == 1) {
return b.get(0);
}
KList<T> rarityMapped = buildRarityMapped(b);
if (rarityMapped.size() == 1) {
return rarityMapped.get(0);
}
if (rarityMapped.isEmpty()) {
throw new RuntimeException("BAD RARITY MAP! RELATED TO: " + b.toString(", or possibly "));
}
return fit2D(rarityMapped, x, z);
}
private <T extends IRare> KList<T> buildRarityMapped(KList<T> values) {
KList<T> rarityMapped = new KList<>();
boolean flip = false;
int max = 1;
for (T value : values) {
if (value.getRarity() > max) {
max = value.getRarity();
}
}
max++;
for (T value : values) {
int count = max - value.getRarity();
for (int j = 0; j < count; j++) {
flip = !flip;
if (flip) {
rarityMapped.add(value);
} else {
rarityMapped.add(0, value);
}
}
}
return rarityMapped;
}
public <T> T fit2D(T[] values, double x, double z) {
if (values.length == 0) {
return null;
}
if (values.length == 1) {
return values[0];
}
return values[fit2D(0, values.length - 1, x, z)];
}
public <T> T fit2D(List<T> values, double x, double z) {
if (values.isEmpty()) {
return null;
}
if (values.size() == 1) {
return values.get(0);
}
try {
return values.get(fit2D(0, values.size() - 1, x, z));
} catch (Throwable e) {
Iris.reportError(e);
}
return values.get(0);
}
public int fit2D(int min, int max, double x, double z) {
if (min == max) {
return min;
}
double noise = noiseFast2D(x, z);
return (int) Math.round(IrisInterpolation.lerp(min, max, noise));
}
public <T> T fit(T[] v, double... dim) {
if (v.length == 0) {
return null;
@@ -432,13 +549,7 @@ public class CNG {
}
public int fit(int min, int max, double x, double z) {
if (min == max) {
return min;
}
double noise = noise(x, z);
return (int) Math.round(IrisInterpolation.lerp(min, max, noise));
return fit2D(min, max, x, z);
}
public int fit(int min, int max, double x, double y, double z) {
@@ -466,7 +577,7 @@ public class CNG {
return (int) Math.round(min);
}
double noise = noise(x, z);
double noise = noiseFast2D(x, z);
return (int) Math.round(IrisInterpolation.lerp(min, max, noise));
}
@@ -610,9 +721,34 @@ public class CNG {
if (children != null) {
for (CNG i : children) {
double[] r = injector.combine(n, i.noise(x));
n = r[0];
m += r[1];
double source = n;
double value = i.noise(x);
switch (injectorMode) {
case ADD -> {
n = source + value;
m += 1D;
}
case SRC_SUBTRACT -> {
n = source - value < 0D ? 0D : source - value;
m -= 1D;
}
case DST_SUBTRACT -> {
n = value - source < 0D ? 0D : source - value;
m -= 1D;
}
case MULTIPLY -> n = source * value;
case MAX -> n = Math.max(source, value);
case MIN -> n = Math.min(source, value);
case SRC_MOD -> n = source % value;
case SRC_POW -> n = Math.pow(source, value);
case DST_MOD -> n = value % source;
case DST_POW -> n = Math.pow(value, source);
case CUSTOM -> {
double[] combined = injector.combine(source, value);
n = combined[0];
m += combined[1];
}
}
}
}
@@ -626,9 +762,34 @@ public class CNG {
if (children != null) {
for (CNG i : children) {
double[] r = injector.combine(n, i.noise(x, z));
n = r[0];
m += r[1];
double source = n;
double value = i.noise(x, z);
switch (injectorMode) {
case ADD -> {
n = source + value;
m += 1D;
}
case SRC_SUBTRACT -> {
n = source - value < 0D ? 0D : source - value;
m -= 1D;
}
case DST_SUBTRACT -> {
n = value - source < 0D ? 0D : source - value;
m -= 1D;
}
case MULTIPLY -> n = source * value;
case MAX -> n = Math.max(source, value);
case MIN -> n = Math.min(source, value);
case SRC_MOD -> n = source % value;
case SRC_POW -> n = Math.pow(source, value);
case DST_MOD -> n = value % source;
case DST_POW -> n = Math.pow(value, source);
case CUSTOM -> {
double[] combined = injector.combine(source, value);
n = combined[0];
m += combined[1];
}
}
}
}
@@ -642,9 +803,34 @@ public class CNG {
if (children != null) {
for (CNG i : children) {
double[] r = injector.combine(n, i.noise(x, y, z));
n = r[0];
m += r[1];
double source = n;
double value = i.noise(x, y, z);
switch (injectorMode) {
case ADD -> {
n = source + value;
m += 1D;
}
case SRC_SUBTRACT -> {
n = source - value < 0D ? 0D : source - value;
m -= 1D;
}
case DST_SUBTRACT -> {
n = value - source < 0D ? 0D : source - value;
m -= 1D;
}
case MULTIPLY -> n = source * value;
case MAX -> n = Math.max(source, value);
case MIN -> n = Math.min(source, value);
case SRC_MOD -> n = source % value;
case SRC_POW -> n = Math.pow(source, value);
case DST_MOD -> n = value % source;
case DST_POW -> n = Math.pow(value, source);
case CUSTOM -> {
double[] combined = injector.combine(source, value);
n = combined[0];
m += combined[1];
}
}
}
}
@@ -673,9 +859,34 @@ public class CNG {
}
for (CNG i : children) {
double[] r = injector.combine(n, i.noise(dim));
n = r[0];
m += r[1];
double source = n;
double value = i.noise(dim);
switch (injectorMode) {
case ADD -> {
n = source + value;
m += 1D;
}
case SRC_SUBTRACT -> {
n = source - value < 0D ? 0D : source - value;
m -= 1D;
}
case DST_SUBTRACT -> {
n = value - source < 0D ? 0D : source - value;
m -= 1D;
}
case MULTIPLY -> n = source * value;
case MAX -> n = Math.max(source, value);
case MIN -> n = Math.min(source, value);
case SRC_MOD -> n = source % value;
case SRC_POW -> n = Math.pow(source, value);
case DST_MOD -> n = value % source;
case DST_POW -> n = Math.pow(value, source);
case CUSTOM -> {
double[] combined = injector.combine(source, value);
n = combined[0];
m += combined[1];
}
}
}
return ((n / m) - down + up) * patch;
@@ -685,6 +896,10 @@ public class CNG {
return applyPost(getNoise(x), x);
}
public double noiseFast1D(double x) {
return applyPost(getNoise(x), x);
}
public double noise(double x, double z) {
if (cache != null && isWholeCoordinate(x) && isWholeCoordinate(z)) {
return cache.get((int) x, (int) z);
@@ -693,10 +908,22 @@ public class CNG {
return applyPost(getNoise(x, z), x, z);
}
public double noiseFast2D(double x, double z) {
if (cache != null && isWholeCoordinate(x) && isWholeCoordinate(z)) {
return cache.get((int) x, (int) z);
}
return applyPost(getNoise(x, z), x, z);
}
public double noise(double x, double y, double z) {
return applyPost(getNoise(x, y, z), x, y, z);
}
public double noiseFast3D(double x, double y, double z) {
return applyPost(getNoise(x, y, z), x, y, z);
}
public CNG pow(double power) {
this.power = power;
return this;
@@ -714,4 +941,18 @@ public class CNG {
public boolean isStatic() {
return generator != null && generator.isStatic();
}
private enum InjectorMode {
ADD,
SRC_SUBTRACT,
DST_SUBTRACT,
MULTIPLY,
MAX,
MIN,
SRC_MOD,
SRC_POW,
DST_MOD,
DST_POW,
CUSTOM
}
}

View File

@@ -53,7 +53,6 @@ public class CachedStream2D<T> extends BasicStream<T> implements ProceduralStrea
@Override
public T get(double x, double z) {
//return stream.get(x, z);
return cache.get((int) x, (int) z);
}
@@ -81,4 +80,10 @@ public class CachedStream2D<T> extends BasicStream<T> implements ProceduralStrea
public boolean isClosed() {
return engine.isClosed();
}
public void fillChunk(int worldX, int worldZ, Object[] target) {
int chunkX = worldX >> 4;
int chunkZ = worldZ >> 4;
cache.fillChunk(chunkX, chunkZ, target);
}
}

View File

@@ -0,0 +1,75 @@
package art.arcane.iris.core;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.Server;
import org.bukkit.block.data.BlockData;
import org.junit.Test;
import java.util.logging.Logger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assume.assumeTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.mockingDetails;
public class IrisRuntimeSchedulerModeRoutingTest {
@Test
public void autoResolvesToPaperLikeOnPurpurBranding() {
installServer("Purpur", "git-Purpur-2562 (MC: 1.21.11)");
IrisSettings.IrisSettingsPregen pregen = new IrisSettings.IrisSettingsPregen();
pregen.runtimeSchedulerMode = IrisRuntimeSchedulerMode.AUTO;
IrisRuntimeSchedulerMode resolved = IrisRuntimeSchedulerMode.resolve(pregen);
assertEquals(IrisRuntimeSchedulerMode.PAPER_LIKE, resolved);
}
@Test
public void autoResolvesToFoliaWhenBrandingContainsFolia() {
installServer("Folia", "git-Folia-123 (MC: 1.21.11)");
IrisSettings.IrisSettingsPregen pregen = new IrisSettings.IrisSettingsPregen();
pregen.runtimeSchedulerMode = IrisRuntimeSchedulerMode.AUTO;
IrisRuntimeSchedulerMode resolved = IrisRuntimeSchedulerMode.resolve(pregen);
assertEquals(IrisRuntimeSchedulerMode.FOLIA, resolved);
}
@Test
public void explicitModeBypassesAutoDetection() {
installServer("Purpur", "git-Purpur-2562 (MC: 1.21.11)");
IrisSettings.IrisSettingsPregen pregen = new IrisSettings.IrisSettingsPregen();
pregen.runtimeSchedulerMode = IrisRuntimeSchedulerMode.FOLIA;
IrisRuntimeSchedulerMode foliaResolved = IrisRuntimeSchedulerMode.resolve(pregen);
assertEquals(IrisRuntimeSchedulerMode.PAPER_LIKE, foliaResolved);
pregen.runtimeSchedulerMode = IrisRuntimeSchedulerMode.PAPER_LIKE;
IrisRuntimeSchedulerMode paperResolved = IrisRuntimeSchedulerMode.resolve(pregen);
assertEquals(IrisRuntimeSchedulerMode.PAPER_LIKE, paperResolved);
}
private void installServer(String name, String version) {
Server server = Bukkit.getServer();
if (server == null) {
server = mock(Server.class);
try {
Bukkit.setServer(server);
} catch (Throwable ignored) {
server = Bukkit.getServer();
}
}
assumeTrue(server != null && mockingDetails(server).isMock());
BlockData emptyBlockData = mock(BlockData.class);
doReturn(Logger.getLogger("IrisTest")).when(server).getLogger();
doReturn(name).when(server).getName();
doReturn(version).when(server).getVersion();
doReturn(version).when(server).getBukkitVersion();
doReturn(emptyBlockData).when(server).createBlockData(any(Material.class));
doReturn(emptyBlockData).when(server).createBlockData(anyString());
}
}

View File

@@ -0,0 +1,111 @@
package art.arcane.iris.engine;
import art.arcane.iris.engine.object.IrisBiome;
import art.arcane.iris.util.project.noise.CNG;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.volmlib.util.math.RNG;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.Server;
import org.bukkit.block.data.BlockData;
import org.junit.BeforeClass;
import org.junit.Test;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
public class IrisComplexImplodeParityTest {
private static Method childSelectionCreateMethod;
private static Method childSelectionSelectMethod;
@BeforeClass
public static void setup() throws Exception {
if (Bukkit.getServer() == null) {
Server server = mock(Server.class);
BlockData emptyBlockData = mock(BlockData.class);
doReturn(Logger.getLogger("IrisTest")).when(server).getLogger();
doReturn("IrisTestServer").when(server).getName();
doReturn("1.0").when(server).getVersion();
doReturn("1.0").when(server).getBukkitVersion();
doReturn(emptyBlockData).when(server).createBlockData(any(Material.class));
doReturn(emptyBlockData).when(server).createBlockData(anyString());
Bukkit.setServer(server);
}
Class<?> childSelectionClass = Class.forName("art.arcane.iris.engine.IrisComplex$ChildSelectionPlan");
childSelectionCreateMethod = childSelectionClass.getDeclaredMethod("create", KList.class);
childSelectionCreateMethod.setAccessible(true);
childSelectionSelectMethod = childSelectionClass.getDeclaredMethod("select", CNG.class, double.class, double.class);
childSelectionSelectMethod.setAccessible(true);
}
@Test
public void selectionPlanMatchesLegacyFitRarityAcrossSeedAndCoordinateGrid() throws Exception {
List<KList<IrisBiome>> scenarios = buildScenarios();
for (int scenarioIndex = 0; scenarioIndex < scenarios.size(); scenarioIndex++) {
KList<IrisBiome> options = scenarios.get(scenarioIndex);
Object selectionPlan = childSelectionCreateMethod.invoke(null, options);
for (long seed = 1L; seed <= 7L; seed++) {
CNG generator = new CNG(new RNG(seed), 4);
for (int x = -512; x <= 512; x += 37) {
for (int z = -512; z <= 512; z += 41) {
IrisBiome expected = generator.fitRarity(options, x, z);
IrisBiome actual = (IrisBiome) childSelectionSelectMethod.invoke(selectionPlan, generator, (double) x, (double) z);
assertSame("scenario=" + scenarioIndex + " seed=" + seed + " x=" + x + " z=" + z, expected, actual);
}
}
}
}
}
@Test
public void emptySelectionPlanMatchesLegacyEmptyBehavior() throws Exception {
KList<IrisBiome> options = new KList<>();
CNG generator = new CNG(new RNG(9L), 2);
Object selectionPlan = childSelectionCreateMethod.invoke(null, options);
IrisBiome expected = generator.fitRarity(options, 12D, -32D);
IrisBiome actual = (IrisBiome) childSelectionSelectMethod.invoke(selectionPlan, generator, 12D, -32D);
assertNull(expected);
assertNull(actual);
}
private List<KList<IrisBiome>> buildScenarios() {
List<KList<IrisBiome>> scenarios = new ArrayList<>();
KList<IrisBiome> scenarioA = new KList<>();
scenarioA.add(createBiome(1));
scenarioA.add(createBiome(3));
scenarioA.add(createBiome(5));
scenarioA.add(createBiome(2));
scenarios.add(scenarioA);
KList<IrisBiome> scenarioB = new KList<>();
scenarioB.add(createBiome(7));
scenarioB.add(createBiome(2));
scenarioB.add(createBiome(2));
scenarioB.add(createBiome(6));
scenarioB.add(createBiome(1));
scenarios.add(scenarioB);
KList<IrisBiome> scenarioC = new KList<>();
scenarioC.add(createBiome(4));
scenarios.add(scenarioC);
return scenarios;
}
private IrisBiome createBiome(int rarity) {
IrisBiome biome = mock(IrisBiome.class);
doReturn(rarity).when(biome).getRarity();
return biome;
}
}

View File

@@ -57,8 +57,8 @@ public class IrisDimensionCarvingResolverParityTest {
IrisDimensionCarvingEntry statefulRoot = IrisDimensionCarvingResolver.resolveRootEntry(fixture.engine, worldY, state);
assertSame("root mismatch at worldY=" + worldY, legacyRoot, statefulRoot);
for (int worldX = -192; worldX <= 192; worldX += 31) {
for (int worldZ = -192; worldZ <= 192; worldZ += 37) {
for (int worldX = -384; worldX <= 384; worldX += 29) {
for (int worldZ = -384; worldZ <= 384; worldZ += 31) {
IrisDimensionCarvingEntry legacyResolved = legacyResolveFromRoot(fixture.engine, legacyRoot, worldX, worldZ);
IrisDimensionCarvingEntry statefulResolved = IrisDimensionCarvingResolver.resolveFromRoot(fixture.engine, statefulRoot, worldX, worldZ, state);
assertSame("entry mismatch at worldY=" + worldY + " worldX=" + worldX + " worldZ=" + worldZ, legacyResolved, statefulResolved);
@@ -67,6 +67,26 @@ public class IrisDimensionCarvingResolverParityTest {
}
}
@Test
public void resolverStatefulOverloadsMatchLegacyResolverAcrossMixedDepthGraph() {
Fixture fixture = createMixedDepthFixture();
IrisDimensionCarvingResolver.State state = new IrisDimensionCarvingResolver.State();
for (int worldY = -64; worldY <= 320; worldY += 17) {
IrisDimensionCarvingEntry legacyRoot = legacyResolveRootEntry(fixture.engine, worldY);
IrisDimensionCarvingEntry statefulRoot = IrisDimensionCarvingResolver.resolveRootEntry(fixture.engine, worldY, state);
assertSame("mixed root mismatch at worldY=" + worldY, legacyRoot, statefulRoot);
for (int worldX = -640; worldX <= 640; worldX += 79) {
for (int worldZ = -640; worldZ <= 640; worldZ += 83) {
IrisDimensionCarvingEntry legacyResolved = legacyResolveFromRoot(fixture.engine, legacyRoot, worldX, worldZ);
IrisDimensionCarvingEntry statefulResolved = IrisDimensionCarvingResolver.resolveFromRoot(fixture.engine, statefulRoot, worldX, worldZ, state);
assertSame("mixed entry mismatch at worldY=" + worldY + " worldX=" + worldX + " worldZ=" + worldZ, legacyResolved, statefulResolved);
}
}
}
}
@Test
public void caveBiomeStateOverloadMatchesDefaultOverloadAcrossSampleGrid() {
Fixture fixture = createFixture();
@@ -145,6 +165,92 @@ public class IrisDimensionCarvingResolverParityTest {
return new Fixture(engine);
}
private Fixture createMixedDepthFixture() {
IrisBiome rootLowBiome = mock(IrisBiome.class);
IrisBiome rootHighBiome = mock(IrisBiome.class);
IrisBiome childABiome = mock(IrisBiome.class);
IrisBiome childBBiome = mock(IrisBiome.class);
IrisBiome childCBiome = mock(IrisBiome.class);
IrisBiome childDBiome = mock(IrisBiome.class);
IrisBiome childEBiome = mock(IrisBiome.class);
IrisBiome childFBiome = mock(IrisBiome.class);
IrisBiome childGBiome = mock(IrisBiome.class);
IrisBiome fallbackBiome = mock(IrisBiome.class);
IrisBiome surfaceBiome = mock(IrisBiome.class);
doReturn(7).when(rootLowBiome).getRarity();
doReturn(5).when(rootHighBiome).getRarity();
doReturn(2).when(childABiome).getRarity();
doReturn(3).when(childBBiome).getRarity();
doReturn(6).when(childCBiome).getRarity();
doReturn(1).when(childDBiome).getRarity();
doReturn(4).when(childEBiome).getRarity();
doReturn(8).when(childFBiome).getRarity();
doReturn(2).when(childGBiome).getRarity();
doReturn(0).when(fallbackBiome).getCaveMinDepthBelowSurface();
@SuppressWarnings("unchecked")
ResourceLoader<IrisBiome> biomeLoader = mock(ResourceLoader.class);
doReturn(rootLowBiome).when(biomeLoader).load("root-low");
doReturn(rootHighBiome).when(biomeLoader).load("root-high");
doReturn(childABiome).when(biomeLoader).load("child-a");
doReturn(childBBiome).when(biomeLoader).load("child-b");
doReturn(childCBiome).when(biomeLoader).load("child-c");
doReturn(childDBiome).when(biomeLoader).load("child-d");
doReturn(childEBiome).when(biomeLoader).load("child-e");
doReturn(childFBiome).when(biomeLoader).load("child-f");
doReturn(childGBiome).when(biomeLoader).load("child-g");
IrisData data = mock(IrisData.class);
doReturn(biomeLoader).when(data).getBiomeLoader();
IrisDimensionCarvingEntry rootLow = buildEntry("root-low", "root-low", new IrisRange(-64, 120), 7, List.of("child-a", "child-d", "child-e"));
IrisDimensionCarvingEntry rootHigh = buildEntry("root-high", "root-high", new IrisRange(121, 320), 6, List.of("child-b", "child-c", "child-f"));
IrisDimensionCarvingEntry childA = buildEntry("child-a", "child-a", new IrisRange(-4096, 4096), 5, List.of("child-b", "child-g"));
IrisDimensionCarvingEntry childB = buildEntry("child-b", "child-b", new IrisRange(-4096, 4096), 1, List.of("child-c"));
IrisDimensionCarvingEntry childC = buildEntry("child-c", "child-c", new IrisRange(-4096, 4096), 0, List.of());
IrisDimensionCarvingEntry childD = buildEntry("child-d", "child-d", new IrisRange(-4096, 4096), 6, List.of("child-e", "child-f"));
IrisDimensionCarvingEntry childE = buildEntry("child-e", "child-e", new IrisRange(-4096, 4096), 2, List.of("child-a"));
IrisDimensionCarvingEntry childF = buildEntry("child-f", "child-f", new IrisRange(-4096, 4096), 8, List.of("child-g", "child-c"));
IrisDimensionCarvingEntry childG = buildEntry("child-g", "child-g", new IrisRange(-4096, 4096), 3, List.of("child-d"));
KList<IrisDimensionCarvingEntry> carvingEntries = new KList<>();
carvingEntries.add(rootLow);
carvingEntries.add(rootHigh);
carvingEntries.add(childA);
carvingEntries.add(childB);
carvingEntries.add(childC);
carvingEntries.add(childD);
carvingEntries.add(childE);
carvingEntries.add(childF);
carvingEntries.add(childG);
Map<String, IrisDimensionCarvingEntry> index = new HashMap<>();
index.put(rootLow.getId(), rootLow);
index.put(rootHigh.getId(), rootHigh);
index.put(childA.getId(), childA);
index.put(childB.getId(), childB);
index.put(childC.getId(), childC);
index.put(childD.getId(), childD);
index.put(childE.getId(), childE);
index.put(childF.getId(), childF);
index.put(childG.getId(), childG);
IrisDimension dimension = mock(IrisDimension.class);
doReturn(carvingEntries).when(dimension).getCarving();
doReturn(index).when(dimension).getCarvingEntryIndex();
Engine engine = mock(Engine.class, CALLS_REAL_METHODS);
doReturn(dimension).when(engine).getDimension();
doReturn(data).when(engine).getData();
doReturn(new SeedManager(4_627_991_643L)).when(engine).getSeedManager();
doReturn(IrisWorld.builder().minHeight(-64).maxHeight(320).build()).when(engine).getWorld();
doReturn(surfaceBiome).when(engine).getSurfaceBiome(anyInt(), anyInt());
doReturn(fallbackBiome).when(engine).getCaveBiome(anyInt(), anyInt());
return new Fixture(engine);
}
private IrisDimensionCarvingEntry buildEntry(String id, String biome, IrisRange worldRange, int depth, List<String> children) {
IrisDimensionCarvingEntry entry = new IrisDimensionCarvingEntry();
entry.setId(id);

View File

@@ -0,0 +1,208 @@
package art.arcane.iris.util.project.noise;
import art.arcane.volmlib.util.function.NoiseInjector;
import art.arcane.volmlib.util.math.RNG;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.Server;
import org.bukkit.block.data.BlockData;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.logging.Logger;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
public class CNGInjectorParityTest {
@BeforeClass
public static void setupBukkit() {
if (Bukkit.getServer() != null) {
return;
}
Server server = mock(Server.class);
BlockData emptyBlockData = mock(BlockData.class);
doReturn(Logger.getLogger("IrisTest")).when(server).getLogger();
doReturn("IrisTestServer").when(server).getName();
doReturn("1.0").when(server).getVersion();
doReturn("1.0").when(server).getBukkitVersion();
doReturn(emptyBlockData).when(server).createBlockData(any(Material.class));
doReturn(emptyBlockData).when(server).createBlockData(anyString());
Bukkit.setServer(server);
}
@Test
public void builtInInjectorsMatchLegacyCombineFor1D() {
List<NoiseInjector> injectors = builtInInjectors();
for (NoiseInjector injector : injectors) {
CompositeFixture fixture = createFixture(injector);
for (int x = -300; x <= 300; x += 17) {
double expected = legacyCombined1D(fixture, x);
double actual = fixture.root.noise(x);
assertEquals("injector=" + injector + " x=" + x, expected, actual, 1.0E-12D);
}
}
}
@Test
public void builtInInjectorsMatchLegacyCombineFor2D() {
List<NoiseInjector> injectors = builtInInjectors();
for (NoiseInjector injector : injectors) {
CompositeFixture fixture = createFixture(injector);
for (int x = -160; x <= 160; x += 19) {
for (int z = -160; z <= 160; z += 23) {
double expected = legacyCombined2D(fixture, x, z);
double actual = fixture.root.noise(x, z);
assertEquals("injector=" + injector + " x=" + x + " z=" + z, expected, actual, 1.0E-12D);
}
}
}
}
@Test
public void builtInInjectorsMatchLegacyCombineFor3D() {
List<NoiseInjector> injectors = builtInInjectors();
for (NoiseInjector injector : injectors) {
CompositeFixture fixture = createFixture(injector);
for (int x = -64; x <= 64; x += 11) {
for (int y = -32; y <= 32; y += 13) {
for (int z = -64; z <= 64; z += 17) {
double expected = legacyCombined3D(fixture, x, y, z);
double actual = fixture.root.noise(x, y, z);
assertEquals("injector=" + injector + " x=" + x + " y=" + y + " z=" + z, expected, actual, 1.0E-12D);
}
}
}
}
}
private CompositeFixture createFixture(NoiseInjector injector) {
DeterministicNoiseGenerator rootGenerator = new DeterministicNoiseGenerator(0.17D);
DeterministicNoiseGenerator childGeneratorA = new DeterministicNoiseGenerator(0.43D);
DeterministicNoiseGenerator childGeneratorB = new DeterministicNoiseGenerator(0.79D);
CNG childA = new CNG(new RNG(11L), childGeneratorA, 1.0D, 1).bake();
CNG childB = new CNG(new RNG(12L), childGeneratorB, 1.0D, 1).bake();
CNG root = new CNG(new RNG(9L), rootGenerator, 1.0D, 1).bake();
root.child(childA);
root.child(childB);
root.injectWith(injector);
return new CompositeFixture(root, rootGenerator, childA, childB, injector);
}
private List<NoiseInjector> builtInInjectors() {
List<NoiseInjector> injectors = new ArrayList<>();
injectors.add(CNG.ADD);
injectors.add(CNG.SRC_SUBTRACT);
injectors.add(CNG.DST_SUBTRACT);
injectors.add(CNG.MULTIPLY);
injectors.add(CNG.MAX);
injectors.add(CNG.MIN);
injectors.add(CNG.SRC_MOD);
injectors.add(CNG.SRC_POW);
injectors.add(CNG.DST_MOD);
injectors.add(CNG.DST_POW);
return injectors;
}
private double legacyCombined1D(CompositeFixture fixture, double x) {
double n = fixture.rootGenerator.noise(x, 0D, 0D);
double m = 1D;
double valueA = fixture.childA.noise(x);
double[] combinedA = fixture.injector.combine(n, valueA);
n = combinedA[0];
m += combinedA[1];
double valueB = fixture.childB.noise(x);
double[] combinedB = fixture.injector.combine(n, valueB);
n = combinedB[0];
m += combinedB[1];
return n / m;
}
private double legacyCombined2D(CompositeFixture fixture, double x, double z) {
double n = fixture.rootGenerator.noise(x, z, 0D);
double m = 1D;
double valueA = fixture.childA.noise(x, z);
double[] combinedA = fixture.injector.combine(n, valueA);
n = combinedA[0];
m += combinedA[1];
double valueB = fixture.childB.noise(x, z);
double[] combinedB = fixture.injector.combine(n, valueB);
n = combinedB[0];
m += combinedB[1];
return n / m;
}
private double legacyCombined3D(CompositeFixture fixture, double x, double y, double z) {
double n = fixture.rootGenerator.noise(x, y, z);
double m = 1D;
double valueA = fixture.childA.noise(x, y, z);
double[] combinedA = fixture.injector.combine(n, valueA);
n = combinedA[0];
m += combinedA[1];
double valueB = fixture.childB.noise(x, y, z);
double[] combinedB = fixture.injector.combine(n, valueB);
n = combinedB[0];
m += combinedB[1];
return n / m;
}
private static class CompositeFixture {
private final CNG root;
private final DeterministicNoiseGenerator rootGenerator;
private final CNG childA;
private final CNG childB;
private final NoiseInjector injector;
private CompositeFixture(CNG root, DeterministicNoiseGenerator rootGenerator, CNG childA, CNG childB, NoiseInjector injector) {
this.root = root;
this.rootGenerator = rootGenerator;
this.childA = childA;
this.childB = childB;
this.injector = injector;
}
}
private static class DeterministicNoiseGenerator implements NoiseGenerator {
private final double offset;
private DeterministicNoiseGenerator(double offset) {
this.offset = offset;
}
@Override
public double noise(double x) {
double angle = (x * 0.013D) + offset;
return 0.2D + (((Math.sin(angle) + 1D) * 0.5D) * 0.6D);
}
@Override
public double noise(double x, double z) {
double angle = (x * 0.011D) + (z * 0.017D) + offset;
return 0.2D + (((Math.sin(angle) + 1D) * 0.5D) * 0.6D);
}
@Override
public double noise(double x, double y, double z) {
double angle = (x * 0.007D) + (y * 0.013D) + (z * 0.019D) + offset;
return 0.2D + (((Math.sin(angle) + 1D) * 0.5D) * 0.6D);
}
}
}