mirror of
https://github.com/VolmitSoftware/Iris.git
synced 2025-07-01 15:36:45 +00:00
commit
2c7b7c8c91
10
build.gradle
10
build.gradle
@ -36,7 +36,7 @@ plugins {
|
||||
}
|
||||
|
||||
|
||||
version '3.6.8-1.20.1-1.21.4'
|
||||
version '3.6.8-1.20.1-1.21.5'
|
||||
|
||||
// ADD YOURSELF AS A NEW LINE IF YOU WANT YOUR OWN BUILD TASK GENERATED
|
||||
// ======================== WINDOWS =============================
|
||||
@ -62,6 +62,7 @@ def MAX_HEAP_SIZE = "8G"
|
||||
def COLOR = "truecolor"
|
||||
|
||||
def NMS_BINDINGS = Map.of(
|
||||
"v1_21_R4", "1.21.5-R0.1-SNAPSHOT",
|
||||
"v1_21_R3", "1.21.4-R0.1-SNAPSHOT",
|
||||
"v1_21_R2", "1.21.3-R0.1-SNAPSHOT",
|
||||
"v1_21_R1", "1.21.1-R0.1-SNAPSHOT",
|
||||
@ -113,6 +114,11 @@ shadowJar {
|
||||
relocate 'net.kyori', 'com.volmit.iris.util.kyori'
|
||||
relocate 'org.bstats', 'com.volmit.util.metrics'
|
||||
archiveFileName.set("Iris-${project.version}.jar")
|
||||
|
||||
dependencies {
|
||||
exclude(dependency("org.ow2.asm:asm:"))
|
||||
exclude(dependency("org.jetbrains:"))
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
@ -139,7 +145,7 @@ allprojects {
|
||||
maven { url "https://repo.triumphteam.dev/snapshots" }
|
||||
maven { url "https://repo.mineinabyss.com/releases" }
|
||||
maven { url 'https://hub.jeff-media.com/nexus/repository/jeff-media-public/' }
|
||||
maven { url "https://repo.nexomc.com/snapshots/" }
|
||||
maven { url "https://repo.nexomc.com/releases/" }
|
||||
maven { url "https://libraries.minecraft.net" }
|
||||
}
|
||||
|
||||
|
@ -62,7 +62,7 @@ dependencies {
|
||||
|
||||
// Third Party Integrations
|
||||
compileOnly 'com.ticxo.playeranimator:PlayerAnimator:R1.2.7'
|
||||
compileOnly 'com.nexomc:nexo:1.0.0-dev.38'
|
||||
compileOnly 'com.nexomc:nexo:1.6.0'
|
||||
compileOnly 'com.github.LoneDev6:api-itemsadder:3.4.1-r4'
|
||||
compileOnly 'com.github.PlaceholderAPI:placeholderapi:2.11.3'
|
||||
compileOnly 'com.github.Ssomar-Developement:SCore:4.23.10.8'
|
||||
|
@ -576,9 +576,19 @@ public class Iris extends VolmitPlugin implements Listener {
|
||||
Bukkit.getScheduler().cancelTasks(this);
|
||||
HandlerList.unregisterAll((Plugin) this);
|
||||
postShutdown.forEach(Runnable::run);
|
||||
services.clear();
|
||||
MultiBurst.burst.close();
|
||||
super.onDisable();
|
||||
|
||||
J.attempt(new JarScanner(instance.getJarFile(), "", false)::scan);
|
||||
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
|
||||
Bukkit.getWorlds()
|
||||
.stream()
|
||||
.map(IrisToolbelt::access)
|
||||
.filter(Objects::nonNull)
|
||||
.forEach(PlatformChunkGenerator::close);
|
||||
|
||||
MultiBurst.burst.close();
|
||||
services.clear();
|
||||
}));
|
||||
}
|
||||
|
||||
private void setupPapi() {
|
||||
|
@ -23,6 +23,7 @@ import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.util.io.IO;
|
||||
import com.volmit.iris.util.json.JSONException;
|
||||
import com.volmit.iris.util.json.JSONObject;
|
||||
import com.volmit.iris.util.misc.getHardware;
|
||||
import com.volmit.iris.util.plugin.VolmitSender;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
@ -154,12 +155,22 @@ public class IrisSettings {
|
||||
|
||||
@Data
|
||||
public static class IrisSettingsPerformance {
|
||||
private IrisSettingsEngineSVC engineSVC = new IrisSettingsEngineSVC();
|
||||
public boolean trimMantleInStudio = false;
|
||||
public int mantleKeepAlive = 30;
|
||||
public int cacheSize = 4_096;
|
||||
public int resourceLoaderCacheSize = 1_024;
|
||||
public int objectLoaderCacheSize = 4_096;
|
||||
public int scriptLoaderCacheSize = 512;
|
||||
public int tectonicPlateSize = -1;
|
||||
public int mantleCleanupDelay = 200;
|
||||
|
||||
public int getTectonicPlateSize() {
|
||||
if (tectonicPlateSize > 0)
|
||||
return tectonicPlateSize;
|
||||
|
||||
return (int) (getHardware.getProcessMemory() / 200L);
|
||||
}
|
||||
}
|
||||
|
||||
@Data
|
||||
@ -191,6 +202,7 @@ public class IrisSettings {
|
||||
public boolean DoomsdayAnnihilationSelfDestructMode = false;
|
||||
public boolean commandSounds = true;
|
||||
public boolean debug = false;
|
||||
public boolean dumpMantleOnError = false;
|
||||
public boolean disableNMS = false;
|
||||
public boolean pluginMetrics = true;
|
||||
public boolean splashLogoStartup = true;
|
||||
@ -231,4 +243,14 @@ public class IrisSettings {
|
||||
public boolean disableTimeAndWeather = true;
|
||||
public boolean autoStartDefaultStudio = false;
|
||||
}
|
||||
|
||||
@Data
|
||||
public static class IrisSettingsEngineSVC {
|
||||
public boolean useVirtualThreads = true;
|
||||
public int priority = Thread.NORM_PRIORITY;
|
||||
|
||||
public int getPriority() {
|
||||
return Math.max(Math.min(priority, Thread.MAX_PRIORITY), Thread.MIN_PRIORITY);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,7 +20,6 @@ package com.volmit.iris.core.commands;
|
||||
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.ServerConfigurator;
|
||||
import com.volmit.iris.core.loader.IrisData;
|
||||
import com.volmit.iris.core.nms.datapack.DataVersion;
|
||||
import com.volmit.iris.core.service.IrisEngineSVC;
|
||||
import com.volmit.iris.core.tools.IrisPackBenchmarking;
|
||||
@ -68,53 +67,8 @@ public class CommandDeveloper implements DecreeExecutor {
|
||||
|
||||
@Decree(description = "Get Loaded TectonicPlates Count", origin = DecreeOrigin.BOTH, sync = true)
|
||||
public void EngineStatus() {
|
||||
List<World> IrisWorlds = new ArrayList<>();
|
||||
int TotalLoadedChunks = 0;
|
||||
int TotalQueuedTectonicPlates = 0;
|
||||
int TotalNotQueuedTectonicPlates = 0;
|
||||
int TotalTectonicPlates = 0;
|
||||
|
||||
long lowestUnloadDuration = 0;
|
||||
long highestUnloadDuration = 0;
|
||||
|
||||
for (World world : Bukkit.getWorlds()) {
|
||||
try {
|
||||
if (IrisToolbelt.access(world).getEngine() != null) {
|
||||
IrisWorlds.add(world);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// no
|
||||
}
|
||||
}
|
||||
|
||||
for (World world : IrisWorlds) {
|
||||
Engine engine = IrisToolbelt.access(world).getEngine();
|
||||
TotalQueuedTectonicPlates += (int) engine.getMantle().getToUnload();
|
||||
TotalNotQueuedTectonicPlates += (int) engine.getMantle().getNotQueuedLoadedRegions();
|
||||
TotalTectonicPlates += engine.getMantle().getLoadedRegionCount();
|
||||
if (highestUnloadDuration <= (long) engine.getMantle().getTectonicDuration()) {
|
||||
highestUnloadDuration = (long) engine.getMantle().getTectonicDuration();
|
||||
}
|
||||
if (lowestUnloadDuration >= (long) engine.getMantle().getTectonicDuration()) {
|
||||
lowestUnloadDuration = (long) engine.getMantle().getTectonicDuration();
|
||||
}
|
||||
for (Chunk chunk : world.getLoadedChunks()) {
|
||||
if (chunk.isLoaded()) {
|
||||
TotalLoadedChunks++;
|
||||
}
|
||||
}
|
||||
}
|
||||
Iris.info("-------------------------");
|
||||
Iris.info(C.DARK_PURPLE + "Engine Status");
|
||||
Iris.info(C.DARK_PURPLE + "Total Loaded Chunks: " + C.LIGHT_PURPLE + TotalLoadedChunks);
|
||||
Iris.info(C.DARK_PURPLE + "Tectonic Limit: " + C.LIGHT_PURPLE + IrisEngineSVC.getTectonicLimit());
|
||||
Iris.info(C.DARK_PURPLE + "Tectonic Total Plates: " + C.LIGHT_PURPLE + TotalTectonicPlates);
|
||||
Iris.info(C.DARK_PURPLE + "Tectonic Active Plates: " + C.LIGHT_PURPLE + TotalNotQueuedTectonicPlates);
|
||||
Iris.info(C.DARK_PURPLE + "Tectonic ToUnload: " + C.LIGHT_PURPLE + TotalQueuedTectonicPlates);
|
||||
Iris.info(C.DARK_PURPLE + "Lowest Tectonic Unload Duration: " + C.LIGHT_PURPLE + Form.duration(lowestUnloadDuration));
|
||||
Iris.info(C.DARK_PURPLE + "Highest Tectonic Unload Duration: " + C.LIGHT_PURPLE + Form.duration(highestUnloadDuration));
|
||||
Iris.info(C.DARK_PURPLE + "Cache Size: " + C.LIGHT_PURPLE + Form.f(IrisData.cacheSize()));
|
||||
Iris.info("-------------------------");
|
||||
Iris.service(IrisEngineSVC.class)
|
||||
.engineStatus(sender());
|
||||
}
|
||||
|
||||
@Decree(description = "Test")
|
||||
@ -166,7 +120,7 @@ public class CommandDeveloper implements DecreeExecutor {
|
||||
|
||||
File tectonicplates = new File(folder, "mantle");
|
||||
for (File i : Objects.requireNonNull(tectonicplates.listFiles())) {
|
||||
TectonicPlate.read(maxHeight, i);
|
||||
TectonicPlate.read(maxHeight, i, true);
|
||||
c++;
|
||||
Iris.info("Loaded count: " + c );
|
||||
|
||||
@ -272,7 +226,8 @@ public class CommandDeveloper implements DecreeExecutor {
|
||||
@Param(description = "base IrisWorld") World world,
|
||||
@Param(description = "raw TectonicPlate File") String path,
|
||||
@Param(description = "Algorithm to Test") String algorithm,
|
||||
@Param(description = "Amount of Tests") int amount) {
|
||||
@Param(description = "Amount of Tests") int amount,
|
||||
@Param(description = "Is versioned", defaultValue = "false") boolean versioned) {
|
||||
if (!IrisToolbelt.isIrisWorld(world)) {
|
||||
sender().sendMessage(C.RED + "This is not an Iris world. Iris worlds: " + String.join(", ", Bukkit.getServer().getWorlds().stream().filter(IrisToolbelt::isIrisWorld).map(World::getName).toList()));
|
||||
return;
|
||||
@ -289,7 +244,7 @@ public class CommandDeveloper implements DecreeExecutor {
|
||||
service.submit(() -> {
|
||||
try {
|
||||
CountingDataInputStream raw = CountingDataInputStream.wrap(new FileInputStream(file));
|
||||
TectonicPlate plate = new TectonicPlate(height, raw);
|
||||
TectonicPlate plate = new TectonicPlate(height, raw, versioned);
|
||||
raw.close();
|
||||
|
||||
double d1 = 0;
|
||||
@ -308,7 +263,7 @@ public class CommandDeveloper implements DecreeExecutor {
|
||||
size = tmp.length();
|
||||
start = System.currentTimeMillis();
|
||||
CountingDataInputStream din = createInput(tmp, algorithm);
|
||||
new TectonicPlate(height, din);
|
||||
new TectonicPlate(height, din, true);
|
||||
din.close();
|
||||
d2 += System.currentTimeMillis() - start;
|
||||
tmp.delete();
|
||||
|
@ -43,6 +43,7 @@ import java.awt.image.BufferedImage;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.function.Consumer;
|
||||
|
||||
@ -55,7 +56,7 @@ public class PregeneratorJob implements PregenListener {
|
||||
private static final Color COLOR_NETWORK_GENERATING = parseColor("#836b8c");
|
||||
private static final Color COLOR_GENERATED = parseColor("#65c295");
|
||||
private static final Color COLOR_CLEANED = parseColor("#34eb93");
|
||||
public static PregeneratorJob instance;
|
||||
private static final AtomicReference<PregeneratorJob> instance = new AtomicReference<>();
|
||||
private final MemoryMonitor monitor;
|
||||
private final PregenTask task;
|
||||
private final boolean saving;
|
||||
@ -73,8 +74,14 @@ public class PregeneratorJob implements PregenListener {
|
||||
private String[] info;
|
||||
|
||||
public PregeneratorJob(PregenTask task, PregeneratorMethod method, Engine engine) {
|
||||
instance.updateAndGet(old -> {
|
||||
if (old != null) {
|
||||
old.pregenerator.close();
|
||||
old.close();
|
||||
}
|
||||
return this;
|
||||
});
|
||||
this.engine = engine;
|
||||
instance = this;
|
||||
monitor = new MemoryMonitor(50);
|
||||
saving = false;
|
||||
info = new String[]{"Initializing..."};
|
||||
@ -103,37 +110,40 @@ public class PregeneratorJob implements PregenListener {
|
||||
}
|
||||
|
||||
public static boolean shutdownInstance() {
|
||||
if (instance == null) {
|
||||
PregeneratorJob inst = instance.get();
|
||||
if (inst == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
J.a(() -> instance.pregenerator.close());
|
||||
J.a(inst.pregenerator::close);
|
||||
return true;
|
||||
}
|
||||
|
||||
public static PregeneratorJob getInstance() {
|
||||
return instance;
|
||||
return instance.get();
|
||||
}
|
||||
|
||||
public static boolean pauseResume() {
|
||||
if (instance == null) {
|
||||
PregeneratorJob inst = instance.get();
|
||||
if (inst == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (isPaused()) {
|
||||
instance.pregenerator.resume();
|
||||
inst.pregenerator.resume();
|
||||
} else {
|
||||
instance.pregenerator.pause();
|
||||
inst.pregenerator.pause();
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public static boolean isPaused() {
|
||||
if (instance == null) {
|
||||
PregeneratorJob inst = instance.get();
|
||||
if (inst == null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return instance.paused();
|
||||
return inst.paused();
|
||||
}
|
||||
|
||||
private static Color parseColor(String c) {
|
||||
@ -183,7 +193,7 @@ public class PregeneratorJob implements PregenListener {
|
||||
J.a(() -> {
|
||||
pregenerator.close();
|
||||
close();
|
||||
instance = null;
|
||||
instance.compareAndSet(this, null);
|
||||
});
|
||||
}
|
||||
|
||||
@ -311,7 +321,7 @@ public class PregeneratorJob implements PregenListener {
|
||||
@Override
|
||||
public void onClose() {
|
||||
close();
|
||||
instance = null;
|
||||
instance.compareAndSet(this, null);
|
||||
whenDone.forEach(Runnable::run);
|
||||
service.shutdownNow();
|
||||
}
|
||||
|
@ -34,7 +34,8 @@ public class INMS {
|
||||
"1.21.1", "v1_21_R1",
|
||||
"1.21.2", "v1_21_R2",
|
||||
"1.21.3", "v1_21_R2",
|
||||
"1.21.4", "v1_21_R3"
|
||||
"1.21.4", "v1_21_R3",
|
||||
"1.21.5", "v1_21_R4"
|
||||
);
|
||||
private static final List<Version> PACKS = List.of(
|
||||
new Version(21, 4, "31020"),
|
||||
|
@ -28,6 +28,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
public class ChunkUpdater {
|
||||
private static final String REGION_PATH = "region" + File.separator + "r.";
|
||||
private final AtomicBoolean paused = new AtomicBoolean();
|
||||
private final AtomicBoolean cancelled = new AtomicBoolean();
|
||||
private final KMap<Long, Pair<Long, AtomicInteger>> lastUse = new KMap<>();
|
||||
@ -162,12 +163,12 @@ public class ChunkUpdater {
|
||||
J.sleep(50);
|
||||
}
|
||||
|
||||
if (rX < dimensions.min.getX() || rX > dimensions.max.getX() || rZ < dimensions.min.getZ() || rZ > dimensions.max.getZ()) {
|
||||
return;
|
||||
}
|
||||
if (!new File(world.getWorldFolder(), "region" + File.separator + rX + "." + rZ + ".mca").exists()) {
|
||||
return;
|
||||
}
|
||||
if (rX < dimensions.min.getX() ||
|
||||
rX > dimensions.max.getX() ||
|
||||
rZ < dimensions.min.getZ() ||
|
||||
rZ > dimensions.max.getZ() ||
|
||||
!new File(world.getWorldFolder(), REGION_PATH + rX + "." + rZ + ".mca").exists()
|
||||
) return;
|
||||
|
||||
task.iterateChunks(rX, rZ, (x, z) -> {
|
||||
while (paused.get() && !cancelled.get()) {
|
||||
|
@ -1,317 +1,242 @@
|
||||
package com.volmit.iris.core.service;
|
||||
|
||||
import com.google.common.util.concurrent.AtomicDouble;
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.IrisSettings;
|
||||
import com.volmit.iris.core.loader.IrisData;
|
||||
import com.volmit.iris.core.tools.IrisToolbelt;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
import com.volmit.iris.engine.platform.PlatformChunkGenerator;
|
||||
import com.volmit.iris.util.collection.KList;
|
||||
import com.volmit.iris.util.collection.KMap;
|
||||
import com.volmit.iris.util.format.C;
|
||||
import com.volmit.iris.util.format.Form;
|
||||
import com.volmit.iris.util.mantle.TectonicPlate;
|
||||
import com.volmit.iris.util.misc.getHardware;
|
||||
import com.volmit.iris.util.math.RNG;
|
||||
import com.volmit.iris.util.plugin.IrisService;
|
||||
import com.volmit.iris.util.scheduling.ChronoLatch;
|
||||
import com.volmit.iris.util.plugin.VolmitSender;
|
||||
import com.volmit.iris.util.scheduling.Looper;
|
||||
import com.volmit.iris.util.scheduling.PrecisionStopwatch;
|
||||
import lombok.Synchronized;
|
||||
import org.bukkit.Bukkit;
|
||||
import org.bukkit.World;
|
||||
import org.bukkit.event.EventHandler;
|
||||
import org.bukkit.event.server.PluginDisableEvent;
|
||||
import org.bukkit.event.server.ServerLoadEvent;
|
||||
import org.bukkit.event.world.WorldLoadEvent;
|
||||
import org.bukkit.event.world.WorldUnloadEvent;
|
||||
import org.checkerframework.checker.units.qual.A;
|
||||
import org.jetbrains.annotations.Nullable;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class IrisEngineSVC implements IrisService {
|
||||
public static IrisEngineSVC instance;
|
||||
public boolean isServerShuttingDown = false;
|
||||
public boolean isServerLoaded = false;
|
||||
private static final AtomicInteger tectonicLimit = new AtomicInteger(30);
|
||||
private ReentrantLock lastUseLock;
|
||||
private KMap<World, Long> lastUse;
|
||||
private List<World> IrisWorlds;
|
||||
private Looper cacheTicker;
|
||||
private Looper trimTicker;
|
||||
private Looper unloadTicker;
|
||||
private final AtomicInteger tectonicLimit = new AtomicInteger(30);
|
||||
private final AtomicInteger tectonicPlates = new AtomicInteger();
|
||||
private final AtomicInteger queuedTectonicPlates = new AtomicInteger();
|
||||
private final AtomicInteger trimmerAlive = new AtomicInteger();
|
||||
private final AtomicInteger unloaderAlive = new AtomicInteger();
|
||||
private final AtomicInteger totalWorlds = new AtomicInteger();
|
||||
private final AtomicDouble maxIdleDuration = new AtomicDouble();
|
||||
private final AtomicDouble minIdleDuration = new AtomicDouble();
|
||||
private final AtomicLong loadedChunks = new AtomicLong();
|
||||
private final KMap<World, Registered> worlds = new KMap<>();
|
||||
private ScheduledExecutorService service;
|
||||
private Looper updateTicker;
|
||||
private PrecisionStopwatch trimAlive;
|
||||
private PrecisionStopwatch unloadAlive;
|
||||
public PrecisionStopwatch trimActiveAlive;
|
||||
public PrecisionStopwatch unloadActiveAlive;
|
||||
private AtomicInteger TotalTectonicPlates;
|
||||
private AtomicInteger TotalQueuedTectonicPlates;
|
||||
private AtomicInteger TotalNotQueuedTectonicPlates;
|
||||
private AtomicBoolean IsUnloadAlive;
|
||||
private AtomicBoolean IsTrimAlive;
|
||||
ChronoLatch cl;
|
||||
|
||||
public List<World> corruptedIrisWorlds = new ArrayList<>();
|
||||
|
||||
@Override
|
||||
public void onEnable() {
|
||||
this.cl = new ChronoLatch(5000);
|
||||
lastUse = new KMap<>();
|
||||
lastUseLock = new ReentrantLock();
|
||||
IrisWorlds = new ArrayList<>();
|
||||
IsUnloadAlive = new AtomicBoolean(true);
|
||||
IsTrimAlive = new AtomicBoolean(true);
|
||||
trimActiveAlive = new PrecisionStopwatch();
|
||||
unloadActiveAlive = new PrecisionStopwatch();
|
||||
trimAlive = new PrecisionStopwatch();
|
||||
unloadAlive = new PrecisionStopwatch();
|
||||
TotalTectonicPlates = new AtomicInteger();
|
||||
TotalQueuedTectonicPlates = new AtomicInteger();
|
||||
TotalNotQueuedTectonicPlates = new AtomicInteger();
|
||||
tectonicLimit.set(2);
|
||||
long t = getHardware.getProcessMemory();
|
||||
while (t > 200) {
|
||||
tectonicLimit.getAndAdd(1);
|
||||
t = t - 200;
|
||||
}
|
||||
this.setup();
|
||||
this.TrimLogic();
|
||||
this.UnloadLogic();
|
||||
|
||||
trimAlive.begin();
|
||||
unloadAlive.begin();
|
||||
trimActiveAlive.begin();
|
||||
unloadActiveAlive.begin();
|
||||
|
||||
updateTicker.start();
|
||||
cacheTicker.start();
|
||||
//trimTicker.start();
|
||||
//unloadTicker.start();
|
||||
instance = this;
|
||||
|
||||
var settings = IrisSettings.get().getPerformance();
|
||||
var engine = settings.getEngineSVC();
|
||||
service = Executors.newScheduledThreadPool(0,
|
||||
(engine.isUseVirtualThreads()
|
||||
? Thread.ofVirtual()
|
||||
: Thread.ofPlatform().priority(engine.getPriority()))
|
||||
.name("Iris EngineSVC-", 0)
|
||||
.factory());
|
||||
tectonicLimit.set(settings.getTectonicPlateSize());
|
||||
Bukkit.getWorlds().forEach(this::add);
|
||||
setup();
|
||||
}
|
||||
|
||||
public void engineStatus() {
|
||||
boolean trimAlive = trimTicker.isAlive();
|
||||
boolean unloadAlive = unloadTicker.isAlive();
|
||||
Iris.info("Status:");
|
||||
Iris.info("- Trim: " + trimAlive);
|
||||
Iris.info("- Unload: " + unloadAlive);
|
||||
|
||||
@Override
|
||||
public void onDisable() {
|
||||
service.shutdown();
|
||||
updateTicker.interrupt();
|
||||
worlds.keySet().forEach(this::remove);
|
||||
worlds.clear();
|
||||
}
|
||||
|
||||
public static int getTectonicLimit() {
|
||||
return tectonicLimit.get();
|
||||
public void engineStatus(VolmitSender sender) {
|
||||
sender.sendMessage(C.DARK_PURPLE + "-------------------------");
|
||||
sender.sendMessage(C.DARK_PURPLE + "Status:");
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Service: " + C.LIGHT_PURPLE + (service.isShutdown() ? "Shutdown" : "Running"));
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Updater: " + C.LIGHT_PURPLE + (updateTicker.isAlive() ? "Running" : "Stopped"));
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Trimmers: " + C.LIGHT_PURPLE + trimmerAlive.get());
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Unloaders: " + C.LIGHT_PURPLE + unloaderAlive.get());
|
||||
sender.sendMessage(C.DARK_PURPLE + "Tectonic Plates:");
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Limit: " + C.LIGHT_PURPLE + tectonicLimit.get());
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Total: " + C.LIGHT_PURPLE + tectonicPlates.get());
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Queued: " + C.LIGHT_PURPLE + queuedTectonicPlates.get());
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Max Idle Duration: " + C.LIGHT_PURPLE + Form.duration(maxIdleDuration.get(), 2));
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Min Idle Duration: " + C.LIGHT_PURPLE + Form.duration(minIdleDuration.get(), 2));
|
||||
sender.sendMessage(C.DARK_PURPLE + "Other:");
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Iris Worlds: " + C.LIGHT_PURPLE + totalWorlds.get());
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Loaded Chunks: " + C.LIGHT_PURPLE + loadedChunks.get());
|
||||
sender.sendMessage(C.DARK_PURPLE + "- Cache Size: " + C.LIGHT_PURPLE + Form.f(IrisData.cacheSize()));
|
||||
sender.sendMessage(C.DARK_PURPLE + "-------------------------");
|
||||
}
|
||||
|
||||
@EventHandler
|
||||
public void onWorldUnload(WorldUnloadEvent event) {
|
||||
updateWorlds();
|
||||
remove(event.getWorld());
|
||||
}
|
||||
|
||||
@EventHandler
|
||||
public void onWorldLoad(WorldLoadEvent event) {
|
||||
updateWorlds();
|
||||
add(event.getWorld());
|
||||
}
|
||||
|
||||
@EventHandler
|
||||
public void onServerBoot(ServerLoadEvent event) {
|
||||
isServerLoaded = true;
|
||||
private void remove(World world) {
|
||||
var entry = worlds.remove(world);
|
||||
if (entry == null) return;
|
||||
entry.close();
|
||||
}
|
||||
|
||||
@EventHandler
|
||||
public void onPluginDisable(PluginDisableEvent event) {
|
||||
if (event.getPlugin().equals(Iris.instance)) {
|
||||
isServerShuttingDown = true;
|
||||
}
|
||||
private void add(World world) {
|
||||
var access = IrisToolbelt.access(world);
|
||||
if (access == null) return;
|
||||
worlds.put(world, new Registered(world.getName(), access));
|
||||
}
|
||||
|
||||
public void updateWorlds() {
|
||||
for (World world : Bukkit.getWorlds()) {
|
||||
try {
|
||||
if (IrisToolbelt.access(world).getEngine() != null) {
|
||||
IrisWorlds.add(world);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// no
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void setup() {
|
||||
cacheTicker = new Looper() {
|
||||
@Override
|
||||
protected long loop() {
|
||||
long now = System.currentTimeMillis();
|
||||
lastUseLock.lock();
|
||||
try {
|
||||
for (World key : new ArrayList<>(lastUse.keySet())) {
|
||||
Long last = lastUse.get(key);
|
||||
if (last == null)
|
||||
continue;
|
||||
if (now - last > 60000) {
|
||||
lastUse.remove(key);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
lastUseLock.unlock();
|
||||
}
|
||||
return 1000;
|
||||
}
|
||||
};
|
||||
private synchronized void setup() {
|
||||
if (updateTicker != null && updateTicker.isAlive())
|
||||
return;
|
||||
|
||||
updateTicker = new Looper() {
|
||||
@Override
|
||||
protected long loop() {
|
||||
try {
|
||||
TotalQueuedTectonicPlates.set(0);
|
||||
TotalNotQueuedTectonicPlates.set(0);
|
||||
TotalTectonicPlates.set(0);
|
||||
for (World world : IrisWorlds) {
|
||||
Engine engine = Objects.requireNonNull(IrisToolbelt.access(world)).getEngine();
|
||||
TotalQueuedTectonicPlates.addAndGet((int) engine.getMantle().getToUnload());
|
||||
TotalNotQueuedTectonicPlates.addAndGet((int) engine.getMantle().getNotQueuedLoadedRegions());
|
||||
TotalTectonicPlates.addAndGet(engine.getMantle().getLoadedRegionCount());
|
||||
}
|
||||
if (!isServerShuttingDown && isServerLoaded) {
|
||||
if (!trimTicker.isAlive()) {
|
||||
Iris.info(C.RED + "TrimTicker found dead! Booting it up!");
|
||||
try {
|
||||
TrimLogic();
|
||||
} catch (Exception e) {
|
||||
Iris.error("What happened?");
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
queuedTectonicPlates.set(0);
|
||||
tectonicPlates.set(0);
|
||||
loadedChunks.set(0);
|
||||
unloaderAlive.set(0);
|
||||
trimmerAlive.set(0);
|
||||
totalWorlds.set(0);
|
||||
|
||||
if (!unloadTicker.isAlive()) {
|
||||
Iris.info(C.RED + "UnloadTicker found dead! Booting it up!");
|
||||
try {
|
||||
UnloadLogic();
|
||||
} catch (Exception e) {
|
||||
Iris.error("What happened?");
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
double maxDuration = Long.MIN_VALUE;
|
||||
double minDuration = Long.MAX_VALUE;
|
||||
for (var entry : worlds.entrySet()) {
|
||||
var registered = entry.getValue();
|
||||
if (registered.closed) continue;
|
||||
|
||||
} catch (Exception e) {
|
||||
return -1;
|
||||
totalWorlds.incrementAndGet();
|
||||
unloaderAlive.addAndGet(registered.unloaderAlive() ? 1 : 0);
|
||||
trimmerAlive.addAndGet(registered.trimmerAlive() ? 1 : 0);
|
||||
|
||||
var engine = registered.getEngine();
|
||||
if (engine == null) continue;
|
||||
|
||||
queuedTectonicPlates.addAndGet((int) engine.getMantle().getUnloadRegionCount());
|
||||
tectonicPlates.addAndGet(engine.getMantle().getLoadedRegionCount());
|
||||
loadedChunks.addAndGet(entry.getKey().getLoadedChunks().length);
|
||||
|
||||
double duration = engine.getMantle().getAdjustedIdleDuration();
|
||||
if (duration > maxDuration) maxDuration = duration;
|
||||
if (duration < minDuration) minDuration = duration;
|
||||
}
|
||||
maxIdleDuration.set(maxDuration);
|
||||
minIdleDuration.set(minDuration);
|
||||
|
||||
worlds.values().forEach(Registered::update);
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
return 1000;
|
||||
}
|
||||
};
|
||||
updateTicker.start();
|
||||
}
|
||||
public void TrimLogic() {
|
||||
if (trimTicker == null || !trimTicker.isAlive()) {
|
||||
trimTicker = new Looper() {
|
||||
private final Supplier<Engine> supplier = createSupplier();
|
||||
|
||||
@Override
|
||||
protected long loop() {
|
||||
long start = System.currentTimeMillis();
|
||||
trimAlive.reset();
|
||||
try {
|
||||
Engine engine = supplier.get();
|
||||
if (engine != null) {
|
||||
engine.getMantle().trim(tectonicLimit.get() / lastUse.size());
|
||||
private final class Registered {
|
||||
private final String name;
|
||||
private final PlatformChunkGenerator access;
|
||||
private transient ScheduledFuture<?> trimmer;
|
||||
private transient ScheduledFuture<?> unloader;
|
||||
private transient boolean closed;
|
||||
|
||||
private Registered(String name, PlatformChunkGenerator access) {
|
||||
this.name = name;
|
||||
this.access = access;
|
||||
update();
|
||||
}
|
||||
|
||||
private boolean unloaderAlive() {
|
||||
return unloader != null && !unloader.isDone() && !unloader.isCancelled();
|
||||
}
|
||||
|
||||
private boolean trimmerAlive() {
|
||||
return trimmer != null && !trimmer.isDone() && !trimmer.isCancelled();
|
||||
}
|
||||
|
||||
@Synchronized
|
||||
private void update() {
|
||||
if (closed || service == null || service.isShutdown())
|
||||
return;
|
||||
|
||||
if (trimmer == null || trimmer.isDone() || trimmer.isCancelled()) {
|
||||
trimmer = service.scheduleAtFixedRate(() -> {
|
||||
Engine engine = getEngine();
|
||||
if (engine == null || !engine.getMantle().getMantle().shouldReduce(engine))
|
||||
return;
|
||||
|
||||
try {
|
||||
engine.getMantle().trim(tectonicLimit.get() / worlds.size());
|
||||
} catch (Throwable e) {
|
||||
Iris.reportError(e);
|
||||
Iris.info(C.RED + "EngineSVC: Failed to trim.");
|
||||
Iris.error("EngineSVC: Failed to trim for " + name);
|
||||
e.printStackTrace();
|
||||
return -1;
|
||||
}
|
||||
}, RNG.r.nextInt(1000), 1000, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
|
||||
int size = lastUse.size();
|
||||
long time = (size > 0 ? 1000 / size : 1000) - (System.currentTimeMillis() - start);
|
||||
if (time <= 0)
|
||||
return 0;
|
||||
return time;
|
||||
}
|
||||
};
|
||||
trimTicker.start();
|
||||
}
|
||||
}
|
||||
public void UnloadLogic() {
|
||||
if (unloadTicker == null || !unloadTicker.isAlive()) {
|
||||
unloadTicker = new Looper() {
|
||||
private final Supplier<Engine> supplier = createSupplier();
|
||||
if (unloader == null || unloader.isDone() || unloader.isCancelled()) {
|
||||
unloader = service.scheduleAtFixedRate(() -> {
|
||||
Engine engine = getEngine();
|
||||
if (engine == null || !engine.getMantle().getMantle().shouldReduce(engine))
|
||||
return;
|
||||
|
||||
@Override
|
||||
protected long loop() {
|
||||
long start = System.currentTimeMillis();
|
||||
unloadAlive.reset();
|
||||
try {
|
||||
Engine engine = supplier.get();
|
||||
if (engine != null) {
|
||||
long unloadStart = System.currentTimeMillis();
|
||||
int count = engine.getMantle().unloadTectonicPlate(tectonicLimit.get() / lastUse.size());
|
||||
int count = engine.getMantle().unloadTectonicPlate(tectonicLimit.get() / worlds.size());
|
||||
if (count > 0) {
|
||||
Iris.debug(C.GOLD + "Unloaded " + C.YELLOW + count + " TectonicPlates in " + C.RED + Form.duration(System.currentTimeMillis() - unloadStart, 2));
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
Iris.reportError(e);
|
||||
Iris.info(C.RED + "EngineSVC: Failed to unload.");
|
||||
Iris.error("EngineSVC: Failed to unload for " + name);
|
||||
e.printStackTrace();
|
||||
return -1;
|
||||
}
|
||||
|
||||
int size = lastUse.size();
|
||||
long time = (size > 0 ? 1000 / size : 1000) - (System.currentTimeMillis() - start);
|
||||
if (time <= 0)
|
||||
return 0;
|
||||
return time;
|
||||
}
|
||||
};
|
||||
unloadTicker.start();
|
||||
}, RNG.r.nextInt(1000), 1000, TimeUnit.MILLISECONDS);
|
||||
}
|
||||
}
|
||||
|
||||
private Supplier<Engine> createSupplier() {
|
||||
AtomicInteger i = new AtomicInteger();
|
||||
return () -> {
|
||||
List<World> worlds = Bukkit.getWorlds();
|
||||
if (i.get() >= worlds.size()) {
|
||||
i.set(0);
|
||||
}
|
||||
try {
|
||||
for (int j = 0; j < worlds.size(); j++) {
|
||||
World world = worlds.get(i.getAndIncrement());
|
||||
PlatformChunkGenerator generator = IrisToolbelt.access(world);
|
||||
if (i.get() >= worlds.size()) {
|
||||
i.set(0);
|
||||
@Synchronized
|
||||
private void close() {
|
||||
if (closed) return;
|
||||
closed = true;
|
||||
|
||||
if (trimmer != null) {
|
||||
trimmer.cancel(false);
|
||||
trimmer = null;
|
||||
}
|
||||
|
||||
if (generator != null) {
|
||||
Engine engine = generator.getEngine();
|
||||
boolean closed = engine.getMantle().getData().isClosed();
|
||||
if (engine != null && !engine.isStudio() && !closed) {
|
||||
lastUseLock.lock();
|
||||
lastUse.put(world, System.currentTimeMillis());
|
||||
lastUseLock.unlock();
|
||||
return engine;
|
||||
if (unloader != null) {
|
||||
unloader.cancel(false);
|
||||
unloader = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
Iris.info(C.RED + "EngineSVC: Failed to create supplier.");
|
||||
e.printStackTrace();
|
||||
Iris.reportError(e);
|
||||
}
|
||||
return null;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onDisable() {
|
||||
cacheTicker.interrupt();
|
||||
trimTicker.interrupt();
|
||||
unloadTicker.interrupt();
|
||||
lastUse.clear();
|
||||
@Nullable
|
||||
private Engine getEngine() {
|
||||
if (closed) return null;
|
||||
return access.getEngine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -55,6 +55,7 @@ import org.bukkit.event.block.BlockPlaceEvent;
|
||||
import org.bukkit.event.player.PlayerTeleportEvent;
|
||||
import org.bukkit.inventory.ItemStack;
|
||||
|
||||
import java.lang.ref.WeakReference;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
@ -422,9 +423,16 @@ public class IrisWorldManager extends EngineAssignedWorldManager {
|
||||
return;
|
||||
}
|
||||
|
||||
var ref = new WeakReference<>(e.getWorld());
|
||||
int x = e.getX(), z = e.getZ();
|
||||
J.s(() -> {
|
||||
World world = ref.get();
|
||||
if (world == null || !world.isChunkLoaded(x, z))
|
||||
return;
|
||||
energy += 0.3;
|
||||
fixEnergy();
|
||||
getEngine().cleanupMantleChunk(e.getX(), e.getZ());
|
||||
getEngine().cleanupMantleChunk(x, z);
|
||||
}, IrisSettings.get().getPerformance().mantleCleanupDelay);
|
||||
|
||||
if (generated) {
|
||||
//INMS.get().injectBiomesFromMantle(e, getMantle());
|
||||
|
@ -288,11 +288,10 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
|
||||
return;
|
||||
}
|
||||
|
||||
var chunk = mantle.getChunk(c);
|
||||
if (chunk.isFlagged(MantleFlag.ETCHED)) return;
|
||||
chunk.flag(MantleFlag.ETCHED, true);
|
||||
|
||||
var chunk = mantle.getChunk(c).use();
|
||||
try {
|
||||
Semaphore semaphore = new Semaphore(3);
|
||||
chunk.raiseFlag(MantleFlag.ETCHED, () -> {
|
||||
chunk.raiseFlag(MantleFlag.TILE, run(semaphore, () -> J.s(() -> {
|
||||
mantle.iterateChunk(c.getX(), c.getZ(), TileWrapper.class, (x, y, z, v) -> {
|
||||
int betterY = y + getWorld().minHeight();
|
||||
@ -354,10 +353,14 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
|
||||
mantle.deleteChunkSlice(c.getX(), c.getZ(), MatterUpdate.class);
|
||||
getMetrics().getUpdates().put(p.getMilliseconds());
|
||||
}, RNG.r.i(0, 20))));
|
||||
});
|
||||
|
||||
try {
|
||||
semaphore.acquire(3);
|
||||
} catch (InterruptedException ignored) {}
|
||||
} finally {
|
||||
chunk.release();
|
||||
}
|
||||
}
|
||||
|
||||
private static Runnable run(Semaphore semaphore, Runnable runnable) {
|
||||
|
@ -289,23 +289,25 @@ public interface EngineMantle extends IObjectPlacer {
|
||||
}
|
||||
|
||||
default void cleanupChunk(int x, int z) {
|
||||
if (!getMantle().hasFlag(x, z, MantleFlag.CLEANED) && isCovered(x, z)) {
|
||||
getMantle().raiseFlag(x, z, MantleFlag.CLEANED, () -> {
|
||||
getMantle().deleteChunkSlice(x, z, BlockData.class);
|
||||
getMantle().deleteChunkSlice(x, z, String.class);
|
||||
getMantle().deleteChunkSlice(x, z, MatterCavern.class);
|
||||
getMantle().deleteChunkSlice(x, z, MatterFluidBody.class);
|
||||
if (!isCovered(x, z)) return;
|
||||
MantleChunk chunk = getMantle().getChunk(x, z).use();
|
||||
try {
|
||||
chunk.raiseFlag(MantleFlag.CLEANED, () -> {
|
||||
chunk.deleteSlices(BlockData.class);
|
||||
chunk.deleteSlices(String.class);
|
||||
chunk.deleteSlices(MatterCavern.class);
|
||||
chunk.deleteSlices(MatterFluidBody.class);
|
||||
});
|
||||
} finally {
|
||||
chunk.release();
|
||||
}
|
||||
}
|
||||
|
||||
default long getToUnload(){
|
||||
return getMantle().getToUnload().size();
|
||||
default long getUnloadRegionCount() {
|
||||
return getMantle().getUnloadRegionCount();
|
||||
}
|
||||
default long getNotQueuedLoadedRegions(){
|
||||
return getMantle().getLoadedRegions().size() - getMantle().getToUnload().size();
|
||||
}
|
||||
default double getTectonicDuration(){
|
||||
return getMantle().getAdjustedIdleDuration().get();
|
||||
|
||||
default double getAdjustedIdleDuration() {
|
||||
return getMantle().getAdjustedIdleDuration();
|
||||
}
|
||||
}
|
@ -43,7 +43,6 @@ import org.bukkit.block.data.BlockData;
|
||||
public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
|
||||
private final RNG rng;
|
||||
private final BlockData AIR = Material.CAVE_AIR.createBlockData();
|
||||
private final BlockData WATER = Material.WATER.createBlockData();
|
||||
private final BlockData LAVA = Material.LAVA.createBlockData();
|
||||
private final IrisDecorantActuator decorant;
|
||||
|
||||
@ -103,7 +102,7 @@ public class IrisCarveModifier extends EngineAssignedModifier<BlockData> {
|
||||
}
|
||||
|
||||
if (c.isWater()) {
|
||||
output.set(rx, yy, rz, WATER);
|
||||
output.set(rx, yy, rz, context.getFluid().get(rx, rz));
|
||||
} else if (c.isLava()) {
|
||||
output.set(rx, yy, rz, LAVA);
|
||||
} else {
|
||||
|
@ -28,7 +28,6 @@ import com.volmit.iris.util.math.RNG;
|
||||
import com.volmit.iris.util.math.Vector3d;
|
||||
import com.volmit.iris.util.matter.MatterMarker;
|
||||
import com.volmit.iris.util.matter.slices.MarkerMatter;
|
||||
import io.lumine.mythic.bukkit.adapters.BukkitEntity;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
@ -38,9 +37,6 @@ import org.bukkit.Location;
|
||||
import org.bukkit.Material;
|
||||
import org.bukkit.World;
|
||||
import org.bukkit.entity.Entity;
|
||||
import org.bukkit.entity.EntityType;
|
||||
import org.bukkit.entity.LivingEntity;
|
||||
import org.bukkit.util.BoundingBox;
|
||||
|
||||
@Snippet("entity-spawn")
|
||||
@Accessors(chain = true)
|
||||
@ -116,7 +112,7 @@ public class IrisEntitySpawn implements IRare {
|
||||
World world = gen.getWorld().realWorld();
|
||||
if (spawns > 0) {
|
||||
|
||||
if (referenceMarker != null) {
|
||||
if (referenceMarker != null && referenceMarker.shouldExhaust()) {
|
||||
gen.getMantle().getMantle().remove(c.getX(), c.getY() - gen.getWorld().minHeight(), c.getZ(), MatterMarker.class);
|
||||
}
|
||||
|
||||
|
@ -51,10 +51,10 @@ public class IrisMarker extends IrisRegistrant {
|
||||
private boolean emptyAbove = true;
|
||||
|
||||
@Desc("If this marker is used, what is the chance it removes itself. For example 25% (0.25) would mean that on average 4 uses will remove a specific marker. Set this below 0 (-1) to never exhaust & set this to 1 or higher to always exhaust on first use.")
|
||||
private double exhaustionChance = 0.33;
|
||||
private double exhaustionChance = 0;
|
||||
|
||||
public boolean shouldExhaust() {
|
||||
return RNG.r.chance(exhaustionChance);
|
||||
return exhaustionChance > RNG.r.nextDouble();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -40,6 +40,7 @@ import com.volmit.iris.util.io.ReactiveFolder;
|
||||
import com.volmit.iris.util.scheduling.ChronoLatch;
|
||||
import com.volmit.iris.util.scheduling.J;
|
||||
import com.volmit.iris.util.scheduling.Looper;
|
||||
import io.papermc.lib.PaperLib;
|
||||
import lombok.Data;
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.Setter;
|
||||
@ -86,12 +87,12 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
|
||||
private final boolean studio;
|
||||
private final AtomicInteger a = new AtomicInteger(0);
|
||||
private final CompletableFuture<Integer> spawnChunks = new CompletableFuture<>();
|
||||
private Engine engine;
|
||||
private Looper hotloader;
|
||||
private StudioMode lastMode;
|
||||
private DummyBiomeProvider dummyBiomeProvider;
|
||||
private volatile Engine engine;
|
||||
private volatile Looper hotloader;
|
||||
private volatile StudioMode lastMode;
|
||||
private volatile DummyBiomeProvider dummyBiomeProvider;
|
||||
@Setter
|
||||
private StudioGenerator studioGenerator;
|
||||
private volatile StudioGenerator studioGenerator;
|
||||
|
||||
private boolean initialized = false;
|
||||
|
||||
@ -110,20 +111,6 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
|
||||
Bukkit.getServer().getPluginManager().registerEvents(this, Iris.instance);
|
||||
}
|
||||
|
||||
private static Field getField(Class clazz, String fieldName)
|
||||
throws NoSuchFieldException {
|
||||
try {
|
||||
return clazz.getDeclaredField(fieldName);
|
||||
} catch (NoSuchFieldException e) {
|
||||
Class superClass = clazz.getSuperclass();
|
||||
if (superClass == null) {
|
||||
throw e;
|
||||
} else {
|
||||
return getField(superClass, fieldName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@EventHandler(priority = EventPriority.LOWEST)
|
||||
public void onWorldInit(WorldInitEvent event) {
|
||||
try {
|
||||
@ -158,6 +145,20 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
|
||||
}
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public Location getFixedSpawnLocation(@NotNull World world, @NotNull Random random) {
|
||||
Location location = new Location(world, 0, 64, 0);
|
||||
PaperLib.getChunkAtAsync(location)
|
||||
.thenAccept(c -> {
|
||||
World w = c.getWorld();
|
||||
if (!w.getSpawnLocation().equals(location))
|
||||
return;
|
||||
w.setSpawnLocation(location.add(0, w.getHighestBlockYAt(location) - 64, 0));
|
||||
});
|
||||
return location;
|
||||
}
|
||||
|
||||
private void setupEngine() {
|
||||
IrisData data = IrisData.get(dataLocation);
|
||||
IrisDimension dimension = data.getDimensionLoader().load(dimensionKey);
|
||||
@ -301,7 +302,9 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
|
||||
hotloader.interrupt();
|
||||
}
|
||||
|
||||
getEngine().close();
|
||||
final Engine engine = getEngine();
|
||||
if (engine != null && !engine.isClosed())
|
||||
engine.close();
|
||||
folder.clear();
|
||||
populators.clear();
|
||||
|
||||
|
@ -31,11 +31,11 @@ import com.volmit.iris.util.data.DoubleArrayUtils;
|
||||
*/
|
||||
public class AtomicAverage {
|
||||
protected final AtomicDoubleArray values;
|
||||
protected int cursor;
|
||||
private double average;
|
||||
private double lastSum;
|
||||
private boolean dirty;
|
||||
private boolean brandNew;
|
||||
protected transient int cursor;
|
||||
private transient double average;
|
||||
private transient double lastSum;
|
||||
private transient boolean dirty;
|
||||
private transient boolean brandNew;
|
||||
|
||||
/**
|
||||
* Create an average holder
|
||||
@ -57,7 +57,7 @@ public class AtomicAverage {
|
||||
*
|
||||
* @param i the value
|
||||
*/
|
||||
public void put(double i) {
|
||||
public synchronized void put(double i) {
|
||||
|
||||
try {
|
||||
dirty = true;
|
||||
|
@ -18,29 +18,67 @@
|
||||
|
||||
package com.volmit.iris.util.collection;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.HashSet;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
public class KSet<T> extends HashSet<T> {
|
||||
import java.io.Serializable;
|
||||
import java.util.AbstractSet;
|
||||
import java.util.Collection;
|
||||
import java.util.Iterator;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public class KSet<T> extends AbstractSet<T> implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
private final ConcurrentHashMap<T, Boolean> map;
|
||||
|
||||
public KSet() {
|
||||
super();
|
||||
map = new ConcurrentHashMap<>();
|
||||
}
|
||||
|
||||
public KSet(Collection<? extends T> c) {
|
||||
super(c);
|
||||
this();
|
||||
addAll(c);
|
||||
}
|
||||
|
||||
public KSet(int initialCapacity, float loadFactor) {
|
||||
super(initialCapacity, loadFactor);
|
||||
map = new ConcurrentHashMap<>(initialCapacity, loadFactor);
|
||||
}
|
||||
|
||||
public KSet(int initialCapacity) {
|
||||
super(initialCapacity);
|
||||
map = new ConcurrentHashMap<>(initialCapacity);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return map.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean contains(Object o) {
|
||||
return map.containsKey(o);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean add(T t) {
|
||||
return map.putIfAbsent(t, Boolean.TRUE) == null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean remove(Object o) {
|
||||
return map.remove(o) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void clear() {
|
||||
map.clear();
|
||||
}
|
||||
|
||||
@NotNull
|
||||
@Override
|
||||
public Iterator<T> iterator() {
|
||||
return map.keySet().iterator();
|
||||
}
|
||||
|
||||
public KSet<T> copy() {
|
||||
return new KSet<T>(this);
|
||||
return new KSet<>(this);
|
||||
}
|
||||
}
|
||||
|
@ -31,16 +31,22 @@ public class JarScanner {
|
||||
private final KSet<Class<?>> classes;
|
||||
private final File jar;
|
||||
private final String superPackage;
|
||||
private final boolean report;
|
||||
|
||||
/**
|
||||
* Create a scanner
|
||||
*
|
||||
* @param jar the path to the jar
|
||||
*/
|
||||
public JarScanner(File jar, String superPackage) {
|
||||
public JarScanner(File jar, String superPackage, boolean report) {
|
||||
this.jar = jar;
|
||||
this.classes = new KSet<>();
|
||||
this.superPackage = superPackage;
|
||||
this.report = report;
|
||||
}
|
||||
|
||||
public JarScanner(File jar, String superPackage) {
|
||||
this(jar, superPackage, true);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -65,7 +71,8 @@ public class JarScanner {
|
||||
try {
|
||||
Class<?> clazz = Class.forName(c);
|
||||
classes.add(clazz);
|
||||
} catch (ClassNotFoundException e) {
|
||||
} catch (Throwable e) {
|
||||
if (!report) continue;
|
||||
Iris.reportError(e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
@ -21,14 +21,13 @@ package com.volmit.iris.util.mantle;
|
||||
import com.google.common.util.concurrent.AtomicDouble;
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.IrisSettings;
|
||||
import com.volmit.iris.core.service.IrisEngineSVC;
|
||||
import com.volmit.iris.core.tools.IrisToolbelt;
|
||||
import com.volmit.iris.engine.data.cache.Cache;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
import com.volmit.iris.engine.mantle.EngineMantle;
|
||||
import com.volmit.iris.engine.mantle.MantleWriter;
|
||||
import com.volmit.iris.util.collection.KList;
|
||||
import com.volmit.iris.util.collection.KMap;
|
||||
import com.volmit.iris.util.collection.KSet;
|
||||
import com.volmit.iris.util.documentation.BlockCoordinates;
|
||||
import com.volmit.iris.util.documentation.ChunkCoordinates;
|
||||
import com.volmit.iris.util.documentation.RegionCoordinates;
|
||||
@ -51,8 +50,6 @@ import java.util.*;
|
||||
import java.util.concurrent.*;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
/**
|
||||
* The mantle can store any type of data slice anywhere and manage regions & IO on it's own.
|
||||
@ -60,18 +57,18 @@ import java.util.concurrent.locks.ReentrantLock;
|
||||
*/
|
||||
|
||||
public class Mantle {
|
||||
private static final boolean disableClear = System.getProperty("disableClear", "false").equals("true");
|
||||
private final File dataFolder;
|
||||
@Getter
|
||||
private final int worldHeight;
|
||||
private final Map<Long, Long> lastUse;
|
||||
@Getter
|
||||
private final Map<Long, TectonicPlate> loadedRegions;
|
||||
private final HyperLock hyperLock;
|
||||
private final AtomicBoolean closed;
|
||||
private final MultiBurst ioBurst;
|
||||
private final AtomicBoolean ioTrim;
|
||||
private final AtomicBoolean ioTectonicUnload;
|
||||
private final AtomicDouble adjustedIdleDuration;
|
||||
private final KSet<Long> toUnload;
|
||||
|
||||
/**
|
||||
* Create a new mantle
|
||||
@ -91,6 +88,8 @@ public class Mantle {
|
||||
loadedRegions = new KMap<>();
|
||||
lastUse = new KMap<>();
|
||||
ioBurst = MultiBurst.burst;
|
||||
adjustedIdleDuration = new AtomicDouble(0);
|
||||
toUnload = new KSet<>();
|
||||
Iris.debug("Opened The Mantle " + C.DARK_AQUA + dataFolder.getAbsolutePath());
|
||||
}
|
||||
|
||||
@ -103,7 +102,7 @@ public class Mantle {
|
||||
* @return the file
|
||||
*/
|
||||
public static File fileForRegion(File folder, int x, int z) {
|
||||
return fileForRegion(folder, key(x, z));
|
||||
return fileForRegion(folder, key(x, z), true);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -113,12 +112,28 @@ public class Mantle {
|
||||
* @param key the region key
|
||||
* @return the file
|
||||
*/
|
||||
public static File fileForRegion(File folder, Long key) {
|
||||
File f = new File(folder, "p." + key + ".ttp.lz4b");
|
||||
if (!f.getParentFile().exists()) {
|
||||
f.getParentFile().mkdirs();
|
||||
}
|
||||
public static File fileForRegion(File folder, Long key, boolean convert) {
|
||||
File f = oldFileForRegion(folder, key);
|
||||
File fv = new File(folder, "pv." + key + ".ttp.lz4b");
|
||||
if (f.exists() && !fv.exists() && convert)
|
||||
return f;
|
||||
|
||||
if (!fv.getParentFile().exists()) {
|
||||
fv.getParentFile().mkdirs();
|
||||
}
|
||||
return fv;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get the old file for the given region
|
||||
*
|
||||
* @param folder the data folder
|
||||
* @param key the region key
|
||||
* @return the file
|
||||
*/
|
||||
public static File oldFileForRegion(File folder, Long key) {
|
||||
return new File(folder, "p." + key + ".ttp.lz4b");
|
||||
}
|
||||
|
||||
/**
|
||||
@ -210,7 +225,7 @@ public class Mantle {
|
||||
@RegionCoordinates
|
||||
public boolean hasTectonicPlate(int x, int z) {
|
||||
Long k = key(x, z);
|
||||
return loadedRegions.containsKey(k) || fileForRegion(dataFolder, k).exists();
|
||||
return loadedRegions.containsKey(k) || fileForRegion(dataFolder, k, true).exists();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -359,16 +374,19 @@ public class Mantle {
|
||||
}
|
||||
|
||||
closed.set(true);
|
||||
BurstExecutor b = ioBurst.burst(loadedRegions.size());
|
||||
for (Long i : loadedRegions.keySet()) {
|
||||
b.queue(() -> {
|
||||
hyperLock.disable();
|
||||
BurstExecutor b = ioBurst.burst(toUnload.size());
|
||||
loadedRegions.forEach((i, plate) -> b.queue(() -> {
|
||||
try {
|
||||
loadedRegions.get(i).write(fileForRegion(dataFolder, i));
|
||||
} catch (IOException e) {
|
||||
plate.close();
|
||||
plate.write(fileForRegion(dataFolder, i, false));
|
||||
oldFileForRegion(dataFolder, i).delete();
|
||||
} catch (Throwable e) {
|
||||
Iris.error("Failed to write Tectonic Plate " + C.DARK_GREEN + Cache.keyX(i) + " " + Cache.keyZ(i));
|
||||
e.printStackTrace();
|
||||
}
|
||||
});
|
||||
}
|
||||
}));
|
||||
loadedRegions.clear();
|
||||
|
||||
try {
|
||||
b.complete();
|
||||
@ -376,7 +394,6 @@ public class Mantle {
|
||||
Iris.reportError(e);
|
||||
}
|
||||
|
||||
loadedRegions.clear();
|
||||
Iris.debug("The Mantle has Closed " + C.DARK_AQUA + dataFolder.getAbsolutePath());
|
||||
}
|
||||
|
||||
@ -392,16 +409,6 @@ public class Mantle {
|
||||
return numberOfEntries * bytesPerEntry;
|
||||
}
|
||||
|
||||
@Getter
|
||||
private final AtomicDouble adjustedIdleDuration = new AtomicDouble(0);
|
||||
@Getter
|
||||
private final AtomicInteger forceAggressiveThreshold = new AtomicInteger(30);
|
||||
@Getter
|
||||
private final AtomicLong oldestTectonicPlate = new AtomicLong(0);
|
||||
private final ReentrantLock unloadLock = new ReentrantLock();
|
||||
@Getter
|
||||
private final KList<Long> toUnload = new KList<>();
|
||||
|
||||
/**
|
||||
* Save & unload regions that have not been used for more than the
|
||||
* specified amount of milliseconds
|
||||
@ -414,93 +421,81 @@ public class Mantle {
|
||||
}
|
||||
|
||||
adjustedIdleDuration.set(baseIdleDuration);
|
||||
|
||||
if (loadedRegions != null) {
|
||||
if (loadedRegions.size() > tectonicLimit) {
|
||||
// todo update this correctly and maybe do something when its above a 100%
|
||||
adjustedIdleDuration.set(Math.max(adjustedIdleDuration.get() - (1000 * (((loadedRegions.size() - tectonicLimit) / (double) tectonicLimit) * 100) * 0.4), 4000));
|
||||
}
|
||||
}
|
||||
|
||||
ioTrim.set(true);
|
||||
unloadLock.lock();
|
||||
try {
|
||||
if (lastUse != null && IrisEngineSVC.instance != null) {
|
||||
if (!lastUse.isEmpty()) {
|
||||
Iris.debug("Trimming Tectonic Plates older than " + Form.duration(adjustedIdleDuration.get(), 0));
|
||||
for (long i : new ArrayList<>(lastUse.keySet())) {
|
||||
double finalAdjustedIdleDuration = adjustedIdleDuration.get();
|
||||
hyperLock.withLong(i, () -> {
|
||||
Long lastUseTime = lastUse.get(i);
|
||||
if (lastUseTime != null && M.ms() - lastUseTime >= finalAdjustedIdleDuration) {
|
||||
toUnload.add(i);
|
||||
double adjustedIdleDuration = this.adjustedIdleDuration.get();
|
||||
Iris.debug("Trimming Tectonic Plates older than " + Form.duration(adjustedIdleDuration, 0));
|
||||
|
||||
if (lastUse.isEmpty()) return;
|
||||
double unloadTime = M.ms() - adjustedIdleDuration;
|
||||
for (long id : lastUse.keySet()) {
|
||||
hyperLock.withLong(id, () -> {
|
||||
Long lastUseTime = lastUse.get(id);
|
||||
if (lastUseTime != null && lastUseTime < unloadTime) {
|
||||
toUnload.add(id);
|
||||
Iris.debug("Tectonic Region added to unload");
|
||||
IrisEngineSVC.instance.trimActiveAlive.reset();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Throwable e) {
|
||||
Iris.reportError(e);
|
||||
} finally {
|
||||
ioTrim.set(false);
|
||||
unloadLock.unlock();
|
||||
}
|
||||
}
|
||||
|
||||
public synchronized int unloadTectonicPlate(int tectonicLimit) {
|
||||
AtomicInteger i = new AtomicInteger();
|
||||
unloadLock.lock();
|
||||
BurstExecutor burst = null;
|
||||
if (IrisEngineSVC.instance != null) {
|
||||
try {
|
||||
KList<Long> copy = toUnload.copy();
|
||||
if (!disableClear) toUnload.clear();
|
||||
burst = MultiBurst.burst.burst(copy.size());
|
||||
burst.setMulticore(copy.size() > tectonicLimit);
|
||||
for (int j = 0; j < copy.size(); j++) {
|
||||
Long id = copy.get(j);
|
||||
if (id == null) {
|
||||
Iris.error("Null id in unloadTectonicPlate at index " + j);
|
||||
continue;
|
||||
if (closed.get()) {
|
||||
throw new RuntimeException("The Mantle is closed");
|
||||
}
|
||||
|
||||
burst.queue(() ->
|
||||
hyperLock.withLong(id, () -> {
|
||||
AtomicInteger i = new AtomicInteger();
|
||||
BurstExecutor burst = ioBurst.burst(toUnload.size());
|
||||
burst.setMulticore(toUnload.size() > tectonicLimit);
|
||||
|
||||
ioTectonicUnload.set(true);
|
||||
try {
|
||||
for (long id : toUnload) {
|
||||
burst.queue(() -> hyperLock.withLong(id, () -> {
|
||||
TectonicPlate m = loadedRegions.get(id);
|
||||
if (m != null) {
|
||||
if (m.inUse()) {
|
||||
Iris.debug("Tectonic Plate was added to unload while in use " + C.DARK_GREEN + m.getX() + " " + m.getZ());
|
||||
if (disableClear) toUnload.remove(id);
|
||||
lastUse.put(id, M.ms());
|
||||
if (m == null) {
|
||||
Iris.debug("Tectonic Plate was added to unload while not loaded " + C.DARK_GREEN + Cache.keyX(id) + " " + Cache.keyZ(id));
|
||||
toUnload.remove(id);
|
||||
return;
|
||||
}
|
||||
|
||||
if (m.inUse()) {
|
||||
Iris.debug("Tectonic Plate was added to unload while in use " + C.DARK_GREEN + m.getX() + " " + m.getZ());
|
||||
lastUse.put(id, M.ms());
|
||||
toUnload.remove(id);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
m.write(fileForRegion(dataFolder, id));
|
||||
m.write(fileForRegion(dataFolder, id, false));
|
||||
oldFileForRegion(dataFolder, id).delete();
|
||||
loadedRegions.remove(id);
|
||||
lastUse.remove(id);
|
||||
if (disableClear) toUnload.remove(id);
|
||||
toUnload.remove(id);
|
||||
i.incrementAndGet();
|
||||
Iris.debug("Unloaded Tectonic Plate " + C.DARK_GREEN + Cache.keyX(id) + " " + Cache.keyZ(id));
|
||||
IrisEngineSVC.instance.unloadActiveAlive.reset();
|
||||
} catch (IOException e) {
|
||||
Iris.reportError(e);
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
burst.complete();
|
||||
} catch (Throwable e) {
|
||||
Iris.reportError(e);
|
||||
e.printStackTrace();
|
||||
if (burst != null)
|
||||
burst.complete();
|
||||
} finally {
|
||||
unloadLock.unlock();
|
||||
ioTectonicUnload.set(true);
|
||||
}
|
||||
return i.get();
|
||||
ioTectonicUnload.set(false);
|
||||
}
|
||||
return i.get();
|
||||
}
|
||||
@ -516,7 +511,7 @@ public class Mantle {
|
||||
*/
|
||||
@RegionCoordinates
|
||||
private TectonicPlate get(int x, int z) {
|
||||
if (ioTrim.get()) {
|
||||
if (ioTrim.get() || ioTectonicUnload.get()) {
|
||||
try {
|
||||
return getSafe(x, z).get();
|
||||
} catch (InterruptedException e) {
|
||||
@ -576,7 +571,7 @@ public class Mantle {
|
||||
if (file.exists()) {
|
||||
try {
|
||||
Iris.addPanic("reading.tectonic-plate", file.getAbsolutePath());
|
||||
region = TectonicPlate.read(worldHeight, file);
|
||||
region = TectonicPlate.read(worldHeight, file, file.getName().startsWith("pv."));
|
||||
|
||||
if (region.getX() != x || region.getZ() != z) {
|
||||
Iris.warn("Loaded Tectonic Plate " + x + "," + z + " but read it as " + region.getX() + "," + region.getZ() + "... Assuming " + x + "," + z);
|
||||
@ -626,6 +621,14 @@ public class Mantle {
|
||||
return loadedRegions.size();
|
||||
}
|
||||
|
||||
public int getUnloadRegionCount() {
|
||||
return toUnload.size();
|
||||
}
|
||||
|
||||
public double getAdjustedIdleDuration() {
|
||||
return adjustedIdleDuration.get();
|
||||
}
|
||||
|
||||
public <T> void set(int x, int y, int z, MatterSlice<T> slice) {
|
||||
if (slice.isEmpty()) {
|
||||
return;
|
||||
|
@ -19,6 +19,7 @@
|
||||
package com.volmit.iris.util.mantle;
|
||||
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.util.data.Varint;
|
||||
import com.volmit.iris.util.documentation.ChunkCoordinates;
|
||||
import com.volmit.iris.util.function.Consumer4;
|
||||
import com.volmit.iris.util.io.CountingDataInputStream;
|
||||
@ -30,7 +31,8 @@ import lombok.Getter;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.Semaphore;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicIntegerArray;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
|
||||
@ -45,7 +47,8 @@ public class MantleChunk {
|
||||
private final int z;
|
||||
private final AtomicIntegerArray flags;
|
||||
private final AtomicReferenceArray<Matter> sections;
|
||||
private final AtomicInteger ref = new AtomicInteger();
|
||||
private final Semaphore ref = new Semaphore(Integer.MAX_VALUE, true);
|
||||
private final AtomicBoolean closed = new AtomicBoolean(false);
|
||||
|
||||
/**
|
||||
* Create a mantle chunk
|
||||
@ -72,11 +75,12 @@ public class MantleChunk {
|
||||
* @throws IOException shit happens
|
||||
* @throws ClassNotFoundException shit happens
|
||||
*/
|
||||
public MantleChunk(int sectionHeight, CountingDataInputStream din) throws IOException {
|
||||
public MantleChunk(int version, int sectionHeight, CountingDataInputStream din) throws IOException {
|
||||
this(sectionHeight, din.readByte(), din.readByte());
|
||||
int s = din.readByte();
|
||||
int l = version < 0 ? flags.length() : Varint.readUnsignedVarInt(din);
|
||||
|
||||
for (int i = 0; i < flags.length(); i++) {
|
||||
for (int i = 0; i < flags.length() && i < l; i++) {
|
||||
flags.set(i, din.readBoolean() ? 1 : 0);
|
||||
}
|
||||
|
||||
@ -85,6 +89,10 @@ public class MantleChunk {
|
||||
long size = din.readInt();
|
||||
if (size == 0) continue;
|
||||
long start = din.count();
|
||||
if (i >= sectionHeight) {
|
||||
din.skipTo(start + size);
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
sections.set(i, Matter.readDin(din));
|
||||
@ -103,20 +111,27 @@ public class MantleChunk {
|
||||
}
|
||||
}
|
||||
|
||||
public void close() throws InterruptedException {
|
||||
closed.set(true);
|
||||
ref.acquire(Integer.MAX_VALUE);
|
||||
}
|
||||
|
||||
public boolean inUse() {
|
||||
return ref.get() > 0;
|
||||
return ref.availablePermits() < Integer.MAX_VALUE;
|
||||
}
|
||||
|
||||
public MantleChunk use() {
|
||||
ref.incrementAndGet();
|
||||
if (closed.get()) throw new IllegalStateException("Chunk is closed!");
|
||||
ref.acquireUninterruptibly();
|
||||
return this;
|
||||
}
|
||||
|
||||
public void release() {
|
||||
ref.decrementAndGet();
|
||||
ref.release();
|
||||
}
|
||||
|
||||
public void flag(MantleFlag flag, boolean f) {
|
||||
if (closed.get()) throw new IllegalStateException("Chunk is closed!");
|
||||
flags.set(flag.ordinal(), f ? 1 : 0);
|
||||
}
|
||||
|
||||
@ -201,6 +216,7 @@ public class MantleChunk {
|
||||
dos.writeByte(x);
|
||||
dos.writeByte(z);
|
||||
dos.writeByte(sections.length());
|
||||
Varint.writeUnsignedVarInt(flags.length(), dos);
|
||||
|
||||
for (int i = 0; i < flags.length(); i++) {
|
||||
dos.writeBoolean(flags.get(i) == 1);
|
||||
|
@ -19,9 +19,10 @@
|
||||
package com.volmit.iris.util.mantle;
|
||||
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.IrisSettings;
|
||||
import com.volmit.iris.engine.EnginePanic;
|
||||
import com.volmit.iris.engine.data.cache.Cache;
|
||||
import com.volmit.iris.util.collection.KSet;
|
||||
import com.volmit.iris.util.data.Varint;
|
||||
import com.volmit.iris.util.documentation.ChunkCoordinates;
|
||||
import com.volmit.iris.util.format.C;
|
||||
import com.volmit.iris.util.format.Form;
|
||||
@ -44,7 +45,9 @@ import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
* Tectonic Plates are fully atomic & thread safe
|
||||
*/
|
||||
public class TectonicPlate {
|
||||
private static final KSet<Thread> errors = new KSet<>();
|
||||
private static final ThreadLocal<Boolean> errors = ThreadLocal.withInitial(() -> false);
|
||||
public static final int MISSING = -1;
|
||||
public static final int CURRENT = 0;
|
||||
|
||||
private final int sectionHeight;
|
||||
private final AtomicReferenceArray<MantleChunk> chunks;
|
||||
@ -74,11 +77,12 @@ public class TectonicPlate {
|
||||
* @param din the data input
|
||||
* @throws IOException shit happens yo
|
||||
*/
|
||||
public TectonicPlate(int worldHeight, CountingDataInputStream din) throws IOException {
|
||||
public TectonicPlate(int worldHeight, CountingDataInputStream din, boolean versioned) throws IOException {
|
||||
this(worldHeight, din.readInt(), din.readInt());
|
||||
if (!din.markSupported())
|
||||
throw new IOException("Mark not supported!");
|
||||
|
||||
int v = versioned ? Varint.readUnsignedVarInt(din) : MISSING;
|
||||
for (int i = 0; i < chunks.length(); i++) {
|
||||
long size = din.readInt();
|
||||
if (size == 0) continue;
|
||||
@ -86,7 +90,7 @@ public class TectonicPlate {
|
||||
|
||||
try {
|
||||
Iris.addPanic("read-chunk", "Chunk[" + i + "]");
|
||||
chunks.set(i, new MantleChunk(sectionHeight, din));
|
||||
chunks.set(i, new MantleChunk(v, sectionHeight, din));
|
||||
EnginePanic.saveLast();
|
||||
} catch (Throwable e) {
|
||||
long end = start + size;
|
||||
@ -103,7 +107,7 @@ public class TectonicPlate {
|
||||
}
|
||||
}
|
||||
|
||||
public static TectonicPlate read(int worldHeight, File file) throws IOException {
|
||||
public static TectonicPlate read(int worldHeight, File file, boolean versioned) throws IOException {
|
||||
try (FileChannel fc = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.SYNC)) {
|
||||
fc.lock();
|
||||
|
||||
@ -111,10 +115,10 @@ public class TectonicPlate {
|
||||
LZ4BlockInputStream lz4 = new LZ4BlockInputStream(fin);
|
||||
BufferedInputStream bis = new BufferedInputStream(lz4);
|
||||
try (CountingDataInputStream din = CountingDataInputStream.wrap(bis)) {
|
||||
return new TectonicPlate(worldHeight, din);
|
||||
return new TectonicPlate(worldHeight, din, versioned);
|
||||
}
|
||||
} finally {
|
||||
if (errors.remove(Thread.currentThread())) {
|
||||
if (IrisSettings.get().getGeneral().isDumpMantleOnError() && errors.get()) {
|
||||
File dump = Iris.instance.getDataFolder("dump", file.getName() + ".bin");
|
||||
try (FileChannel fc = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.SYNC)) {
|
||||
fc.lock();
|
||||
@ -124,6 +128,7 @@ public class TectonicPlate {
|
||||
Files.copy(lz4, dump.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
||||
}
|
||||
}
|
||||
errors.remove();
|
||||
}
|
||||
}
|
||||
|
||||
@ -136,6 +141,15 @@ public class TectonicPlate {
|
||||
return false;
|
||||
}
|
||||
|
||||
public void close() throws InterruptedException {
|
||||
for (int i = 0; i < chunks.length(); i++) {
|
||||
MantleChunk chunk = chunks.get(i);
|
||||
if (chunk != null) {
|
||||
chunk.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a chunk exists in this plate or not (same as get(x, z) != null)
|
||||
*
|
||||
@ -208,15 +222,13 @@ public class TectonicPlate {
|
||||
*/
|
||||
public void write(File file) throws IOException {
|
||||
PrecisionStopwatch p = PrecisionStopwatch.start();
|
||||
try (FileChannel fc = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING, StandardOpenOption.SYNC)) {
|
||||
fc.lock();
|
||||
|
||||
OutputStream fos = Channels.newOutputStream(fc);
|
||||
try (DataOutputStream dos = new DataOutputStream(new LZ4BlockOutputStream(fos))) {
|
||||
File temp = File.createTempFile("iris-tectonic-plate", ".bin");
|
||||
try (DataOutputStream dos = new DataOutputStream(new LZ4BlockOutputStream(new FileOutputStream(temp)))) {
|
||||
write(dos);
|
||||
Iris.debug("Saved Tectonic Plate " + C.DARK_GREEN + file.getName().split("\\Q.\\E")[0] + C.RED + " in " + Form.duration(p.getMilliseconds(), 2));
|
||||
}
|
||||
}
|
||||
Files.move(temp.toPath(), file.toPath(), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
|
||||
Iris.debug("Saved Tectonic Plate " + C.DARK_GREEN + file.getName() + C.RED + " in " + Form.duration(p.getMilliseconds(), 2));
|
||||
temp.delete();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -228,6 +240,7 @@ public class TectonicPlate {
|
||||
public void write(DataOutputStream dos) throws IOException {
|
||||
dos.writeInt(x);
|
||||
dos.writeInt(z);
|
||||
Varint.writeUnsignedVarInt(CURRENT, dos);
|
||||
|
||||
var bytes = new ByteArrayOutputStream(8192);
|
||||
var sub = new DataOutputStream(bytes);
|
||||
@ -249,6 +262,6 @@ public class TectonicPlate {
|
||||
}
|
||||
|
||||
public static void addError() {
|
||||
errors.add(Thread.currentThread());
|
||||
errors.set(true);
|
||||
}
|
||||
}
|
||||
|
@ -154,16 +154,17 @@ public interface Matter {
|
||||
matter.putSlice(type, slice);
|
||||
} catch (Throwable e) {
|
||||
long end = start + size;
|
||||
if (!(e instanceof ClassNotFoundException)) {
|
||||
Iris.error("Failed to read matter slice, skipping it.");
|
||||
Iris.addPanic("read.byte.range", start + " " + end);
|
||||
Iris.addPanic("read.byte.current", din.count() + "");
|
||||
Iris.reportError(e);
|
||||
e.printStackTrace();
|
||||
Iris.panic();
|
||||
|
||||
din.skipTo(end);
|
||||
TectonicPlate.addError();
|
||||
}
|
||||
din.skipTo(end);
|
||||
}
|
||||
}
|
||||
|
||||
return matter;
|
||||
|
@ -143,5 +143,6 @@ public class HyperLock {
|
||||
|
||||
public void disable() {
|
||||
enabled = false;
|
||||
locks.values().forEach(ReentrantLock::lock);
|
||||
}
|
||||
}
|
||||
|
@ -32,6 +32,7 @@ import java.util.concurrent.*;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
|
||||
public class MultiBurst implements ExecutorService {
|
||||
private static final long TIMEOUT = Long.getLong("iris.burst.timeout", 15000);
|
||||
public static final MultiBurst burst = new MultiBurst();
|
||||
private final AtomicLong last;
|
||||
private final String name;
|
||||
@ -231,7 +232,7 @@ public class MultiBurst implements ExecutorService {
|
||||
try {
|
||||
while (!service.awaitTermination(1, TimeUnit.SECONDS)) {
|
||||
Iris.info("Still waiting to shutdown burster...");
|
||||
if (p.getMilliseconds() > 7000) {
|
||||
if (p.getMilliseconds() > TIMEOUT) {
|
||||
Iris.warn("Forcing Shutdown...");
|
||||
|
||||
try {
|
||||
|
@ -0,0 +1,169 @@
|
||||
package com.volmit.iris.core.nms.v1_21_R4;
|
||||
|
||||
import com.mojang.serialization.MapCodec;
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.engine.data.cache.AtomicCache;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
import com.volmit.iris.engine.object.IrisBiome;
|
||||
import com.volmit.iris.engine.object.IrisBiomeCustom;
|
||||
import com.volmit.iris.util.collection.KMap;
|
||||
import com.volmit.iris.util.math.RNG;
|
||||
import net.minecraft.core.Holder;
|
||||
import net.minecraft.core.Registry;
|
||||
import net.minecraft.core.RegistryAccess;
|
||||
import net.minecraft.core.registries.Registries;
|
||||
import net.minecraft.resources.ResourceKey;
|
||||
import net.minecraft.resources.ResourceLocation;
|
||||
import net.minecraft.world.level.biome.Biome;
|
||||
import net.minecraft.world.level.biome.BiomeSource;
|
||||
import net.minecraft.world.level.biome.Climate;
|
||||
import org.bukkit.Bukkit;
|
||||
import org.bukkit.World;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.CraftServer;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.CraftWorld;
|
||||
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
public class CustomBiomeSource extends BiomeSource {
|
||||
|
||||
private final long seed;
|
||||
private final Engine engine;
|
||||
private final Registry<Biome> biomeCustomRegistry;
|
||||
private final Registry<Biome> biomeRegistry;
|
||||
private final AtomicCache<RegistryAccess> registryAccess = new AtomicCache<>();
|
||||
private final RNG rng;
|
||||
private final KMap<String, Holder<Biome>> customBiomes;
|
||||
|
||||
public CustomBiomeSource(long seed, Engine engine, World world) {
|
||||
this.engine = engine;
|
||||
this.seed = seed;
|
||||
this.biomeCustomRegistry = registry().lookup(Registries.BIOME).orElse(null);
|
||||
this.biomeRegistry = ((RegistryAccess) getFor(RegistryAccess.Frozen.class, ((CraftServer) Bukkit.getServer()).getHandle().getServer())).lookup(Registries.BIOME).orElse(null);
|
||||
this.rng = new RNG(engine.getSeedManager().getBiome());
|
||||
this.customBiomes = fillCustomBiomes(biomeCustomRegistry, engine);
|
||||
}
|
||||
|
||||
private static List<Holder<Biome>> getAllBiomes(Registry<Biome> customRegistry, Registry<Biome> registry, Engine engine) {
|
||||
List<Holder<Biome>> b = new ArrayList<>();
|
||||
|
||||
for (IrisBiome i : engine.getAllBiomes()) {
|
||||
if (i.isCustom()) {
|
||||
for (IrisBiomeCustom j : i.getCustomDerivitives()) {
|
||||
b.add(customRegistry.get(customRegistry.getResourceKey(customRegistry
|
||||
.getValue(ResourceLocation.fromNamespaceAndPath(engine.getDimension().getLoadKey(), j.getId()))).get()).get());
|
||||
}
|
||||
} else {
|
||||
b.add(NMSBinding.biomeToBiomeBase(registry, i.getVanillaDerivative()));
|
||||
}
|
||||
}
|
||||
|
||||
return b;
|
||||
}
|
||||
|
||||
private static Object getFor(Class<?> type, Object source) {
|
||||
Object o = fieldFor(type, source);
|
||||
|
||||
if (o != null) {
|
||||
return o;
|
||||
}
|
||||
|
||||
return invokeFor(type, source);
|
||||
}
|
||||
|
||||
private static Object fieldFor(Class<?> returns, Object in) {
|
||||
return fieldForClass(returns, in.getClass(), in);
|
||||
}
|
||||
|
||||
private static Object invokeFor(Class<?> returns, Object in) {
|
||||
for (Method i : in.getClass().getMethods()) {
|
||||
if (i.getReturnType().equals(returns)) {
|
||||
i.setAccessible(true);
|
||||
try {
|
||||
Iris.debug("[NMS] Found " + returns.getSimpleName() + " in " + in.getClass().getSimpleName() + "." + i.getName() + "()");
|
||||
return i.invoke(in);
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T fieldForClass(Class<T> returnType, Class<?> sourceType, Object in) {
|
||||
for (Field i : sourceType.getDeclaredFields()) {
|
||||
if (i.getType().equals(returnType)) {
|
||||
i.setAccessible(true);
|
||||
try {
|
||||
Iris.debug("[NMS] Found " + returnType.getSimpleName() + " in " + sourceType.getSimpleName() + "." + i.getName());
|
||||
return (T) i.get(in);
|
||||
} catch (IllegalAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Stream<Holder<Biome>> collectPossibleBiomes() {
|
||||
return getAllBiomes(
|
||||
((RegistryAccess) getFor(RegistryAccess.Frozen.class, ((CraftServer) Bukkit.getServer()).getHandle().getServer()))
|
||||
.lookup(Registries.BIOME).orElse(null),
|
||||
((CraftWorld) engine.getWorld().realWorld()).getHandle().registryAccess().lookup(Registries.BIOME).orElse(null),
|
||||
engine).stream();
|
||||
}
|
||||
private KMap<String, Holder<Biome>> fillCustomBiomes(Registry<Biome> customRegistry, Engine engine) {
|
||||
KMap<String, Holder<Biome>> m = new KMap<>();
|
||||
|
||||
for (IrisBiome i : engine.getAllBiomes()) {
|
||||
if (i.isCustom()) {
|
||||
for (IrisBiomeCustom j : i.getCustomDerivitives()) {
|
||||
ResourceLocation resourceLocation = ResourceLocation.fromNamespaceAndPath(engine.getDimension().getLoadKey(), j.getId());
|
||||
Biome biome = customRegistry.getValue(resourceLocation);
|
||||
Optional<ResourceKey<Biome>> optionalBiomeKey = customRegistry.getResourceKey(biome);
|
||||
if (optionalBiomeKey.isEmpty()) {
|
||||
Iris.error("Cannot find biome for IrisBiomeCustom " + j.getId() + " from engine " + engine.getName());
|
||||
continue;
|
||||
}
|
||||
ResourceKey<Biome> biomeKey = optionalBiomeKey.get();
|
||||
Optional<Holder.Reference<Biome>> optionalReferenceHolder = customRegistry.get(biomeKey);
|
||||
if (optionalReferenceHolder.isEmpty()) {
|
||||
Iris.error("Cannot find reference to biome " + biomeKey + " for engine " + engine.getName());
|
||||
continue;
|
||||
}
|
||||
m.put(j.getId(), optionalReferenceHolder.get());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return m;
|
||||
}
|
||||
|
||||
private RegistryAccess registry() {
|
||||
return registryAccess.aquire(() -> (RegistryAccess) getFor(RegistryAccess.Frozen.class, ((CraftServer) Bukkit.getServer()).getHandle().getServer()));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MapCodec<? extends BiomeSource> codec() {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
}
|
||||
|
||||
@Override
|
||||
public Holder<Biome> getNoiseBiome(int x, int y, int z, Climate.Sampler sampler) {
|
||||
int m = (y - engine.getMinHeight()) << 2;
|
||||
IrisBiome ib = engine.getComplex().getTrueBiomeStream().get(x << 2, z << 2);
|
||||
if (ib.isCustom()) {
|
||||
return customBiomes.get(ib.getCustomBiome(rng, x << 2, m, z << 2).getId());
|
||||
} else {
|
||||
org.bukkit.block.Biome v = ib.getSkyBiome(rng, x << 2, m, z << 2);
|
||||
return NMSBinding.biomeToBiomeBase(biomeRegistry, v);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,311 @@
|
||||
package com.volmit.iris.core.nms.v1_21_R4;
|
||||
|
||||
import com.mojang.datafixers.util.Pair;
|
||||
import com.mojang.serialization.MapCodec;
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
import com.volmit.iris.engine.framework.ResultLocator;
|
||||
import com.volmit.iris.engine.framework.WrongEngineBroException;
|
||||
import com.volmit.iris.engine.object.IrisJigsawStructure;
|
||||
import com.volmit.iris.engine.object.IrisJigsawStructurePlacement;
|
||||
import com.volmit.iris.util.collection.KList;
|
||||
import com.volmit.iris.util.collection.KMap;
|
||||
import com.volmit.iris.util.collection.KSet;
|
||||
import com.volmit.iris.util.mantle.MantleFlag;
|
||||
import com.volmit.iris.util.math.Position2;
|
||||
import com.volmit.iris.util.reflect.WrappedField;
|
||||
import net.minecraft.core.*;
|
||||
import net.minecraft.core.registries.Registries;
|
||||
import net.minecraft.resources.ResourceKey;
|
||||
import net.minecraft.resources.ResourceLocation;
|
||||
import net.minecraft.server.level.ServerLevel;
|
||||
import net.minecraft.server.level.WorldGenRegion;
|
||||
import net.minecraft.tags.StructureTags;
|
||||
import net.minecraft.tags.TagKey;
|
||||
import net.minecraft.util.random.WeightedList;
|
||||
import net.minecraft.world.entity.MobCategory;
|
||||
import net.minecraft.world.level.*;
|
||||
import net.minecraft.world.level.biome.*;
|
||||
import net.minecraft.world.level.chunk.ChunkAccess;
|
||||
import net.minecraft.world.level.chunk.ChunkGenerator;
|
||||
import net.minecraft.world.level.chunk.ChunkGeneratorStructureState;
|
||||
import net.minecraft.world.level.levelgen.Heightmap;
|
||||
import net.minecraft.world.level.levelgen.RandomState;
|
||||
import net.minecraft.world.level.levelgen.blending.Blender;
|
||||
import net.minecraft.world.level.levelgen.structure.Structure;
|
||||
import net.minecraft.world.level.levelgen.structure.StructureSet;
|
||||
import net.minecraft.world.level.levelgen.structure.templatesystem.StructureTemplateManager;
|
||||
import org.bukkit.World;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.CraftWorld;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.generator.CustomChunkGenerator;
|
||||
import org.spigotmc.SpigotWorldConfig;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.ExecutionException;
|
||||
|
||||
public class IrisChunkGenerator extends CustomChunkGenerator {
|
||||
private static final WrappedField<ChunkGenerator, BiomeSource> BIOME_SOURCE;
|
||||
private final ChunkGenerator delegate;
|
||||
private final Engine engine;
|
||||
private final KMap<ResourceKey<Structure>, KSet<String>> structures = new KMap<>();
|
||||
|
||||
public IrisChunkGenerator(ChunkGenerator delegate, long seed, Engine engine, World world) {
|
||||
super(((CraftWorld) world).getHandle(), edit(delegate, new CustomBiomeSource(seed, engine, world)), null);
|
||||
this.delegate = delegate;
|
||||
this.engine = engine;
|
||||
var dimension = engine.getDimension();
|
||||
|
||||
KSet<IrisJigsawStructure> placements = new KSet<>();
|
||||
addAll(dimension.getJigsawStructures(), placements);
|
||||
for (var region : dimension.getAllRegions(engine)) {
|
||||
addAll(region.getJigsawStructures(), placements);
|
||||
for (var biome : region.getAllBiomes(engine))
|
||||
addAll(biome.getJigsawStructures(), placements);
|
||||
}
|
||||
var stronghold = dimension.getStronghold();
|
||||
if (stronghold != null)
|
||||
placements.add(engine.getData().getJigsawStructureLoader().load(stronghold));
|
||||
placements.removeIf(Objects::isNull);
|
||||
|
||||
var registry = ((CraftWorld) world).getHandle().registryAccess().lookup(Registries.STRUCTURE).orElseThrow();
|
||||
for (var s : placements) {
|
||||
try {
|
||||
String raw = s.getStructureKey();
|
||||
if (raw == null) continue;
|
||||
boolean tag = raw.startsWith("#");
|
||||
if (tag) raw = raw.substring(1);
|
||||
|
||||
var location = ResourceLocation.parse(raw);
|
||||
if (!tag) {
|
||||
structures.computeIfAbsent(ResourceKey.create(Registries.STRUCTURE, location), k -> new KSet<>()).add(s.getLoadKey());
|
||||
continue;
|
||||
}
|
||||
|
||||
var key = TagKey.create(Registries.STRUCTURE, location);
|
||||
var set = registry.get(key).orElse(null);
|
||||
if (set == null) {
|
||||
Iris.error("Could not find structure tag: " + raw);
|
||||
continue;
|
||||
}
|
||||
for (var holder : set) {
|
||||
var resourceKey = holder.unwrapKey().orElse(null);
|
||||
if (resourceKey == null) continue;
|
||||
structures.computeIfAbsent(resourceKey, k -> new KSet<>()).add(s.getLoadKey());
|
||||
}
|
||||
} catch (Throwable e) {
|
||||
Iris.error("Failed to load structure: " + s.getLoadKey());
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addAll(KList<IrisJigsawStructurePlacement> placements, KSet<IrisJigsawStructure> structures) {
|
||||
if (placements == null) return;
|
||||
placements.stream()
|
||||
.map(IrisJigsawStructurePlacement::getStructure)
|
||||
.map(engine.getData().getJigsawStructureLoader()::load)
|
||||
.filter(Objects::nonNull)
|
||||
.forEach(structures::add);
|
||||
}
|
||||
|
||||
@Override
|
||||
public @Nullable Pair<BlockPos, Holder<Structure>> findNearestMapStructure(ServerLevel level, HolderSet<Structure> holders, BlockPos pos, int radius, boolean findUnexplored) {
|
||||
if (holders.size() == 0) return null;
|
||||
if (holders.unwrapKey().orElse(null) == StructureTags.EYE_OF_ENDER_LOCATED) {
|
||||
var next = engine.getNearestStronghold(new Position2(pos.getX(), pos.getZ()));
|
||||
return next == null ? null : new Pair<>(new BlockPos(next.getX(), 0, next.getZ()), holders.get(0));
|
||||
}
|
||||
if (engine.getDimension().isDisableExplorerMaps())
|
||||
return null;
|
||||
|
||||
KMap<String, Holder<Structure>> structures = new KMap<>();
|
||||
for (var holder : holders) {
|
||||
if (holder == null) continue;
|
||||
var key = holder.unwrapKey().orElse(null);
|
||||
var set = this.structures.get(key);
|
||||
if (set == null) continue;
|
||||
for (var structure : set) {
|
||||
structures.put(structure, holder);
|
||||
}
|
||||
}
|
||||
if (structures.isEmpty())
|
||||
return null;
|
||||
|
||||
var locator = ResultLocator.locateStructure(structures.keySet())
|
||||
.then((e, p , s) -> structures.get(s.getLoadKey()));
|
||||
if (findUnexplored)
|
||||
locator = locator.then((e, p, s) -> e.getMantle().getMantle().getChunk(p.getX(), p.getZ()).isFlagged(MantleFlag.DISCOVERED) ? null : s);
|
||||
|
||||
try {
|
||||
var result = locator.find(engine, new Position2(pos.getX() >> 4, pos.getZ() >> 4), radius * 10L, i -> {}, false).get();
|
||||
if (result == null) return null;
|
||||
var blockPos = new BlockPos(result.getBlockX(), 0, result.getBlockZ());
|
||||
return Pair.of(blockPos, result.obj());
|
||||
} catch (WrongEngineBroException | ExecutionException | InterruptedException e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected MapCodec<? extends ChunkGenerator> codec() {
|
||||
return MapCodec.unit(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChunkGenerator getDelegate() {
|
||||
if (delegate instanceof CustomChunkGenerator chunkGenerator)
|
||||
return chunkGenerator.getDelegate();
|
||||
return delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getMinY() {
|
||||
return delegate.getMinY();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSeaLevel() {
|
||||
return delegate.getSeaLevel();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createStructures(RegistryAccess iregistrycustom, ChunkGeneratorStructureState chunkgeneratorstructurestate, StructureManager structuremanager, ChunkAccess ichunkaccess, StructureTemplateManager structuretemplatemanager, ResourceKey<Level> resourcekey) {
|
||||
delegate.createStructures(iregistrycustom, chunkgeneratorstructurestate, structuremanager, ichunkaccess, structuretemplatemanager, resourcekey);
|
||||
}
|
||||
|
||||
@Override
|
||||
public ChunkGeneratorStructureState createState(HolderLookup<StructureSet> holderlookup, RandomState randomstate, long i, SpigotWorldConfig conf) {
|
||||
return delegate.createState(holderlookup, randomstate, i, conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void createReferences(WorldGenLevel generatoraccessseed, StructureManager structuremanager, ChunkAccess ichunkaccess) {
|
||||
delegate.createReferences(generatoraccessseed, structuremanager, ichunkaccess);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<ChunkAccess> createBiomes(RandomState randomstate, Blender blender, StructureManager structuremanager, ChunkAccess ichunkaccess) {
|
||||
return delegate.createBiomes(randomstate, blender, structuremanager, ichunkaccess);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void buildSurface(WorldGenRegion regionlimitedworldaccess, StructureManager structuremanager, RandomState randomstate, ChunkAccess ichunkaccess) {
|
||||
delegate.buildSurface(regionlimitedworldaccess, structuremanager, randomstate, ichunkaccess);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyCarvers(WorldGenRegion regionlimitedworldaccess, long seed, RandomState randomstate, BiomeManager biomemanager, StructureManager structuremanager, ChunkAccess ichunkaccess) {
|
||||
delegate.applyCarvers(regionlimitedworldaccess, seed, randomstate, biomemanager, structuremanager, ichunkaccess);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompletableFuture<ChunkAccess> fillFromNoise(Blender blender, RandomState randomstate, StructureManager structuremanager, ChunkAccess ichunkaccess) {
|
||||
return delegate.fillFromNoise(blender, randomstate, structuremanager, ichunkaccess);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBaseHeight(int i, int j, Heightmap.Types heightmap_type, LevelHeightAccessor levelheightaccessor, RandomState randomstate) {
|
||||
return delegate.getBaseHeight(i, j, heightmap_type, levelheightaccessor, randomstate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public WeightedList<MobSpawnSettings.SpawnerData> getMobsAt(Holder<Biome> holder, StructureManager structuremanager, MobCategory enumcreaturetype, BlockPos blockposition) {
|
||||
return delegate.getMobsAt(holder, structuremanager, enumcreaturetype, blockposition);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyBiomeDecoration(WorldGenLevel generatoraccessseed, ChunkAccess ichunkaccess, StructureManager structuremanager) {
|
||||
delegate.applyBiomeDecoration(generatoraccessseed, ichunkaccess, structuremanager);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addDebugScreenInfo(List<String> list, RandomState randomstate, BlockPos blockposition) {
|
||||
delegate.addDebugScreenInfo(list, randomstate, blockposition);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void applyBiomeDecoration(WorldGenLevel generatoraccessseed, ChunkAccess ichunkaccess, StructureManager structuremanager, boolean vanilla) {
|
||||
delegate.applyBiomeDecoration(generatoraccessseed, ichunkaccess, structuremanager, vanilla);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFirstFreeHeight(int i, int j, Heightmap.Types heightmap_type, LevelHeightAccessor levelheightaccessor, RandomState randomstate) {
|
||||
return delegate.getFirstFreeHeight(i, j, heightmap_type, levelheightaccessor, randomstate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getFirstOccupiedHeight(int i, int j, Heightmap.Types heightmap_type, LevelHeightAccessor levelheightaccessor, RandomState randomstate) {
|
||||
return delegate.getFirstOccupiedHeight(i, j, heightmap_type, levelheightaccessor, randomstate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addVanillaDecorations(WorldGenLevel generatoraccessseed, ChunkAccess ichunkaccess, StructureManager structuremanager) {
|
||||
delegate.addVanillaDecorations(generatoraccessseed, ichunkaccess, structuremanager);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void spawnOriginalMobs(WorldGenRegion regionlimitedworldaccess) {
|
||||
delegate.spawnOriginalMobs(regionlimitedworldaccess);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSpawnHeight(LevelHeightAccessor levelheightaccessor) {
|
||||
return delegate.getSpawnHeight(levelheightaccessor);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getGenDepth() {
|
||||
return delegate.getGenDepth();
|
||||
}
|
||||
|
||||
@Override
|
||||
public NoiseColumn getBaseColumn(int i, int j, LevelHeightAccessor levelheightaccessor, RandomState randomstate) {
|
||||
return delegate.getBaseColumn(i, j, levelheightaccessor, randomstate);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Optional<ResourceKey<MapCodec<? extends ChunkGenerator>>> getTypeNameForDataFixer() {
|
||||
return delegate.getTypeNameForDataFixer();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void validate() {
|
||||
delegate.validate();
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("deprecation")
|
||||
public BiomeGenerationSettings getBiomeGenerationSettings(Holder<Biome> holder) {
|
||||
return delegate.getBiomeGenerationSettings(holder);
|
||||
}
|
||||
|
||||
static {
|
||||
Field biomeSource = null;
|
||||
for (Field field : ChunkGenerator.class.getDeclaredFields()) {
|
||||
if (!field.getType().equals(BiomeSource.class))
|
||||
continue;
|
||||
biomeSource = field;
|
||||
break;
|
||||
}
|
||||
if (biomeSource == null)
|
||||
throw new RuntimeException("Could not find biomeSource field in ChunkGenerator!");
|
||||
BIOME_SOURCE = new WrappedField<>(ChunkGenerator.class, biomeSource.getName());
|
||||
}
|
||||
|
||||
private static ChunkGenerator edit(ChunkGenerator generator, BiomeSource source) {
|
||||
try {
|
||||
BIOME_SOURCE.set(generator, source);
|
||||
if (generator instanceof CustomChunkGenerator custom)
|
||||
BIOME_SOURCE.set(custom.getDelegate(), source);
|
||||
|
||||
return generator;
|
||||
} catch (IllegalAccessException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,765 @@
|
||||
package com.volmit.iris.core.nms.v1_21_R4;
|
||||
|
||||
import com.mojang.brigadier.exceptions.CommandSyntaxException;
|
||||
import com.mojang.serialization.Lifecycle;
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.nms.INMSBinding;
|
||||
import com.volmit.iris.core.nms.container.AutoClosing;
|
||||
import com.volmit.iris.core.nms.container.BiomeColor;
|
||||
import com.volmit.iris.core.nms.datapack.DataVersion;
|
||||
import com.volmit.iris.engine.data.cache.AtomicCache;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
import com.volmit.iris.util.collection.KList;
|
||||
import com.volmit.iris.util.collection.KMap;
|
||||
import com.volmit.iris.util.hunk.Hunk;
|
||||
import com.volmit.iris.util.json.JSONObject;
|
||||
import com.volmit.iris.util.mantle.Mantle;
|
||||
import com.volmit.iris.util.math.Vector3d;
|
||||
import com.volmit.iris.util.matter.MatterBiomeInject;
|
||||
import com.volmit.iris.util.nbt.mca.NBTWorld;
|
||||
import com.volmit.iris.util.nbt.mca.palette.*;
|
||||
import com.volmit.iris.util.nbt.tag.CompoundTag;
|
||||
import com.volmit.iris.util.scheduling.J;
|
||||
import it.unimi.dsi.fastutil.objects.Object2IntMap;
|
||||
import lombok.SneakyThrows;
|
||||
import net.minecraft.core.Registry;
|
||||
import net.minecraft.core.*;
|
||||
import net.minecraft.core.component.DataComponents;
|
||||
import net.minecraft.core.registries.Registries;
|
||||
import net.minecraft.nbt.Tag;
|
||||
import net.minecraft.nbt.*;
|
||||
import net.minecraft.resources.ResourceKey;
|
||||
import net.minecraft.resources.ResourceLocation;
|
||||
import net.minecraft.server.MinecraftServer;
|
||||
import net.minecraft.server.WorldLoader;
|
||||
import net.minecraft.server.commands.data.BlockDataAccessor;
|
||||
import net.minecraft.server.level.ServerLevel;
|
||||
import net.minecraft.tags.TagKey;
|
||||
import net.minecraft.world.entity.EntityType;
|
||||
import net.minecraft.world.item.component.CustomData;
|
||||
import net.minecraft.world.level.LevelReader;
|
||||
import net.minecraft.world.level.biome.Biomes;
|
||||
import net.minecraft.world.level.block.Block;
|
||||
import net.minecraft.world.level.block.Blocks;
|
||||
import net.minecraft.world.level.block.EntityBlock;
|
||||
import net.minecraft.world.level.block.entity.BlockEntity;
|
||||
import net.minecraft.world.level.block.state.BlockState;
|
||||
import net.minecraft.world.level.chunk.ChunkAccess;
|
||||
import net.minecraft.world.level.chunk.LevelChunk;
|
||||
import net.minecraft.world.level.chunk.status.ChunkStatus;
|
||||
import net.minecraft.world.level.chunk.status.WorldGenContext;
|
||||
import net.minecraft.world.level.dimension.DimensionType;
|
||||
import net.minecraft.world.level.dimension.LevelStem;
|
||||
import net.minecraft.world.level.levelgen.FlatLevelSource;
|
||||
import net.minecraft.world.level.levelgen.flat.FlatLayerInfo;
|
||||
import net.minecraft.world.level.levelgen.flat.FlatLevelGeneratorSettings;
|
||||
import org.bukkit.*;
|
||||
import org.bukkit.block.Biome;
|
||||
import org.bukkit.block.data.BlockData;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.CraftChunk;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.CraftServer;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.CraftWorld;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.block.CraftBlockState;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.block.CraftBlockStates;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.block.data.CraftBlockData;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.inventory.CraftItemStack;
|
||||
import org.bukkit.craftbukkit.v1_21_R4.util.CraftNamespacedKey;
|
||||
import org.bukkit.entity.Entity;
|
||||
import org.bukkit.event.entity.CreatureSpawnEvent;
|
||||
import org.bukkit.generator.ChunkGenerator;
|
||||
import org.bukkit.inventory.ItemStack;
|
||||
import org.jetbrains.annotations.Contract;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
|
||||
import java.awt.Color;
|
||||
import java.lang.reflect.Field;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.List;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.locks.ReentrantLock;
|
||||
|
||||
public class NMSBinding implements INMSBinding {
|
||||
private final KMap<Biome, Object> baseBiomeCache = new KMap<>();
|
||||
private final BlockData AIR = Material.AIR.createBlockData();
|
||||
private final AtomicCache<MCAIdMap<net.minecraft.world.level.biome.Biome>> biomeMapCache = new AtomicCache<>();
|
||||
private final AtomicCache<WorldLoader.DataLoadContext> dataLoadContext = new AtomicCache<>();
|
||||
private final AtomicCache<MCAIdMapper<BlockState>> registryCache = new AtomicCache<>();
|
||||
private final AtomicCache<MCAPalette<BlockState>> globalCache = new AtomicCache<>();
|
||||
private final AtomicCache<RegistryAccess> registryAccess = new AtomicCache<>();
|
||||
private final ReentrantLock dataContextLock = new ReentrantLock(true);
|
||||
private final AtomicCache<Method> byIdRef = new AtomicCache<>();
|
||||
private Field biomeStorageCache = null;
|
||||
|
||||
private static Object getFor(Class<?> type, Object source) {
|
||||
Object o = fieldFor(type, source);
|
||||
|
||||
if (o != null) {
|
||||
return o;
|
||||
}
|
||||
|
||||
return invokeFor(type, source);
|
||||
}
|
||||
|
||||
private static Object invokeFor(Class<?> returns, Object in) {
|
||||
for (Method i : in.getClass().getMethods()) {
|
||||
if (i.getReturnType().equals(returns)) {
|
||||
i.setAccessible(true);
|
||||
try {
|
||||
Iris.debug("[NMS] Found " + returns.getSimpleName() + " in " + in.getClass().getSimpleName() + "." + i.getName() + "()");
|
||||
return i.invoke(in);
|
||||
} catch (Throwable e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static Object fieldFor(Class<?> returns, Object in) {
|
||||
return fieldForClass(returns, in.getClass(), in);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static <T> T fieldForClass(Class<T> returnType, Class<?> sourceType, Object in) {
|
||||
for (Field i : sourceType.getDeclaredFields()) {
|
||||
if (i.getType().equals(returnType)) {
|
||||
i.setAccessible(true);
|
||||
try {
|
||||
Iris.debug("[NMS] Found " + returnType.getSimpleName() + " in " + sourceType.getSimpleName() + "." + i.getName());
|
||||
return (T) i.get(in);
|
||||
} catch (IllegalAccessException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static Class<?> getClassType(Class<?> type, int ordinal) {
|
||||
return type.getDeclaredClasses()[ordinal];
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTile(Material material) {
|
||||
return !CraftBlockState.class.equals(CraftBlockStates.getBlockStateType(material));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasTile(Location l) {
|
||||
return ((CraftWorld) l.getWorld()).getHandle().getBlockEntity(new BlockPos(l.getBlockX(), l.getBlockY(), l.getBlockZ()), false) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SuppressWarnings("unchecked")
|
||||
public KMap<String, Object> serializeTile(Location location) {
|
||||
BlockEntity e = ((CraftWorld) location.getWorld()).getHandle().getBlockEntity(new BlockPos(location.getBlockX(), location.getBlockY(), location.getBlockZ()), false);
|
||||
|
||||
if (e == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
net.minecraft.nbt.CompoundTag tag = e.saveWithoutMetadata(registry());
|
||||
return (KMap<String, Object>) convertFromTag(tag, 0, 64);
|
||||
}
|
||||
|
||||
@Contract(value = "null, _, _ -> null", pure = true)
|
||||
private Object convertFromTag(net.minecraft.nbt.Tag tag, int depth, int maxDepth) {
|
||||
if (tag == null || depth > maxDepth) return null;
|
||||
return switch (tag) {
|
||||
case CollectionTag collection -> {
|
||||
KList<Object> list = new KList<>();
|
||||
|
||||
for (Object i : collection) {
|
||||
if (i instanceof net.minecraft.nbt.Tag t)
|
||||
list.add(convertFromTag(t, depth + 1, maxDepth));
|
||||
else list.add(i);
|
||||
}
|
||||
yield list;
|
||||
}
|
||||
case net.minecraft.nbt.CompoundTag compound -> {
|
||||
KMap<String, Object> map = new KMap<>();
|
||||
|
||||
for (String key : compound.keySet()) {
|
||||
var child = compound.get(key);
|
||||
if (child == null) continue;
|
||||
var value = convertFromTag(child, depth + 1, maxDepth);
|
||||
if (value == null) continue;
|
||||
map.put(key, value);
|
||||
}
|
||||
yield map;
|
||||
}
|
||||
case NumericTag numeric -> numeric.box();
|
||||
default -> tag.asString().orElse(null);
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deserializeTile(KMap<String, Object> map, Location pos) {
|
||||
net.minecraft.nbt.CompoundTag tag = (net.minecraft.nbt.CompoundTag) convertToTag(map, 0, 64);
|
||||
var level = ((CraftWorld) pos.getWorld()).getHandle();
|
||||
var blockPos = new BlockPos(pos.getBlockX(), pos.getBlockY(), pos.getBlockZ());
|
||||
J.s(() -> merge(level, blockPos, tag));
|
||||
}
|
||||
|
||||
private void merge(ServerLevel level, BlockPos blockPos, net.minecraft.nbt.CompoundTag tag) {
|
||||
var blockEntity = level.getBlockEntity(blockPos);
|
||||
if (blockEntity == null) {
|
||||
Iris.warn("[NMS] BlockEntity not found at " + blockPos);
|
||||
var state = level.getBlockState(blockPos);
|
||||
if (!state.hasBlockEntity())
|
||||
return;
|
||||
|
||||
blockEntity = ((EntityBlock) state.getBlock())
|
||||
.newBlockEntity(blockPos, state);
|
||||
}
|
||||
var accessor = new BlockDataAccessor(blockEntity, blockPos);
|
||||
accessor.setData(accessor.getData().merge(tag));
|
||||
}
|
||||
|
||||
private Tag convertToTag(Object object, int depth, int maxDepth) {
|
||||
if (object == null || depth > maxDepth) return EndTag.INSTANCE;
|
||||
return switch (object) {
|
||||
case Map<?, ?> map -> {
|
||||
var tag = new net.minecraft.nbt.CompoundTag();
|
||||
for (var i : map.entrySet()) {
|
||||
tag.put(i.getKey().toString(), convertToTag(i.getValue(), depth + 1, maxDepth));
|
||||
}
|
||||
yield tag;
|
||||
}
|
||||
case List<?> list -> {
|
||||
var tag = new net.minecraft.nbt.ListTag();
|
||||
for (var i : list) {
|
||||
tag.add(convertToTag(i, depth + 1, maxDepth));
|
||||
}
|
||||
yield tag;
|
||||
}
|
||||
case Byte number -> ByteTag.valueOf(number);
|
||||
case Short number -> ShortTag.valueOf(number);
|
||||
case Integer number -> IntTag.valueOf(number);
|
||||
case Long number -> LongTag.valueOf(number);
|
||||
case Float number -> FloatTag.valueOf(number);
|
||||
case Double number -> DoubleTag.valueOf(number);
|
||||
case String string -> StringTag.valueOf(string);
|
||||
default -> EndTag.INSTANCE;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public CompoundTag serializeEntity(Entity location) {
|
||||
return null;// TODO:
|
||||
}
|
||||
|
||||
@Override
|
||||
public Entity deserializeEntity(CompoundTag s, Location newPosition) {
|
||||
return null;// TODO:
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsCustomHeight() {
|
||||
return true;
|
||||
}
|
||||
|
||||
private RegistryAccess registry() {
|
||||
return registryAccess.aquire(() -> (RegistryAccess) getFor(RegistryAccess.Frozen.class, ((CraftServer) Bukkit.getServer()).getHandle().getServer()));
|
||||
}
|
||||
|
||||
private Registry<net.minecraft.world.level.biome.Biome> getCustomBiomeRegistry() {
|
||||
return registry().lookup(Registries.BIOME).orElse(null);
|
||||
}
|
||||
|
||||
private Registry<Block> getBlockRegistry() {
|
||||
return registry().lookup(Registries.BLOCK).orElse(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBiomeBaseFromId(int id) {
|
||||
return getCustomBiomeRegistry().get(id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getMinHeight(World world) {
|
||||
return world.getMinHeight();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsCustomBiomes() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getTrueBiomeBaseId(Object biomeBase) {
|
||||
return getCustomBiomeRegistry().getId(((Holder<net.minecraft.world.level.biome.Biome>) biomeBase).value());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getTrueBiomeBase(Location location) {
|
||||
return ((CraftWorld) location.getWorld()).getHandle().getBiome(new BlockPos(location.getBlockX(), location.getBlockY(), location.getBlockZ()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTrueBiomeBaseKey(Location location) {
|
||||
return getKeyForBiomeBase(getTrueBiomeBase(location));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getCustomBiomeBaseFor(String mckey) {
|
||||
return getCustomBiomeRegistry().getValue(ResourceLocation.parse(mckey));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getCustomBiomeBaseHolderFor(String mckey) {
|
||||
return getCustomBiomeRegistry().get(getTrueBiomeBaseId(getCustomBiomeRegistry().get(ResourceLocation.parse(mckey)))).orElse(null);
|
||||
}
|
||||
|
||||
public int getBiomeBaseIdForKey(String key) {
|
||||
return getCustomBiomeRegistry().getId(getCustomBiomeRegistry().get(ResourceLocation.parse(key)).map(Holder::value).orElse(null));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getKeyForBiomeBase(Object biomeBase) {
|
||||
return getCustomBiomeRegistry().getKey((net.minecraft.world.level.biome.Biome) biomeBase).getPath(); // something, not something:something
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBiomeBase(World world, Biome biome) {
|
||||
return biomeToBiomeBase(((CraftWorld) world).getHandle()
|
||||
.registryAccess().lookup(Registries.BIOME).orElse(null), biome);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Object getBiomeBase(Object registry, Biome biome) {
|
||||
Object v = baseBiomeCache.get(biome);
|
||||
|
||||
if (v != null) {
|
||||
return v;
|
||||
}
|
||||
//noinspection unchecked
|
||||
v = biomeToBiomeBase((Registry<net.minecraft.world.level.biome.Biome>) registry, biome);
|
||||
if (v == null) {
|
||||
// Ok so there is this new biome name called "CUSTOM" in Paper's new releases.
|
||||
// But, this does NOT exist within CraftBukkit which makes it return an error.
|
||||
// So, we will just return the ID that the plains biome returns instead.
|
||||
//noinspection unchecked
|
||||
return biomeToBiomeBase((Registry<net.minecraft.world.level.biome.Biome>) registry, Biome.PLAINS);
|
||||
}
|
||||
baseBiomeCache.put(biome, v);
|
||||
return v;
|
||||
}
|
||||
|
||||
@Override
|
||||
public KList<Biome> getBiomes() {
|
||||
return new KList<>(Biome.values()).qadd(Biome.CHERRY_GROVE).qdel(Biome.CUSTOM);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isBukkit() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBiomeId(Biome biome) {
|
||||
for (World i : Bukkit.getWorlds()) {
|
||||
if (i.getEnvironment().equals(World.Environment.NORMAL)) {
|
||||
Registry<net.minecraft.world.level.biome.Biome> registry = ((CraftWorld) i).getHandle().registryAccess().lookup(Registries.BIOME).orElse(null);
|
||||
return registry.getId((net.minecraft.world.level.biome.Biome) getBiomeBase(registry, biome));
|
||||
}
|
||||
}
|
||||
|
||||
return biome.ordinal();
|
||||
}
|
||||
|
||||
private MCAIdMap<net.minecraft.world.level.biome.Biome> getBiomeMapping() {
|
||||
return biomeMapCache.aquire(() -> new MCAIdMap<>() {
|
||||
@NotNull
|
||||
@Override
|
||||
public Iterator<net.minecraft.world.level.biome.Biome> iterator() {
|
||||
return getCustomBiomeRegistry().iterator();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getId(net.minecraft.world.level.biome.Biome paramT) {
|
||||
return getCustomBiomeRegistry().getId(paramT);
|
||||
}
|
||||
|
||||
@Override
|
||||
public net.minecraft.world.level.biome.Biome byId(int paramInt) {
|
||||
return (net.minecraft.world.level.biome.Biome) getBiomeBaseFromId(paramInt);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private MCABiomeContainer getBiomeContainerInterface(MCAIdMap<net.minecraft.world.level.biome.Biome> biomeMapping, MCAChunkBiomeContainer<net.minecraft.world.level.biome.Biome> base) {
|
||||
return new MCABiomeContainer() {
|
||||
@Override
|
||||
public int[] getData() {
|
||||
return base.writeBiomes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBiome(int x, int y, int z, int id) {
|
||||
base.setBiome(x, y, z, biomeMapping.byId(id));
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getBiome(int x, int y, int z) {
|
||||
return biomeMapping.getId(base.getBiome(x, y, z));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
public MCABiomeContainer newBiomeContainer(int min, int max) {
|
||||
MCAChunkBiomeContainer<net.minecraft.world.level.biome.Biome> base = new MCAChunkBiomeContainer<>(getBiomeMapping(), min, max);
|
||||
return getBiomeContainerInterface(getBiomeMapping(), base);
|
||||
}
|
||||
|
||||
@Override
|
||||
public MCABiomeContainer newBiomeContainer(int min, int max, int[] data) {
|
||||
MCAChunkBiomeContainer<net.minecraft.world.level.biome.Biome> base = new MCAChunkBiomeContainer<>(getBiomeMapping(), min, max, data);
|
||||
return getBiomeContainerInterface(getBiomeMapping(), base);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int countCustomBiomes() {
|
||||
AtomicInteger a = new AtomicInteger(0);
|
||||
|
||||
getCustomBiomeRegistry().keySet().forEach((i) -> {
|
||||
if (i.getNamespace().equals("minecraft")) {
|
||||
return;
|
||||
}
|
||||
|
||||
a.incrementAndGet();
|
||||
Iris.debug("Custom Biome: " + i);
|
||||
});
|
||||
|
||||
return a.get();
|
||||
}
|
||||
|
||||
public boolean supportsDataPacks() {
|
||||
return true;
|
||||
}
|
||||
|
||||
public void setBiomes(int cx, int cz, World world, Hunk<Object> biomes) {
|
||||
LevelChunk c = ((CraftWorld) world).getHandle().getChunk(cx, cz);
|
||||
biomes.iterateSync((x, y, z, b) -> c.setBiome(x, y, z, (Holder<net.minecraft.world.level.biome.Biome>) b));
|
||||
c.markUnsaved();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void forceBiomeInto(int x, int y, int z, Object somethingVeryDirty, ChunkGenerator.BiomeGrid chunk) {
|
||||
try {
|
||||
ChunkAccess s = (ChunkAccess) getFieldForBiomeStorage(chunk).get(chunk);
|
||||
Holder<net.minecraft.world.level.biome.Biome> biome = (Holder<net.minecraft.world.level.biome.Biome>) somethingVeryDirty;
|
||||
s.setBiome(x, y, z, biome);
|
||||
} catch (IllegalAccessException e) {
|
||||
Iris.reportError(e);
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
private Field getFieldForBiomeStorage(Object storage) {
|
||||
Field f = biomeStorageCache;
|
||||
|
||||
if (f != null) {
|
||||
return f;
|
||||
}
|
||||
try {
|
||||
f = storage.getClass().getDeclaredField("biome");
|
||||
f.setAccessible(true);
|
||||
return f;
|
||||
} catch (Throwable e) {
|
||||
Iris.reportError(e);
|
||||
e.printStackTrace();
|
||||
Iris.error(storage.getClass().getCanonicalName());
|
||||
}
|
||||
|
||||
biomeStorageCache = f;
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public MCAPaletteAccess createPalette() {
|
||||
MCAIdMapper<BlockState> registry = registryCache.aquireNasty(() -> {
|
||||
Field cf = net.minecraft.core.IdMapper.class.getDeclaredField("tToId");
|
||||
Field df = net.minecraft.core.IdMapper.class.getDeclaredField("idToT");
|
||||
Field bf = net.minecraft.core.IdMapper.class.getDeclaredField("nextId");
|
||||
cf.setAccessible(true);
|
||||
df.setAccessible(true);
|
||||
bf.setAccessible(true);
|
||||
net.minecraft.core.IdMapper<BlockState> blockData = Block.BLOCK_STATE_REGISTRY;
|
||||
int b = bf.getInt(blockData);
|
||||
Object2IntMap<BlockState> c = (Object2IntMap<BlockState>) cf.get(blockData);
|
||||
List<BlockState> d = (List<BlockState>) df.get(blockData);
|
||||
return new MCAIdMapper<BlockState>(c, d, b);
|
||||
});
|
||||
MCAPalette<BlockState> global = globalCache.aquireNasty(() -> new MCAGlobalPalette<>(registry, ((CraftBlockData) AIR).getState()));
|
||||
MCAPalettedContainer<BlockState> container = new MCAPalettedContainer<>(global, registry,
|
||||
i -> ((CraftBlockData) NBTWorld.getBlockData(i)).getState(),
|
||||
i -> NBTWorld.getCompound(CraftBlockData.fromData(i)),
|
||||
((CraftBlockData) AIR).getState());
|
||||
return new MCAWrappedPalettedContainer<>(container,
|
||||
i -> NBTWorld.getCompound(CraftBlockData.fromData(i)),
|
||||
i -> ((CraftBlockData) NBTWorld.getBlockData(i)).getState());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void injectBiomesFromMantle(Chunk e, Mantle mantle) {
|
||||
ChunkAccess chunk = ((CraftChunk) e).getHandle(ChunkStatus.FULL);
|
||||
AtomicInteger c = new AtomicInteger();
|
||||
AtomicInteger r = new AtomicInteger();
|
||||
mantle.iterateChunk(e.getX(), e.getZ(), MatterBiomeInject.class, (x, y, z, b) -> {
|
||||
if (b != null) {
|
||||
if (b.isCustom()) {
|
||||
chunk.setBiome(x, y, z, getCustomBiomeRegistry().get(b.getBiomeId()).get());
|
||||
c.getAndIncrement();
|
||||
} else {
|
||||
chunk.setBiome(x, y, z, (Holder<net.minecraft.world.level.biome.Biome>) getBiomeBase(e.getWorld(), b.getBiome()));
|
||||
r.getAndIncrement();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public ItemStack applyCustomNbt(ItemStack itemStack, KMap<String, Object> customNbt) throws IllegalArgumentException {
|
||||
if (customNbt != null && !customNbt.isEmpty()) {
|
||||
net.minecraft.world.item.ItemStack s = CraftItemStack.asNMSCopy(itemStack);
|
||||
|
||||
try {
|
||||
net.minecraft.nbt.CompoundTag tag = TagParser.parseCompoundFully((new JSONObject(customNbt)).toString());
|
||||
tag.merge(s.getOrDefault(DataComponents.CUSTOM_DATA, CustomData.EMPTY).getUnsafe());
|
||||
s.set(DataComponents.CUSTOM_DATA, CustomData.of(tag));
|
||||
} catch (CommandSyntaxException var5) {
|
||||
throw new IllegalArgumentException(var5);
|
||||
}
|
||||
|
||||
return CraftItemStack.asBukkitCopy(s);
|
||||
} else {
|
||||
return itemStack;
|
||||
}
|
||||
}
|
||||
|
||||
public void inject(long seed, Engine engine, World world) throws NoSuchFieldException, IllegalAccessException {
|
||||
var chunkMap = ((CraftWorld)world).getHandle().getChunkSource().chunkMap;
|
||||
var worldGenContextField = getField(chunkMap.getClass(), WorldGenContext.class);
|
||||
worldGenContextField.setAccessible(true);
|
||||
var worldGenContext = (WorldGenContext) worldGenContextField.get(chunkMap);
|
||||
var dimensionType = chunkMap.level.dimensionTypeRegistration().unwrapKey().orElse(null);
|
||||
if (dimensionType != null && !dimensionType.location().getNamespace().equals("iris"))
|
||||
Iris.error("Loaded world %s with invalid dimension type! (%s)", world.getName(), dimensionType.location().toString());
|
||||
|
||||
var newContext = new WorldGenContext(
|
||||
worldGenContext.level(), new IrisChunkGenerator(worldGenContext.generator(), seed, engine, world),
|
||||
worldGenContext.structureManager(), worldGenContext.lightEngine(), worldGenContext.mainThreadExecutor(), worldGenContext.unsavedListener());
|
||||
|
||||
worldGenContextField.set(chunkMap, newContext);
|
||||
}
|
||||
|
||||
public Vector3d getBoundingbox(org.bukkit.entity.EntityType entity) {
|
||||
Field[] fields = EntityType.class.getDeclaredFields();
|
||||
for (Field field : fields) {
|
||||
if (Modifier.isStatic(field.getModifiers()) && field.getType().equals(EntityType.class)) {
|
||||
try {
|
||||
EntityType entityType = (EntityType) field.get(null);
|
||||
if (entityType.getDescriptionId().equals("entity.minecraft." + entity.name().toLowerCase())) {
|
||||
Vector<Float> v1 = new Vector<>();
|
||||
v1.add(entityType.getHeight());
|
||||
entityType.getDimensions();
|
||||
Vector3d box = new Vector3d( entityType.getWidth(), entityType.getHeight(), entityType.getWidth());
|
||||
//System.out.println("Entity Type: " + entityType.getDescriptionId() + ", " + "Height: " + height + ", Width: " + width);
|
||||
return box;
|
||||
}
|
||||
} catch (IllegalAccessException e) {
|
||||
Iris.error("Unable to get entity dimensions!");
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public Entity spawnEntity(Location location, org.bukkit.entity.EntityType type, CreatureSpawnEvent.SpawnReason reason) {
|
||||
return ((CraftWorld) location.getWorld()).spawn(location, type.getEntityClass(), null, reason);
|
||||
}
|
||||
|
||||
@Override
|
||||
public java.awt.Color getBiomeColor(Location location, BiomeColor type) {
|
||||
LevelReader reader = ((CraftWorld) location.getWorld()).getHandle();
|
||||
var holder = reader.getBiome(new BlockPos(location.getBlockX(), location.getBlockY(), location.getBlockZ()));
|
||||
var biome = holder.value();
|
||||
if (biome == null) throw new IllegalArgumentException("Invalid biome: " + holder.unwrapKey().orElse(null));
|
||||
|
||||
int rgba = switch (type) {
|
||||
case FOG -> biome.getFogColor();
|
||||
case WATER -> biome.getWaterColor();
|
||||
case WATER_FOG -> biome.getWaterFogColor();
|
||||
case SKY -> biome.getSkyColor();
|
||||
case FOLIAGE -> biome.getFoliageColor();
|
||||
case GRASS -> biome.getGrassColor(location.getBlockX(), location.getBlockZ());
|
||||
};
|
||||
if (rgba == 0) {
|
||||
if (BiomeColor.FOLIAGE == type && biome.getSpecialEffects().getFoliageColorOverride().isEmpty())
|
||||
return null;
|
||||
if (BiomeColor.GRASS == type && biome.getSpecialEffects().getGrassColorOverride().isEmpty())
|
||||
return null;
|
||||
}
|
||||
return new Color(rgba, true);
|
||||
}
|
||||
|
||||
private static Field getField(Class<?> clazz, Class<?> fieldType) throws NoSuchFieldException {
|
||||
try {
|
||||
for (Field f : clazz.getDeclaredFields()) {
|
||||
if (f.getType().equals(fieldType))
|
||||
return f;
|
||||
}
|
||||
throw new NoSuchFieldException(fieldType.getName());
|
||||
} catch (NoSuchFieldException var4) {
|
||||
Class<?> superClass = clazz.getSuperclass();
|
||||
if (superClass == null) {
|
||||
throw var4;
|
||||
} else {
|
||||
return getField(superClass, fieldType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static Holder<net.minecraft.world.level.biome.Biome> biomeToBiomeBase(Registry<net.minecraft.world.level.biome.Biome> registry, Biome biome) {
|
||||
return registry.getOrThrow(ResourceKey.create(Registries.BIOME, CraftNamespacedKey.toMinecraft(biome.getKey())));
|
||||
}
|
||||
|
||||
@Override
|
||||
public DataVersion getDataVersion() {
|
||||
return DataVersion.V1213;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getSpawnChunkCount(World world) {
|
||||
var radius = Optional.ofNullable(world.getGameRuleValue(GameRule.SPAWN_CHUNK_RADIUS))
|
||||
.orElseGet(() -> world.getGameRuleDefault(GameRule.SPAWN_CHUNK_RADIUS));
|
||||
if (radius == null) throw new IllegalStateException("GameRule.SPAWN_CHUNK_RADIUS is null!");
|
||||
return (int) Math.pow(2 * radius + 1, 2);
|
||||
}
|
||||
|
||||
@Override
|
||||
public KList<String> getStructureKeys() {
|
||||
KList<String> keys = new KList<>();
|
||||
|
||||
var registry = registry().lookup(Registries.STRUCTURE).orElse(null);
|
||||
if (registry == null) return keys;
|
||||
registry.keySet().stream().map(ResourceLocation::toString).forEach(keys::add);
|
||||
registry.getTags()
|
||||
.map(HolderSet.Named::key)
|
||||
.map(TagKey::location)
|
||||
.map(ResourceLocation::toString)
|
||||
.map(s -> "#" + s)
|
||||
.forEach(keys::add);
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
@Override
|
||||
@SneakyThrows
|
||||
public AutoClosing injectLevelStems() {
|
||||
if (!dataContextLock.tryLock()) throw new IllegalStateException("Failed to inject data context!");
|
||||
|
||||
var server = ((CraftServer) Bukkit.getServer());
|
||||
var field = getField(MinecraftServer.class, WorldLoader.DataLoadContext.class);
|
||||
var nmsServer = server.getServer();
|
||||
var old = nmsServer.worldLoader;
|
||||
|
||||
field.setAccessible(true);
|
||||
field.set(nmsServer, dataLoadContext.aquire(() -> new WorldLoader.DataLoadContext(
|
||||
old.resources(),
|
||||
old.dataConfiguration(),
|
||||
old.datapackWorldgen(),
|
||||
createRegistryAccess(old.datapackDimensions(), false, true, true, true)
|
||||
)));
|
||||
|
||||
return new AutoClosing(() -> {
|
||||
field.set(nmsServer, old);
|
||||
dataContextLock.unlock();
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@SneakyThrows
|
||||
public AutoClosing injectUncached(boolean overworld, boolean nether, boolean end) {
|
||||
var reg = registry();
|
||||
var field = getField(RegistryAccess.ImmutableRegistryAccess.class, Map.class);
|
||||
field.setAccessible(true);
|
||||
|
||||
var access = createRegistryAccess(((CraftServer) Bukkit.getServer()).getServer().worldLoader.datapackDimensions(), true, overworld, nether, end);
|
||||
var injected = access.lookupOrThrow(Registries.LEVEL_STEM);
|
||||
var old = (Map<ResourceKey<? extends Registry<?>>, Registry<?>>) field.get(reg);
|
||||
var fake = new HashMap<>(old);
|
||||
fake.put(Registries.LEVEL_STEM, injected);
|
||||
field.set(reg, fake);
|
||||
|
||||
return new AutoClosing(() -> field.set(reg, old));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean missingDimensionTypes(boolean overworld, boolean nether, boolean end) {
|
||||
var registry = registry().lookupOrThrow(Registries.DIMENSION_TYPE);
|
||||
if (overworld) overworld = !registry.containsKey(createIrisKey(LevelStem.OVERWORLD));
|
||||
if (nether) nether = !registry.containsKey(createIrisKey(LevelStem.NETHER));
|
||||
if (end) end = !registry.containsKey(createIrisKey(LevelStem.END));
|
||||
return overworld || nether || end;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeCustomDimensions(World world) {
|
||||
((CraftWorld) world).getHandle().L.customDimensions = null;
|
||||
}
|
||||
|
||||
private RegistryAccess.Frozen createRegistryAccess(RegistryAccess.Frozen datapack, boolean copy, boolean overworld, boolean nether, boolean end) {
|
||||
var access = registry();
|
||||
var dimensions = access.lookupOrThrow(Registries.DIMENSION_TYPE);
|
||||
|
||||
var settings = new FlatLevelGeneratorSettings(
|
||||
Optional.empty(),
|
||||
access.lookupOrThrow(Registries.BIOME).getOrThrow(Biomes.THE_VOID),
|
||||
List.of()
|
||||
);
|
||||
settings.getLayersInfo().add(new FlatLayerInfo(1, Blocks.AIR));
|
||||
settings.updateLayers();
|
||||
|
||||
var source = new FlatLevelSource(settings);
|
||||
var fake = new MappedRegistry<>(Registries.LEVEL_STEM, Lifecycle.experimental());
|
||||
if (overworld) register(fake, dimensions, source, LevelStem.OVERWORLD);
|
||||
if (nether) register(fake, dimensions, source, LevelStem.NETHER);
|
||||
if (end) register(fake, dimensions, source, LevelStem.END);
|
||||
copy(fake, datapack.lookup(Registries.LEVEL_STEM).orElse(null));
|
||||
|
||||
if (copy) copy(fake, access.lookupOrThrow(Registries.LEVEL_STEM));
|
||||
|
||||
return new RegistryAccess.Frozen.ImmutableRegistryAccess(List.of(fake)).freeze();
|
||||
}
|
||||
|
||||
private void register(MappedRegistry<LevelStem> target, Registry<DimensionType> dimensions, FlatLevelSource source, ResourceKey<LevelStem> key) {
|
||||
var loc = createIrisKey(key);
|
||||
target.register(key, new LevelStem(
|
||||
dimensions.get(loc).orElseThrow(() -> new IllegalStateException("Missing dimension type " + loc + " in " + dimensions.keySet())),
|
||||
source
|
||||
), RegistrationInfo.BUILT_IN);
|
||||
}
|
||||
|
||||
private void copy(MappedRegistry<LevelStem> target, Registry<LevelStem> source) {
|
||||
if (source == null) return;
|
||||
source.listElementIds().forEach(key -> {
|
||||
var value = source.getValue(key);
|
||||
var info = source.registrationInfo(key).orElse(null);
|
||||
if (value != null && info != null && !target.containsKey(key))
|
||||
target.register(key, value, info);
|
||||
});
|
||||
}
|
||||
|
||||
private ResourceLocation createIrisKey(ResourceKey<LevelStem> key) {
|
||||
return ResourceLocation.fromNamespaceAndPath("iris", key.location().getPath());
|
||||
}
|
||||
}
|
@ -30,6 +30,7 @@ rootProject.name = 'Iris'
|
||||
|
||||
include(':core')
|
||||
include(
|
||||
':nms:v1_21_R4',
|
||||
':nms:v1_21_R3',
|
||||
':nms:v1_21_R2',
|
||||
':nms:v1_21_R1',
|
||||
|
Loading…
x
Reference in New Issue
Block a user