Merge branch 'dev' into feat/folia

This commit is contained in:
Julian Krings 2025-07-25 22:34:17 +02:00
commit 2f16c0cfb7
No known key found for this signature in database
GPG Key ID: 208C6E08C3B718D2
9 changed files with 212 additions and 68 deletions

View File

@ -1,5 +1,6 @@
import io.github.slimjar.func.slimjar
import io.github.slimjar.resolver.data.Mirror
import org.ajoberstar.grgit.Grgit
import java.net.URI
/*
@ -26,6 +27,7 @@ plugins {
alias(libs.plugins.shadow)
alias(libs.plugins.sentry)
alias(libs.plugins.slimjar)
alias(libs.plugins.grgit)
}
val apiVersion = "1.19"
@ -140,6 +142,15 @@ tasks {
"version" to rootProject.version,
"apiVersion" to apiVersion,
"main" to main,
"environment" to if (project.hasProperty("release")) "production" else "development",
"commit" to provider {
val res = runCatching { project.extensions.getByType<Grgit>().head().id }
res.getOrDefault("")
.takeIf { it.length == 40 } ?: {
logger.error("Git commit hash not found", res.exceptionOrNull())
"unknown"
}()
},
)
filesMatching("**/plugin.yml") {
expand(inputs.properties)

View File

@ -3,10 +3,14 @@ package com.volmit.iris.core.link.data;
import com.volmit.iris.Iris;
import com.volmit.iris.core.link.ExternalDataProvider;
import com.volmit.iris.core.link.Identifier;
import com.volmit.iris.engine.framework.Engine;
import com.volmit.iris.util.collection.KList;
import com.volmit.iris.util.collection.KMap;
import com.volmit.iris.util.data.B;
import com.volmit.iris.util.data.IrisCustomData;
import dev.lone.itemsadder.api.CustomBlock;
import dev.lone.itemsadder.api.CustomStack;
import org.bukkit.block.Block;
import org.bukkit.block.data.BlockData;
import org.bukkit.inventory.ItemStack;
import org.jetbrains.annotations.NotNull;
@ -40,7 +44,11 @@ public class ItemAdderDataProvider extends ExternalDataProvider {
@NotNull
@Override
public BlockData getBlockData(@NotNull Identifier blockId, @NotNull KMap<String, String> state) throws MissingResourceException {
return CustomBlock.getBaseBlockData(blockId.toString());
CustomBlock block = CustomBlock.getInstance(blockId.toString());
if (block == null) {
throw new MissingResourceException("Failed to find BlockData!", blockId.namespace(), blockId.key());
}
return new IrisCustomData(B.getAir(), blockId);
}
@NotNull
@ -53,6 +61,11 @@ public class ItemAdderDataProvider extends ExternalDataProvider {
return stack.getItemStack();
}
@Override
public void processUpdate(@NotNull Engine engine, @NotNull Block block, @NotNull Identifier blockId) {
CustomBlock.place(blockId.toString(), block.getLocation());
}
@Override
public @NotNull Collection<@NotNull Identifier> getTypes(@NotNull DataType dataType) {
return switch (dataType) {

View File

@ -261,7 +261,7 @@ public interface Engine extends DataProvider, Fallible, LootProvider, BlockUpdat
getMantle().updateBlock(x, y, z);
}
if (data instanceof IrisCustomData) {
getMantle().getMantle().flag(x >> 4, z >> 4, MantleFlag.CUSTOM, true);
getMantle().getMantle().flag(x >> 4, z >> 4, MantleFlag.CUSTOM_ACTIVE, true);
}
}

View File

@ -47,7 +47,6 @@ import org.bukkit.Chunk;
import java.io.EOFException;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
@ -58,16 +57,18 @@ import java.util.concurrent.atomic.AtomicInteger;
*/
public class Mantle {
private static final int LOCK_SIZE = Short.MAX_VALUE;
private final File dataFolder;
@Getter
private final int worldHeight;
private final Map<Long, Long> lastUse;
private final Map<Long, TectonicPlate> loadedRegions;
private final KMap<Long, Long> lastUse;
private final KMap<Long, TectonicPlate> loadedRegions;
private final HyperLock hyperLock;
private final AtomicBoolean closed;
private final MultiBurst ioBurst;
private final AtomicBoolean ioTrim;
private final AtomicBoolean ioTectonicUnload;
private final Semaphore ioTrim;
private final Semaphore ioTectonicUnload;
private final AtomicDouble adjustedIdleDuration;
private final KSet<Long> toUnload;
@ -83,8 +84,8 @@ public class Mantle {
this.closed = new AtomicBoolean(false);
this.dataFolder = dataFolder;
this.worldHeight = worldHeight;
this.ioTrim = new AtomicBoolean(false);
this.ioTectonicUnload = new AtomicBoolean(false);
this.ioTrim = new Semaphore(LOCK_SIZE, true);
this.ioTectonicUnload = new Semaphore(LOCK_SIZE, true);
loadedRegions = new KMap<>();
lastUse = new KMap<>();
ioBurst = MultiBurst.burst;
@ -421,19 +422,19 @@ public class Mantle {
throw new RuntimeException("The Mantle is closed");
}
adjustedIdleDuration.set(baseIdleDuration);
double idleDuration = baseIdleDuration;
if (loadedRegions.size() > tectonicLimit) {
// todo update this correctly and maybe do something when its above a 100%
adjustedIdleDuration.set(Math.max(adjustedIdleDuration.get() - (1000 * (((loadedRegions.size() - tectonicLimit) / (double) tectonicLimit) * 100) * 0.4), 4000));
idleDuration = Math.max(idleDuration - (1000 * (((loadedRegions.size() - tectonicLimit) / (double) tectonicLimit) * 100) * 0.4), 4000);
}
adjustedIdleDuration.set(idleDuration);
ioTrim.set(true);
ioTrim.acquireUninterruptibly(LOCK_SIZE);
try {
double adjustedIdleDuration = this.adjustedIdleDuration.get();
Iris.debug("Trimming Tectonic Plates older than " + Form.duration(adjustedIdleDuration, 0));
Iris.debug("Trimming Tectonic Plates older than " + Form.duration(idleDuration, 0));
if (lastUse.isEmpty()) return;
double unloadTime = M.ms() - adjustedIdleDuration;
double unloadTime = M.ms() - idleDuration;
for (long id : lastUse.keySet()) {
hyperLock.withLong(id, () -> {
Long lastUseTime = lastUse.get(id);
@ -446,7 +447,7 @@ public class Mantle {
} catch (Throwable e) {
Iris.reportError(e);
} finally {
ioTrim.set(false);
ioTrim.release(LOCK_SIZE);
}
}
@ -459,9 +460,10 @@ public class Mantle {
BurstExecutor burst = ioBurst.burst(toUnload.size());
burst.setMulticore(toUnload.size() > tectonicLimit);
ioTectonicUnload.set(true);
ioTectonicUnload.acquireUninterruptibly(LOCK_SIZE);
try {
for (long id : toUnload) {
double unloadTime = M.ms() - adjustedIdleDuration.get();
burst.queue(() -> hyperLock.withLong(id, () -> {
TectonicPlate m = loadedRegions.get(id);
if (m == null) {
@ -470,17 +472,21 @@ public class Mantle {
return;
}
var used = lastUse.getOrDefault(id, 0L);
if (!toUnload.contains(id) || used >= unloadTime) {
return;
}
if (m.inUse()) {
Iris.debug("Tectonic Plate was added to unload while in use " + C.DARK_GREEN + m.getX() + " " + m.getZ());
lastUse.put(id, M.ms());
toUnload.remove(id);
use(id);
return;
}
try {
m.write(fileForRegion(dataFolder, id, false));
oldFileForRegion(dataFolder, id).delete();
loadedRegions.remove(id);
loadedRegions.remove(id, m);
lastUse.remove(id);
toUnload.remove(id);
i.incrementAndGet();
@ -496,7 +502,7 @@ public class Mantle {
e.printStackTrace();
burst.complete();
} finally {
ioTectonicUnload.set(false);
ioTectonicUnload.release(LOCK_SIZE);
}
return i.get();
}
@ -512,30 +518,39 @@ public class Mantle {
*/
@RegionCoordinates
private TectonicPlate get(int x, int z) {
if (ioTrim.get() || ioTectonicUnload.get()) {
boolean trim = ioTrim.tryAcquire();
boolean unload = ioTectonicUnload.tryAcquire();
try {
if (!trim || !unload) {
try {
return getSafe(x, z).get();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (ExecutionException e) {
e.printStackTrace();
}
}
Long key = key(x, z);
TectonicPlate p = loadedRegions.get(key);
if (p != null && !p.isClosed()) {
use(key);
return p;
}
try {
return getSafe(x, z).get();
} catch (InterruptedException e) {
e.printStackTrace();
Iris.warn("Failed to get Tectonic Plate " + x + " " + z + " Due to a thread intterruption (hotload?)");
Iris.reportError(e);
} catch (ExecutionException e) {
e.printStackTrace();
Iris.warn("Failed to get Tectonic Plate " + x + " " + z + " Due to a thread execution exception (engine close?)");
Iris.reportError(e);
}
}
TectonicPlate p = loadedRegions.get(key(x, z));
if (p != null) {
return p;
}
try {
return getSafe(x, z).get();
} catch (InterruptedException e) {
Iris.warn("Failed to get Tectonic Plate " + x + " " + z + " Due to a thread intterruption (hotload?)");
Iris.reportError(e);
} catch (ExecutionException e) {
Iris.warn("Failed to get Tectonic Plate " + x + " " + z + " Due to a thread execution exception (engine close?)");
Iris.reportError(e);
} finally {
if (trim) ioTrim.release();
if (unload) ioTectonicUnload.release();
}
Iris.warn("Retrying to get " + x + " " + z + " Mantle Region");
@ -552,19 +567,12 @@ public class Mantle {
*/
@RegionCoordinates
private Future<TectonicPlate> getSafe(int x, int z) {
Long k = key(x, z);
TectonicPlate p = loadedRegions.get(k);
if (p != null) {
lastUse.put(k, M.ms());
return CompletableFuture.completedFuture(p);
}
return ioBurst.completeValue(() -> hyperLock.withResult(x, z, () -> {
lastUse.put(k, M.ms());
Long k = key(x, z);
use(k);
TectonicPlate region = loadedRegions.get(k);
if (region != null) {
if (region != null && !region.isClosed()) {
return region;
}
@ -592,16 +600,23 @@ public class Mantle {
Iris.debug("Created new Tectonic Plate (Due to Load Failure) " + C.DARK_GREEN + x + " " + z);
}
use(k);
return region;
}
region = new TectonicPlate(worldHeight, x, z);
loadedRegions.put(k, region);
Iris.debug("Created new Tectonic Plate " + C.DARK_GREEN + x + " " + z);
use(k);
return region;
}));
}
private void use(Long key) {
lastUse.put(key, M.ms());
toUnload.remove(key);
}
public void saveAll() {
}

View File

@ -39,6 +39,7 @@ import java.nio.channels.FileChannel;
import java.nio.file.Files;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReferenceArray;
/**
@ -52,6 +53,7 @@ public class TectonicPlate {
private final int sectionHeight;
private final AtomicReferenceArray<MantleChunk> chunks;
private final AtomicBoolean closed;
@Getter
private final int x;
@ -67,6 +69,7 @@ public class TectonicPlate {
public TectonicPlate(int worldHeight, int x, int z) {
this.sectionHeight = worldHeight >> 4;
this.chunks = new AtomicReferenceArray<>(1024);
this.closed = new AtomicBoolean(false);
this.x = x;
this.z = z;
}
@ -143,6 +146,7 @@ public class TectonicPlate {
}
public void close() throws InterruptedException {
closed.set(true);
for (int i = 0; i < chunks.length(); i++) {
MantleChunk chunk = chunks.get(i);
if (chunk != null) {
@ -151,6 +155,10 @@ public class TectonicPlate {
}
}
public boolean isClosed() {
return closed.get();
}
/**
* Check if a chunk exists in this plate or not (same as get(x, z) != null)
*

View File

@ -23,9 +23,11 @@ import org.bstats.charts.SimplePie;
import org.bstats.charts.SingleLineChart;
import org.bukkit.Bukkit;
import org.bukkit.command.CommandSender;
import org.bukkit.configuration.file.YamlConfiguration;
import org.bukkit.entity.Player;
import oshi.SystemInfo;
import java.io.InputStreamReader;
import java.math.RoundingMode;
import java.text.NumberFormat;
import java.util.HashMap;
@ -45,6 +47,9 @@ public class Bindings {
if (settings.disableAutoReporting || Sentry.isEnabled() || Boolean.getBoolean("iris.suppressReporting")) return;
Iris.info("Enabling Sentry for anonymous error reporting. You can disable this in the settings.");
Iris.info("Your server ID is: " + ServerID.ID);
var resource = Iris.instance.getResource("plugin.yml");
YamlConfiguration desc = resource != null ? YamlConfiguration.loadConfiguration(new InputStreamReader(resource)) : new YamlConfiguration();
Sentry.init(options -> {
options.setDsn("https://b16ecc222e9c1e0c48faecacb906fd89@o4509451052646400.ingest.de.sentry.io/4509452722765904");
if (settings.debug) {
@ -55,6 +60,7 @@ public class Bindings {
options.setAttachServerName(false);
options.setEnableUncaughtExceptionHandler(false);
options.setRelease(Iris.instance.getDescription().getVersion());
options.setEnvironment(desc.getString("environment", "production"));
options.setBeforeSend((event, hint) -> {
if (suppress(event.getThrowable())) return null;
event.setTag("iris.safeguard", IrisSafeguard.mode());
@ -71,6 +77,7 @@ public class Bindings {
scope.setTag("server", Bukkit.getVersion());
scope.setTag("server.type", Bukkit.getName());
scope.setTag("server.api", Bukkit.getBukkitVersion());
scope.setTag("iris.commit", desc.getString("commit", "unknown"));
});
Runtime.getRuntime().addShutdownHook(new Thread(Sentry::close));
}

View File

@ -1,20 +1,35 @@
package com.volmit.iris.util.misc;
import com.volmit.iris.Iris;
import com.volmit.iris.core.nms.container.Pair;
import io.github.slimjar.app.builder.ApplicationBuilder;
import io.github.slimjar.exceptions.InjectorException;
import io.github.slimjar.injector.loader.Injectable;
import io.github.slimjar.injector.loader.InjectableFactory;
import io.github.slimjar.injector.loader.IsolatedInjectableClassLoader;
import io.github.slimjar.logging.ProcessLogger;
import io.github.slimjar.resolver.data.Repository;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Path;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReentrantLock;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.logging.Level;
import java.util.logging.Logger;
public class SlimJar {
private static final Logger LOGGER = Logger.getLogger("Iris");
private static final String NAME = "Iris";
private static final Logger LOGGER = Logger.getLogger(NAME);
private static final ReentrantLock lock = new ReentrantLock();
private static final AtomicBoolean loaded = new AtomicBoolean();
private static final boolean DISABLE_REMAPPER = Boolean.getBoolean("iris.disable-remapper");
public static void debug(boolean debug) {
LOGGER.setLevel(debug ? Level.FINE : Level.INFO);
@ -31,28 +46,99 @@ public class SlimJar {
}
LOGGER.info("Loading libraries...");
ApplicationBuilder.appending("Iris")
.downloadDirectoryPath(localRepository.toPath())
.logger(new ProcessLogger() {
@Override
public void info(@NotNull String message, @Nullable Object... args) {
LOGGER.fine(message.formatted(args));
}
load(localRepository.toPath(), new ProcessLogger() {
@Override
public void info(@NotNull String message, @Nullable Object... args) {
LOGGER.fine(message.formatted(args));
}
@Override
public void error(@NotNull String message, @Nullable Object... args) {
LOGGER.severe(message.formatted(args));
}
@Override
public void error(@NotNull String message, @Nullable Object... args) {
LOGGER.severe(message.formatted(args));
}
@Override
public void debug(@NotNull String message, @Nullable Object... args) {
LOGGER.fine(message.formatted(args));
}
})
.build();
@Override
public void debug(@NotNull String message, @Nullable Object... args) {
LOGGER.fine(message.formatted(args));
}
});
LOGGER.info("Libraries loaded successfully!");
} finally {
lock.unlock();
}
}
private static void load(Path downloadPath, ProcessLogger logger) {
try {
loadSpigot(downloadPath, logger);
} catch (Throwable e) {
Iris.warn("Failed to inject the library loader, falling back to application builder");
ApplicationBuilder.appending(NAME)
.downloadDirectoryPath(downloadPath)
.logger(logger)
.build();
}
}
private static void loadSpigot(Path downloadPath, ProcessLogger logger) throws Throwable {
var current = SlimJar.class.getClassLoader();
var libraryLoader = current.getClass().getDeclaredField("libraryLoader");
libraryLoader.setAccessible(true);
if (!ClassLoader.class.isAssignableFrom(libraryLoader.getType())) throw new IllegalStateException("Failed to find library loader");
final var pair = findRemapper();
final var remapper = pair.getA();
final var factory = pair.getB();
final var libraries = factory.apply(new URL[0], current.getParent());
final var injecting = InjectableFactory.create(downloadPath, List.of(Repository.central()), libraries);
ApplicationBuilder.injecting(NAME, new Injectable() {
@Override
public void inject(@NotNull URL url) throws InjectorException {
try {
final List<Path> mapped;
synchronized (remapper) {
mapped = remapper.apply(List.of(Path.of(url.toURI())));
}
for (final Path path : mapped) {
injecting.inject(path.toUri().toURL());
}
} catch (Throwable e) {
throw new InjectorException("Failed to inject " + url, e);
}
}
@Override
public boolean isThreadSafe() {
return injecting.isThreadSafe();
}
})
.downloadDirectoryPath(downloadPath)
.logger(logger)
.build();
libraryLoader.set(current, libraries);
}
private static Pair<Function<List<Path>, List<Path>>, BiFunction<URL[], ClassLoader, URLClassLoader>> findRemapper() {
Function<List<Path>, List<Path>> mapper = null;
BiFunction<URL[], ClassLoader, URLClassLoader> factory = null;
if (!DISABLE_REMAPPER) {
try {
var libraryLoader = Class.forName("org.bukkit.plugin.java.LibraryLoader");
var mapperField = libraryLoader.getDeclaredField("REMAPPER");
var factoryField = libraryLoader.getDeclaredField("LIBRARY_LOADER_FACTORY");
mapperField.setAccessible(true);
factoryField.setAccessible(true);
mapper = (Function<List<Path>, List<Path>>) mapperField.get(null);
factory = (BiFunction<URL[], ClassLoader, URLClassLoader>) factoryField.get(null);
} catch (Throwable ignored) {}
}
if (mapper == null) mapper = Function.identity();
if (factory == null) factory = (urls, parent) -> new IsolatedInjectableClassLoader(urls, List.of(), parent);
return new Pair<>(mapper, factory);
}
}

View File

@ -5,6 +5,8 @@ load: STARTUP
authors: [ cyberpwn, NextdoorPsycho, Vatuu ]
website: volmit.com
description: More than a Dimension!
environment: '${environment}'
commit: '${commit}'
commands:
iris:
aliases: [ ir, irs ]

View File

@ -9,6 +9,7 @@ slimjar = "2.0.8" # https://plugins.gradle.org/plugin/de.crazydev22.slimjar
download = "5.6.0" # https://plugins.gradle.org/plugin/de.undercouch.download
runPaper = "2.3.1" # https://plugins.gradle.org/plugin/xyz.jpenilla.run-paper
sentryPlugin = "5.8.0" # https://github.com/getsentry/sentry-android-gradle-plugin
grgit = "5.3.2" # https://github.com/ajoberstar/grgit
# Core Libraries
lombok = "1.18.38"
@ -102,3 +103,4 @@ slimjar = { id = "de.crazydev22.slimjar", version.ref = "slimjar" }
download = { id = "de.undercouch.download", version.ref = "download" }
runPaper = { id = "xyz.jpenilla.run-paper", version.ref = "runPaper" }
sentry = { id = "io.sentry.jvm.gradle", version.ref = "sentryPlugin" }
grgit = { id = "org.ajoberstar.grgit", version.ref = "grgit" }