This commit is contained in:
Brian Neumann-Fopiano
2026-02-21 03:37:44 -05:00
parent 72c891ce5b
commit 1d09cd5f0f
25 changed files with 3158 additions and 701 deletions

View File

@@ -528,7 +528,7 @@ public class Iris extends VolmitPlugin implements Listener {
J.ar(this::checkConfigHotload, 60);
J.sr(this::tickQueue, 0);
J.s(this::setupPapi);
J.a(ServerConfigurator::configure, 20);
J.a(ServerConfigurator::configureIfDeferred, 20);
autoStartStudio();
if (!J.isFolia()) {

View File

@@ -34,6 +34,8 @@ import art.arcane.iris.util.common.plugin.VolmitSender;
import art.arcane.iris.util.common.scheduling.J;
import lombok.NonNull;
import org.bukkit.Bukkit;
import org.bukkit.NamespacedKey;
import org.bukkit.block.Biome;
import org.bukkit.configuration.InvalidConfigurationException;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.configuration.file.YamlConfiguration;
@@ -42,13 +44,22 @@ import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.io.IOException;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicIntegerArray;
import java.util.stream.Stream;
public class ServerConfigurator {
private static volatile boolean deferredInstallPending = false;
public static void configure() {
IrisSettings.IrisSettingsAutoconfiguration s = IrisSettings.get().getAutoConfiguration();
if (s.isConfigureSpigotTimeoutTime()) {
@@ -59,9 +70,26 @@ public class ServerConfigurator {
J.attempt(ServerConfigurator::increasePaperWatchdog);
}
if (shouldDeferInstallUntilWorldsReady()) {
deferredInstallPending = true;
return;
}
deferredInstallPending = false;
installDataPacks(true);
}
public static void configureIfDeferred() {
if (!deferredInstallPending) {
return;
}
configure();
if (deferredInstallPending) {
J.a(ServerConfigurator::configureIfDeferred, 20);
}
}
private static void increaseKeepAliveSpigot() throws IOException, InvalidConfigurationException {
File spigotConfig = new File("spigot.yml");
FileConfiguration f = new YamlConfiguration();
@@ -103,24 +131,38 @@ public class ServerConfigurator {
}
public static boolean installDataPacks(boolean fullInstall) {
return installDataPacks(fullInstall, true);
}
public static boolean installDataPacks(boolean fullInstall, boolean includeExternal) {
IDataFixer fixer = DataVersion.getDefault();
if (fixer == null) {
DataVersion fallback = DataVersion.getLatest();
Iris.warn("Primary datapack fixer was null, forcing latest fixer: " + fallback.getVersion());
fixer = fallback.get();
}
return installDataPacks(fixer, fullInstall);
return installDataPacks(fixer, fullInstall, includeExternal);
}
public static boolean installDataPacks(IDataFixer fixer, boolean fullInstall) {
return installDataPacks(fixer, fullInstall, true);
}
public static boolean installDataPacks(IDataFixer fixer, boolean fullInstall, boolean includeExternal) {
if (fixer == null) {
Iris.error("Unable to install datapacks, fixer is null!");
return false;
}
Iris.info("Checking Data Packs...");
if (fullInstall || includeExternal) {
Iris.info("Checking Data Packs...");
} else {
Iris.verbose("Checking Data Packs...");
}
DimensionHeight height = new DimensionHeight(fixer);
KList<File> folders = getDatapacksFolder();
installExternalDataPacks(folders);
if (includeExternal) {
installExternalDataPacks(folders);
}
KMap<String, KSet<String>> biomes = new KMap<>();
try (Stream<IrisData> stream = allPacks()) {
@@ -133,7 +175,11 @@ public class ServerConfigurator {
});
}
IrisDimension.writeShared(folders, height);
Iris.info("Data Packs Setup!");
if (fullInstall || includeExternal) {
Iris.info("Data Packs Setup!");
} else {
Iris.verbose("Data Packs Setup!");
}
return fullInstall && verifyDataPacksPost(IrisSettings.get().getAutoConfiguration().isAutoRestartOnCustomBiomeInstall());
}
@@ -147,93 +193,684 @@ public class ServerConfigurator {
KMap<String, KList<File>> worldDatapackFoldersByPack = collectWorldDatapackFoldersByPack(folders);
ExternalDataPackPipeline.PipelineSummary summary = ExternalDataPackPipeline.processDatapacks(requests, worldDatapackFoldersByPack);
if (summary.getLegacyDownloadRemovals() > 0) {
Iris.info("Removed " + summary.getLegacyDownloadRemovals() + " legacy global datapack downloads.");
Iris.verbose("Removed " + summary.getLegacyDownloadRemovals() + " legacy global datapack downloads.");
}
if (summary.getLegacyWorldCopyRemovals() > 0) {
Iris.info("Removed " + summary.getLegacyWorldCopyRemovals() + " legacy managed world datapack copies.");
}
if (summary.getRequests() > 0 || summary.getImportedSources() > 0 || summary.getWorldDatapacksInstalled() > 0) {
Iris.info("External datapack sync/import/install: requests=" + summary.getRequests()
+ ", synced=" + summary.getSyncedRequests()
+ ", restored=" + summary.getRestoredRequests()
+ ", importedSources=" + summary.getImportedSources()
+ ", cachedSources=" + summary.getCachedSources()
+ ", converted=" + summary.getConvertedStructures()
+ ", failedConversions=" + summary.getFailedConversions()
+ ", worldDatapacks=" + summary.getWorldDatapacksInstalled()
+ ", worldAssets=" + summary.getWorldAssetsInstalled()
+ ", optionalFailures=" + summary.getOptionalFailures()
+ ", requiredFailures=" + summary.getRequiredFailures());
Iris.verbose("Removed " + summary.getLegacyWorldCopyRemovals() + " legacy managed world datapack copies.");
}
int loadedDatapackCount = Math.max(0, summary.getRequests() - summary.getOptionalFailures() - summary.getRequiredFailures());
Iris.info("Loaded Datapacks into Iris: " + loadedDatapackCount + "!");
if (summary.getRequiredFailures() > 0) {
throw new IllegalStateException("Required external datapack setup failed for " + summary.getRequiredFailures() + " request(s).");
}
}
private static boolean shouldDeferInstallUntilWorldsReady() {
String forcedMainWorld = IrisSettings.get().getGeneral().forceMainWorld;
if (forcedMainWorld != null && !forcedMainWorld.isBlank()) {
return false;
}
return Bukkit.getServer().getWorlds().isEmpty();
}
private static KList<ExternalDataPackPipeline.DatapackRequest> collectExternalDatapackRequests() {
KMap<String, ExternalDataPackPipeline.DatapackRequest> deduplicated = new KMap<>();
try (Stream<IrisData> stream = allPacks()) {
stream.forEach(data -> {
ResourceLoader<IrisDimension> loader = data.getDimensionLoader();
if (loader == null) {
return;
}
KList<IrisDimension> dimensions = loader.loadAll(loader.getPossibleKeys());
for (IrisDimension dimension : dimensions) {
if (dimension == null || dimension.getExternalDatapacks() == null || dimension.getExternalDatapacks().isEmpty()) {
continue;
}
String targetPack = sanitizePackName(dimension.getLoadKey());
if (targetPack.isBlank()) {
targetPack = sanitizePackName(data.getDataFolder().getName());
}
String environment = ExternalDataPackPipeline.normalizeEnvironmentValue(dimension.getEnvironment() == null ? null : dimension.getEnvironment().name());
for (IrisExternalDatapack externalDatapack : dimension.getExternalDatapacks()) {
if (externalDatapack == null || !externalDatapack.isEnabled()) {
continue;
}
String url = externalDatapack.getUrl() == null ? "" : externalDatapack.getUrl().trim();
if (url.isBlank()) {
continue;
}
String requestId = externalDatapack.getId() == null ? "" : externalDatapack.getId().trim();
if (requestId.isBlank()) {
requestId = url;
}
IrisExternalDatapackReplaceTargets replaceTargets = externalDatapack.getReplaceTargets();
ExternalDataPackPipeline.DatapackRequest request = new ExternalDataPackPipeline.DatapackRequest(
requestId,
url,
targetPack,
environment,
externalDatapack.isRequired(),
externalDatapack.isReplaceVanilla(),
replaceTargets,
externalDatapack.getStructurePatches()
);
String dedupeKey = request.getDedupeKey();
ExternalDataPackPipeline.DatapackRequest existing = deduplicated.get(dedupeKey);
if (existing == null) {
deduplicated.put(dedupeKey, request);
continue;
}
deduplicated.put(dedupeKey, existing.merge(request));
}
}
});
stream.forEach(data -> collectExternalDatapackRequestsForPack(data, deduplicated));
}
return new KList<>(deduplicated.v());
}
private static void collectExternalDatapackRequestsForPack(IrisData data, KMap<String, ExternalDataPackPipeline.DatapackRequest> deduplicated) {
ResourceLoader<IrisDimension> loader = data.getDimensionLoader();
if (loader == null) {
Iris.warn("Skipping external datapack request discovery for pack " + data.getDataFolder().getName() + " because dimension loader is unavailable.");
return;
}
String[] possibleKeys = loader.getPossibleKeys();
if (possibleKeys == null || possibleKeys.length == 0) {
File dimensionsFolder = new File(data.getDataFolder(), "dimensions");
File[] dimensionFiles = dimensionsFolder.listFiles((dir, name) -> name != null && name.toLowerCase().endsWith(".json"));
int dimensionFileCount = dimensionFiles == null ? 0 : dimensionFiles.length;
Iris.warn("Pack " + data.getDataFolder().getName() + " has no loadable dimension keys. Dimension folder json files=" + dimensionFileCount + ". External datapacks in this pack cannot be discovered.");
return;
}
KList<IrisDimension> dimensions = loader.loadAll(possibleKeys);
int scannedDimensions = 0;
int dimensionsWithExternalEntries = 0;
int enabledEntries = 0;
int disabledEntries = 0;
int skippedBlankUrl = 0;
int scopedRequests = 0;
int unscopedRequests = 0;
int dedupeMerges = 0;
for (IrisDimension dimension : dimensions) {
if (dimension == null) {
continue;
}
scannedDimensions++;
KList<IrisExternalDatapack> externalDatapacks = dimension.getExternalDatapacks();
if (externalDatapacks == null || externalDatapacks.isEmpty()) {
continue;
}
dimensionsWithExternalEntries++;
String targetPack = sanitizePackName(dimension.getLoadKey());
if (targetPack.isBlank()) {
targetPack = sanitizePackName(data.getDataFolder().getName());
}
String environment = ExternalDataPackPipeline.normalizeEnvironmentValue(dimension.getEnvironment() == null ? null : dimension.getEnvironment().name());
LinkedHashMap<String, IrisExternalDatapack> definitionsById = new LinkedHashMap<>();
for (IrisExternalDatapack externalDatapack : externalDatapacks) {
if (externalDatapack == null) {
disabledEntries++;
continue;
}
if (!externalDatapack.isEnabled()) {
disabledEntries++;
continue;
}
String url = externalDatapack.getUrl() == null ? "" : externalDatapack.getUrl().trim();
if (url.isBlank()) {
skippedBlankUrl++;
continue;
}
enabledEntries++;
String requestId = normalizeExternalDatapackId(externalDatapack.getId(), url);
IrisExternalDatapack existingDefinition = definitionsById.put(requestId, externalDatapack);
if (existingDefinition != null) {
Iris.warn("Duplicate external datapack id '" + requestId + "' in dimension " + dimension.getLoadKey() + ". Latest entry wins.");
}
}
if (definitionsById.isEmpty()) {
continue;
}
KMap<String, KList<ScopedBindingGroup>> scopedGroups = resolveScopedBindingGroups(data, dimension, definitionsById);
for (Map.Entry<String, IrisExternalDatapack> entry : definitionsById.entrySet()) {
String requestId = entry.getKey();
IrisExternalDatapack definition = entry.getValue();
String url = definition.getUrl() == null ? "" : definition.getUrl().trim();
if (url.isBlank()) {
continue;
}
KList<ScopedBindingGroup> groups = scopedGroups.get(requestId);
if (groups == null || groups.isEmpty()) {
String scopeKey = buildRootScopeKey(dimension.getLoadKey(), requestId);
ExternalDataPackPipeline.DatapackRequest request = new ExternalDataPackPipeline.DatapackRequest(
requestId,
url,
targetPack,
environment,
definition.isRequired(),
definition.isReplaceVanilla(),
definition.getReplaceTargets(),
definition.getStructurePatches(),
Set.of(),
scopeKey,
!definition.isReplaceVanilla(),
Set.of()
);
dedupeMerges += mergeDeduplicatedRequest(deduplicated, request);
unscopedRequests++;
Iris.verbose("External datapack scope resolved: id=" + requestId
+ ", targetPack=" + targetPack
+ ", dimension=" + dimension.getLoadKey()
+ ", scope=dimension-root"
+ ", forcedBiomes=0"
+ ", replaceVanilla=" + definition.isReplaceVanilla()
+ ", alongsideMode=" + (!definition.isReplaceVanilla())
+ ", required=" + definition.isRequired());
continue;
}
for (ScopedBindingGroup group : groups) {
ExternalDataPackPipeline.DatapackRequest request = new ExternalDataPackPipeline.DatapackRequest(
requestId,
url,
targetPack,
environment,
group.required(),
group.replaceVanilla(),
definition.getReplaceTargets(),
definition.getStructurePatches(),
group.forcedBiomeKeys(),
group.scopeKey(),
!group.replaceVanilla(),
Set.of()
);
dedupeMerges += mergeDeduplicatedRequest(deduplicated, request);
scopedRequests++;
Iris.verbose("External datapack scope resolved: id=" + requestId
+ ", targetPack=" + targetPack
+ ", dimension=" + dimension.getLoadKey()
+ ", scope=" + group.source()
+ ", forcedBiomes=" + group.forcedBiomeKeys().size()
+ ", replaceVanilla=" + group.replaceVanilla()
+ ", alongsideMode=" + (!group.replaceVanilla())
+ ", required=" + group.required());
}
}
}
if (scannedDimensions == 0) {
Iris.warn("Pack " + data.getDataFolder().getName() + " did not resolve any dimensions during external datapack discovery.");
return;
}
if (dimensionsWithExternalEntries > 0 || enabledEntries > 0 || disabledEntries > 0 || skippedBlankUrl > 0) {
Iris.verbose("External datapack discovery for pack " + data.getDataFolder().getName()
+ ": dimensions=" + scannedDimensions
+ ", withEntries=" + dimensionsWithExternalEntries
+ ", enabled=" + enabledEntries
+ ", disabled=" + disabledEntries
+ ", skippedBlankUrl=" + skippedBlankUrl
+ ", scopedRequests=" + scopedRequests
+ ", unscopedRequests=" + unscopedRequests
+ ", dedupeMerges=" + dedupeMerges);
}
}
private static KMap<String, KList<ScopedBindingGroup>> resolveScopedBindingGroups(
IrisData data,
IrisDimension dimension,
Map<String, IrisExternalDatapack> definitionsById
) {
KMap<String, KList<ScopedBindingGroup>> groupedRequestsById = new KMap<>();
if (definitionsById == null || definitionsById.isEmpty()) {
return groupedRequestsById;
}
ResourceLoader<IrisRegion> regionLoader = data.getRegionLoader();
ResourceLoader<IrisBiome> biomeLoader = data.getBiomeLoader();
if (regionLoader == null || biomeLoader == null) {
return groupedRequestsById;
}
String biomeNamespace = resolveBiomeNamespace(dimension);
LinkedHashMap<String, IrisBiome> biomeCache = new LinkedHashMap<>();
LinkedHashMap<String, IrisRegion> regions = new LinkedHashMap<>();
KList<String> dimensionRegions = dimension.getRegions();
if (dimensionRegions != null) {
for (String regionKey : dimensionRegions) {
String normalizedRegion = normalizeResourceReference(regionKey);
if (normalizedRegion.isBlank()) {
continue;
}
IrisRegion region = regionLoader.load(normalizedRegion, false);
if (region != null) {
regions.put(normalizedRegion, region);
}
}
}
LinkedHashMap<String, KList<ScopedBindingCandidate>> candidatesById = new LinkedHashMap<>();
LinkedHashSet<String> discoveryBiomeKeys = new LinkedHashSet<>();
for (IrisRegion region : regions.values()) {
Set<String> expandedRegionBiomes = collectRegionBiomeKeys(region, true, biomeLoader, biomeCache);
discoveryBiomeKeys.addAll(expandedRegionBiomes);
KList<IrisExternalDatapackBinding> bindings = region.getExternalDatapacks();
if (bindings == null || bindings.isEmpty()) {
continue;
}
for (IrisExternalDatapackBinding binding : bindings) {
if (binding == null || !binding.isEnabled()) {
continue;
}
String id = normalizeExternalDatapackId(binding.getId(), "");
if (id.isBlank()) {
continue;
}
IrisExternalDatapack definition = definitionsById.get(id);
if (definition == null) {
Iris.warn("Ignoring region external datapack binding id '" + id + "' in " + region.getLoadKey() + " because no matching dimension externalDatapacks entry exists.");
continue;
}
boolean replaceVanilla = binding.getReplaceVanillaOverride() == null
? definition.isReplaceVanilla()
: binding.getReplaceVanillaOverride();
boolean required = binding.getRequiredOverride() == null
? definition.isRequired()
: binding.getRequiredOverride();
Set<String> regionBiomeKeys = collectRegionBiomeKeys(region, binding.isIncludeChildren(), biomeLoader, biomeCache);
Set<String> runtimeBiomeKeys = resolveRuntimeBiomeKeys(regionBiomeKeys, biomeNamespace, biomeLoader, biomeCache);
if (runtimeBiomeKeys.isEmpty()) {
continue;
}
KList<ScopedBindingCandidate> candidates = candidatesById.computeIfAbsent(id, key -> new KList<>());
candidates.add(new ScopedBindingCandidate("region", region.getLoadKey(), 1, replaceVanilla, required, runtimeBiomeKeys));
}
}
for (String biomeKey : discoveryBiomeKeys) {
IrisBiome biome = loadBiomeFromCache(biomeKey, biomeLoader, biomeCache);
if (biome == null) {
continue;
}
KList<IrisExternalDatapackBinding> bindings = biome.getExternalDatapacks();
if (bindings == null || bindings.isEmpty()) {
continue;
}
for (IrisExternalDatapackBinding binding : bindings) {
if (binding == null || !binding.isEnabled()) {
continue;
}
String id = normalizeExternalDatapackId(binding.getId(), "");
if (id.isBlank()) {
continue;
}
IrisExternalDatapack definition = definitionsById.get(id);
if (definition == null) {
Iris.warn("Ignoring biome external datapack binding id '" + id + "' in " + biome.getLoadKey() + " because no matching dimension externalDatapacks entry exists.");
continue;
}
boolean replaceVanilla = binding.getReplaceVanillaOverride() == null
? definition.isReplaceVanilla()
: binding.getReplaceVanillaOverride();
boolean required = binding.getRequiredOverride() == null
? definition.isRequired()
: binding.getRequiredOverride();
Set<String> biomeSelection = collectBiomeKeys(biome.getLoadKey(), binding.isIncludeChildren(), biomeLoader, biomeCache);
Set<String> runtimeBiomeKeys = resolveRuntimeBiomeKeys(biomeSelection, biomeNamespace, biomeLoader, biomeCache);
if (runtimeBiomeKeys.isEmpty()) {
continue;
}
KList<ScopedBindingCandidate> candidates = candidatesById.computeIfAbsent(id, key -> new KList<>());
candidates.add(new ScopedBindingCandidate("biome", biome.getLoadKey(), 2, replaceVanilla, required, runtimeBiomeKeys));
}
}
for (Map.Entry<String, KList<ScopedBindingCandidate>> entry : candidatesById.entrySet()) {
String id = entry.getKey();
KList<ScopedBindingCandidate> candidates = entry.getValue();
if (candidates == null || candidates.isEmpty()) {
continue;
}
LinkedHashMap<String, ScopedBindingSelection> selectedByBiome = new LinkedHashMap<>();
for (ScopedBindingCandidate candidate : candidates) {
if (candidate == null || candidate.forcedBiomeKeys() == null || candidate.forcedBiomeKeys().isEmpty()) {
continue;
}
ArrayList<String> sortedBiomeKeys = new ArrayList<>(candidate.forcedBiomeKeys());
sortedBiomeKeys.sort(String::compareTo);
for (String runtimeBiomeKey : sortedBiomeKeys) {
ScopedBindingSelection selected = selectedByBiome.get(runtimeBiomeKey);
if (selected == null) {
selectedByBiome.put(runtimeBiomeKey, new ScopedBindingSelection(
candidate.priority(),
candidate.replaceVanilla(),
candidate.required(),
candidate.sourceType(),
candidate.sourceKey()
));
continue;
}
if (candidate.priority() > selected.priority()) {
selectedByBiome.put(runtimeBiomeKey, new ScopedBindingSelection(
candidate.priority(),
candidate.replaceVanilla(),
candidate.required(),
candidate.sourceType(),
candidate.sourceKey()
));
continue;
}
if (candidate.priority() == selected.priority()
&& (candidate.replaceVanilla() != selected.replaceVanilla() || candidate.required() != selected.required())) {
Iris.warn("External datapack scope conflict for id=" + id
+ ", biomeKey=" + runtimeBiomeKey
+ ", kept=" + selected.sourceType() + "/" + selected.sourceKey()
+ ", ignored=" + candidate.sourceType() + "/" + candidate.sourceKey());
}
}
}
LinkedHashMap<String, LinkedHashSet<String>> groupedBiomes = new LinkedHashMap<>();
LinkedHashMap<String, ScopedBindingSelection> groupedSelection = new LinkedHashMap<>();
for (Map.Entry<String, ScopedBindingSelection> selectedEntry : selectedByBiome.entrySet()) {
String runtimeBiomeKey = selectedEntry.getKey();
ScopedBindingSelection selection = selectedEntry.getValue();
String groupKey = selection.replaceVanilla() + "|" + selection.required();
groupedBiomes.computeIfAbsent(groupKey, key -> new LinkedHashSet<>()).add(runtimeBiomeKey);
groupedSelection.putIfAbsent(groupKey, selection);
}
for (Map.Entry<String, LinkedHashSet<String>> groupedEntry : groupedBiomes.entrySet()) {
LinkedHashSet<String> runtimeBiomeKeys = groupedEntry.getValue();
if (runtimeBiomeKeys == null || runtimeBiomeKeys.isEmpty()) {
continue;
}
ScopedBindingSelection selection = groupedSelection.get(groupedEntry.getKey());
if (selection == null) {
continue;
}
Set<String> forcedBiomeKeys = Set.copyOf(runtimeBiomeKeys);
String scopeKey = buildScopedScopeKey(dimension.getLoadKey(), id, selection.sourceType(), selection.sourceKey(), forcedBiomeKeys);
String source = selection.sourceType() + ":" + selection.sourceKey();
KList<ScopedBindingGroup> groups = groupedRequestsById.computeIfAbsent(id, key -> new KList<>());
groups.add(new ScopedBindingGroup(selection.replaceVanilla(), selection.required(), forcedBiomeKeys, scopeKey, source));
}
}
return groupedRequestsById;
}
private static Set<String> collectRegionBiomeKeys(
IrisRegion region,
boolean includeChildren,
ResourceLoader<IrisBiome> biomeLoader,
Map<String, IrisBiome> biomeCache
) {
LinkedHashSet<String> regionBiomeKeys = new LinkedHashSet<>();
if (region == null) {
return regionBiomeKeys;
}
addAllResourceReferences(regionBiomeKeys, region.getLandBiomes());
addAllResourceReferences(regionBiomeKeys, region.getSeaBiomes());
addAllResourceReferences(regionBiomeKeys, region.getShoreBiomes());
addAllResourceReferences(regionBiomeKeys, region.getCaveBiomes());
if (!includeChildren) {
return regionBiomeKeys;
}
LinkedHashSet<String> expanded = new LinkedHashSet<>();
for (String biomeKey : regionBiomeKeys) {
expanded.addAll(collectBiomeKeys(biomeKey, true, biomeLoader, biomeCache));
}
return expanded;
}
private static Set<String> collectBiomeKeys(
String biomeKey,
boolean includeChildren,
ResourceLoader<IrisBiome> biomeLoader,
Map<String, IrisBiome> biomeCache
) {
LinkedHashSet<String> resolved = new LinkedHashSet<>();
String normalizedBiomeKey = normalizeResourceReference(biomeKey);
if (normalizedBiomeKey.isBlank()) {
return resolved;
}
if (!includeChildren) {
resolved.add(normalizedBiomeKey);
return resolved;
}
ArrayDeque<String> queue = new ArrayDeque<>();
queue.add(normalizedBiomeKey);
while (!queue.isEmpty()) {
String next = normalizeResourceReference(queue.removeFirst());
if (next.isBlank() || !resolved.add(next)) {
continue;
}
IrisBiome biome = loadBiomeFromCache(next, biomeLoader, biomeCache);
if (biome == null) {
continue;
}
addQueueResourceReferences(queue, biome.getChildren());
}
return resolved;
}
private static Set<String> resolveRuntimeBiomeKeys(
Set<String> irisBiomeKeys,
String biomeNamespace,
ResourceLoader<IrisBiome> biomeLoader,
Map<String, IrisBiome> biomeCache
) {
LinkedHashSet<String> resolved = new LinkedHashSet<>();
if (irisBiomeKeys == null || irisBiomeKeys.isEmpty()) {
return resolved;
}
for (String irisBiomeKey : irisBiomeKeys) {
String normalizedBiomeKey = normalizeResourceReference(irisBiomeKey);
if (normalizedBiomeKey.isBlank()) {
continue;
}
IrisBiome biome = loadBiomeFromCache(normalizedBiomeKey, biomeLoader, biomeCache);
if (biome == null) {
continue;
}
if (biome.isCustom() && biome.getCustomDerivitives() != null && !biome.getCustomDerivitives().isEmpty()) {
for (IrisBiomeCustom customDerivative : biome.getCustomDerivitives()) {
if (customDerivative == null) {
continue;
}
String customId = normalizeResourceReference(customDerivative.getId());
if (customId.isBlank()) {
continue;
}
resolved.add((biomeNamespace + ":" + customId).toLowerCase(Locale.ROOT));
}
continue;
}
Biome vanillaDerivative = biome.getVanillaDerivative();
NamespacedKey vanillaKey = vanillaDerivative == null ? null : vanillaDerivative.getKey();
if (vanillaKey != null) {
resolved.add(vanillaKey.toString().toLowerCase(Locale.ROOT));
}
}
return resolved;
}
private static String resolveBiomeNamespace(IrisDimension dimension) {
if (dimension == null) {
return "iris";
}
String namespace = dimension.getLoadKey() == null ? "" : dimension.getLoadKey().trim().toLowerCase(Locale.ROOT);
namespace = namespace.replaceAll("[^a-z0-9_\\-.]", "_");
namespace = namespace.replaceAll("_+", "_");
namespace = namespace.replaceAll("^_+", "");
namespace = namespace.replaceAll("_+$", "");
if (namespace.isBlank()) {
return "iris";
}
return namespace;
}
private static IrisBiome loadBiomeFromCache(
String biomeKey,
ResourceLoader<IrisBiome> biomeLoader,
Map<String, IrisBiome> biomeCache
) {
if (biomeLoader == null) {
return null;
}
String normalizedBiomeKey = normalizeResourceReference(biomeKey);
if (normalizedBiomeKey.isBlank()) {
return null;
}
if (biomeCache.containsKey(normalizedBiomeKey)) {
return biomeCache.get(normalizedBiomeKey);
}
IrisBiome biome = biomeLoader.load(normalizedBiomeKey, false);
if (biome != null) {
biomeCache.put(normalizedBiomeKey, biome);
}
return biome;
}
private static void addAllResourceReferences(Set<String> destination, KList<String> references) {
if (destination == null || references == null || references.isEmpty()) {
return;
}
for (String reference : references) {
String normalized = normalizeResourceReference(reference);
if (!normalized.isBlank()) {
destination.add(normalized);
}
}
}
private static void addQueueResourceReferences(ArrayDeque<String> queue, KList<String> references) {
if (queue == null || references == null || references.isEmpty()) {
return;
}
for (String reference : references) {
String normalized = normalizeResourceReference(reference);
if (!normalized.isBlank()) {
queue.addLast(normalized);
}
}
}
private static String normalizeResourceReference(String reference) {
if (reference == null) {
return "";
}
String normalized = reference.trim().replace('\\', '/');
normalized = normalized.replaceAll("/+", "/");
normalized = normalized.replaceAll("^/+", "");
normalized = normalized.replaceAll("/+$", "");
return normalized;
}
private static int mergeDeduplicatedRequest(
KMap<String, ExternalDataPackPipeline.DatapackRequest> deduplicated,
ExternalDataPackPipeline.DatapackRequest request
) {
if (request == null) {
return 0;
}
String dedupeKey = request.getDedupeKey();
ExternalDataPackPipeline.DatapackRequest existing = deduplicated.get(dedupeKey);
if (existing == null) {
deduplicated.put(dedupeKey, request);
return 0;
}
deduplicated.put(dedupeKey, existing.merge(request));
return 1;
}
private static String normalizeExternalDatapackId(String id, String fallbackUrl) {
String normalized = id == null ? "" : id.trim();
if (!normalized.isBlank()) {
return normalized.toLowerCase(Locale.ROOT);
}
String fallback = fallbackUrl == null ? "" : fallbackUrl.trim();
if (fallback.isBlank()) {
return "";
}
return fallback.toLowerCase(Locale.ROOT);
}
private static String buildRootScopeKey(String dimensionKey, String id) {
String normalizedDimension = ExternalDataPackPipeline.sanitizePackNameValue(dimensionKey);
if (normalizedDimension.isBlank()) {
normalizedDimension = "dimension";
}
String normalizedId = ExternalDataPackPipeline.sanitizePackNameValue(id);
if (normalizedId.isBlank()) {
normalizedId = "external";
}
return "root-" + normalizedDimension + "-" + normalizedId;
}
private static String buildScopedScopeKey(String dimensionKey, String id, String sourceType, String sourceKey, Set<String> forcedBiomeKeys) {
ArrayList<String> sortedBiomes = new ArrayList<>();
if (forcedBiomeKeys != null) {
sortedBiomes.addAll(forcedBiomeKeys);
}
sortedBiomes.sort(String::compareTo);
String biomeFingerprint = Integer.toHexString(String.join(",", sortedBiomes).hashCode());
String normalizedDimension = ExternalDataPackPipeline.sanitizePackNameValue(dimensionKey);
if (normalizedDimension.isBlank()) {
normalizedDimension = "dimension";
}
String normalizedId = ExternalDataPackPipeline.sanitizePackNameValue(id);
if (normalizedId.isBlank()) {
normalizedId = "external";
}
String normalizedSourceType = ExternalDataPackPipeline.sanitizePackNameValue(sourceType);
if (normalizedSourceType.isBlank()) {
normalizedSourceType = "scope";
}
String normalizedSourceKey = ExternalDataPackPipeline.sanitizePackNameValue(sourceKey);
if (normalizedSourceKey.isBlank()) {
normalizedSourceKey = "entry";
}
return normalizedDimension + "-" + normalizedId + "-" + normalizedSourceType + "-" + normalizedSourceKey + "-" + biomeFingerprint;
}
private record ScopedBindingCandidate(
String sourceType,
String sourceKey,
int priority,
boolean replaceVanilla,
boolean required,
Set<String> forcedBiomeKeys
) {
}
private record ScopedBindingSelection(
int priority,
boolean replaceVanilla,
boolean required,
String sourceType,
String sourceKey
) {
}
private record ScopedBindingGroup(
boolean replaceVanilla,
boolean required,
Set<String> forcedBiomeKeys,
String scopeKey,
String source
) {
}
private static KMap<String, KList<File>> collectWorldDatapackFoldersByPack(KList<File> fallbackFolders) {
KMap<String, KList<File>> foldersByPack = new KMap<>();
KMap<String, String> mappedWorlds = IrisWorlds.get().getWorlds();

View File

@@ -442,7 +442,9 @@ public class CommandDeveloper implements DirectorExecutor {
orchestrator.setDaemon(true);
try {
orchestrator.start();
Iris.info("Delete-chunk worker dispatched on dedicated thread=" + orchestrator.getName() + " id=" + runId + ".");
if (IrisSettings.get().getGeneral().isDebug()) {
Iris.info("Delete-chunk worker dispatched on dedicated thread=" + orchestrator.getName() + " id=" + runId + ".");
}
} catch (Throwable e) {
ACTIVE_DELETE_CHUNK_WORLDS.remove(worldKey);
sender.sendMessage(C.RED + "Failed to start delete-chunk worker thread. See console.");
@@ -519,7 +521,9 @@ public class CommandDeveloper implements DirectorExecutor {
watchdog.interrupt();
IrisToolbelt.endWorldMaintenance(world, "delete-chunk");
ACTIVE_DELETE_CHUNK_WORLDS.remove(worldKey);
Iris.info("Delete-chunk run closed: id=" + runId + " world=" + world.getName() + " totalMs=" + (System.currentTimeMillis() - runStart));
if (IrisSettings.get().getGeneral().isDebug()) {
Iris.info("Delete-chunk run closed: id=" + runId + " world=" + world.getName() + " totalMs=" + (System.currentTimeMillis() - runStart));
}
}
}
@@ -773,7 +777,9 @@ public class CommandDeveloper implements DirectorExecutor {
) {
phase.set(next);
phaseSince.set(System.currentTimeMillis());
Iris.info("Delete-chunk phase: id=" + runId + " phase=" + next + " world=" + world.getName());
if (IrisSettings.get().getGeneral().isDebug()) {
Iris.info("Delete-chunk phase: id=" + runId + " phase=" + next + " world=" + world.getName());
}
}
private String formatDeleteChunkFailedPreview(List<Position2> failedChunks) {

View File

@@ -30,6 +30,8 @@ import art.arcane.iris.engine.IrisNoisemapPrebakePipeline;
import art.arcane.iris.engine.framework.Engine;
import art.arcane.iris.engine.framework.SeedManager;
import art.arcane.iris.engine.object.*;
import art.arcane.iris.engine.platform.ChunkReplacementListener;
import art.arcane.iris.engine.platform.ChunkReplacementOptions;
import art.arcane.iris.engine.platform.PlatformChunkGenerator;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.volmlib.util.collection.KMap;
@@ -227,6 +229,7 @@ public class CommandStudio implements DirectorExecutor {
sender.sendMessage(C.YELLOW + "Folia fast regen: skipping outer mantle preservation stage.");
}
final String runId = "studio-regen-" + world.getName() + "-" + System.currentTimeMillis();
ParallelRadiusJob job = new ParallelRadiusJob(threadCount, service) {
@Override
@@ -234,7 +237,14 @@ public class CommandStudio implements DirectorExecutor {
if (foliaFastRegen) {
Iris.verbose("Folia fast studio regen skipping mantle delete for " + x + "," + z + ".");
}
plat.injectChunkReplacement(world, x, z, executor);
plat.injectChunkReplacement(
world,
x,
z,
executor,
ChunkReplacementOptions.terrain(runId, IrisSettings.get().getGeneral().isDebug()),
ChunkReplacementListener.NO_OP
);
}
@Override

View File

@@ -161,7 +161,9 @@ public class IrisCreator {
.seed(seed)
.studio(studio)
.create();
if (ServerConfigurator.installDataPacks(true)) {
boolean verifyDataPacks = !studio();
boolean includeExternalDataPacks = !studio();
if (ServerConfigurator.installDataPacks(verifyDataPacks, includeExternalDataPacks)) {
throw new IrisException("Datapacks were missing!");
}

View File

@@ -433,7 +433,11 @@ public class IrisToolbelt {
if (bypassMantleStages) {
worldMaintenanceMantleBypassDepth.computeIfAbsent(name, k -> new AtomicInteger()).incrementAndGet();
}
Iris.info("World maintenance enter: " + name + " reason=" + reason + " depth=" + depth + " bypassMantle=" + bypassMantleStages);
if (IrisSettings.get().getGeneral().isDebug()) {
Iris.info("World maintenance enter: " + name + " reason=" + reason + " depth=" + depth + " bypassMantle=" + bypassMantleStages);
} else {
Iris.verbose("World maintenance enter: " + name + " reason=" + reason + " depth=" + depth + " bypassMantle=" + bypassMantleStages);
}
}
public static void endWorldMaintenance(World world, String reason) {
@@ -463,7 +467,11 @@ public class IrisToolbelt {
}
}
Iris.info("World maintenance exit: " + name + " reason=" + reason + " depth=" + depth + " bypassMantleDepth=" + bypassDepth);
if (IrisSettings.get().getGeneral().isDebug()) {
Iris.info("World maintenance exit: " + name + " reason=" + reason + " depth=" + depth + " bypassMantleDepth=" + bypassDepth);
} else {
Iris.verbose("World maintenance exit: " + name + " reason=" + reason + " depth=" + depth + " bypassMantleDepth=" + bypassDepth);
}
}
public static boolean isWorldMaintenanceActive(World world) {

View File

@@ -307,6 +307,10 @@ public class IrisEngine implements Engine {
return;
}
if (studio) {
return;
}
if (!noisemapPrebakeRunning.compareAndSet(false, true)) {
return;
}
@@ -369,7 +373,7 @@ public class IrisEngine implements Engine {
setupEngine();
J.a(() -> {
synchronized (ServerConfigurator.class) {
ServerConfigurator.installDataPacks(false);
ServerConfigurator.installDataPacks(false, false);
}
});
}

View File

@@ -73,7 +73,7 @@ public interface EngineMode extends Staged {
default void generate(int x, int z, Hunk<BlockData> blocks, Hunk<Biome> biomes, boolean multicore) {
boolean cacheContext = true;
if (J.isFolia()) {
var world = getEngine().getWorld().realWorld();
org.bukkit.World world = getEngine().getWorld().realWorld();
if (world != null && IrisToolbelt.isWorldMaintenanceActive(world)) {
cacheContext = false;
}
@@ -81,7 +81,8 @@ public interface EngineMode extends Staged {
ChunkContext ctx = new ChunkContext(x, z, getComplex(), cacheContext);
IrisContext.getOr(getEngine()).setChunkContext(ctx);
for (EngineStage i : getStages()) {
EngineStage[] stages = getStages().toArray(new EngineStage[0]);
for (EngineStage i : stages) {
i.generate(x, z, blocks, biomes, multicore, ctx);
}
}

View File

@@ -45,6 +45,7 @@ import art.arcane.iris.util.common.scheduling.J;
import org.bukkit.util.BlockVector;
import java.io.IOException;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
@@ -313,10 +314,11 @@ public class MantleObjectComponent extends IrisMantleComponent {
continue;
}
int id = rng.i(0, Integer.MAX_VALUE);
IrisObjectPlacement effectivePlacement = resolveEffectivePlacement(objectPlacement, v);
try {
int result = v.place(xx, -1, zz, writer, objectPlacement, rng, (b, data) -> {
int result = v.place(xx, -1, zz, writer, effectivePlacement, rng, (b, data) -> {
writer.setData(b.getX(), b.getY(), b.getZ(), v.getLoadKey() + "@" + id);
if (objectPlacement.isDolphinTarget() && objectPlacement.isUnderwater() && B.isStorageChest(data)) {
if (effectivePlacement.isDolphinTarget() && effectivePlacement.isUnderwater() && B.isStorageChest(data)) {
writer.setData(b.getX(), b.getY(), b.getZ(), MatterStructurePOI.BURIED_TREASURE);
}
}, null, getData());
@@ -417,11 +419,12 @@ public class MantleObjectComponent extends IrisMantleComponent {
}
int id = rng.i(0, Integer.MAX_VALUE);
IrisObjectPlacement effectivePlacement = resolveEffectivePlacement(objectPlacement, object);
try {
int result = object.place(x, y, z, writer, objectPlacement, rng, (b, data) -> {
int result = object.place(x, y, z, writer, effectivePlacement, rng, (b, data) -> {
writer.setData(b.getX(), b.getY(), b.getZ(), object.getLoadKey() + "@" + id);
if (objectPlacement.isDolphinTarget() && objectPlacement.isUnderwater() && B.isStorageChest(data)) {
if (effectivePlacement.isDolphinTarget() && effectivePlacement.isUnderwater() && B.isStorageChest(data)) {
writer.setData(b.getX(), b.getY(), b.getZ(), MatterStructurePOI.BURIED_TREASURE);
}
}, null, getData());
@@ -458,6 +461,38 @@ public class MantleObjectComponent extends IrisMantleComponent {
return new ObjectPlacementResult(attempts, placed, rejected, nullObjects, errors);
}
private static IrisObjectPlacement resolveEffectivePlacement(IrisObjectPlacement objectPlacement, IrisObject object) {
if (objectPlacement == null || object == null) {
return objectPlacement;
}
String loadKey = object.getLoadKey();
if (loadKey == null || loadKey.isBlank()) {
return objectPlacement;
}
String normalized = loadKey.toLowerCase(Locale.ROOT);
boolean imported = normalized.startsWith("imports/")
|| normalized.contains("/imports/")
|| normalized.contains("imports/");
if (!imported) {
return objectPlacement;
}
ObjectPlaceMode mode = objectPlacement.getMode();
if (mode == ObjectPlaceMode.STILT
|| mode == ObjectPlaceMode.FAST_STILT
|| mode == ObjectPlaceMode.MIN_STILT
|| mode == ObjectPlaceMode.FAST_MIN_STILT
|| mode == ObjectPlaceMode.CENTER_STILT) {
return objectPlacement;
}
IrisObjectPlacement effectivePlacement = objectPlacement.toPlacement(loadKey);
effectivePlacement.setMode(ObjectPlaceMode.FAST_MIN_STILT);
return effectivePlacement;
}
private int findCaveAnchorY(MantleWriter writer, RNG rng, int x, int z, IrisCaveAnchorMode anchorMode, int anchorScanStep, int objectMinDepthBelowSurface, KMap<Long, KList<Integer>> anchorCache) {
long key = Cache.key(x, z);
KList<Integer> anchors = anchorCache.computeIfAbsent(key, (k) -> scanCaveAnchorColumn(writer, anchorMode, anchorScanStep, objectMinDepthBelowSurface, x, z));

View File

@@ -104,6 +104,9 @@ public class IrisBiome extends IrisRegistrant implements IRare {
private IrisCaveProfile caveProfile = new IrisCaveProfile();
@Desc("Configuration of fluid bodies such as rivers & lakes")
private IrisFluidBodies fluidBodies = new IrisFluidBodies();
@ArrayType(type = IrisExternalDatapackBinding.class, min = 1)
@Desc("Scoped external datapack bindings for this biome")
private KList<IrisExternalDatapackBinding> externalDatapacks = new KList<>();
@MinNumber(1)
@MaxNumber(512)
@Desc("The rarity of this biome (integer)")

View File

@@ -197,55 +197,56 @@ public class IrisEffect {
return;
}
if (sound != null) {
Location part = p.getLocation().clone().add(RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance));
J.s(() -> p.playSound(part, getSound(), (float) volume, (float) RNG.r.d(minPitch, maxPitch)));
}
if (particleEffect != null) {
Location part = p.getLocation().clone().add(p.getLocation().getDirection().clone().multiply(RNG.r.i(particleDistance) + particleAway)).clone().add(p.getLocation().getDirection().clone().rotateAroundY(Math.toRadians(90)).multiply(RNG.r.d(-particleDistanceWidth, particleDistanceWidth)));
part.setY(Math.round(g.getHeight(part.getBlockX(), part.getBlockZ())) + 1);
part.add(RNG.r.d(), 0, RNG.r.d());
int offset = p.getWorld().getMinHeight();
if (extra != 0) {
J.s(() -> p.spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset),
part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ,
extra));
} else {
J.s(() -> p.spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset), part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ));
J.runEntity(p, () -> {
if (sound != null) {
Location part = p.getLocation().clone().add(RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance));
p.playSound(part, getSound(), (float) volume, (float) RNG.r.d(minPitch, maxPitch));
}
}
if (commandRegistry != null) {
commandRegistry.run(p);
}
if (particleEffect != null) {
Location part = p.getLocation().clone().add(p.getLocation().getDirection().clone().multiply(RNG.r.i(particleDistance) + particleAway)).clone().add(p.getLocation().getDirection().clone().rotateAroundY(Math.toRadians(90)).multiply(RNG.r.d(-particleDistanceWidth, particleDistanceWidth)));
if (potionStrength > -1) {
if (p.hasPotionEffect(getRealType())) {
PotionEffect e = p.getPotionEffect(getRealType());
if (e.getAmplifier() > getPotionStrength()) {
return;
part.setY(Math.round(g.getHeight(part.getBlockX(), part.getBlockZ())) + 1);
part.add(RNG.r.d(), 0, RNG.r.d());
int offset = p.getWorld().getMinHeight();
if (extra != 0) {
p.spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset),
part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ,
extra);
} else {
p.spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset), part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ);
}
}
if (commandRegistry != null) {
commandRegistry.run(p);
}
if (potionStrength > -1) {
if (p.hasPotionEffect(getRealType())) {
PotionEffect e = p.getPotionEffect(getRealType());
if (e != null && e.getAmplifier() > getPotionStrength()) {
return;
}
p.removePotionEffect(getRealType());
}
J.s(() -> p.removePotionEffect(getRealType()));
p.addPotionEffect(new PotionEffect(getRealType(),
RNG.r.i(Math.min(potionTicksMax, potionTicksMin),
Math.max(potionTicksMax, potionTicksMin)),
getPotionStrength(),
true, false, false));
}
J.s(() -> p.addPotionEffect(new PotionEffect(getRealType(),
RNG.r.i(Math.min(potionTicksMax, potionTicksMin),
Math.max(potionTicksMax, potionTicksMin)),
getPotionStrength(),
true, false, false)));
}
});
}
public void apply(Entity p) {
@@ -257,31 +258,32 @@ public class IrisEffect {
return;
}
if (sound != null) {
Location part = p.getLocation().clone().add(RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance));
J.s(() -> p.getWorld().playSound(part, getSound(), (float) volume, (float) RNG.r.d(minPitch, maxPitch)));
}
if (particleEffect != null) {
Location part = p.getLocation().clone().add(0, 0.25, 0).add(new Vector(1, 1, 1).multiply(RNG.r.d())).subtract(new Vector(1, 1, 1).multiply(RNG.r.d()));
part.add(RNG.r.d(), 0, RNG.r.d());
int offset = p.getWorld().getMinHeight();
if (extra != 0) {
J.s(() -> p.getWorld().spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset),
part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ,
extra));
} else {
J.s(() -> p.getWorld().spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset), part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ));
J.runEntity(p, () -> {
if (sound != null) {
Location part = p.getLocation().clone().add(RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance), RNG.r.i(-soundDistance, soundDistance));
p.getWorld().playSound(part, getSound(), (float) volume, (float) RNG.r.d(minPitch, maxPitch));
}
}
if (particleEffect != null) {
Location part = p.getLocation().clone().add(0, 0.25, 0).add(new Vector(1, 1, 1).multiply(RNG.r.d())).subtract(new Vector(1, 1, 1).multiply(RNG.r.d()));
part.add(RNG.r.d(), 0, RNG.r.d());
int offset = p.getWorld().getMinHeight();
if (extra != 0) {
p.getWorld().spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset),
part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ,
extra);
} else {
p.getWorld().spawnParticle(particleEffect, part.getX(), part.getY() + offset + RNG.r.i(particleOffset), part.getZ(),
particleCount,
randomAltX ? RNG.r.d(-particleAltX, particleAltX) : particleAltX,
randomAltY ? RNG.r.d(-particleAltY, particleAltY) : particleAltY,
randomAltZ ? RNG.r.d(-particleAltZ, particleAltZ) : particleAltZ);
}
}
});
}
}

View File

@@ -0,0 +1,29 @@
package art.arcane.iris.engine.object;
import art.arcane.iris.engine.object.annotations.Desc;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
@Data
@NoArgsConstructor
@AllArgsConstructor
@Accessors(chain = true)
@Desc("Scoped binding to a dimension external datapack id")
public class IrisExternalDatapackBinding {
@Desc("Target external datapack id defined on the dimension")
private String id = "";
@Desc("Enable or disable this scoped binding")
private boolean enabled = true;
@Desc("Override replaceVanilla behavior for this scoped binding (null keeps dimension default)")
private Boolean replaceVanillaOverride = null;
@Desc("Include child biomes recursively when collecting scoped biome boundaries")
private boolean includeChildren = true;
@Desc("Override required behavior for this scoped binding (null keeps dimension default)")
private Boolean requiredOverride = null;
}

View File

@@ -821,8 +821,11 @@ public class IrisObject extends IrisRegistrant {
if (yv >= 0 && config.isBottom()) {
y += Math.floorDiv(h, 2);
if (!config.isForcePlace()) {
bail = shouldBailForCarvingAnchor(placer, config, x, y, z);
CarvingMode carvingMode = config.getCarvingSupport();
if (!config.isForcePlace() && !carvingMode.equals(CarvingMode.CARVING_ONLY)) {
if (shouldBailForCarvingAnchor(placer, config, x, y, z)) {
bail = true;
}
}
}

View File

@@ -29,6 +29,7 @@ import art.arcane.iris.util.common.data.DataProvider;
import art.arcane.volmlib.util.data.WeightedRandom;
import art.arcane.volmlib.util.math.RNG;
import art.arcane.iris.util.project.noise.CNG;
import com.google.gson.annotations.SerializedName;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.EqualsAndHashCode;
@@ -150,6 +151,7 @@ public class IrisObjectPlacement {
@Desc("List of objects to this object is forbidden to collied with")
private KList<String> forbiddenCollisions = new KList<>();
@Desc("Ignore any placement restrictions for this object")
@SerializedName(value = "forcePlace", alternate = {"force"})
private boolean forcePlace = false;
private transient AtomicCache<TableCache> cache = new AtomicCache<>();
@@ -178,6 +180,7 @@ public class IrisObjectPlacement {
p.setClamp(clamp);
p.setRotation(rotation);
p.setLoot(loot);
p.setForcePlace(forcePlace);
return p;
}

View File

@@ -116,6 +116,9 @@ public class IrisRegion extends IrisRegistrant implements IRare {
private IrisCaveProfile caveProfile = new IrisCaveProfile();
@Desc("Configuration of fluid bodies such as rivers & lakes")
private IrisFluidBodies fluidBodies = new IrisFluidBodies();
@ArrayType(type = IrisExternalDatapackBinding.class, min = 1)
@Desc("Scoped external datapack bindings for this region")
private KList<IrisExternalDatapackBinding> externalDatapacks = new KList<>();
@RegistryListResource(IrisBiome.class)
@Required
@ArrayType(min = 1, type = String.class)

View File

@@ -34,10 +34,14 @@ import art.arcane.iris.engine.object.IrisDimension;
import art.arcane.iris.engine.object.IrisWorld;
import art.arcane.iris.engine.object.StudioMode;
import art.arcane.iris.engine.platform.studio.StudioGenerator;
import art.arcane.iris.util.project.matter.TileWrapper;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.iris.util.project.hunk.Hunk;
import art.arcane.iris.util.project.hunk.view.ChunkDataHunkHolder;
import art.arcane.volmlib.util.io.ReactiveFolder;
import art.arcane.volmlib.util.mantle.flag.MantleFlag;
import art.arcane.volmlib.util.mantle.runtime.MantleChunk;
import art.arcane.volmlib.util.matter.Matter;
import art.arcane.volmlib.util.scheduling.ChronoLatch;
import art.arcane.iris.util.common.scheduling.J;
import art.arcane.volmlib.util.scheduling.Looper;
@@ -47,6 +51,7 @@ import lombok.EqualsAndHashCode;
import lombok.Setter;
import org.bukkit.*;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
import org.bukkit.entity.Entity;
import org.bukkit.entity.Player;
import org.bukkit.event.EventHandler;
@@ -62,11 +67,13 @@ import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.List;
import java.util.Objects;
import java.util.Random;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
@EqualsAndHashCode(callSuper = true)
@@ -205,15 +212,25 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
}
@Override
public void injectChunkReplacement(World world, int x, int z, Executor syncExecutor) {
public void injectChunkReplacement(
World world,
int x,
int z,
Executor syncExecutor,
ChunkReplacementOptions options,
ChunkReplacementListener listener
) {
boolean acquired = false;
String phase = "start";
ChunkReplacementOptions effectiveOptions = Objects.requireNonNull(options, "options");
ChunkReplacementListener effectiveListener = Objects.requireNonNull(listener, "listener");
AtomicReference<String> phaseRef = new AtomicReference<>("start");
try {
phase = "acquire-load-lock";
setChunkReplacementPhase(phaseRef, effectiveListener, "acquire-load-lock", x, z);
long acquireStart = System.currentTimeMillis();
while (!loadLock.tryAcquire(5, TimeUnit.SECONDS)) {
Iris.warn("Chunk replacement waiting for load lock at " + x + "," + z
+ " for " + (System.currentTimeMillis() - acquireStart) + "ms.");
effectiveListener.onPhase(phaseRef.get(), x, z, System.currentTimeMillis());
}
acquired = true;
long acquireWait = System.currentTimeMillis() - acquireStart;
@@ -223,7 +240,12 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
TerrainChunk tc = TerrainChunk.create(world);
this.world.bind(world);
phase = "engine-generate";
if (effectiveOptions.isFullMode()) {
setChunkReplacementPhase(phaseRef, effectiveListener, "reset-mantle", x, z);
resetMantleChunkForFullRegen(x, z);
}
setChunkReplacementPhase(phaseRef, effectiveListener, "generate", x, z);
long generateStart = System.currentTimeMillis();
boolean useMulticore = IrisSettings.get().getGenerator().useMulticore && !J.isFolia();
AtomicBoolean generateDone = new AtomicBoolean(false);
@@ -242,6 +264,7 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
+ " for " + (System.currentTimeMillis() - generationWatchdogStart.get()) + "ms"
+ " thread=" + generateThread.getName()
+ " state=" + generateThread.getState());
effectiveListener.onPhase(phaseRef.get(), x, z, System.currentTimeMillis());
}
});
try {
@@ -255,12 +278,13 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
}
if (J.isFolia()) {
phase = "folia-run-region";
setChunkReplacementPhase(phaseRef, effectiveListener, "folia-run-region", x, z);
CountDownLatch latch = new CountDownLatch(1);
Throwable[] failure = new Throwable[1];
long regionScheduleStart = System.currentTimeMillis();
if (!J.runRegion(world, x, z, () -> {
try {
setChunkReplacementPhase(phaseRef, effectiveListener, "apply-terrain", x, z);
phaseUnsafeSet("folia-region-run", x, z);
Chunk c = world.getChunkAt(x, z);
Iris.tickets.addTicket(c);
@@ -288,7 +312,15 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
}
}
if (effectiveOptions.isFullMode()) {
setChunkReplacementPhase(phaseRef, effectiveListener, "overlay", x, z);
OverlayMetrics overlayMetrics = applyMantleOverlay(c, world, x, z);
effectiveListener.onOverlay(x, z, overlayMetrics.appliedBlocks(), overlayMetrics.objectKeys(), System.currentTimeMillis());
}
setChunkReplacementPhase(phaseRef, effectiveListener, "structures", x, z);
INMS.get().placeStructures(c);
setChunkReplacementPhase(phaseRef, effectiveListener, "chunk-load-callback", x, z);
engine.getWorldManager().onChunkLoad(c, true);
} finally {
Iris.tickets.removeTicket(c);
@@ -310,16 +342,18 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
while (!latch.await(5, TimeUnit.SECONDS)) {
Iris.warn("Chunk replacement waiting on region task at " + x + "," + z
+ " for " + (System.currentTimeMillis() - regionWaitStart) + "ms.");
effectiveListener.onPhase(phaseRef.get(), x, z, System.currentTimeMillis());
}
long regionWaitTook = System.currentTimeMillis() - regionWaitStart;
if (regionWaitTook >= 5000L) {
Iris.warn("Chunk replacement region task completed after " + regionWaitTook + "ms at " + x + "," + z + ".");
}
if (failure[0] != null) {
effectiveListener.onFailurePhase(phaseRef.get(), x, z, failure[0], System.currentTimeMillis());
throw failure[0];
}
} else {
phase = "paperlib-async-load";
setChunkReplacementPhase(phaseRef, effectiveListener, "paperlib-async-load", x, z);
long loadChunkStart = System.currentTimeMillis();
Chunk c = PaperLib.getChunkAtAsync(world, x, z).get();
long loadChunkTook = System.currentTimeMillis() - loadChunkStart;
@@ -327,53 +361,66 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
Iris.warn("Chunk replacement chunk load took " + loadChunkTook + "ms at " + x + "," + z + ".");
}
phase = "non-folia-apply";
setChunkReplacementPhase(phaseRef, effectiveListener, "apply-terrain", x, z);
Iris.tickets.addTicket(c);
CompletableFuture.runAsync(() -> {
for (Entity ee : c.getEntities()) {
if (ee instanceof Player) {
continue;
try {
CompletableFuture.runAsync(() -> {
for (Entity ee : c.getEntities()) {
if (ee instanceof Player) {
continue;
}
ee.remove();
}
}, syncExecutor).get();
ee.remove();
}
}, syncExecutor).get();
KList<CompletableFuture<?>> futures = new KList<>(1 + getEngine().getHeight() >> 4);
for (int i = getEngine().getHeight() >> 4; i >= 0; i--) {
int finalI = i << 4;
futures.add(CompletableFuture.runAsync(() -> {
for (int xx = 0; xx < 16; xx++) {
for (int yy = 0; yy < 16; yy++) {
for (int zz = 0; zz < 16; zz++) {
if (yy + finalI >= engine.getHeight() || yy + finalI < 0) {
continue;
KList<CompletableFuture<?>> futures = new KList<>(1 + getEngine().getHeight() >> 4);
for (int i = getEngine().getHeight() >> 4; i >= 0; i--) {
int finalI = i << 4;
futures.add(CompletableFuture.runAsync(() -> {
for (int xx = 0; xx < 16; xx++) {
for (int yy = 0; yy < 16; yy++) {
for (int zz = 0; zz < 16; zz++) {
if (yy + finalI >= engine.getHeight() || yy + finalI < 0) {
continue;
}
int y = yy + finalI + world.getMinHeight();
c.getBlock(xx, y, zz).setBlockData(tc.getBlockData(xx, y, zz), false);
}
int y = yy + finalI + world.getMinHeight();
c.getBlock(xx, y, zz).setBlockData(tc.getBlockData(xx, y, zz), false);
}
}
}
}, syncExecutor));
}, syncExecutor));
}
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).get();
if (effectiveOptions.isFullMode()) {
CompletableFuture.runAsync(() -> {
setChunkReplacementPhase(phaseRef, effectiveListener, "overlay", x, z);
OverlayMetrics overlayMetrics = applyMantleOverlay(c, world, x, z);
effectiveListener.onOverlay(x, z, overlayMetrics.appliedBlocks(), overlayMetrics.objectKeys(), System.currentTimeMillis());
}, syncExecutor).get();
}
CompletableFuture.runAsync(() -> {
setChunkReplacementPhase(phaseRef, effectiveListener, "structures", x, z);
INMS.get().placeStructures(c);
}, syncExecutor).get();
CompletableFuture.runAsync(() -> {
setChunkReplacementPhase(phaseRef, effectiveListener, "chunk-load-callback", x, z);
engine.getWorldManager().onChunkLoad(c, true);
}, syncExecutor).get();
} finally {
Iris.tickets.removeTicket(c);
}
futures.add(CompletableFuture.runAsync(() -> INMS.get().placeStructures(c), syncExecutor));
CompletableFuture.allOf(futures.toArray(new CompletableFuture[0]))
.thenRunAsync(() -> {
Iris.tickets.removeTicket(c);
engine.getWorldManager().onChunkLoad(c, true);
}, syncExecutor)
.get();
}
Iris.debug("Regenerated " + x + " " + z);
} catch (Throwable e) {
effectiveListener.onFailurePhase(phaseRef.get(), x, z, e, System.currentTimeMillis());
Iris.error("======================================");
Iris.error("Chunk replacement failed at phase=" + phase + " chunk=" + x + "," + z);
Iris.error("Chunk replacement failed at phase=" + phaseRef.get() + " chunk=" + x + "," + z);
e.printStackTrace();
Iris.reportErrorChunk(x, z, e, "CHUNK");
Iris.error("======================================");
throw new IllegalStateException("Chunk replacement failed at phase=" + phase + " chunk=" + x + "," + z, e);
throw new IllegalStateException("Chunk replacement failed at phase=" + phaseRef.get() + " chunk=" + x + "," + z, e);
} finally {
if (acquired) {
loadLock.release();
@@ -385,6 +432,63 @@ public class BukkitChunkGenerator extends ChunkGenerator implements PlatformChun
Iris.verbose("Chunk replacement phase=" + phase + " chunk=" + x + "," + z);
}
private static void setChunkReplacementPhase(
AtomicReference<String> phaseRef,
ChunkReplacementListener listener,
String phase,
int x,
int z
) {
phaseRef.set(phase);
listener.onPhase(phase, x, z, System.currentTimeMillis());
}
private void resetMantleChunkForFullRegen(int chunkX, int chunkZ) {
MantleChunk<Matter> mantleChunk = getEngine().getMantle().getMantle().getChunk(chunkX, chunkZ).use();
try {
mantleChunk.deleteSlices(BlockData.class);
mantleChunk.deleteSlices(String.class);
mantleChunk.deleteSlices(TileWrapper.class);
mantleChunk.flag(MantleFlag.PLANNED, false);
mantleChunk.flag(MantleFlag.OBJECT, false);
mantleChunk.flag(MantleFlag.REAL, false);
} finally {
mantleChunk.release();
}
}
private OverlayMetrics applyMantleOverlay(Chunk chunk, World world, int chunkX, int chunkZ) {
int minWorldY = world.getMinHeight();
int maxWorldY = world.getMaxHeight();
AtomicInteger appliedBlocks = new AtomicInteger();
AtomicInteger objectKeys = new AtomicInteger();
MantleChunk<Matter> mantleChunk = getEngine().getMantle().getMantle().getChunk(chunkX, chunkZ).use();
try {
mantleChunk.iterate(String.class, (x, y, z, value) -> {
if (value != null && !value.isEmpty() && value.indexOf('@') > 0) {
objectKeys.incrementAndGet();
}
});
mantleChunk.iterate(BlockData.class, (x, y, z, blockData) -> {
if (blockData == null) {
return;
}
int worldY = y + minWorldY;
if (worldY < minWorldY || worldY >= maxWorldY) {
return;
}
chunk.getBlock(x & 15, worldY, z & 15).setBlockData(blockData, false);
appliedBlocks.incrementAndGet();
});
} finally {
mantleChunk.release();
}
return new OverlayMetrics(appliedBlocks.get(), objectKeys.get());
}
private record OverlayMetrics(int appliedBlocks, int objectKeys) {
}
private Engine getEngine(WorldInfo world) {
if (setup.get()) {
return getEngine();

View File

@@ -0,0 +1,33 @@
/*
* Iris is a World Generator for Minecraft Bukkit Servers
* Copyright (c) 2022 Arcane Arts (Volmit Software)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package art.arcane.iris.engine.platform;
public interface ChunkReplacementListener {
ChunkReplacementListener NO_OP = new ChunkReplacementListener() {
};
default void onPhase(String phase, int chunkX, int chunkZ, long timestampMs) {
}
default void onOverlay(int chunkX, int chunkZ, int appliedBlocks, int objectKeys, long timestampMs) {
}
default void onFailurePhase(String phase, int chunkX, int chunkZ, Throwable error, long timestampMs) {
}
}

View File

@@ -0,0 +1,51 @@
/*
* Iris is a World Generator for Minecraft Bukkit Servers
* Copyright (c) 2022 Arcane Arts (Volmit Software)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
package art.arcane.iris.engine.platform;
public final class ChunkReplacementOptions {
private final String runId;
private final boolean fullMode;
private final boolean diagnostics;
private ChunkReplacementOptions(String runId, boolean fullMode, boolean diagnostics) {
this.runId = runId == null ? "unknown" : runId;
this.fullMode = fullMode;
this.diagnostics = diagnostics;
}
public static ChunkReplacementOptions terrain(String runId, boolean diagnostics) {
return new ChunkReplacementOptions(runId, false, diagnostics);
}
public static ChunkReplacementOptions full(String runId, boolean diagnostics) {
return new ChunkReplacementOptions(runId, true, diagnostics);
}
public String runId() {
return runId;
}
public boolean isFullMode() {
return fullMode;
}
public boolean diagnostics() {
return diagnostics;
}
}

View File

@@ -42,7 +42,14 @@ public interface PlatformChunkGenerator extends Hotloadable, DataProvider {
@NotNull
EngineTarget getTarget();
void injectChunkReplacement(World world, int x, int z, Executor syncExecutor);
void injectChunkReplacement(
World world,
int x,
int z,
Executor syncExecutor,
ChunkReplacementOptions options,
ChunkReplacementListener listener
);
void close();

View File

@@ -0,0 +1,112 @@
package art.arcane.iris.util.common.director.specialhandlers;
import art.arcane.iris.core.ExternalDataPackPipeline;
import art.arcane.volmlib.util.collection.KList;
import art.arcane.iris.util.common.director.DirectorParameterHandler;
import art.arcane.volmlib.util.director.exceptions.DirectorParsingException;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
public class ExternalDatapackLocateHandler implements DirectorParameterHandler<String> {
@Override
public KList<String> getPossibilities() {
LinkedHashSet<String> tokens = new LinkedHashSet<>();
Map<String, Set<String>> locateById = ExternalDataPackPipeline.snapshotLocateStructuresById();
for (Map.Entry<String, Set<String>> entry : locateById.entrySet()) {
if (entry == null) {
continue;
}
String id = entry.getKey();
if (id != null && !id.isBlank()) {
tokens.add(id);
}
Set<String> structures = entry.getValue();
if (structures == null || structures.isEmpty()) {
continue;
}
for (String structure : structures) {
if (structure != null && !structure.isBlank()) {
tokens.add(structure);
}
}
}
KList<String> possibilities = new KList<>();
possibilities.add(tokens);
return possibilities;
}
@Override
public KList<String> getPossibilities(String input) {
String rawInput = input == null ? "" : input;
String[] split = rawInput.split(",", -1);
String partial = split.length == 0 ? "" : split[split.length - 1].trim().toLowerCase(Locale.ROOT);
StringBuilder prefixBuilder = new StringBuilder();
if (split.length > 1) {
for (int index = 0; index < split.length - 1; index++) {
String value = split[index] == null ? "" : split[index].trim();
if (value.isBlank()) {
continue;
}
if (!prefixBuilder.isEmpty()) {
prefixBuilder.append(',');
}
prefixBuilder.append(value);
}
}
String prefix = prefixBuilder.toString();
LinkedHashSet<String> completions = new LinkedHashSet<>();
for (String possibility : getPossibilities()) {
if (possibility == null || possibility.isBlank()) {
continue;
}
String normalized = possibility.toLowerCase(Locale.ROOT);
if (!partial.isBlank() && !normalized.startsWith(partial)) {
continue;
}
if (prefix.isBlank()) {
completions.add(possibility);
} else {
completions.add(prefix + "," + possibility);
}
}
KList<String> results = new KList<>();
results.add(completions);
return results;
}
@Override
public String toString(String value) {
return value == null ? "" : value;
}
@Override
public String parse(String in, boolean force) throws DirectorParsingException {
if (in == null || in.trim().isBlank()) {
throw new DirectorParsingException("You must provide at least one external datapack id or structure id.");
}
return in.trim();
}
@Override
public boolean supports(Class<?> type) {
return type.equals(String.class);
}
@Override
public String getRandomDefault() {
KList<String> possibilities = getPossibilities();
String random = possibilities.getRandom();
return random == null ? "external-datapack-id" : random;
}
}

View File

@@ -205,13 +205,12 @@ public class CustomBiomeSource extends BiomeSource {
int blockZ = z << 2;
int blockY = y << 2;
int worldMinHeight = engine.getWorld().minHeight();
int surfaceInternalY = engine.getComplex().getHeightStream().get(blockX, blockZ).intValue();
int surfaceWorldY = surfaceInternalY + worldMinHeight;
int caveSwitchWorldY = Math.min(-8, worldMinHeight + 40);
boolean deepUnderground = blockY <= caveSwitchWorldY;
boolean belowSurface = blockY <= surfaceWorldY - 8;
boolean underground = deepUnderground && belowSurface;
int internalY = blockY - worldMinHeight;
int surfaceInternalY = engine.getComplex().getHeightStream().get(blockX, blockZ).intValue();
int caveSwitchInternalY = Math.max(-8 - worldMinHeight, 40);
boolean deepUnderground = internalY <= caveSwitchInternalY;
boolean belowSurface = internalY <= surfaceInternalY - 8;
boolean underground = deepUnderground && belowSurface;
IrisBiome irisBiome = underground
? engine.getCaveBiome(blockX, internalY, blockZ)
: engine.getComplex().getTrueBiomeStream().get(blockX, blockZ);

View File

@@ -3,6 +3,7 @@ package art.arcane.iris.core.nms.v1_21_R7;
import com.mojang.datafixers.util.Pair;
import com.mojang.serialization.MapCodec;
import art.arcane.iris.Iris;
import art.arcane.iris.core.ExternalDataPackPipeline;
import art.arcane.iris.core.IrisSettings;
import art.arcane.iris.engine.framework.Engine;
import art.arcane.iris.util.common.reflect.WrappedField;
@@ -49,6 +50,7 @@ import java.util.function.Supplier;
public class IrisChunkGenerator extends CustomChunkGenerator {
private static final WrappedField<ChunkGenerator, BiomeSource> BIOME_SOURCE;
private static final WrappedReturningMethod<Heightmap, Object> SET_HEIGHT;
private static final int EXTERNAL_FOUNDATION_MAX_DEPTH = 96;
private final ChunkGenerator delegate;
private final Engine engine;
private volatile Registry<Structure> cachedStructureRegistry;
@@ -199,6 +201,7 @@ public class IrisChunkGenerator extends CustomChunkGenerator {
List<StructureStart> starts = new ArrayList<>(structureManager.startsForStructure(chunkAccess.getPos(), structure -> true));
starts.sort(Comparator.comparingInt(start -> structureOrder.getOrDefault(start.getStructure(), Integer.MAX_VALUE)));
Set<String> externalLocateStructures = ExternalDataPackPipeline.snapshotLocateStructureKeys();
int seededStructureIndex = Integer.MIN_VALUE;
for (int j = 0; j < starts.size(); j++) {
@@ -210,10 +213,19 @@ public class IrisChunkGenerator extends CustomChunkGenerator {
seededStructureIndex = structureIndex;
}
Supplier<String> supplier = () -> structureRegistry.getResourceKey(structure).map(Object::toString).orElseGet(structure::toString);
String structureKey = supplier.get().toLowerCase(Locale.ROOT);
boolean isExternalLocateStructure = externalLocateStructures.contains(structureKey);
BitSet[] beforeSolidColumns = null;
if (isExternalLocateStructure) {
beforeSolidColumns = snapshotChunkSolidColumns(level, chunkAccess);
}
try {
level.setCurrentlyGenerating(supplier);
start.placeInChunk(level, structureManager, this, random, getWritableArea(chunkAccess), chunkAccess.getPos());
if (isExternalLocateStructure && beforeSolidColumns != null) {
applyExternalStructureFoundations(level, chunkAccess, beforeSolidColumns, EXTERNAL_FOUNDATION_MAX_DEPTH);
}
} catch (Exception exception) {
CrashReport crashReport = CrashReport.forThrowable(exception, "Feature placement");
CrashReportCategory category = crashReport.addCategory("Feature");
@@ -235,6 +247,120 @@ public class IrisChunkGenerator extends CustomChunkGenerator {
return new BoundingBox(minX, minY, minZ, minX + 15, maxY, minZ + 15);
}
private static BitSet[] snapshotChunkSolidColumns(WorldGenLevel level, ChunkAccess chunkAccess) {
int minY = level.getMinY();
int maxY = level.getMaxY();
int ySpan = maxY - minY;
if (ySpan <= 0) {
return new BitSet[0];
}
ChunkPos chunkPos = chunkAccess.getPos();
int minX = chunkPos.getMinBlockX();
int minZ = chunkPos.getMinBlockZ();
BitSet[] columns = new BitSet[16 * 16];
BlockPos.MutableBlockPos mutablePos = new BlockPos.MutableBlockPos();
for (int localX = 0; localX < 16; localX++) {
for (int localZ = 0; localZ < 16; localZ++) {
int index = (localX << 4) | localZ;
BitSet solids = new BitSet(ySpan);
int worldX = minX + localX;
int worldZ = minZ + localZ;
for (int y = minY; y < maxY; y++) {
mutablePos.set(worldX, y, worldZ);
if (isFoundationSolid(level.getBlockState(mutablePos))) {
solids.set(y - minY);
}
}
columns[index] = solids;
}
}
return columns;
}
private static void applyExternalStructureFoundations(
WorldGenLevel level,
ChunkAccess chunkAccess,
BitSet[] beforeSolidColumns,
int maxDepth
) {
if (beforeSolidColumns == null || beforeSolidColumns.length == 0 || maxDepth <= 0) {
return;
}
int minY = level.getMinY();
int maxY = level.getMaxY();
int ySpan = maxY - minY;
if (ySpan <= 0) {
return;
}
ChunkPos chunkPos = chunkAccess.getPos();
int minX = chunkPos.getMinBlockX();
int minZ = chunkPos.getMinBlockZ();
BlockPos.MutableBlockPos mutablePos = new BlockPos.MutableBlockPos();
for (int localX = 0; localX < 16; localX++) {
for (int localZ = 0; localZ < 16; localZ++) {
int index = (localX << 4) | localZ;
BitSet before = beforeSolidColumns[index];
if (before == null) {
continue;
}
int worldX = minX + localX;
int worldZ = minZ + localZ;
int lowestNewSolidY = Integer.MIN_VALUE;
for (int y = minY; y < maxY; y++) {
mutablePos.set(worldX, y, worldZ);
BlockState state = level.getBlockState(mutablePos);
if (!isFoundationSolid(state)) {
continue;
}
if (before.get(y - minY)) {
continue;
}
lowestNewSolidY = y;
break;
}
if (lowestNewSolidY == Integer.MIN_VALUE) {
continue;
}
mutablePos.set(worldX, lowestNewSolidY, worldZ);
BlockState foundationState = level.getBlockState(mutablePos);
if (!isFoundationSolid(foundationState)) {
continue;
}
int depth = 0;
for (int y = lowestNewSolidY - 1; y >= minY && depth < maxDepth; y--) {
mutablePos.set(worldX, y, worldZ);
BlockState state = level.getBlockState(mutablePos);
if (isFoundationSolid(state)) {
break;
}
level.setBlock(mutablePos, foundationState, 2);
depth++;
}
}
}
}
private static boolean isFoundationSolid(BlockState state) {
if (state == null || state.isAir()) {
return false;
}
if (!state.getFluidState().isEmpty()) {
return false;
}
return Heightmap.Types.MOTION_BLOCKING_NO_LEAVES.isOpaque().test(state);
}
private Map<Structure, Integer> getStructureOrder(Registry<Structure> structureRegistry) {
Map<Structure, Integer> localOrder = cachedStructureOrder;
Registry<Structure> localRegistry = cachedStructureRegistry;

View File

@@ -832,7 +832,10 @@ public class NMSBinding implements INMSBinding {
public void placeStructures(Chunk chunk) {
var craft = ((CraftChunk) chunk);
var level = craft.getCraftWorld().getHandle();
var access = ((CraftChunk) chunk).getHandle(ChunkStatus.FULL);
var access = craft.getHandle(ChunkStatus.FEATURES);
if (access instanceof LevelChunk) {
return;
}
level.getChunkSource().getGenerator().applyBiomeDecoration(level, access, level.structureManager());
}