Merge branch 'master' into dev/1.21.11

This commit is contained in:
Zoe Gidiere
2025-12-18 20:07:58 -07:00
49 changed files with 992 additions and 168 deletions

149
Jenkinsfile vendored Normal file
View File

@@ -0,0 +1,149 @@
pipeline {
agent any
tools {
jdk "Temurin Java 21"
}
triggers {
githubPush()
}
environment {
DISCORD_WEBHOOK_URL = credentials('polydev-discord-webhook-url')
}
stages {
stage('Checkout') {
steps {
scmSkip(deleteBuild: true)
}
}
stage('Setup Gradle') {
steps {
sh 'chmod +x gradlew'
}
}
stage('Build') {
steps {
withGradle {
sh './gradlew build --rerun-tasks -x check'
sh './gradlew javadoc'
}
}
post {
success {
archiveArtifacts artifacts: 'platforms/fabric/build/libs/Terra-fabric*.jar,platforms/bukkit/build/libs/Terra-bukkit*-shaded.jar,platforms/allay/build/libs/Terra-allay*.jar,platforms/minestom/build/libs/Terra-minestom*.jar', fingerprint: true, onlyIfSuccessful: true
javadoc javadocDir: 'common/api/build/docs/javadoc', keepAll: true
}
}
}
stage('Tests') {
steps {
withGradle {
sh './gradlew test --rerun-tasks'
}
}
}
// stage('Deploy to snapshots repositories') {
// when {
// allOf {
// not { buildingTag() }
// not { expression { env.TAG_NAME != null && env.TAG_NAME.matches('v\\d+\\.\\d+\\.\\d+') } }
// }
// }
//
// steps {
// withCredentials([
// string(credentialsId: 'maven-signing-key', variable: 'ORG_GRADLE_PROJECT_signingKey'),
// string(credentialsId: 'maven-signing-key-password', variable: 'ORG_GRADLE_PROJECT_signingPassword'),
// usernamePassword(
// credentialsId: 'solo-studios-maven',
// passwordVariable: 'ORG_GRADLE_PROJECT_SoloStudiosSnapshotsPassword',
// usernameVariable: 'ORG_GRADLE_PROJECT_SoloStudiosSnapshotsUsername'
// )
// ]) {
// withGradle {
// sh './gradlew publishAllPublicationsToSoloStudiosSnapshotsRepository'
// }
// }
// }
// }
stage('Deploy to releases repositories') {
// when {
// allOf {
// buildingTag()
// expression { env.TAG_NAME != null && env.TAG_NAME.matches('v\\d+\\.\\d+\\.\\d+') }
// }
// }
steps {
withCredentials([
string(credentialsId: 'maven-signing-key', variable: 'ORG_GRADLE_PROJECT_signingKey'),
string(credentialsId: 'maven-signing-key-password', variable: 'ORG_GRADLE_PROJECT_signingPassword'),
usernamePassword(
credentialsId: 'solo-studios-maven',
passwordVariable: 'ORG_GRADLE_PROJECT_SoloStudiosReleasesPassword',
usernameVariable: 'ORG_GRADLE_PROJECT_SoloStudiosReleasesUsername'
),
// TODO: does not yet exist (uncomment once added)
// usernamePassword(
// credentialsId: 'sonatype-maven-credentials',
// passwordVariable: 'ORG_GRADLE_PROJECT_SonatypePassword',
// usernameVariable: 'ORG_GRADLE_PROJECT_SonatypeUsername'
// ),
// usernamePassword(
// credentialsId: 'codemc-maven-credentials',
// passwordVariable: 'ORG_GRADLE_PROJECT_CodeMCPassword',
// usernameVariable: 'ORG_GRADLE_PROJECT_CodeMCUsername'
// )
]) {
withGradle {
sh './gradlew publish'
//sh './gradlew publishAllPublicationsToSoloStudiosReleasesRepository'
// sh './gradlew publishAllPublicationsToSonatypeRepository'
// sh './gradlew publishAllPublicationsToCodeMCRepository'
}
}
}
}
}
post {
always {
discoverReferenceBuild()
// junit testResults: '**/build/test-results/*/TEST-*.xml'
recordIssues(
aggregatingResults: true,
enabledForFailure: true,
minimumSeverity: 'ERROR',
sourceCodeEncoding: 'UTF-8',
checksAnnotationScope: 'ALL',
sourceCodeRetention: 'LAST_BUILD',
tools: [java(), javaDoc()]
)
discordSend(
title: env.JOB_NAME + ' ' + env.BUILD_DISPLAY_NAME,
showChangeset: true,
enableArtifactsList: true,
link: env.BUILD_URL,
result: currentBuild.currentResult,
customAvatarUrl: 'https://github.com/PolyhedralDev.png',
customUsername: 'Solo Studios Jenkins',
webhookURL: env.DISCORD_WEBHOOK_URL,
)
cleanWs()
}
}
}

View File

@@ -13,6 +13,7 @@ import org.gradle.kotlin.dsl.getByName
import org.gradle.kotlin.dsl.register
import org.gradle.kotlin.dsl.withType
import org.gradle.language.jvm.tasks.ProcessResources
import org.gradle.plugins.ide.idea.model.IdeaModel
fun Project.configureCompilation() {
apply(plugin = "maven-publish")
@@ -21,6 +22,13 @@ fun Project.configureCompilation() {
apply(plugin = "idea")
apply<TectonicDocPlugin>()
configure<IdeaModel> {
module {
isDownloadJavadoc = true
isDownloadSources = true
}
}
configure<JavaPluginExtension> {
sourceCompatibility = JavaVersion.VERSION_21
targetCompatibility = JavaVersion.VERSION_21

View File

@@ -16,16 +16,17 @@ fun Project.configurePublishing() {
}
repositories {
val mavenUrl = "https://repo.codemc.io/repository/maven-releases/"
val mavenUrl = "https://maven.solo-studios.ca/releases/"
//val mavenSnapshotUrl = "https://repo.codemc.io/repository/maven-snapshots/"
maven(mavenUrl) {
val mavenUsername: String? by project
val mavenPassword: String? by project
if (mavenUsername != null && mavenPassword != null) {
val SoloStudiosReleasesUsername: String? by project
val SoloStudiosReleasesPassword: String? by project
if (SoloStudiosReleasesUsername != null && SoloStudiosReleasesPassword != null) {
credentials {
username = mavenUsername
password = mavenPassword
username = SoloStudiosReleasesUsername
password = SoloStudiosReleasesPassword
}
}
}

View File

@@ -10,7 +10,7 @@ object Versions {
const val tectonic = "4.3.1"
const val paralithic = "2.0.1"
const val strata = "1.3.2"
const val seismic = "2.3.0"
const val seismic = "2.5.7"
const val cloud = "2.0.0"

View File

@@ -46,6 +46,6 @@ class BaseBiomeColumn implements Column<Biome> {
@Override
public Biome get(int y) {
return biomeProvider.extrude(base, x, y, z, seed);
return biomeProvider.pipeline.extrude(base, x, y, z, seed);
}
}

View File

@@ -6,37 +6,33 @@ import java.util.Set;
import java.util.stream.Collectors;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.utils.ExtrusionPipeline;
import com.dfsek.terra.addons.biome.extrusion.utils.ExtrusionPipelineFactory;
import com.dfsek.terra.api.util.Column;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomeExtrusionProvider implements BiomeProvider {
public final ExtrusionPipeline pipeline;
private final BiomeProvider delegate;
private final Set<Biome> biomes;
private final Extrusion[] extrusions;
private final int resolution;
public BiomeExtrusionProvider(BiomeProvider delegate, List<Extrusion> extrusions, int resolution) {
this.delegate = delegate;
this.biomes = delegate.stream().collect(Collectors.toSet());
extrusions.forEach(e -> biomes.addAll(e.getBiomes()));
this.extrusions = extrusions.toArray(new Extrusion[0]);
this.pipeline = ExtrusionPipelineFactory.create(extrusions);
this.resolution = resolution;
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
Biome delegated = delegate.getBiome(x, y, z, seed);
return extrude(delegated, x, y, z, seed);
}
public Biome extrude(Biome original, int x, int y, int z, long seed) {
for(int i = 0; i < extrusions.length; i++) {
original = extrusions[i].extrude(original, x, y, z, seed);
}
return original;
return pipeline.extrude(delegated, x, y, z, seed);
}
@Override
@@ -64,4 +60,4 @@ public class BiomeExtrusionProvider implements BiomeProvider {
public BiomeProvider getDelegate() {
return delegate;
}
}
}

View File

@@ -10,6 +10,7 @@ import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.addons.biome.query.api.BiomeQueries;
import com.dfsek.terra.api.util.collection.ProbabilityCollection;
import com.dfsek.terra.api.util.collection.TriStateIntCache;
import com.dfsek.terra.api.util.range.Range;
import com.dfsek.terra.api.world.biome.Biome;
@@ -19,25 +20,41 @@ import com.dfsek.terra.api.world.biome.Biome;
*/
public class ReplaceExtrusion implements Extrusion {
private final Sampler sampler;
private final Range range;
private final ProbabilityCollection<ReplaceableBiome> biomes;
private final Predicate<Biome> hasTag;
private final TriStateIntCache cache;
public ReplaceExtrusion(Sampler sampler, Range range, ProbabilityCollection<ReplaceableBiome> biomes, String tag) {
this.sampler = sampler;
this.range = range;
this.biomes = biomes;
this.hasTag = BiomeQueries.has(tag);
this.cache = new TriStateIntCache(Biome.INT_ID_COUNTER.get());
}
@Override
public Biome extrude(Biome original, int x, int y, int z, long seed) {
if(hasTag.test(original)) {
return range.ifInRange(y, () -> biomes.get(sampler, x, y, z, seed).get(original), original);
int id = original.getIntID();
long state = cache.get(id);
boolean passes;
if(state == TriStateIntCache.STATE_UNSET) {
// Only run the test if unset in cache
passes = hasTag.test(original);
cache.set(id, passes);
} else {
// Read the primitive long directly
passes = (state == TriStateIntCache.STATE_TRUE);
}
if(passes) {
if(range.isInRange(y)) {
return biomes.get(sampler, x, y, z, seed).get(original);
}
}
return original;
}

View File

@@ -0,0 +1,8 @@
package com.dfsek.terra.addons.biome.extrusion.utils;
import com.dfsek.terra.api.world.biome.Biome;
public interface ExtrusionPipeline {
Biome extrude(Biome original, int x, int y, int z, long seed);
}

View File

@@ -0,0 +1,158 @@
package com.dfsek.terra.addons.biome.extrusion.utils;
import org.objectweb.asm.ClassWriter;
import org.objectweb.asm.FieldVisitor;
import org.objectweb.asm.MethodVisitor;
import org.objectweb.asm.Type;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.api.world.biome.Biome;
import static org.objectweb.asm.Opcodes.AALOAD;
import static org.objectweb.asm.Opcodes.ACC_FINAL;
import static org.objectweb.asm.Opcodes.ACC_PRIVATE;
import static org.objectweb.asm.Opcodes.ACC_PUBLIC;
import static org.objectweb.asm.Opcodes.ALOAD;
import static org.objectweb.asm.Opcodes.ARETURN;
import static org.objectweb.asm.Opcodes.GETFIELD;
import static org.objectweb.asm.Opcodes.ILOAD;
import static org.objectweb.asm.Opcodes.INVOKEINTERFACE;
import static org.objectweb.asm.Opcodes.INVOKESPECIAL;
import static org.objectweb.asm.Opcodes.LLOAD;
import static org.objectweb.asm.Opcodes.PUTFIELD;
import static org.objectweb.asm.Opcodes.RETURN;
import static org.objectweb.asm.Opcodes.SIPUSH;
import static org.objectweb.asm.Opcodes.SWAP;
import static org.objectweb.asm.Opcodes.V1_8;
public class ExtrusionPipelineFactory {
private static final AtomicInteger ID_COUNTER = new AtomicInteger(0);
// Type Descriptors
private static final String EXTRUSION_TYPE = Type.getInternalName(Extrusion.class);
private static final String EXTRUSION_DESC = Type.getDescriptor(Extrusion.class);
private static final String BIOME_DESC = Type.getDescriptor(Biome.class);
private static final String PIPELINE_INTERFACE = Type.getInternalName(ExtrusionPipeline.class);
// Method Signature: (Biome, int, int, int, long) -> Biome
private static final String EXTRUDE_SIG = "(" + BIOME_DESC + "IIIJ)" + BIOME_DESC;
public static ExtrusionPipeline create(List<Extrusion> extrusions) {
// Optimization: If empty, return identity
if(extrusions.isEmpty()) {
return (original, x, y, z, seed) -> original;
}
String className = "com/dfsek/terra/addons/biome/extrusion/GeneratedExtrusionPipeline_" + ID_COUNTER.getAndIncrement();
ClassWriter cw = new ClassWriter(ClassWriter.COMPUTE_FRAMES | ClassWriter.COMPUTE_MAXS);
// 1. Define Class
cw.visit(V1_8, ACC_PUBLIC | ACC_FINAL, className, null, "java/lang/Object", new String[]{ PIPELINE_INTERFACE });
// 2. Define Fields (e0, e1, e2...)
for(int i = 0; i < extrusions.size(); i++) {
FieldVisitor fv = cw.visitField(ACC_PRIVATE | ACC_FINAL, "e" + i, EXTRUSION_DESC, null, null);
fv.visitEnd();
}
// 3. Generate Constructor(Extrusion[])
generateConstructor(cw, className, extrusions.size());
// 4. Generate extrude() method
generateExtrudeMethod(cw, className, extrusions.size());
cw.visitEnd();
// 5. Load and Instantiate
byte[] bytecode = cw.toByteArray();
try {
Class<?> generatedClass = new PipelineClassLoader(ExtrusionPipelineFactory.class.getClassLoader())
.defineClass(className.replace('/', '.'), bytecode);
return (ExtrusionPipeline) generatedClass.getConstructor(Extrusion[].class)
.newInstance((Object) extrusions.toArray(new Extrusion[0]));
} catch(Exception e) {
throw new RuntimeException("Failed to generate ExtrusionPipeline", e);
}
}
private static void generateConstructor(ClassWriter cw, String className, int count) {
MethodVisitor mv = cw.visitMethod(ACC_PUBLIC, "<init>", "([L" + EXTRUSION_TYPE + ";)V", null, null);
mv.visitCode();
// super()
mv.visitVarInsn(ALOAD, 0);
mv.visitMethodInsn(INVOKESPECIAL, "java/lang/Object", "<init>", "()V", false);
// Assign array elements to fields
for(int i = 0; i < count; i++) {
mv.visitVarInsn(ALOAD, 0); // Load this
mv.visitVarInsn(ALOAD, 1); // Load array argument
mv.visitIntInsn(SIPUSH, i); // Load index
mv.visitInsn(AALOAD); // Load array[i]
mv.visitFieldInsn(PUTFIELD, className, "e" + i, EXTRUSION_DESC);
}
mv.visitInsn(RETURN);
mv.visitMaxs(0, 0); // Computed automatically
mv.visitEnd();
}
private static void generateExtrudeMethod(ClassWriter cw, String className, int count) {
MethodVisitor mv = cw.visitMethod(ACC_PUBLIC, "extrude", EXTRUDE_SIG, null, null);
mv.visitCode();
// Helper var indices:
// 0: this
// 1: Biome original (We will update this or chain it on stack)
// 2: int x
// 3: int y
// 4: int z
// 5: long seed
mv.visitVarInsn(ALOAD, 1); // Load 'original' Biome onto stack initially
for(int i = 0; i < count; i++) {
// Stack contains: [CurrentBiome]
mv.visitVarInsn(ALOAD, 0); // Load 'this'
mv.visitFieldInsn(GETFIELD, className, "e" + i, EXTRUSION_DESC); // Load Extrusion field
// Stack: [CurrentBiome, Extrusion]
// We need: [Extrusion, CurrentBiome, x, y, z, seed]
mv.visitInsn(SWAP); // Swap to get [Extrusion, CurrentBiome]
mv.visitVarInsn(ILOAD, 2); // x
mv.visitVarInsn(ILOAD, 3); // y
mv.visitVarInsn(ILOAD, 4); // z
mv.visitVarInsn(LLOAD, 5); // seed
// Invoke Extrusion.extrude(Biome, x, y, z, seed)
mv.visitMethodInsn(INVOKEINTERFACE, EXTRUSION_TYPE, "extrude", EXTRUDE_SIG, true);
// Stack now contains: [NewBiome]
// Loop continues using this result as input for the next one
}
mv.visitInsn(ARETURN); // Return the final Biome
mv.visitMaxs(0, 0);
mv.visitEnd();
}
// Custom ClassLoader to inject the bytes
private static class PipelineClassLoader extends ClassLoader {
public PipelineClassLoader(ClassLoader parent) {
super(parent);
}
public Class<?> defineClass(String name, byte[] b) {
return defineClass(name, b, 0, b.length);
}
}
}

View File

@@ -73,12 +73,13 @@ public class BiomeChunkImpl implements BiomeChunk {
lookupArray = tempArray;
// Apply stage to working grid
ViewPoint viewPoint = new ViewPoint(this, gridInterval, lookupArray, size);
for(int gridZ = 0; gridZ < gridSize; gridZ = gridZ + 1) {
for(int gridX = 0; gridX < gridSize; gridX = gridX + 1) {
int xIndex = gridOrigin + gridX * gridInterval;
int zIndex = gridOrigin + gridZ * gridInterval;
biomes[(xIndex * size) + zIndex] = stage.apply(
new ViewPoint(this, gridInterval, gridX, gridZ, xIndex, zIndex, lookupArray, size));
viewPoint.set(gridX, gridZ, xIndex, zIndex);
biomes[(xIndex * size) + zIndex] = stage.apply(viewPoint);
}
}
}
@@ -157,25 +158,32 @@ public class BiomeChunkImpl implements BiomeChunk {
*/
public static class ViewPoint {
private final BiomeChunkImpl chunk;
private final PipelineBiome biome;
private PipelineBiome biome;
private final int gridInterval;
private final int gridX;
private final int gridZ;
private final int xIndex;
private final int zIndex;
private int gridX;
private int gridZ;
private int xIndex;
private int zIndex;
private final PipelineBiome[] lookupArray;
private final int size;
private ViewPoint(BiomeChunkImpl chunk, int gridInterval, int gridX, int gridZ, int xIndex, int zIndex,
private ViewPoint(BiomeChunkImpl chunk, int gridInterval,
PipelineBiome[] lookupArray, int size) {
this.chunk = chunk;
this.gridInterval = gridInterval;
this.gridX = 0;
this.gridZ = 0;
this.xIndex = 0;
this.zIndex = 0;
this.lookupArray = lookupArray;
this.size = size;
}
public void set(int gridX, int gridZ, int xIndex, int zIndex) {
this.gridX = gridX;
this.gridZ = gridZ;
this.xIndex = xIndex;
this.zIndex = zIndex;
this.lookupArray = lookupArray;
this.size = size;
this.biome = lookupArray[(this.xIndex * this.size) + this.zIndex];
}

View File

@@ -0,0 +1,5 @@
version = version("1.0.0")
dependencies {
compileOnlyApi(project(":common:addons:manifest-addon-loader"))
}

View File

@@ -0,0 +1,131 @@
package com.dfsek.terra.addons.commands.locate;
import com.dfsek.seismic.type.vector.Vector2Int;
import com.dfsek.seismic.type.vector.Vector3Int;
import com.dfsek.terra.api.util.generic.either.Either;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
import com.dfsek.terra.api.world.info.WorldProperties;
import org.jetbrains.annotations.NotNull;
import java.util.Optional;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.stream.IntStream;
import java.util.stream.Stream;
public class BiomeLocator {
/**
* Locates the nearest biome matching the given predicate using a parallelized square spiral search.
*
* @param provider The BiomeProvider to search in.
* @param properties The world properties (needed for seed and height bounds).
* @param originX Starting X coordinate.
* @param originZ Starting Z coordinate.
* @param radius The maximum radius (in blocks) to search.
* @param step The search step/increment. Higher values are faster but less accurate.
* @param filter The condition to match the biome.
* @param search3D If true, searches the entire vertical column at each step. If false, only checks originY.
* @return An Optional containing the location of the found biome, or empty if not found.
*/
public static Optional<Either<Vector3Int, Vector2Int>> search(
@NotNull BiomeProvider provider,
@NotNull WorldProperties properties,
int originX,
int originZ,
int radius,
int step,
@NotNull Predicate<Biome> filter,
boolean search3D
) {
long seed = properties.getSeed();
int minHeight = properties.getMinHeight();
int maxHeight = properties.getMaxHeight();
// 1. Check the exact center first
Optional<Either<Vector3Int, Vector2Int>> centerResult = check(provider, seed, originX, originZ, step, filter, search3D, minHeight, maxHeight);
if (centerResult.isPresent()) {
return centerResult;
}
// 2. Begin Parallel Square Spiral Search
// We iterate rings sequentially to guarantee finding the *nearest* result.
// However, we process all points within a specific ring in parallel.
for (int r = step; r <= radius; r += step) {
final int currentRadius = r;
final int minX = -currentRadius;
final int maxX = currentRadius;
final int minZ = -currentRadius;
final int maxZ = currentRadius;
Stream<int[]> northSide = IntStream.iterate(minX, n -> n < maxX, n -> n + step)
.mapToObj(x -> new int[]{x, minZ}); // Fixed Z (min), varying X
Stream<int[]> eastSide = IntStream.iterate(minZ, n -> n < maxZ, n -> n + step)
.mapToObj(z -> new int[]{maxX, z}); // Fixed X (max), varying Z
Stream<int[]> southSide = IntStream.iterate(maxX, n -> n > minX, n -> n - step)
.mapToObj(x -> new int[]{x, maxZ}); // Fixed Z (max), varying X
Stream<int[]> westSide = IntStream.iterate(maxZ, n -> n > minZ, n -> n - step)
.mapToObj(z -> new int[]{minX, z}); // Fixed X (min), varying Z
Optional<Either<Vector3Int, Vector2Int>> ringResult = Stream.of(northSide, eastSide, southSide, westSide)
.flatMap(Function.identity())
.parallel()
.map(coords -> check(
provider,
seed,
originX + coords[0],
originZ + coords[1],
step,
filter,
search3D,
minHeight,
maxHeight
))
.filter(Optional::isPresent)
.map(Optional::get)
.findFirst(); // findFirst() respects encounter order (North -> East -> South -> West)
if (ringResult.isPresent()) {
return ringResult;
}
}
return Optional.empty();
}
/**
* Helper to check a specific coordinate column or point.
* This logic is executed inside the worker threads.
*/
private static Optional<Either<Vector3Int, Vector2Int>> check(
BiomeProvider provider,
long seed,
int x,
int z,
int step,
Predicate<Biome> filter,
boolean search3D,
int minHeight,
int maxHeight
) {
if (search3D) {
// Iterate from bottom to top of the world using the step
for (int y = minHeight; y < maxHeight; y += step) {
if (filter.test(provider.getBiome(x, y, z, seed))) {
return Optional.of(Either.left(Vector3Int.of(x, y, z)));
}
}
return Optional.empty();
} else {
// 2D Mode: Check only the base biome
// We use a flatMap approach here to be safe with Optionals inside the stream
return provider.getBaseBiome(x, z, seed)
.filter(filter)
.map(b -> Either.right(Vector2Int.of(x, z)));
}
}
}

View File

@@ -0,0 +1,142 @@
package com.dfsek.terra.addons.commands.locate;
import com.dfsek.seismic.type.vector.Vector2Int;
import com.dfsek.seismic.type.vector.Vector3Int;
import org.incendo.cloud.CommandManager;
import org.incendo.cloud.component.DefaultValue;
import org.incendo.cloud.context.CommandContext;
import org.incendo.cloud.description.Description;
import org.incendo.cloud.parser.standard.IntegerParser;
import java.util.Optional;
import com.dfsek.terra.addons.manifest.api.AddonInitializer;
import com.dfsek.terra.api.Platform;
import com.dfsek.terra.api.addon.BaseAddon;
import com.dfsek.terra.api.command.CommandSender;
import com.dfsek.terra.api.command.arguments.RegistryArgument;
import com.dfsek.terra.api.entity.Entity;
import com.dfsek.terra.api.event.events.platform.CommandRegistrationEvent;
import com.dfsek.terra.api.event.functional.FunctionalEventHandler;
import com.dfsek.terra.api.inject.annotations.Inject;
import com.dfsek.terra.api.registry.Registry;
import com.dfsek.terra.api.util.generic.either.Either;
import com.dfsek.terra.api.util.reflection.TypeKey;
import com.dfsek.terra.api.world.World;
import com.dfsek.terra.api.world.biome.Biome;
public class LocateCommandAddon implements AddonInitializer {
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
private static Registry<Biome> getBiomeRegistry(CommandContext<CommandSender> sender) {
return sender.sender().getEntity().orElseThrow().world().getPack().getRegistry(Biome.class);
}
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, CommandRegistrationEvent.class)
.then(event -> {
CommandManager<CommandSender> manager = event.getCommandManager();
manager.command(
manager.commandBuilder("search", Description.of("Locate things in the world"))
.literal("biome")
// Argument 1: The Biome to search for
.argument(RegistryArgument.builder("biome",
LocateCommandAddon::getBiomeRegistry,
TypeKey.of(Biome.class)))
// Argument 2: Radius (Optional, default 5000)
.optional("radius", IntegerParser.integerParser(100), DefaultValue.constant(5000))
// Argument 3: Step/Resolution (Optional, default 16)
.optional("step", IntegerParser.integerParser(1), DefaultValue.constant(16))
// Flag: Toggle 3D search (e.g., --3d or -3)
.flag(manager.flagBuilder("3d").withAliases("3").build())
// Flag: Auto resolution mode (e.g., --auto or -a)
.flag(manager.flagBuilder("auto").withAliases("a").build())
.handler(context -> {
// 1. Gather Context & Arguments
Biome targetBiome = context.get("biome");
Entity sender = context.sender().getEntity().orElseThrow(
() -> new Error("Only entities can run this command."));
World world = sender.world();
// Fetch properties needed for the locator
int radius = context.get("radius");
boolean search3D = context.flags().hasFlag("3d");
boolean autoMode = context.flags().hasFlag("auto");
// 2. Determine Initial Step
// If Auto: Start at radius / 2 (very coarse check).
// If Manual: Use provided step.
int stepArg = context.get("step");
int currentStep = autoMode ? Integer.highestOneBit(radius - 1) : stepArg;
// Notify player
String modeMsg = autoMode ? " (Auto Mode)" : " (Step: " + currentStep + ")";
context.sender().sendMessage(
"Searching for " + targetBiome.getID() + " within " + radius + " blocks" + modeMsg + "...");
Optional<Either<Vector3Int, Vector2Int>> result;
// 3. Execute Search Loop
while(true) {
result = BiomeLocator.search(
world.getBiomeProvider(),
world,
sender.position().getFloorX(),
sender.position().getFloorZ(),
radius,
currentStep,
found -> found.equals(targetBiome), // Match specific biome instance
search3D
);
// Exit Conditions:
// 1. Found a result
if(result.isPresent()) {
break;
}
// 2. Not in auto mode (only run once)
if(!autoMode) {
break;
}
// 3. We just ran a search at step arg and failed (lowest resolution)
if(currentStep <= stepArg) {
break;
}
// Reduce step for next iteration (Adaptive Search)
currentStep /= 2;
context.sender().sendMessage("No result found, refining search (Step: " + currentStep + ")...");
}
// 4. Handle Result
if(result.isPresent()) {
Either<Vector3Int, Vector2Int> location = result.get();
String coords;
if(location.hasLeft()) { // 3D Result
Vector3Int vec = location.getLeft().get();
coords = String.format("%d, %d, %d", vec.getX(), vec.getY(), vec.getZ());
} else { // 2D Result
Vector2Int vec = location.getRight().get();
coords = String.format("%d, ~, %d", vec.getX(), vec.getZ());
}
context.sender().sendMessage("Found " + targetBiome.getID() + " at [" + coords + "]");
} else {
context.sender().sendMessage("Could not find " + targetBiome.getID() + " within " + radius + " blocks.");
}
})
.permission("terra.locate.biome")
);
});
}
}

View File

@@ -0,0 +1,12 @@
schema-version: 1
contributors:
- Terra contributors
id: command-locate
version: @VERSION@
entrypoints:
- "com.dfsek.terra.addons.commands.locate.LocateCommandAddon"
website:
issues: https://github.com/PolyhedralDev/Terra/issues
source: https://github.com/PolyhedralDev/Terra
docs: https://terra.polydev.org
license: MIT License

View File

@@ -8,6 +8,7 @@
package com.dfsek.terra.addons.biome;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import com.dfsek.terra.api.properties.Context;
import com.dfsek.terra.api.world.biome.Biome;
@@ -25,6 +26,7 @@ public class UserDefinedBiome implements Biome {
private final Set<String> tags;
private final Context context = new Context();
private final int intID;
public UserDefinedBiome(PlatformBiome vanilla, BiomeTemplate config) {
this.vanilla = vanilla;
@@ -32,6 +34,7 @@ public class UserDefinedBiome implements Biome {
this.config = config;
this.color = config.getColor();
this.tags = config.getTags();
this.intID = INT_ID_COUNTER.getAndIncrement();
tags.add("BIOME:" + id);
tags.add("ALL");
}
@@ -61,6 +64,11 @@ public class UserDefinedBiome implements Biome {
return tags;
}
@Override
public int getIntID() {
return intID;
}
@Override
public String getID() {
return id;

View File

@@ -2,18 +2,11 @@ package com.dfsek.terra.addons.feature.distributor.distributors;
import com.dfsek.seismic.algorithms.hashing.HashingFunctions;
import com.dfsek.seismic.math.integer.IntegerFunctions;
import java.util.random.RandomGenerator;
import java.util.random.RandomGeneratorFactory;
import com.dfsek.terra.api.structure.feature.Distributor;
public class PaddedGridDistributor implements Distributor {
private final int width;
private final int cellWidth;
private final int salt;
public PaddedGridDistributor(int width, int padding, int salt) {
@@ -27,12 +20,26 @@ public class PaddedGridDistributor implements Distributor {
int cellX = Math.floorDiv(x, cellWidth);
int cellZ = Math.floorDiv(z, cellWidth);
RandomGenerator random = RandomGeneratorFactory.<RandomGenerator.SplittableGenerator>of("Xoroshiro128PlusPlus").create(
(HashingFunctions.murmur64(IntegerFunctions.squash(cellX, cellZ)) ^ seed) + salt);
int localX = x - (cellX * cellWidth);
int localZ = z - (cellZ * cellWidth);
int pointX = random.nextInt(width) + cellX * cellWidth;
int pointZ = random.nextInt(width) + cellZ * cellWidth;
if (localX >= width || localZ >= width) {
return false;
}
return x == pointX && z == pointZ;
long hash = HashingFunctions.murmur64(IntegerFunctions.squash(cellX, cellZ)) ^ seed;
hash += salt;
hash = HashingFunctions.splitMix64(hash);
int targetX = (int) ((hash & 0x7FFFFFFFFFFFFFFFL) % width);
if (localX != targetX) {
return false;
}
hash = HashingFunctions.splitMix64(hash);
int targetZ = (int) ((hash & 0x7FFFFFFFFFFFFFFFL) % width);
return localZ == targetZ;
}
}

View File

@@ -7,11 +7,11 @@
package com.dfsek.terra.addons.terrascript.parser.lang.functions;
import java.util.List;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
import java.util.List;
public interface FunctionBuilder<T extends Function<?>> {
T build(List<Returnable<?>> argumentList, Position position);

View File

@@ -0,0 +1,67 @@
package com.dfsek.terra.api.util.collection;
import java.lang.invoke.MethodHandles;
import java.lang.invoke.VarHandle;
import com.dfsek.seismic.util.UnsafeUtils;
public class TriStateIntCache {
public static final long STATE_UNSET = 0L;
public static final long STATE_FALSE = 1L;
public static final long STATE_TRUE = 2L;
private static final long BIT_MASK = 3L;
private final long[] data;
private static final VarHandle ARRAY_HANDLE = MethodHandles.arrayElementVarHandle(long[].class);
private static int getOptimalMaxKeys(int requestedKeys) {
// 192 keys fill the first cache line exactly (along with the 16-byte header)
if (requestedKeys <= 192) {
return 192;
}
// For every additional line, we fit 256 keys (64 bytes * 4 keys/byte)
// We calculate the overflow beyond 192, round up to the nearest 256, and add it back.
int overflow = requestedKeys - 192;
int chunks = (overflow + 255) >>> 8; // Fast ceil division by 256
return 192 + (chunks << 8); // chunks * 256
}
public TriStateIntCache(int maxKeySize) {
this.data = new long[(getOptimalMaxKeys(maxKeySize) + 31) >>> 5];
}
/**
* Checks the cache state without any allocation.
*
* @return STATE_UNSET (0), STATE_FALSE (1), or STATE_TRUE (2)
*/
public long get(int key) {
long offset = UnsafeUtils.LONG_ARRAY_BASE + ((long)(key >>> 5) << UnsafeUtils.LONG_ARRAY_SHIFT);
long currentWord = UnsafeUtils.UNSAFE.getLong(data, offset);
return (currentWord >>> ((key << 1) & 63)) & BIT_MASK;
}
/**
* Sets the value safely. Handles race conditions internally.
*/
public void set(int key, boolean value) {
int index = key >>> 5;
int shift = (key << 1) & 63;
long targetWord = (value ? STATE_TRUE : STATE_FALSE) << shift;
long current;
do {
current = (long) ARRAY_HANDLE.getVolatile(data, index);
if (((current >>> shift) & BIT_MASK) != STATE_UNSET) {
return;
}
} while (!ARRAY_HANDLE.compareAndSet(data, index, current, current | targetWord));
}
}

View File

@@ -9,6 +9,7 @@ package com.dfsek.terra.api.world.biome;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import com.dfsek.terra.api.properties.PropertyHolder;
import com.dfsek.terra.api.registry.key.StringIdentifiable;
@@ -18,6 +19,7 @@ import com.dfsek.terra.api.registry.key.StringIdentifiable;
* Represents a Terra biome
*/
public interface Biome extends PropertyHolder, StringIdentifiable {
AtomicInteger INT_ID_COUNTER = new AtomicInteger(0);
/**
* Gets the platform biome this custom biome delegates to.
@@ -39,4 +41,12 @@ public interface Biome extends PropertyHolder, StringIdentifiable {
* @return A {@link Set} of String tags this biome holds.
*/
Set<String> getTags();
/**
* Get the numeric ID of this biome, generated at registration time
*
* @return The numeric ID.
*/
int getIntID();
}

View File

@@ -52,7 +52,8 @@ import com.dfsek.terra.api.util.reflection.TypeKey;
public class OpenRegistryImpl<T> implements OpenRegistry<T> {
private static final Entry<?> NULL = new Entry<>(null);
private final Map<RegistryKey, Entry<T>> objects;
private final ListMultimap<String, Pair<RegistryKey, Entry<T>>> objectIDs = Multimaps.newListMultimap(new ConcurrentHashMap<>(), ArrayList::new);
private final ListMultimap<String, Pair<RegistryKey, Entry<T>>> objectIDs = Multimaps.newListMultimap(new ConcurrentHashMap<>(),
ArrayList::new);
private final TypeKey<T> typeKey;
public OpenRegistryImpl(TypeKey<T> typeKey) {

View File

@@ -21,7 +21,6 @@ import java.io.IOException;
import java.io.Serial;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Stream;

View File

@@ -19,7 +19,7 @@ public class JeBlockState {
private JeBlockState(String data) {
// TODO: support block state with nbt (identifier[properties]{nbt}), for now we just ignore it
int braceIndex = data.indexOf('{');
if (braceIndex != -1) {
if(braceIndex != -1) {
data = data.substring(0, braceIndex);
}

View File

@@ -13,7 +13,6 @@ import org.allaymc.api.block.type.BlockStateGetter;
import org.allaymc.api.block.type.BlockTypes;
import org.allaymc.api.item.type.ItemType;
import org.allaymc.api.item.type.ItemTypeGetter;
import org.allaymc.api.world.data.DimensionInfo;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
@@ -83,7 +82,7 @@ public final class Mapping {
}
public static String dimensionIdBeToJe(String beDimensionId) {
return switch (beDimensionId) {
return switch(beDimensionId) {
case "overworld" -> "minecraft:overworld";
case "nether" -> "minecraft:the_nether";
case "the_end" -> "minecraft:the_end";

View File

@@ -1,12 +1,11 @@
package com.dfsek.terra.allay.delegate;
import com.dfsek.seismic.type.vector.Vector3;
import org.allaymc.api.blockentity.BlockEntity;
import com.dfsek.terra.allay.Mapping;
import com.dfsek.terra.api.block.state.BlockState;
import org.allaymc.api.blockentity.BlockEntity;
/**
* @author daoge_cmd

View File

@@ -22,9 +22,9 @@ public record AllayChunk(ServerWorld world, Chunk allayChunk) implements com.dfs
@Override
public void setBlock(int x, int y, int z, BlockState data, boolean physics) {
var dimensionInfo = allayChunk.getDimensionInfo();
if (x < 0 || x > 15 ||
z < 0 || z > 15 ||
y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
if(x < 0 || x > 15 ||
z < 0 || z > 15 ||
y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
return;
}

View File

@@ -1,7 +1,7 @@
package com.dfsek.terra.allay.delegate;
import org.allaymc.api.block.property.type.BlockPropertyTypes;
import org.allaymc.api.block.data.BlockTags;
import org.allaymc.api.block.property.type.BlockPropertyTypes;
import org.allaymc.api.block.type.BlockTypes;
import org.allaymc.api.world.chunk.UnsafeChunk;
import org.jetbrains.annotations.NotNull;
@@ -27,9 +27,9 @@ public record AllayProtoChunk(UnsafeChunk allayChunk) implements ProtoChunk {
@Override
public void setBlock(int x, int y, int z, @NotNull BlockState blockState) {
var dimensionInfo = allayChunk.getDimensionInfo();
if (x < 0 || x > 15 ||
z < 0 || z > 15 ||
y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
if(x < 0 || x > 15 ||
z < 0 || z > 15 ||
y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
return;
}

View File

@@ -1,8 +1,8 @@
package com.dfsek.terra.allay.delegate;
import com.dfsek.seismic.type.vector.Vector3;
import org.allaymc.api.block.property.type.BlockPropertyTypes;
import org.allaymc.api.block.data.BlockTags;
import org.allaymc.api.block.property.type.BlockPropertyTypes;
import org.allaymc.api.block.type.BlockTypes;
import org.allaymc.api.world.generator.context.OtherChunkAccessibleContext;
@@ -26,6 +26,23 @@ public record AllayProtoWorld(AllayServerWorld allayServerWorld, OtherChunkAcces
private static final org.allaymc.api.block.type.BlockState WATER = BlockTypes.WATER.ofState(
BlockPropertyTypes.LIQUID_DEPTH.createValue(0));
// TODO: use method in OtherChunkAccessibleContext directly after bumped allay-api version to 0.14.0
private static org.allaymc.api.blockentity.BlockEntity getBlockEntity(OtherChunkAccessibleContext context, int x, int y, int z) {
var currentChunk = context.getCurrentChunk();
var currentChunkX = currentChunk.getX();
var currentChunkZ = currentChunk.getZ();
var dimInfo = currentChunk.getDimensionInfo();
if(x >= currentChunkX * 16 && x < currentChunkX * 16 + 16 &&
z >= currentChunkZ * 16 && z < currentChunkZ * 16 + 16 &&
y >= dimInfo.minHeight() && y <= dimInfo.maxHeight()) {
return currentChunk.getBlockEntity(x & 15, y, z & 15);
} else {
var chunk = context.getChunkSource().getChunk(x >> 4, z >> 4);
return chunk == null ? null : chunk.getBlockEntity(x & 15, y, z & 15);
}
}
@Override
public int centerChunkX() {
return context.getCurrentChunk().getX();
@@ -44,7 +61,7 @@ public record AllayProtoWorld(AllayServerWorld allayServerWorld, OtherChunkAcces
@Override
public void setBlockState(int x, int y, int z, BlockState data, boolean physics) {
var dimensionInfo = allayServerWorld.allayDimension().getDimensionInfo();
if (y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
if(y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
return;
}
@@ -71,23 +88,6 @@ public record AllayProtoWorld(AllayServerWorld allayServerWorld, OtherChunkAcces
return new AllayBlockEntity(getBlockEntity(context, x, y, z));
}
// TODO: use method in OtherChunkAccessibleContext directly after bumped allay-api version to 0.14.0
private static org.allaymc.api.blockentity.BlockEntity getBlockEntity(OtherChunkAccessibleContext context, int x, int y, int z) {
var currentChunk = context.getCurrentChunk();
var currentChunkX = currentChunk.getX();
var currentChunkZ = currentChunk.getZ();
var dimInfo = currentChunk.getDimensionInfo();
if (x >= currentChunkX * 16 && x < currentChunkX * 16 + 16 &&
z >= currentChunkZ * 16 && z < currentChunkZ * 16 + 16 &&
y >= dimInfo.minHeight() && y <= dimInfo.maxHeight()) {
return currentChunk.getBlockEntity(x & 15, y, z & 15);
} else {
var chunk = context.getChunkSource().getChunk(x >> 4, z >> 4);
return chunk == null ? null : chunk.getBlockEntity(x & 15, y, z & 15);
}
}
@Override
public ChunkGenerator getGenerator() {
return allayServerWorld.getGenerator();

View File

@@ -28,7 +28,7 @@ public record AllayServerWorld(AllayGeneratorWrapper allayGeneratorWrapper, Dime
@Override
public void setBlockState(int x, int y, int z, BlockState data, boolean physics) {
var dimensionInfo = allayDimension.getDimensionInfo();
if (y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
if(y < dimensionInfo.minHeight() || y > dimensionInfo.maxHeight()) {
return;
}

View File

@@ -1,9 +1,9 @@
package com.dfsek.terra.allay.delegate;
import com.dfsek.terra.api.world.info.WorldProperties;
import org.allaymc.api.world.data.DimensionInfo;
import com.dfsek.terra.api.world.info.WorldProperties;
/**
* @author daoge_cmd

View File

@@ -1,8 +1,5 @@
package com.dfsek.terra.allay.generator;
import com.dfsek.terra.allay.Mapping;
import com.dfsek.terra.allay.delegate.AllayWorldProperties;
import com.google.common.base.Preconditions;
import org.allaymc.api.utils.AllayStringUtils;
import org.allaymc.api.world.biome.BiomeType;
@@ -14,10 +11,12 @@ import org.allaymc.api.world.generator.context.PopulateContext;
import org.allaymc.api.world.generator.function.Noiser;
import org.allaymc.api.world.generator.function.Populator;
import com.dfsek.terra.allay.Mapping;
import com.dfsek.terra.allay.TerraAllayPlugin;
import com.dfsek.terra.allay.delegate.AllayProtoChunk;
import com.dfsek.terra.allay.delegate.AllayProtoWorld;
import com.dfsek.terra.allay.delegate.AllayServerWorld;
import com.dfsek.terra.allay.delegate.AllayWorldProperties;
import com.dfsek.terra.api.config.ConfigPack;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
import com.dfsek.terra.api.world.chunk.generation.ChunkGenerator;
@@ -59,7 +58,7 @@ public class AllayGeneratorWrapper implements GeneratorWrapper {
this.worldProperties = new AllayWorldProperties(this.seed, dimension.getDimensionInfo());
var metaPackName = options.get(OPTION_META_PACK_NAME);
if (metaPackName != null) {
if(metaPackName != null) {
setConfigPack(getConfigPackByMeta(metaPackName, dimension.getDimensionInfo()));
return;
}

View File

@@ -1,6 +1,7 @@
package com.dfsek.terra.allay.handle;
import org.allaymc.api.registry.Registries;
import org.allaymc.api.utils.identifier.Identifier;
import java.util.Set;
import java.util.stream.Collectors;
@@ -12,8 +13,6 @@ import com.dfsek.terra.api.handle.ItemHandle;
import com.dfsek.terra.api.inventory.Item;
import com.dfsek.terra.api.inventory.item.Enchantment;
import org.allaymc.api.utils.identifier.Identifier;
/**
* @author daoge_cmd

View File

@@ -3,10 +3,10 @@ package com.dfsek.terra.bukkit;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.dfsek.terra.bukkit.util.VersionUtil;
import java.util.List;
import com.dfsek.terra.bukkit.util.VersionUtil;
public interface NMSInitializer {
List<String> SUPPORTED_VERSIONS = List.of("v1.21.11");
@@ -16,8 +16,9 @@ public interface NMSInitializer {
static PlatformImpl init(TerraBukkitPlugin plugin) {
Logger logger = LoggerFactory.getLogger(NMSInitializer.class);
if (!SUPPORTED_VERSIONS.contains(MINECRAFT_VERSION)) {
logger.error("You are running your server on Minecraft version {} which is not supported by this version of Terra.", MINECRAFT_VERSION);
if(!SUPPORTED_VERSIONS.contains(MINECRAFT_VERSION)) {
logger.error("You are running your server on Minecraft version {} which is not supported by this version of Terra.",
MINECRAFT_VERSION);
String bypassKey = "IKnowThereAreNoNMSBindingsFor" + MINECRAFT_VERSION.replace(".", "_") + "ButIWillProceedAnyway";
if(System.getProperty(bypassKey) == null) {

View File

@@ -93,7 +93,7 @@ public class TerraBukkitPlugin extends JavaPlugin {
BukkitAdapter::adapt,
BukkitAdapter::adapt
))
.executionCoordinator(ExecutionCoordinator.simpleCoordinator())
.executionCoordinator(ExecutionCoordinator.asyncCoordinator())
.buildOnEnable(this);
commandManager.brigadierManager().setNativeNumberSuggestions(false);

View File

@@ -17,12 +17,12 @@
package com.dfsek.terra.bukkit.util;
import com.dfsek.terra.bukkit.TerraBukkitPlugin;
import io.papermc.lib.PaperLib;
import java.util.concurrent.TimeUnit;
import com.dfsek.terra.bukkit.TerraBukkitPlugin;
import static io.papermc.lib.PaperLib.suggestPaper;

View File

@@ -27,6 +27,8 @@ public class TerraMinestomExample {
private TerraMinestomWorld world;
public static void main(String[] args) {
System.setProperty("minestom.registry.unsafe-ops", "true");
TerraMinestomExample example = new TerraMinestomExample();
example.createNewInstance();
example.attachTerra();

View File

@@ -2,12 +2,6 @@ package com.dfsek.terra.minestom;
import com.dfsek.tectonic.api.TypeRegistry;
import com.dfsek.tectonic.api.loader.type.TypeLoader;
import com.dfsek.terra.minestom.api.BiomeFactory;
import com.dfsek.terra.minestom.biome.MinestomUserDefinedBiomeFactory;
import com.dfsek.terra.minestom.biome.MinestomUserDefinedBiomePool;
import net.kyori.adventure.key.Key;
import net.kyori.adventure.util.RGBLike;
import net.minestom.server.MinecraftServer;
@@ -32,8 +26,11 @@ import com.dfsek.terra.api.handle.ItemHandle;
import com.dfsek.terra.api.handle.WorldHandle;
import com.dfsek.terra.api.world.biome.PlatformBiome;
import com.dfsek.terra.minestom.addon.MinestomAddon;
import com.dfsek.terra.minestom.api.BiomeFactory;
import com.dfsek.terra.minestom.api.TerraMinestomWorldBuilder;
import com.dfsek.terra.minestom.biome.MinestomBiomeLoader;
import com.dfsek.terra.minestom.biome.MinestomUserDefinedBiomeFactory;
import com.dfsek.terra.minestom.biome.MinestomUserDefinedBiomePool;
import com.dfsek.terra.minestom.config.BiomeAdditionsSoundTemplate;
import com.dfsek.terra.minestom.config.BiomeMoodSoundTemplate;
import com.dfsek.terra.minestom.config.BiomeParticleConfigTemplate;
@@ -70,6 +67,10 @@ public final class TerraMinestomPlatform extends AbstractPlatform {
this(new MinestomWorldHandle(), new MinestomItemHandle(), new MinestomBiomeLoader(), new MinestomUserDefinedBiomeFactory());
}
public static Builder builder() {
return new Builder();
}
@Override
public void register(TypeRegistry registry) {
super.register(registry);
@@ -146,16 +147,13 @@ public final class TerraMinestomPlatform extends AbstractPlatform {
return worldBuilder(MinecraftServer.getInstanceManager().createInstanceContainer());
}
public static Builder builder() {
return new Builder();
}
public static class Builder {
private final List<BaseAddon> platformAddons = new ArrayList<>();
private @Nullable WorldHandle worldHandle;
private @Nullable ItemHandle itemHandle;
private @Nullable TypeLoader<PlatformBiome> biomeTypeLoader;
private @Nullable BiomeFactory biomeFactory;
private final List<BaseAddon> platformAddons = new ArrayList<>();
public Builder worldHandle(@Nullable WorldHandle worldHandle) {
this.worldHandle = worldHandle;

View File

@@ -1,6 +1,7 @@
package com.dfsek.terra.minestom.api;
import net.kyori.adventure.nbt.CompoundBinaryTag;
import net.minestom.server.entity.Entity;
import net.minestom.server.entity.EntityType;
@@ -9,5 +10,22 @@ import net.minestom.server.entity.EntityType;
* Allows adding AI to generated entities using custom entity types
*/
public interface EntityFactory {
/**
* Creates a new entity of the specified type.
*
* @param type the type of the entity to be created
* @return the created entity instance
*/
Entity createEntity(EntityType type);
/**
* Creates a new entity of the specified type with additional data.
*
* @param type the type of the entity to be created
* @param data the additional data for the entity, represented as a CompoundBinaryTag
* @return the created entity instance
*/
default Entity createEntity(EntityType type, CompoundBinaryTag data) {
return createEntity(type);
}
}

View File

@@ -1,8 +1,9 @@
package com.dfsek.terra.minestom.api;
import com.dfsek.terra.minestom.biome.MinestomUserDefinedBiomePool;
import net.minestom.server.instance.Instance;
import net.minestom.server.registry.RegistryKey;
import net.minestom.server.world.DimensionType;
import org.jspecify.annotations.NonNull;
import java.util.Random;
import java.util.function.Function;
@@ -10,14 +11,11 @@ import java.util.function.Function;
import com.dfsek.terra.api.config.ConfigPack;
import com.dfsek.terra.api.registry.CheckedRegistry;
import com.dfsek.terra.minestom.TerraMinestomPlatform;
import com.dfsek.terra.minestom.biome.MinestomUserDefinedBiomePool;
import com.dfsek.terra.minestom.block.DefaultBlockEntityFactory;
import com.dfsek.terra.minestom.entity.DefaultEntityFactory;
import com.dfsek.terra.minestom.world.TerraMinestomWorld;
import net.minestom.server.registry.RegistryKey;
import net.minestom.server.world.DimensionType;
import org.jspecify.annotations.NonNull;
public class TerraMinestomWorldBuilder {
private final TerraMinestomPlatform platform;

View File

@@ -4,9 +4,6 @@ import com.dfsek.tectonic.api.depth.DepthTracker;
import com.dfsek.tectonic.api.exception.LoadException;
import com.dfsek.tectonic.api.loader.ConfigLoader;
import com.dfsek.tectonic.api.loader.type.TypeLoader;
import com.dfsek.terra.api.world.biome.PlatformBiome;
import net.kyori.adventure.key.Key;
import net.minestom.server.registry.RegistryKey;
import org.intellij.lang.annotations.Subst;
@@ -14,6 +11,8 @@ import org.jetbrains.annotations.NotNull;
import java.lang.reflect.AnnotatedType;
import com.dfsek.terra.api.world.biome.PlatformBiome;
public class MinestomBiomeLoader implements TypeLoader<PlatformBiome> {
@Override

View File

@@ -1,7 +1,10 @@
package com.dfsek.terra.minestom.block;
import net.kyori.adventure.nbt.CompoundBinaryTag;
import net.kyori.adventure.nbt.TagStringIO;
import net.minestom.server.instance.block.Block;
import java.io.IOException;
import java.util.HashMap;
import java.util.Objects;
import java.util.stream.Collectors;
@@ -10,38 +13,84 @@ import com.dfsek.terra.api.block.BlockType;
import com.dfsek.terra.api.block.state.BlockState;
import com.dfsek.terra.api.block.state.properties.Property;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class MinestomBlockState implements BlockState {
private final Block block;
public record MinestomBlockState(Block block) implements BlockState {
private static final Logger LOGGER = LoggerFactory.getLogger(MinestomBlockState.class);
public static final MinestomBlockState AIR = new MinestomBlockState(Block.AIR);
private static final TagStringIO tagStringIO = TagStringIO.tagStringIO();
public MinestomBlockState(Block block) {
if(block == null) {
this.block = Block.AIR;
} else {
this.block = block;
}
public MinestomBlockState {
block = Objects.requireNonNullElse(block, Block.AIR);
}
public MinestomBlockState(String data) {
if(!data.contains("[")) {
block = Block.fromKey(data);
return;
public static MinestomBlockState fromStateId(String data) {
CompoundBinaryTag nbt = CompoundBinaryTag.empty();
int splitIndex = data.indexOf('{');
if(splitIndex != -1) {
String fullId = data;
data = data.substring(0, splitIndex);
String dataString = fullId.substring(splitIndex);
try {
nbt = tagStringIO.asCompound(dataString);
} catch(IOException exception) {
LOGGER.warn("Invalid entity data, will be ignored: {}", dataString);
}
}
String[] split = data.split("\\[");
String namespaceId = split[0];
String properties = split[1].substring(0, split[1].length() - 1);
int openBracketIndex = data.indexOf('[');
int closeBracketIndex = data.indexOf(']');
if(openBracketIndex == -1 || closeBracketIndex == -1 || closeBracketIndex < openBracketIndex) {
// no or invalid properties
Block block = Block.fromKey(data);
if(block != null && !nbt.isEmpty()) {
block = block.withNbt(nbt);
}
return new MinestomBlockState(block);
}
String namespaceId = data.substring(0, openBracketIndex);
String propertiesContent = data.substring(openBracketIndex + 1, closeBracketIndex);
Block block = Block.fromKey(namespaceId);
HashMap<String, String> propertiesMap = new HashMap<>();
for(String property : properties.split(",")) {
String[] kv = property.split("=");
propertiesMap.put(kv[0].strip(), kv[1].strip());
if (block == null) {
LOGGER.error("Invalid block ID found during parsing: {}", namespaceId);
return new MinestomBlockState(Block.AIR);
}
assert block != null;
this.block = block.withProperties(propertiesMap);
HashMap<String, String> propertiesMap = new HashMap<>();
int current = 0;
while (current < propertiesContent.length()) {
int nextComma = propertiesContent.indexOf(',', current);
String property;
if (nextComma == -1) {
property = propertiesContent.substring(current);
current = propertiesContent.length();
} else {
property = propertiesContent.substring(current, nextComma);
current = nextComma + 1;
}
int equalsIndex = property.indexOf('=');
if (equalsIndex == -1) {
LOGGER.warn("Invalid block property syntax (missing '=') in string: {}", property);
continue;
}
String key = property.substring(0, equalsIndex).strip();
String value = property.substring(equalsIndex + 1).strip();
propertiesMap.put(key, value);
}
if(!nbt.isEmpty()) {
block = block.withNbt(nbt);
}
return new MinestomBlockState(block.withProperties(propertiesMap));
}
@Override

View File

@@ -15,26 +15,27 @@ import com.dfsek.terra.minestom.block.MinestomBlockState;
public class CachedChunk implements ProtoChunk {
private final int minHeight;
private final int maxHeight;
private final Block[] blocks;
private final MinestomBlockState[] blocks;
public CachedChunk(int minHeight, int maxHeight) {
this.minHeight = minHeight;
this.maxHeight = maxHeight;
this.blocks = new Block[16 * (maxHeight - minHeight + 1) * 16];
Arrays.fill(blocks, Block.AIR);
this.blocks = new MinestomBlockState[16 * (maxHeight - minHeight + 1) * 16];
Arrays.fill(blocks, MinestomBlockState.AIR);
}
public void writeRelative(UnitModifier modifier) {
modifier.setAllRelative((x, y, z) -> blocks[getIndex(x, y + minHeight, z)]);
modifier.setAllRelative((x, y, z) -> blocks[getIndex(x, y + minHeight, z)].block());
}
@Override
public void setBlock(int x, int y, int z, @NotNull BlockState blockState) {
Block block = (Block) blockState.getHandle();
MinestomBlockState minestomBlockState = (MinestomBlockState) blockState;
Block block = minestomBlockState.block();
if(block == null) return;
int index = getIndex(x, y, z);
if (index > blocks.length || index < 0) return;
blocks[index] = block;
if(index > blocks.length || index < 0) return;
blocks[index] = minestomBlockState;
}
private int getIndex(int x, int y, int z) {
@@ -45,8 +46,8 @@ public class CachedChunk implements ProtoChunk {
@Override
public @NotNull BlockState getBlock(int x, int y, int z) {
int index = getIndex(x, y, z);
if (index > blocks.length || index < 0) return MinestomBlockState.AIR;
return new MinestomBlockState(blocks[index]);
if(index > blocks.length || index < 0) return MinestomBlockState.AIR;
return blocks[index];
}
@Override

View File

@@ -4,6 +4,7 @@ import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.github.benmanes.caffeine.cache.stats.CacheStats;
import net.minestom.server.world.DimensionType;
import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -15,7 +16,7 @@ import com.dfsek.terra.api.world.chunk.generation.ChunkGenerator;
public class GeneratedChunkCache {
private static final Logger log = LoggerFactory.getLogger(GeneratedChunkCache.class);
private final LoadingCache<Pair<Integer, Integer>, CachedChunk> cache;
private final LoadingCache<@NotNull Long, CachedChunk> cache;
private final DimensionType dimensionType;
private final ChunkGenerator generator;
private final ServerWorld world;
@@ -29,7 +30,7 @@ public class GeneratedChunkCache {
this.cache = Caffeine.newBuilder()
.maximumSize(128)
.recordStats()
.build((Pair<Integer, Integer> key) -> generateChunk(key.getLeft(), key.getRight()));
.build((Long key) -> generateChunk(unpackX(key), unpackZ(key)));
}
private CachedChunk generateChunk(int x, int z) {
@@ -50,6 +51,18 @@ public class GeneratedChunkCache {
}
public CachedChunk at(int x, int z) {
return cache.get(Pair.of(x, z));
return cache.get(pack(x, z));
}
private long pack(final int x, final int z) {
return ((long) x) << 32 | z & 0xFFFFFFFFL;
}
private int unpackX(long key) {
return (int) (key >>> 32);
}
private int unpackZ(long key) {
return (int) key;
}
}

View File

@@ -3,19 +3,10 @@ package com.dfsek.terra.minestom.config;
import com.dfsek.tectonic.api.config.template.annotations.Default;
import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import net.kyori.adventure.nbt.BinaryTagIO;
import net.kyori.adventure.nbt.CompoundBinaryTag;
import net.kyori.adventure.nbt.TagStringIO;
import net.minestom.server.MinecraftServer;
import net.minestom.server.command.builder.arguments.Argument;
import net.minestom.server.command.builder.arguments.minecraft.registry.ArgumentParticle;
import net.minestom.server.particle.Particle;
import net.minestom.server.world.biome.BiomeEffects;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class BiomeParticleConfigTemplate implements ObjectTemplate<BiomeEffects.Particle> {
@Value("particle")
@@ -34,8 +25,9 @@ public class BiomeParticleConfigTemplate implements ObjectTemplate<BiomeEffects.
String[] parts = particle.split("\\{");
Particle parsedParticle = Particle.fromKey(parts[0]);
if (parts.length > 1) {
LoggerFactory.getLogger(BiomeParticleConfigTemplate.class).warn("Particle {} has additional data, particle will be ignored.", particle);
if(parts.length > 1) {
LoggerFactory.getLogger(BiomeParticleConfigTemplate.class).warn("Particle {} has additional data, particle will be ignored.",
particle);
return null;
}

View File

@@ -1,10 +1,10 @@
package com.dfsek.terra.minestom.entity;
import com.dfsek.terra.minestom.api.EntityFactory;
import net.minestom.server.entity.Entity;
import net.minestom.server.entity.EntityType;
import com.dfsek.terra.minestom.api.EntityFactory;
public class DefaultEntityFactory implements EntityFactory {
@Override

View File

@@ -22,7 +22,8 @@ public class MinestomEntity implements com.dfsek.terra.api.entity.Entity {
public static MinestomEntity spawn(double x, double y, double z, EntityType type, TerraMinestomWorld world) {
Instance instance = world.getHandle();
Entity entity = world.getEntityFactory().createEntity(((MinestomEntityType) type).getHandle());
MinestomEntityType entityType = (MinestomEntityType) type;
Entity entity = world.getEntityFactory().createEntity(entityType.getHandle(), entityType.getData());
entity.setInstance(instance, new Pos(x, y, z));
return new MinestomEntity(entity, world);
}

View File

@@ -1,12 +1,38 @@
package com.dfsek.terra.minestom.entity;
import net.kyori.adventure.nbt.CompoundBinaryTag;
import net.kyori.adventure.nbt.TagStringIO;
import net.minestom.server.entity.EntityType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
public class MinestomEntityType implements com.dfsek.terra.api.entity.EntityType {
private static final Logger LOGGER = LoggerFactory.getLogger(MinestomEntityType.class);
private static final TagStringIO tagStringIO = TagStringIO.tagStringIO();
private final EntityType delegate;
private final CompoundBinaryTag data;
public MinestomEntityType(String id) {
int splitIndex = id.indexOf('{');
if(splitIndex != -1) {
String fullId = id;
id = id.substring(0, splitIndex);
String dataString = fullId.substring(splitIndex);
CompoundBinaryTag data;
try {
data = tagStringIO.asCompound(dataString);
} catch(IOException exception) {
LOGGER.warn("Invalid entity data, will be ignored: {}", dataString);
data = CompoundBinaryTag.empty();
}
this.data = data;
} else {
this.data = CompoundBinaryTag.empty();
}
delegate = EntityType.fromKey(id);
}
@@ -14,4 +40,8 @@ public class MinestomEntityType implements com.dfsek.terra.api.entity.EntityType
public EntityType getHandle() {
return delegate;
}
public CompoundBinaryTag getData() {
return data;
}
}

View File

@@ -15,7 +15,7 @@ public class MinestomWorldHandle implements WorldHandle {
@Override
public @NotNull BlockState createBlockState(@NotNull String data) {
return new MinestomBlockState(data);
return MinestomBlockState.fromStateId(data);
}
@Override

View File

@@ -20,7 +20,6 @@ import com.dfsek.terra.api.world.chunk.Chunk;
import com.dfsek.terra.api.world.chunk.generation.ChunkGenerator;
import com.dfsek.terra.api.world.info.WorldProperties;
import com.dfsek.terra.minestom.TerraMinestomPlatform;
import com.dfsek.terra.minestom.api.BiomeFactory;
import com.dfsek.terra.minestom.api.BlockEntityFactory;
import com.dfsek.terra.minestom.api.EntityFactory;
import com.dfsek.terra.minestom.biome.MinestomUserDefinedBiomePool;

View File

@@ -18,7 +18,7 @@ public final class LifecycleEntryPoint {
logger.info("Initializing Terra {} mod...", modName);
FabricServerCommandManager<CommandSender> manager = new FabricServerCommandManager<>(
ExecutionCoordinator.simpleCoordinator(),
ExecutionCoordinator.asyncCoordinator(),
SenderMapper.create(
serverCommandSource -> (CommandSender) serverCommandSource,
commandSender -> (ServerCommandSource) commandSender)