Merge pull request #329 from PolyhedralDev/ver/6.2.0

Minor version 6.2.0
This commit is contained in:
dfsek
2022-07-06 19:43:00 -07:00
committed by GitHub
285 changed files with 5678 additions and 2448 deletions

5
.gitignore vendored
View File

@@ -245,4 +245,7 @@ nbdist/
/run/
**/testDir/
**/testDir/
platforms/**/run/**

View File

@@ -63,7 +63,8 @@ to [Terra global moderation team](CODE_OF_CONDUCT.md#Reporting).
## I don't want to read this whole thing I just have a question!!!
> **Note:** Please don't file an issue to ask a question. You'll get faster results by using the resources below.
> **Note:** Please don't file an issue to ask a question. You'll get faster
> results by using the resources below.
We have an official discord server where you can request help from various users
@@ -103,7 +104,9 @@ you don't need to create one. When you are creating a bug report,
please [include as many details as possible](#how-do-i-submit-a-good-bug-report)
.
> **Note:** If you find a **Closed** issue that seems like it is the same thing that you're experiencing, open a new issue and include a link to the original issue in the body of your new one.
> **Note:** If you find a **Closed** issue that seems like it is the same thing
> that you're experiencing, open a new issue and include a link to the original
> issue in the body of your new one.
#### Before Submitting A Bug Report

View File

@@ -1,8 +1,8 @@
preRelease(true)
versionProjects(":common:api", version("6.1.2"))
versionProjects(":common:implementation", version("6.1.2"))
versionProjects(":platforms", version("6.1.2"))
versionProjects(":common:api", version("6.2.0"))
versionProjects(":common:implementation", version("6.2.0"))
versionProjects(":platforms", version("6.2.0"))
allprojects {

View File

@@ -15,7 +15,9 @@ buildscript {
repositories {
mavenCentral()
gradlePluginPortal()
maven { url = uri("https://repo.codemc.org/repository/maven-public") }
maven("https://repo.codemc.org/repository/maven-public") {
name = "CodeMC"
}
maven("https://papermc.io/repo/repository/maven-public/") {
name = "PaperMC"
}

View File

@@ -12,30 +12,30 @@ import kotlin.streams.asStream
*/
fun Project.addonDir(dir: File, task: Task) {
val moveAddons = tasks.register("moveAddons" + task.name) {
dependsOn("compileAddons")
doLast {
dir.parentFile.mkdirs()
matchingAddons(dir) {
it.name.startsWith("Terra-") // Assume everything that starts with Terra- is a core addon.
}.forEach {
println("Deleting old addon: " + it.absolutePath)
it.delete()
}
forSubProjects(":common:addons") {
val jar = tasks.named("shadowJar").get() as ShadowJar
val boot = if (extra.has("bootstrap") && extra.get("bootstrap") as Boolean) "bootstrap/" else ""
val target = File(dir, boot + jar.archiveFileName.get())
val base = "${jar.archiveBaseName.get()}-${version}"
println("Copying addon ${jar.archiveFileName.get()} to ${target.absolutePath}. Base name: $base")
jar.archiveFile.orNull?.asFile?.copyTo(target)
}
dependsOn("compileAddons")
doLast {
dir.parentFile.mkdirs()
matchingAddons(dir) {
it.name.startsWith("Terra-") // Assume everything that starts with Terra- is a core addon.
}.forEach {
println("Deleting old addon: " + it.absolutePath)
it.delete()
}
forSubProjects(":common:addons") {
val jar = tasks.named("shadowJar").get() as ShadowJar
val boot = if (extra.has("bootstrap") && extra.get("bootstrap") as Boolean) "bootstrap/" else ""
val target = File(dir, boot + jar.archiveFileName.get())
val base = "${jar.archiveBaseName.get()}-${version}"
println("Copying addon ${jar.archiveFileName.get()} to ${target.absolutePath}. Base name: $base")
jar.archiveFile.orNull?.asFile?.copyTo(target)
}
}
}
task.dependsOn(moveAddons)
}

View File

@@ -30,10 +30,24 @@ fun Project.configureDependencies() {
repositories {
mavenCentral()
gradlePluginPortal()
maven("https://maven.fabricmc.net/")
maven("https://repo.codemc.org/repository/maven-public")
maven("https://repo.codemc.io/repository/nms/")
maven("https://papermc.io/repo/repository/maven-public/")
maven("https://maven.fabricmc.net/") {
name = "FabricMC"
}
maven("https://repo.codemc.org/repository/maven-public") {
name = "CodeMC"
}
maven("https://papermc.io/repo/repository/maven-public/") {
name = "PaperMC"
}
maven("https://files.minecraftforge.net/maven/") {
name = "Forge"
}
maven("https://maven.quiltmc.org/repository/release/") {
name = "Quilt"
}
maven("https://jitpack.io") {
name = "JitPack"
}
}
dependencies {

View File

@@ -7,12 +7,9 @@ import java.nio.file.Files
import java.nio.file.StandardCopyOption
import org.gradle.api.DefaultTask
import org.gradle.api.Project
import org.gradle.api.Task
import org.gradle.api.plugins.BasePluginExtension
import org.gradle.jvm.tasks.Jar
import org.gradle.kotlin.dsl.apply
import org.gradle.kotlin.dsl.configure
import org.gradle.kotlin.dsl.creating
import org.gradle.kotlin.dsl.extra
import org.gradle.kotlin.dsl.get
import org.gradle.kotlin.dsl.named
@@ -55,7 +52,7 @@ fun Project.configureDistribution() {
println("Packaging addon ${jar.archiveFileName.get()} to $dest. size: ${jar.archiveFile.get().asFile.length() / 1024}KB")
val boot = if (extra.has("bootstrap") && extra.get("bootstrap") as Boolean) "bootstrap/" else ""
val addonPath = fs.getPath("/addons/$boot${jar.archiveFileName.get()}");
val addonPath = fs.getPath("/addons/$boot${jar.archiveFileName.get()}")
if (!Files.exists(addonPath)) {
Files.createDirectories(addonPath.parent)

View File

@@ -4,7 +4,7 @@ object Versions {
const val paralithic = "0.7.0"
const val strata = "1.1.1"
const val cloud = "1.7.0-SNAPSHOT"
const val cloud = "1.7.0"
const val slf4j = "1.7.36"
const val log4j_slf4j_impl = "2.14.1"
@@ -18,18 +18,40 @@ object Versions {
}
object Fabric {
const val fabricLoader = "0.14.2"
const val fabricAPI = "0.55.1+1.19"
const val fabricLoader = "0.14.8"
const val fabricAPI = "0.57.0+1.19"
}
object Quilt {
const val quiltLoader = "0.17.0"
const val fabricApi = "2.0.0-beta.4+0.57.0-1.19"
}
object Mod {
const val mixin = "0.11.2+mixin.0.8.5"
const val minecraft = "1.19"
const val yarn = "$minecraft+build.1"
const val permissionsAPI = "0.1-SNAPSHOT"
const val mixin = "0.11.2+mixin.0.8.5"
const val loom = "0.11-SNAPSHOT"
const val fabricLoader = "0.14.2"
const val architecuryLoom = "0.12.0-SNAPSHOT"
const val architecturyPlugin = "3.4-SNAPSHOT"
const val loomQuiltflower = "1.7.1"
const val lazyDfu = "0.1.2"
}
object Forge {
const val forge = "${Mod.minecraft}-41.0.63"
const val burningwave = "12.53.0"
}
object Bukkit {
const val paper = "1.18-R0.1-SNAPSHOT"
const val paper = "1.18.2-R0.1-SNAPSHOT"
const val paperLib = "1.0.5"
const val minecraft = "1.19"
const val reflectionRemapper = "0.1.0-SNAPSHOT"
}
object Sponge {
@@ -37,4 +59,11 @@ object Versions {
const val mixin = "0.8.2"
const val minecraft = "1.17.1"
}
object CLI {
const val nbt = "6.1"
const val logback = "1.2.9"
const val commonsIO = "2.7"
const val guava = "31.0.1-jre"
}
}

View File

@@ -0,0 +1,6 @@
version = version("1.0.0")
dependencies {
compileOnlyApi(project(":common:addons:manifest-addon-loader"))
compileOnlyApi(project(":common:addons:biome-query-api"))
}

View File

@@ -0,0 +1,51 @@
package com.dfsek.terra.addons.biome.extrusion;
import com.dfsek.terra.api.util.Column;
import com.dfsek.terra.api.world.biome.Biome;
class BaseBiomeColumn implements Column<Biome> {
private final BiomeExtrusionProvider biomeProvider;
private final Biome base;
private final int min;
private final int max;
private final int x;
private final int z;
private final long seed;
protected BaseBiomeColumn(BiomeExtrusionProvider biomeProvider, Biome base, int min, int max, int x, int z, long seed) {
this.biomeProvider = biomeProvider;
this.base = base;
this.min = min;
this.max = max;
this.x = x;
this.z = z;
this.seed = seed;
}
@Override
public int getMinY() {
return min;
}
@Override
public int getMaxY() {
return max;
}
@Override
public int getX() {
return x;
}
@Override
public int getZ() {
return z;
}
@Override
public Biome get(int y) {
return biomeProvider.extrude(base, x, y, z, seed);
}
}

View File

@@ -0,0 +1,67 @@
package com.dfsek.terra.addons.biome.extrusion;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import java.util.function.Supplier;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.addons.biome.extrusion.config.BiomeExtrusionTemplate;
import com.dfsek.terra.addons.biome.extrusion.config.ReplaceableBiomeLoader;
import com.dfsek.terra.addons.biome.extrusion.config.extrusions.ReplaceExtrusionTemplate;
import com.dfsek.terra.addons.biome.extrusion.config.extrusions.SetExtrusionTemplate;
import com.dfsek.terra.addons.manifest.api.AddonInitializer;
import com.dfsek.terra.api.Platform;
import com.dfsek.terra.api.addon.BaseAddon;
import com.dfsek.terra.api.event.events.config.pack.ConfigPackPostLoadEvent;
import com.dfsek.terra.api.event.events.config.pack.ConfigPackPreLoadEvent;
import com.dfsek.terra.api.event.functional.FunctionalEventHandler;
import com.dfsek.terra.api.inject.annotations.Inject;
import com.dfsek.terra.api.registry.CheckedRegistry;
import com.dfsek.terra.api.registry.Registry;
import com.dfsek.terra.api.util.reflection.TypeKey;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomeExtrusionAddon implements AddonInitializer {
public static final TypeKey<Supplier<ObjectTemplate<Extrusion>>> EXTRUSION_REGISTRY_KEY = new TypeKey<>() {
};
public static final TypeKey<Supplier<ObjectTemplate<BiomeProvider>>> PROVIDER_REGISTRY_KEY = new TypeKey<>() {
};
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry =
event.getPack()
.getOrCreateRegistry(PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("EXTRUSION"), BiomeExtrusionTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Extrusion>>> extrusionRegistry = event.getPack().getOrCreateRegistry(
EXTRUSION_REGISTRY_KEY);
extrusionRegistry.register(addon.key("SET"), SetExtrusionTemplate::new);
extrusionRegistry.register(addon.key("REPLACE"), ReplaceExtrusionTemplate::new);
})
.failThrough();
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Registry<Biome> biomeRegistry = event.getPack().getRegistry(Biome.class);
event.getPack().applyLoader(ReplaceableBiome.class, new ReplaceableBiomeLoader(biomeRegistry));
});
}
}

View File

@@ -0,0 +1,67 @@
package com.dfsek.terra.addons.biome.extrusion;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.api.util.Column;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomeExtrusionProvider implements BiomeProvider {
private final BiomeProvider delegate;
private final Set<Biome> biomes;
private final List<Extrusion> extrusions;
private final int resolution;
public BiomeExtrusionProvider(BiomeProvider delegate, List<Extrusion> extrusions, int resolution) {
this.delegate = delegate;
this.biomes = delegate.stream().collect(Collectors.toSet());
extrusions.forEach(e -> biomes.addAll(e.getBiomes()));
this.extrusions = extrusions;
this.resolution = resolution;
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
Biome delegated = delegate.getBiome(x, y, z, seed);
return extrude(delegated, x, y, z, seed);
}
public Biome extrude(Biome original, int x, int y, int z, long seed) {
for(Extrusion extrusion : extrusions) {
original = extrusion.extrude(original, x, y, z, seed);
}
return original;
}
@Override
public Column<Biome> getColumn(int x, int z, long seed, int min, int max) {
return delegate.getBaseBiome(x, z, seed)
.map(base -> (Column<Biome>) new BaseBiomeColumn(this, base, min, max, x, z, seed))
.orElseGet(() -> BiomeProvider.super.getColumn(x, z, seed, min, max));
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return delegate.getBaseBiome(x, z, seed);
}
@Override
public Iterable<Biome> getBiomes() {
return biomes;
}
@Override
public int resolution() {
return resolution;
}
public BiomeProvider getDelegate() {
return delegate;
}
}

View File

@@ -0,0 +1,12 @@
package com.dfsek.terra.addons.biome.extrusion.api;
import java.util.Collection;
import com.dfsek.terra.api.world.biome.Biome;
public interface Extrusion {
Biome extrude(Biome original, int x, int y, int z, long seed);
Collection<Biome> getBiomes();
}

View File

@@ -0,0 +1,23 @@
package com.dfsek.terra.addons.biome.extrusion.api;
import com.dfsek.terra.api.world.biome.Biome;
final class PresentBiome implements ReplaceableBiome {
private final Biome biome;
PresentBiome(Biome biome) {
this.biome = biome;
}
@Override
public Biome get(Biome existing) {
return biome;
}
@Override
public boolean isSelf() {
return false;
}
}

View File

@@ -0,0 +1,31 @@
package com.dfsek.terra.addons.biome.extrusion.api;
import java.util.Optional;
import com.dfsek.terra.api.world.biome.Biome;
/**
* Basically just a specialised implementation of {@link Optional} for biomes where a biome may be a "self" reference.
*/
public sealed interface ReplaceableBiome permits PresentBiome, SelfBiome {
static ReplaceableBiome of(Biome biome) {
return new PresentBiome(biome);
}
static ReplaceableBiome self() {
return SelfBiome.INSTANCE;
}
Biome get(Biome existing);
default Biome get() {
if(isSelf()) {
throw new IllegalStateException("Cannot get() self biome!");
}
return get(null);
}
boolean isSelf();
}

View File

@@ -0,0 +1,21 @@
package com.dfsek.terra.addons.biome.extrusion.api;
import java.util.Objects;
import com.dfsek.terra.api.world.biome.Biome;
final class SelfBiome implements ReplaceableBiome {
public static final SelfBiome INSTANCE = new SelfBiome();
@Override
public Biome get(Biome existing) {
return Objects.requireNonNull(existing);
}
@Override
public boolean isSelf() {
return true;
}
}

View File

@@ -0,0 +1,30 @@
package com.dfsek.terra.addons.biome.extrusion.config;
import com.dfsek.tectonic.api.config.template.annotations.Default;
import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import java.util.List;
import com.dfsek.terra.addons.biome.extrusion.BiomeExtrusionProvider;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.api.config.meta.Meta;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomeExtrusionTemplate implements ObjectTemplate<BiomeProvider> {
@Value("provider")
private @Meta BiomeProvider provider;
@Value("resolution")
@Default
private @Meta int resolution = 4;
@Value("extrusions")
private @Meta List<@Meta Extrusion> extrusions;
@Override
public BiomeProvider get() {
return new BiomeExtrusionProvider(provider, extrusions, resolution);
}
}

View File

@@ -0,0 +1,32 @@
package com.dfsek.terra.addons.biome.extrusion.config;
import com.dfsek.tectonic.api.depth.DepthTracker;
import com.dfsek.tectonic.api.exception.LoadException;
import com.dfsek.tectonic.api.loader.ConfigLoader;
import com.dfsek.tectonic.api.loader.type.TypeLoader;
import org.jetbrains.annotations.NotNull;
import java.lang.reflect.AnnotatedType;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.api.registry.Registry;
import com.dfsek.terra.api.world.biome.Biome;
public class ReplaceableBiomeLoader implements TypeLoader<ReplaceableBiome> {
private final Registry<Biome> biomeRegistry;
public ReplaceableBiomeLoader(Registry<Biome> biomeRegistry) {
this.biomeRegistry = biomeRegistry;
}
@Override
public ReplaceableBiome load(@NotNull AnnotatedType t, @NotNull Object c, @NotNull ConfigLoader loader, DepthTracker depthTracker)
throws LoadException {
if(c.equals("SELF")) return ReplaceableBiome.self();
return biomeRegistry
.getByID((String) c)
.map(ReplaceableBiome::of)
.orElseThrow(() -> new LoadException("No such biome: " + c, depthTracker));
}
}

View File

@@ -0,0 +1,23 @@
package com.dfsek.terra.addons.biome.extrusion.config.extrusions;
import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.addons.biome.extrusion.extrusions.ReplaceExtrusion;
import com.dfsek.terra.api.config.meta.Meta;
import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class ReplaceExtrusionTemplate extends SamplerExtrusionTemplate {
@Value("to")
private @Meta ProbabilityCollection<@Meta ReplaceableBiome> biomes;
@Value("from")
private @Meta String fromTag;
@Override
public Extrusion get() {
return new ReplaceExtrusion(sampler, range, biomes, fromTag);
}
}

View File

@@ -0,0 +1,18 @@
package com.dfsek.terra.addons.biome.extrusion.config.extrusions;
import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.api.config.meta.Meta;
import com.dfsek.terra.api.noise.NoiseSampler;
import com.dfsek.terra.api.util.Range;
public abstract class SamplerExtrusionTemplate implements ObjectTemplate<Extrusion> {
@Value("sampler")
protected @Meta NoiseSampler sampler;
@Value("range")
protected @Meta Range range;
}

View File

@@ -0,0 +1,20 @@
package com.dfsek.terra.addons.biome.extrusion.config.extrusions;
import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.addons.biome.extrusion.extrusions.SetExtrusion;
import com.dfsek.terra.api.config.meta.Meta;
import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class SetExtrusionTemplate extends SamplerExtrusionTemplate {
@Value("to")
private @Meta ProbabilityCollection<@Meta ReplaceableBiome> biomes;
@Override
public Extrusion get() {
return new SetExtrusion(sampler, range, biomes);
}
}

View File

@@ -0,0 +1,52 @@
package com.dfsek.terra.addons.biome.extrusion.extrusions;
import java.util.Collection;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.addons.biome.query.api.BiomeQueries;
import com.dfsek.terra.api.noise.NoiseSampler;
import com.dfsek.terra.api.util.Range;
import com.dfsek.terra.api.util.collection.ProbabilityCollection;
import com.dfsek.terra.api.world.biome.Biome;
/**
* Sets biomes at locations based on a sampler.
*/
public class ReplaceExtrusion implements Extrusion {
private final NoiseSampler sampler;
private final Range range;
private final ProbabilityCollection<ReplaceableBiome> biomes;
private final Predicate<Biome> hasTag;
public ReplaceExtrusion(NoiseSampler sampler, Range range, ProbabilityCollection<ReplaceableBiome> biomes, String tag) {
this.sampler = sampler;
this.range = range;
this.biomes = biomes;
this.hasTag = BiomeQueries.has(tag);
}
@Override
public Biome extrude(Biome original, int x, int y, int z, long seed) {
if(hasTag.test(original)) {
return range.ifInRange(y, () -> biomes.get(sampler, x, y, z, seed).get(original), original);
}
return original;
}
@Override
public Collection<Biome> getBiomes() {
return biomes
.getContents()
.stream()
.filter(Predicate.not(ReplaceableBiome::isSelf))
.map(ReplaceableBiome::get)
.collect(Collectors.toSet());
}
}

View File

@@ -0,0 +1,45 @@
package com.dfsek.terra.addons.biome.extrusion.extrusions;
import java.util.Collection;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.api.noise.NoiseSampler;
import com.dfsek.terra.api.util.Range;
import com.dfsek.terra.api.util.collection.ProbabilityCollection;
import com.dfsek.terra.api.world.biome.Biome;
/**
* Sets biomes at locations based on a sampler.
*/
public class SetExtrusion implements Extrusion {
private final NoiseSampler sampler;
private final Range range;
private final ProbabilityCollection<ReplaceableBiome> biomes;
public SetExtrusion(NoiseSampler sampler, Range range, ProbabilityCollection<ReplaceableBiome> biomes) {
this.sampler = sampler;
this.range = range;
this.biomes = biomes;
}
@Override
public Biome extrude(Biome original, int x, int y, int z, long seed) {
return range.ifInRange(y, () -> biomes.get(sampler, x, y, z, seed).get(original), original);
}
@Override
public Collection<Biome> getBiomes() {
return biomes
.getContents()
.stream()
.filter(Predicate.not(ReplaceableBiome::isSelf))
.map(ReplaceableBiome::get)
.collect(Collectors.toSet());
}
}

View File

@@ -0,0 +1,14 @@
schema-version: 1
contributors:
- Terra contributors
id: biome-provider-extrusion
version: @VERSION@
entrypoints:
- "com.dfsek.terra.addons.biome.extrusion.BiomeExtrusionAddon"
website:
issues: https://github.com/PolyhedralDev/Terra/issues
source: https://github.com/PolyhedralDev/Terra
docs: https://terra.polydev.org
license: MIT License
depends:
biome-query-api: "1.+"

View File

@@ -13,6 +13,7 @@ import java.awt.Color;
import java.awt.image.BufferedImage;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import com.dfsek.terra.api.world.biome.Biome;
@@ -38,6 +39,10 @@ public class ImageBiomeProvider implements BiomeProvider {
@Override
public Biome getBiome(int x, int y, int z, long seed) {
return getBiome(x, z);
}
public Biome getBiome(int x, int z) {
x /= resolution;
z /= resolution;
Color color = align.getColor(image, x, z);
@@ -51,6 +56,11 @@ public class ImageBiomeProvider implements BiomeProvider {
}));
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(getBiome(x, z));
}
@Override
public Iterable<Biome> getBiomes() {
return colorBiomeMap.values();

View File

@@ -1,14 +1,12 @@
version = version("1.0.0")
version = version("1.0.1")
dependencies {
compileOnlyApi(project(":common:addons:manifest-addon-loader"))
implementation("com.github.ben-manes.caffeine:caffeine:3.1.0")
implementation("net.jafama", "jafama", Versions.Libraries.Internal.jafama)
testImplementation("net.jafama", "jafama", Versions.Libraries.Internal.jafama)
}
tasks.named<com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar>("shadowJar") {
relocate("com.github.benmanes.caffeine", "com.dfsek.terra.addons.biome.pipeline.lib.caffeine")
relocate("net.jafama", "com.dfsek.terra.addons.biome.pipeline.lib.jafama")
}

View File

@@ -0,0 +1,71 @@
package com.dfsek.terra.addons.biome.pipeline;
import java.util.function.Consumer;
import com.dfsek.terra.api.util.Column;
import com.dfsek.terra.api.util.function.IntIntObjConsumer;
import com.dfsek.terra.api.util.function.IntObjConsumer;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
class BiomePipelineColumn implements Column<Biome> {
private final int min;
private final int max;
private final int x;
private final int z;
private final Biome biome;
protected BiomePipelineColumn(BiomeProvider biomeProvider, int min, int max, int x, int z, long seed) {
this.min = min;
this.max = max;
this.x = x;
this.z = z;
this.biome = biomeProvider.getBiome(x, 0, z, seed);
}
@Override
public int getMinY() {
return min;
}
@Override
public int getMaxY() {
return max;
}
@Override
public int getX() {
return x;
}
@Override
public int getZ() {
return z;
}
@Override
public Biome get(int y) {
return biome;
}
@Override
public void forRanges(int resolution, IntIntObjConsumer<Biome> consumer) {
consumer.accept(min, max, biome);
}
@Override
public void forEach(Consumer<Biome> consumer) {
for(int y = min; y < max; y++) {
consumer.accept(biome);
}
}
@Override
public void forEach(IntObjConsumer<Biome> consumer) {
for(int y = min; y < max; y++) {
consumer.accept(y, biome);
}
}
}

View File

@@ -13,6 +13,7 @@ import net.jafama.FastMath;
import java.util.Comparator;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
import java.util.stream.StreamSupport;
@@ -21,6 +22,7 @@ import com.dfsek.terra.addons.biome.pipeline.api.delegate.BiomeDelegate;
import com.dfsek.terra.addons.biome.pipeline.api.stage.Stage;
import com.dfsek.terra.api.noise.NoiseSampler;
import com.dfsek.terra.api.registry.key.StringIdentifiable;
import com.dfsek.terra.api.util.Column;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
@@ -73,18 +75,26 @@ public class BiomePipelineProvider implements BiomeProvider {
@Override
public Biome getBiome(int x, int y, int z, long seed) {
return getBiome(x, z, seed);
}
public Biome getBiome(int x, int z, long seed) {
x += mutator.noise(seed + 1, x, z) * noiseAmp;
z += mutator.noise(seed + 2, x, z) * noiseAmp;
x = FastMath.floorToInt(FastMath.floorDiv(x, resolution));
z = FastMath.floorToInt(FastMath.floorDiv(z, resolution));
x /= resolution;
z /= resolution;
int fdX = FastMath.floorDiv(x, pipeline.getSize());
int fdZ = FastMath.floorDiv(z, pipeline.getSize());
return holderCache.get(new SeededVector(fdX, fdZ, seed)).getBiome(x - fdX * pipeline.getSize(),
z - fdZ * pipeline.getSize()).getBiome();
z - fdZ * pipeline.getSize()).getBiome();
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(getBiome(x, z, seed));
}
@Override
@@ -92,6 +102,30 @@ public class BiomePipelineProvider implements BiomeProvider {
return biomes;
}
@Override
public Column<Biome> getColumn(int x, int z, long seed, int min, int max) {
return new BiomePipelineColumn(this, min, max, x, z, seed);
}
@Override
public int resolution() {
return resolution;
}
private record SeededVector(int x, int z, long seed) {
@Override
public boolean equals(Object obj) {
if(obj instanceof SeededVector that) {
return this.z == that.z && this.x == that.x && this.seed == that.seed;
}
return false;
}
@Override
public int hashCode() {
int code = x;
code = 31 * code + z;
return 31 * code + ((int) (seed ^ (seed >>> 32)));
}
}
}

View File

@@ -22,7 +22,7 @@ public abstract class BiomeProviderTemplate implements ObjectTemplate<BiomeProvi
@Default
@Description("""
The resolution at which to sample biomes.
Larger values are quadratically faster, but produce lower quality results.
For example, a value of 3 would sample every 3 blocks.""")
protected @Meta int resolution = 1;

View File

@@ -8,6 +8,7 @@
package com.dfsek.terra.addons.biome.single;
import java.util.Collections;
import java.util.Optional;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
@@ -25,6 +26,11 @@ public class SingleBiomeProvider implements BiomeProvider {
return biome;
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(biome);
}
@Override
public Iterable<Biome> getBiomes() {
return Collections.singleton(biome);

View File

@@ -0,0 +1,4 @@
# Biome Query API
This addon contains an API to allow other addons to quickly query
Biome data, by baking queries and using Contexts on biomes.

View File

@@ -0,0 +1,5 @@
version = version("1.0.0")
dependencies {
compileOnlyApi(project(":common:addons:manifest-addon-loader"))
}

View File

@@ -0,0 +1,46 @@
package com.dfsek.terra.addons.biome.query;
import java.util.Collection;
import com.dfsek.terra.addons.biome.query.impl.BiomeTagFlattener;
import com.dfsek.terra.addons.biome.query.impl.BiomeTagHolder;
import com.dfsek.terra.addons.manifest.api.AddonInitializer;
import com.dfsek.terra.api.Platform;
import com.dfsek.terra.api.addon.BaseAddon;
import com.dfsek.terra.api.event.events.config.pack.ConfigPackPostLoadEvent;
import com.dfsek.terra.api.event.functional.FunctionalEventHandler;
import com.dfsek.terra.api.inject.annotations.Inject;
import com.dfsek.terra.api.properties.Context;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.world.biome.Biome;
public class BiomeQueryAPIAddon implements AddonInitializer {
public static PropertyKey<BiomeTagHolder> BIOME_TAG_KEY = Context.create(BiomeTagHolder.class);
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Collection<Biome> biomes = event
.getPack()
.getRegistry(Biome.class)
.entries();
BiomeTagFlattener flattener = new BiomeTagFlattener(biomes
.stream()
.flatMap(biome -> biome.getTags().stream())
.toList());
biomes.forEach(biome -> biome.getContext().put(BIOME_TAG_KEY, new BiomeTagHolder(biome, flattener)));
})
.global();
}
}

View File

@@ -0,0 +1,17 @@
package com.dfsek.terra.addons.biome.query.api;
import java.util.function.Predicate;
import com.dfsek.terra.addons.biome.query.impl.SingleTagQuery;
import com.dfsek.terra.api.world.biome.Biome;
public final class BiomeQueries {
private BiomeQueries() {
}
public static Predicate<Biome> has(String tag) {
return new SingleTagQuery(tag);
}
}

View File

@@ -0,0 +1,20 @@
package com.dfsek.terra.addons.biome.query.impl;
import java.util.List;
public class BiomeTagFlattener {
private final List<String> tags;
public BiomeTagFlattener(List<String> tags) {
this.tags = tags;
}
public int index(String tag) {
return tags.indexOf(tag);
}
public int size() {
return tags.size();
}
}

View File

@@ -0,0 +1,26 @@
package com.dfsek.terra.addons.biome.query.impl;
import com.dfsek.terra.api.properties.Properties;
import com.dfsek.terra.api.world.biome.Biome;
public class BiomeTagHolder implements Properties {
private final boolean[] tags;
private final BiomeTagFlattener flattener;
public BiomeTagHolder(Biome biome, BiomeTagFlattener flattener) {
this.tags = new boolean[flattener.size()];
this.flattener = flattener;
for(String tag : biome.getTags()) {
tags[flattener.index(tag)] = true;
}
}
boolean get(int index) {
return tags[index];
}
public BiomeTagFlattener getFlattener() {
return flattener;
}
}

View File

@@ -0,0 +1,31 @@
package com.dfsek.terra.addons.biome.query.impl;
import java.util.function.Predicate;
import com.dfsek.terra.addons.biome.query.BiomeQueryAPIAddon;
import com.dfsek.terra.api.world.biome.Biome;
public class SingleTagQuery implements Predicate<Biome> {
private final String tag;
private int tagIndex = -1;
public SingleTagQuery(String tag) {
this.tag = tag;
}
@Override
public boolean test(Biome biome) {
if(tagIndex < 0) {
tagIndex = biome
.getContext()
.get(BiomeQueryAPIAddon.BIOME_TAG_KEY)
.getFlattener()
.index(tag);
}
return biome
.getContext()
.get(BiomeQueryAPIAddon.BIOME_TAG_KEY)
.get(tagIndex);
}
}

View File

@@ -0,0 +1,12 @@
schema-version: 1
contributors:
- Terra contributors
id: biome-query-api
version: @VERSION@
entrypoints:
- "com.dfsek.terra.addons.biome.query.BiomeQueryAPIAddon"
website:
issues: https://github.com/PolyhedralDev/Terra/issues
source: https://github.com/PolyhedralDev/Terra
docs: https://terra.polydev.org
license: MIT License

View File

@@ -1,4 +1,4 @@
version = version("1.0.0")
version = version("1.1.0")
dependencies {
compileOnlyApi(project(":common:addons:manifest-addon-loader"))

View File

@@ -9,7 +9,9 @@ package com.dfsek.terra.addons.chunkgenerator;
import com.dfsek.terra.addons.chunkgenerator.config.NoiseChunkGeneratorPackConfigTemplate;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseConfigTemplate;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseProperties;
import com.dfsek.terra.addons.chunkgenerator.config.palette.BiomePaletteTemplate;
import com.dfsek.terra.addons.chunkgenerator.config.palette.PaletteInfo;
import com.dfsek.terra.addons.chunkgenerator.config.palette.SlantLayer;
import com.dfsek.terra.addons.chunkgenerator.generation.NoiseChunkGenerator3D;
import com.dfsek.terra.addons.manifest.api.AddonInitializer;
@@ -19,6 +21,8 @@ import com.dfsek.terra.api.event.events.config.ConfigurationLoadEvent;
import com.dfsek.terra.api.event.events.config.pack.ConfigPackPreLoadEvent;
import com.dfsek.terra.api.event.functional.FunctionalEventHandler;
import com.dfsek.terra.api.inject.annotations.Inject;
import com.dfsek.terra.api.properties.Context;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.chunk.generation.util.provider.ChunkGeneratorProvider;
@@ -32,17 +36,22 @@ public class NoiseChunkGenerator3DAddon implements AddonInitializer {
@Override
public void initialize() {
PropertyKey<PaletteInfo> paletteInfoPropertyKey = Context.create(PaletteInfo.class);
PropertyKey<BiomeNoiseProperties> noisePropertiesPropertyKey = Context.create(BiomeNoiseProperties.class);
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.priority(1000)
.then(event -> {
NoiseChunkGeneratorPackConfigTemplate config = event.loadTemplate(new NoiseChunkGeneratorPackConfigTemplate());
event.getPack()
.getOrCreateRegistry(ChunkGeneratorProvider.class)
.register(addon.key("NOISE_3D"),
pack -> new NoiseChunkGenerator3D(platform, config.getElevationBlend(), config.getHorizontalRes(),
config.getVerticalRes()));
pack -> new NoiseChunkGenerator3D(pack, platform, config.getElevationBlend(),
config.getHorizontalRes(),
config.getVerticalRes(), noisePropertiesPropertyKey,
paletteInfoPropertyKey));
event.getPack()
.applyLoader(SlantLayer.class, SlantLayer::new);
})
@@ -53,8 +62,10 @@ public class NoiseChunkGenerator3DAddon implements AddonInitializer {
.register(addon, ConfigurationLoadEvent.class)
.then(event -> {
if(event.is(Biome.class)) {
event.getLoadedObject(Biome.class).getContext().put(event.load(new BiomePaletteTemplate(platform)).get());
event.getLoadedObject(Biome.class).getContext().put(event.load(new BiomeNoiseConfigTemplate()).get());
event.getLoadedObject(Biome.class).getContext().put(paletteInfoPropertyKey,
event.load(new BiomePaletteTemplate(platform)).get());
event.getLoadedObject(Biome.class).getContext().put(noisePropertiesPropertyKey,
event.load(new BiomeNoiseConfigTemplate()).get());
}
})
.failThrough();

View File

@@ -39,6 +39,6 @@ public class BiomeNoiseConfigTemplate implements ObjectTemplate<BiomeNoiseProper
@Override
public BiomeNoiseProperties get() {
return new BiomeNoiseProperties(baseSampler, elevationSampler, carvingSampler, blendDistance, blendStep, blendWeight,
elevationWeight);
elevationWeight, new ThreadLocalNoiseHolder());
}
}

View File

@@ -10,6 +10,6 @@ public record BiomeNoiseProperties(NoiseSampler base,
int blendDistance,
int blendStep,
double blendWeight,
double elevationWeight) implements Properties {
double elevationWeight,
ThreadLocalNoiseHolder noiseHolder) implements Properties {
}

View File

@@ -0,0 +1,32 @@
package com.dfsek.terra.addons.chunkgenerator.config.noise;
import com.dfsek.terra.api.noise.NoiseSampler;
public class ThreadLocalNoiseHolder {
private final ThreadLocal<Holder> holder = ThreadLocal.withInitial(Holder::new);
public double getNoise(NoiseSampler sampler, int x, int y, int z, long seed) {
Holder holder = this.holder.get();
if(holder.init && holder.y == y && holder.z == z && holder.x == x && holder.seed == seed) {
return holder.noise;
}
double noise = sampler.noise(seed, x, y, z);
holder.noise = noise;
holder.x = x;
holder.y = y;
holder.z = z;
holder.seed = seed;
holder.init = true;
return noise;
}
private static final class Holder {
int x, y, z;
boolean init = false;
long seed;
double noise;
}
}

View File

@@ -59,6 +59,10 @@ public class BiomePaletteTemplate implements ObjectTemplate<PaletteInfo> {
}
};
@Value("carving.update-palette")
@Default
private @Meta boolean updatePalette = false;
public BiomePaletteTemplate(Platform platform) { this.platform = platform; }
@Override
@@ -79,6 +83,7 @@ public class BiomePaletteTemplate implements ObjectTemplate<PaletteInfo> {
slantLayers.put(threshold, layer.getPalette());
}
return new PaletteInfo(builder.build(), SlantHolder.of(slantLayers, minThreshold), oceanPalette, seaLevel, slantDepth);
return new PaletteInfo(builder.build(), SlantHolder.of(slantLayers, minThreshold), oceanPalette, seaLevel, slantDepth,
updatePalette);
}
}

View File

@@ -17,5 +17,6 @@ public record PaletteInfo(PaletteHolder paletteHolder,
SlantHolder slantHolder,
Palette ocean,
int seaLevel,
int maxSlantDepth) implements Properties {
int maxSlantDepth,
boolean updatePaletteWhenCarving) implements Properties {
}

View File

@@ -11,6 +11,7 @@ package com.dfsek.terra.addons.chunkgenerator.generation;
import net.jafama.FastMath;
import org.jetbrains.annotations.NotNull;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseProperties;
import com.dfsek.terra.addons.chunkgenerator.config.palette.PaletteInfo;
import com.dfsek.terra.addons.chunkgenerator.generation.math.PaletteUtil;
import com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation.LazilyEvaluatedInterpolator;
@@ -18,6 +19,9 @@ import com.dfsek.terra.addons.chunkgenerator.generation.math.samplers.Sampler3D;
import com.dfsek.terra.addons.chunkgenerator.generation.math.samplers.SamplerProvider;
import com.dfsek.terra.api.Platform;
import com.dfsek.terra.api.block.state.BlockState;
import com.dfsek.terra.api.config.ConfigPack;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.util.Column;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
import com.dfsek.terra.api.world.chunk.generation.ChunkGenerator;
@@ -36,13 +40,28 @@ public class NoiseChunkGenerator3D implements ChunkGenerator {
private final int carverHorizontalResolution;
private final int carverVerticalResolution;
public NoiseChunkGenerator3D(Platform platform, int elevationBlend, int carverHorizontalResolution,
int carverVerticalResolution) {
private final PropertyKey<PaletteInfo> paletteInfoPropertyKey;
private final PropertyKey<BiomeNoiseProperties> noisePropertiesKey;
public NoiseChunkGenerator3D(ConfigPack pack, Platform platform, int elevationBlend, int carverHorizontalResolution,
int carverVerticalResolution,
PropertyKey<BiomeNoiseProperties> noisePropertiesKey,
PropertyKey<PaletteInfo> paletteInfoPropertyKey) {
this.platform = platform;
this.air = platform.getWorldHandle().air();
this.carverHorizontalResolution = carverHorizontalResolution;
this.carverVerticalResolution = carverVerticalResolution;
this.samplerCache = new SamplerProvider(platform, elevationBlend);
this.paletteInfoPropertyKey = paletteInfoPropertyKey;
this.noisePropertiesKey = noisePropertiesKey;
int maxBlend = pack
.getBiomeProvider()
.stream()
.map(biome -> biome.getContext().get(noisePropertiesKey))
.mapToInt(properties -> properties.blendDistance() * properties.blendStep())
.max()
.orElse(0);
this.samplerCache = new SamplerProvider(platform, elevationBlend, noisePropertiesKey, maxBlend);
}
@Override
@@ -62,7 +81,7 @@ public class NoiseChunkGenerator3D implements ChunkGenerator {
chunkX,
chunkZ,
world.getMaxHeight(),
world.getMinHeight(),
noisePropertiesKey, world.getMinHeight(),
carverHorizontalResolution,
carverVerticalResolution,
seed);
@@ -73,23 +92,28 @@ public class NoiseChunkGenerator3D implements ChunkGenerator {
int cx = xOrig + x;
int cz = zOrig + z;
Biome biome = biomeProvider.getBiome(cx, 0, cz, seed);
PaletteInfo paletteInfo = biome.getContext().get(PaletteInfo.class);
int sea = paletteInfo.seaLevel();
Palette seaPalette = paletteInfo.ocean();
BlockState data;
Column<Biome> biomeColumn = biomeProvider.getColumn(cx, cz, world);
for(int y = world.getMaxHeight() - 1; y >= world.getMinHeight(); y--) {
Biome biome = biomeColumn.get(y);
PaletteInfo paletteInfo = biome.getContext().get(paletteInfoPropertyKey);
int sea = paletteInfo.seaLevel();
Palette seaPalette = paletteInfo.ocean();
if(sampler.sample(x, y, z) > 0) {
if(carver.sample(x, y, z) <= 0) {
data = PaletteUtil.getPalette(x, y, z, sampler, paletteInfo, paletteLevel).get(paletteLevel, cx, y, cz,
seed);
data = PaletteUtil
.getPalette(x, y, z, sampler, paletteInfo, paletteLevel)
.get(paletteLevel, cx, y, cz, seed);
chunk.setBlock(x, y, z, data);
paletteLevel++;
} else if(paletteInfo.updatePaletteWhenCarving()) {
paletteLevel = 0;
} else {
paletteLevel++;
}
paletteLevel++;
} else if(y <= sea) {
chunk.setBlock(x, y, z, seaPalette.get(sea - y, x + xOrig, y, z + zOrig, seed));
paletteLevel = 0;
@@ -107,7 +131,7 @@ public class NoiseChunkGenerator3D implements ChunkGenerator {
Biome biome = biomeProvider.getBiome(x, y, z, world.getSeed());
Sampler3D sampler = samplerCache.get(x, z, world, biomeProvider);
PaletteInfo paletteInfo = biome.getContext().get(PaletteInfo.class);
PaletteInfo paletteInfo = biome.getContext().get(paletteInfoPropertyKey);
int fdX = FastMath.floorMod(x, 16);
int fdZ = FastMath.floorMod(z, 16);
@@ -128,7 +152,7 @@ public class NoiseChunkGenerator3D implements ChunkGenerator {
@Override
public Palette getPalette(int x, int y, int z, WorldProperties world, BiomeProvider biomeProvider) {
return biomeProvider.getBiome(x, y, z, world.getSeed()).getContext().get(PaletteInfo.class).paletteHolder().getPalette(y);
return biomeProvider.getBiome(x, y, z, world.getSeed()).getContext().get(paletteInfoPropertyKey).paletteHolder().getPalette(y);
}
public SamplerProvider samplerProvider() {

View File

@@ -9,11 +9,10 @@ package com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation;
import net.jafama.FastMath;
import java.util.HashMap;
import java.util.Map;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseProperties;
import com.dfsek.terra.api.util.mutable.MutableInteger;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.util.Column;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
@@ -23,7 +22,6 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
*/
public class ChunkInterpolator {
private final Interpolator3[][][] interpGrid;
private final long seed;
private final int min;
private final int max;
@@ -37,10 +35,10 @@ public class ChunkInterpolator {
* @param min
* @param max
*/
public ChunkInterpolator(long seed, int chunkX, int chunkZ, BiomeProvider provider, int min, int max) {
public ChunkInterpolator(long seed, int chunkX, int chunkZ, BiomeProvider provider, int min, int max,
PropertyKey<BiomeNoiseProperties> noisePropertiesKey, int maxBlend) {
this.min = min;
this.max = max;
this.seed = seed;
int xOrigin = chunkX << 4;
int zOrigin = chunkZ << 4;
@@ -53,28 +51,67 @@ public class ChunkInterpolator {
double[][][] noiseStorage = new double[5][5][size + 1];
int maxBlendAndChunk = 17 + 2 * maxBlend;
@SuppressWarnings("unchecked")
Column<Biome>[] columns = new Column[maxBlendAndChunk * maxBlendAndChunk];
for(int x = 0; x < 5; x++) {
int scaledX = x << 2;
int absoluteX = xOrigin + scaledX;
for(int z = 0; z < 5; z++) {
BiomeNoiseProperties generationSettings = provider.getBiome(xOrigin + (x << 2), 0, zOrigin + (z << 2), seed)
.getContext()
.get(BiomeNoiseProperties.class);
Map<BiomeNoiseProperties, MutableInteger> genMap = new HashMap<>();
int scaledZ = z << 2;
int absoluteZ = zOrigin + scaledZ;
int step = generationSettings.blendStep();
int blend = generationSettings.blendDistance();
int index = (scaledX + maxBlend) + maxBlendAndChunk * (scaledZ + maxBlend);
Column<Biome> biomeColumn = columns[index];
for(int xi = -blend; xi <= blend; xi++) {
for(int zi = -blend; zi <= blend; zi++) {
genMap.computeIfAbsent(
provider.getBiome(xOrigin + (x << 2) + (xi * step), 0, zOrigin + (z << 2) + (zi * step), seed)
.getContext()
.get(BiomeNoiseProperties.class),
g -> new MutableInteger(0)).increment(); // Increment by 1
}
if(biomeColumn == null) {
biomeColumn = provider.getColumn(absoluteX, absoluteZ, seed, min, max);
columns[index] = biomeColumn;
}
for(int y = 0; y < size + 1; y++) {
noiseStorage[x][z][y] = computeNoise(genMap, (x << 2) + xOrigin, (y << 2) + this.min, (z << 2) + zOrigin);
for(int y = 0; y < size; y++) {
int scaledY = (y << 2) + min;
BiomeNoiseProperties generationSettings = biomeColumn.get(scaledY)
.getContext()
.get(noisePropertiesKey);
int step = generationSettings.blendStep();
int blend = generationSettings.blendDistance();
double runningNoise = 0;
double runningDiv = 0;
for(int xi = -blend; xi <= blend; xi++) {
for(int zi = -blend; zi <= blend; zi++) {
int blendX = (xi * step);
int blendZ = (zi * step);
int localIndex = (scaledX + maxBlend + blendX) + maxBlendAndChunk * (scaledZ + maxBlend + blendZ);
Column<Biome> column = columns[localIndex];
if(column == null) {
column = provider.getColumn(absoluteX + blendX, absoluteZ + blendZ, seed, min, max);
columns[localIndex] = column;
}
BiomeNoiseProperties properties = column
.get(scaledY)
.getContext()
.get(noisePropertiesKey);
double sample = properties.noiseHolder().getNoise(properties.base(), absoluteX, scaledY, absoluteZ, seed);
runningNoise += sample * properties.blendWeight();
runningDiv += properties.blendWeight();
}
}
double noise = runningNoise / runningDiv;
noiseStorage[x][z][y] = noise;
if(y == size - 1) {
noiseStorage[x][z][size] = noise;
}
}
}
}
@@ -100,24 +137,6 @@ public class ChunkInterpolator {
return FastMath.max(FastMath.min(value, high), 0);
}
public double computeNoise(BiomeNoiseProperties generationSettings, double x, double y, double z) {
return generationSettings.base().noise(seed, x, y, z);
}
public double computeNoise(Map<BiomeNoiseProperties, MutableInteger> gens, double x, double y, double z) {
double n = 0;
double div = 0;
for(Map.Entry<BiomeNoiseProperties, MutableInteger> entry : gens.entrySet()) {
BiomeNoiseProperties gen = entry.getKey();
int weight = entry.getValue().get();
double noise = computeNoise(gen, x, y, z);
n += noise * weight;
div += gen.blendWeight() * weight;
}
return n / div;
}
/**
* Gets the noise at a pair of internal chunk coordinates.
*

View File

@@ -8,13 +8,15 @@
package com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseProperties;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class ElevationInterpolator {
private final double[][] values = new double[18][18];
public ElevationInterpolator(long seed, int chunkX, int chunkZ, BiomeProvider provider, int smooth) {
public ElevationInterpolator(long seed, int chunkX, int chunkZ, BiomeProvider provider, int smooth,
PropertyKey<BiomeNoiseProperties> noisePropertiesKey) {
int xOrigin = chunkX << 4;
int zOrigin = chunkZ << 4;
@@ -23,8 +25,14 @@ public class ElevationInterpolator {
// Precompute generators.
for(int x = -1 - smooth; x <= 16 + smooth; x++) {
for(int z = -1 - smooth; z <= 16 + smooth; z++) {
gens[x + 1 + smooth][z + 1 + smooth] = provider.getBiome(xOrigin + x, 0, zOrigin + z, seed).getContext().get(
BiomeNoiseProperties.class);
int bx = xOrigin + x;
int bz = zOrigin + z;
gens[x + 1 + smooth][z + 1 + smooth] =
provider
.getBaseBiome(bx, bz, seed)
.orElseGet(() -> provider.getBiome(bx, 0, bz, seed)) // kind of a hack
.getContext()
.get(noisePropertiesKey);
}
}

View File

@@ -3,16 +3,14 @@ package com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation;
import net.jafama.FastMath;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseProperties;
import com.dfsek.terra.api.noise.NoiseSampler;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
import static com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation.Interpolator.lerp;
public class LazilyEvaluatedInterpolator {
private final Double[][][] samples;
private final NoiseSampler[][] samplers;
private final Double[] samples; //
private final int chunkX;
private final int chunkZ;
@@ -21,16 +19,22 @@ public class LazilyEvaluatedInterpolator {
private final int verticalRes;
private final BiomeProvider biomeProvider;
private final PropertyKey<BiomeNoiseProperties> noisePropertiesKey;
private final long seed;
private final int min;
private final int min, max;
public LazilyEvaluatedInterpolator(BiomeProvider biomeProvider, int cx, int cz, int max, int min, int horizontalRes, int verticalRes,
private final int zMul, yMul;
public LazilyEvaluatedInterpolator(BiomeProvider biomeProvider, int cx, int cz, int max,
PropertyKey<BiomeNoiseProperties> noisePropertiesKey, int min, int horizontalRes, int verticalRes,
long seed) {
this.noisePropertiesKey = noisePropertiesKey;
int hSamples = FastMath.ceilToInt(16.0 / horizontalRes);
int vSamples = FastMath.ceilToInt((double) (max - min) / verticalRes);
samples = new Double[hSamples + 1][vSamples + 1][hSamples + 1];
samplers = new NoiseSampler[hSamples + 1][hSamples + 1];
this.zMul = (hSamples + 1);
this.yMul = zMul * zMul;
samples = new Double[yMul * (vSamples + 1)];
this.chunkX = cx << 4;
this.chunkZ = cz << 4;
this.horizontalRes = horizontalRes;
@@ -38,22 +42,25 @@ public class LazilyEvaluatedInterpolator {
this.biomeProvider = biomeProvider;
this.seed = seed;
this.min = min;
this.max = max - 1;
}
private double sample(int x, int y, int z, int ox, int oy, int oz) {
Double sample = samples[x][y][z];
private double sample(int xIndex, int yIndex, int zIndex, int ox, int oy, int oz) {
int index = xIndex + (zIndex * zMul) + (yIndex * yMul);
Double sample = samples[index];
if(sample == null) {
int xi = ox + chunkX;
int zi = oz + chunkZ;
NoiseSampler sampler = samplers[x][z];
if(sampler == null) {
sampler = biomeProvider.getBiome(xi, y, zi, seed).getContext().get(BiomeNoiseProperties.class).carving();
samplers[x][z] = sampler;
}
int y = FastMath.min(max, oy);
sample = sampler.noise(seed, xi, oy, zi);
samples[x][y][z] = sample;
sample = biomeProvider
.getBiome(xi, y, zi, seed)
.getContext()
.get(noisePropertiesKey)
.carving()
.noise(seed, xi, y, zi);
samples[index] = sample;
}
return sample;
}

View File

@@ -9,8 +9,10 @@ package com.dfsek.terra.addons.chunkgenerator.generation.math.samplers;
import net.jafama.FastMath;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseProperties;
import com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation.ChunkInterpolator;
import com.dfsek.terra.addons.chunkgenerator.generation.math.interpolation.ElevationInterpolator;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
@@ -18,10 +20,11 @@ public class Sampler3D {
private final ChunkInterpolator interpolator;
private final ElevationInterpolator elevationInterpolator;
public Sampler3D(int x, int z, long seed, int minHeight, int maxHeight, BiomeProvider provider, int elevationSmooth) {
public Sampler3D(int x, int z, long seed, int minHeight, int maxHeight, BiomeProvider provider, int elevationSmooth,
PropertyKey<BiomeNoiseProperties> noisePropertiesKey, int maxBlend) {
this.interpolator = new ChunkInterpolator(seed, x, z, provider,
minHeight, maxHeight);
this.elevationInterpolator = new ElevationInterpolator(seed, x, z, provider, elevationSmooth);
minHeight, maxHeight, noisePropertiesKey, maxBlend);
this.elevationInterpolator = new ElevationInterpolator(seed, x, z, provider, elevationSmooth, noisePropertiesKey);
}
public double sample(double x, double y, double z) {

View File

@@ -17,13 +17,13 @@
package com.dfsek.terra.addons.chunkgenerator.generation.math.samplers;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import net.jafama.FastMath;
import java.util.concurrent.ExecutionException;
import com.dfsek.terra.addons.chunkgenerator.config.noise.BiomeNoiseProperties;
import com.dfsek.terra.api.Platform;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
import com.dfsek.terra.api.world.info.WorldProperties;
@@ -31,10 +31,17 @@ import com.dfsek.terra.api.world.info.WorldProperties;
public class SamplerProvider {
private final Cache<WorldContext, Sampler3D> cache;
private final int elevationSmooth;
private final PropertyKey<BiomeNoiseProperties> noisePropertiesKey;
private final int maxBlend;
public SamplerProvider(Platform platform, int elevationSmooth) {
public SamplerProvider(Platform platform, int elevationSmooth, PropertyKey<BiomeNoiseProperties> noisePropertiesKey, int maxBlend) {
cache = Caffeine
.newBuilder()
.maximumSize(platform.getTerraConfig().getSamplerCache())
.build();
this.elevationSmooth = elevationSmooth;
cache = CacheBuilder.newBuilder().maximumSize(platform.getTerraConfig().getSamplerCache()).build();
this.noisePropertiesKey = noisePropertiesKey;
this.maxBlend = maxBlend;
}
public Sampler3D get(int x, int z, WorldProperties world, BiomeProvider provider) {
@@ -45,13 +52,8 @@ public class SamplerProvider {
public Sampler3D getChunk(int cx, int cz, WorldProperties world, BiomeProvider provider) {
WorldContext context = new WorldContext(cx, cz, world.getSeed(), world.getMinHeight(), world.getMaxHeight());
try {
return cache.get(context,
() -> new Sampler3D(context.cx, context.cz, context.seed, context.minHeight, context.maxHeight, provider,
elevationSmooth));
} catch(ExecutionException e) {
throw new RuntimeException(e);
}
return cache.get(context, c -> new Sampler3D(c.cx, c.cz, c.seed, c.minHeight, c.maxHeight, provider,
elevationSmooth, noisePropertiesKey, maxBlend));
}
private record WorldContext(int cx, int cz, long seed, int minHeight, int maxHeight) {

View File

@@ -1,4 +1,4 @@
version = version("1.0.0")
version = version("1.1.0")
dependencies {
compileOnlyApi(project(":common:addons:manifest-addon-loader"))

View File

@@ -11,6 +11,7 @@ import com.dfsek.terra.addons.feature.locator.patterns.Pattern;
import com.dfsek.terra.api.structure.feature.BinaryColumn;
import com.dfsek.terra.api.structure.feature.Locator;
import com.dfsek.terra.api.util.Range;
import com.dfsek.terra.api.world.WritableWorld;
import com.dfsek.terra.api.world.chunk.generation.util.Column;
@@ -31,16 +32,19 @@ public class AdjacentPatternLocator implements Locator {
}
private boolean isValid(int y, Column<?> column) {
WritableWorld world = column.getWorld();
int x = column.getX();
int z = column.getZ();
if(matchAll) {
return pattern.matches(y, column.adjacent(0, -1)) &&
pattern.matches(y, column.adjacent(0, 1)) &&
pattern.matches(y, column.adjacent(-1, 0)) &&
pattern.matches(y, column.adjacent(1, 0));
return pattern.matches(world, x, y, z - 1) &&
pattern.matches(world, x, y, z + 1) &&
pattern.matches(world, x - 1, y, z) &&
pattern.matches(world, x + 1, y, z);
} else {
return pattern.matches(y, column.adjacent(0, -1)) ||
pattern.matches(y, column.adjacent(0, 1)) ||
pattern.matches(y, column.adjacent(-1, 0)) ||
pattern.matches(y, column.adjacent(1, 0));
return pattern.matches(world, x, y, z - 1) ||
pattern.matches(world, x, y, z + 1) ||
pattern.matches(world, x - 1, y, z) ||
pattern.matches(world, x + 1, y, z);
}
}
}

View File

@@ -7,6 +7,8 @@
package com.dfsek.terra.addons.feature.locator.locators;
import net.jafama.FastMath;
import com.dfsek.terra.addons.feature.locator.patterns.Pattern;
import com.dfsek.terra.api.structure.feature.BinaryColumn;
import com.dfsek.terra.api.structure.feature.Locator;
@@ -25,6 +27,9 @@ public class PatternLocator implements Locator {
@Override
public BinaryColumn getSuitableCoordinates(Column<?> column) {
return new BinaryColumn(search, y -> pattern.matches(y, column));
int min = FastMath.max(column.getMinY(), search.getMin());
int max = FastMath.min(column.getMaxY(), search.getMax());
if(min >= max) return BinaryColumn.getNull();
return new BinaryColumn(min, max, y -> pattern.matches(y, column));
}
}

View File

@@ -23,6 +23,19 @@ public class SamplerLocator implements Locator {
this.samplers = samplers;
}
private static int floorToInt(double value) {
int valueInt = (int) value;
if(value < 0.0) {
if(value == (double) valueInt) {
return valueInt;
} else {
return valueInt == Integer.MIN_VALUE ? valueInt : valueInt - 1;
}
} else {
return valueInt;
}
}
@Override
public BinaryColumn getSuitableCoordinates(Column<?> column) {
BinaryColumnBuilder results = column.newBinaryColumn();
@@ -36,17 +49,4 @@ public class SamplerLocator implements Locator {
return results.build();
}
private static int floorToInt(double value) {
int valueInt = (int)value;
if (value < 0.0) {
if (value == (double)valueInt) {
return valueInt;
} else {
return valueInt == Integer.MIN_VALUE ? valueInt : valueInt - 1;
}
} else {
return valueInt;
}
}
}

View File

@@ -7,6 +7,8 @@
package com.dfsek.terra.addons.feature.locator.locators;
import net.jafama.FastMath;
import com.dfsek.terra.api.structure.feature.BinaryColumn;
import com.dfsek.terra.api.structure.feature.Locator;
import com.dfsek.terra.api.util.Range;
@@ -24,7 +26,10 @@ public class SurfaceLocator implements Locator {
@Override
public BinaryColumn getSuitableCoordinates(Column<?> column) {
BinaryColumnBuilder builder = column.newBinaryColumn();
for(int y : search) {
int max = FastMath.min(search.getMax(), column.getMaxY());
int min = FastMath.max(search.getMin(), column.getMinY());
if(min >= max) return builder.build();
for(int y = min; y < max; y++) {
if(column.getBlock(y).isAir() && !column.getBlock(y - 1).isAir()) {
builder.set(y);
}

View File

@@ -7,10 +7,13 @@
package com.dfsek.terra.addons.feature.locator.patterns;
import net.jafama.FastMath;
import java.util.function.Predicate;
import com.dfsek.terra.api.block.state.BlockState;
import com.dfsek.terra.api.util.Range;
import com.dfsek.terra.api.world.WritableWorld;
import com.dfsek.terra.api.world.chunk.generation.util.Column;
@@ -25,8 +28,22 @@ public class MatchPattern implements Pattern {
@Override
public boolean matches(int y, Column<?> column) {
for(int i : range) {
if(!matches.test(column.getBlock(y + i))) return false;
int min = FastMath.max(column.getMinY(), range.getMin() + y);
int max = FastMath.min(column.getMaxY(), range.getMax() + y);
if(max <= min) return false;
for(int i = min; i < max; i++) {
if(!matches.test(column.getBlock(i))) return false;
}
return true;
}
@Override
public boolean matches(WritableWorld world, int x, int y, int z) {
int min = FastMath.max(world.getMinHeight(), range.getMin() + y);
int max = FastMath.min(world.getMaxHeight(), range.getMax() + y);
if(max <= min) return false;
for(int i = min; i < max; i++) {
if(!matches.test(world.getBlockState(x, i, z))) return false;
}
return true;
}

View File

@@ -7,12 +7,18 @@
package com.dfsek.terra.addons.feature.locator.patterns;
import com.dfsek.terra.api.world.WritableWorld;
import com.dfsek.terra.api.world.chunk.generation.util.Column;
public interface Pattern {
boolean matches(int y, Column<?> column);
default boolean matches(WritableWorld world, int x, int y, int z) {
return matches(y, world.column(x, z));
}
default Pattern and(Pattern that) {
return (y, column) -> this.matches(y, column) && that.matches(y, column);
}

View File

@@ -23,11 +23,10 @@ import com.dfsek.terra.addons.noise.config.templates.FunctionTemplate;
public class UserDefinedFunction implements DynamicFunction {
private static final Map<FunctionTemplate, UserDefinedFunction> CACHE = new HashMap<>();
private final Expression expression;
private final int args;
private static final Map<FunctionTemplate, UserDefinedFunction> CACHE = new HashMap<>();
protected UserDefinedFunction(Expression expression, int args) {
this.expression = expression;
this.args = args;
@@ -38,17 +37,17 @@ public class UserDefinedFunction implements DynamicFunction {
if(function == null) {
Parser parser = new Parser();
Scope parent = new Scope();
Scope functionScope = new Scope().withParent(parent);
template.getArgs().forEach(functionScope::addInvocationVariable);
for(Entry<String, FunctionTemplate> entry : template.getFunctions().entrySet()) {
String id = entry.getKey();
FunctionTemplate nest = entry.getValue();
parser.registerFunction(id, newInstance(nest));
}
function = new UserDefinedFunction(parser.parse(template.getFunction(), functionScope), template.getArgs().size());
CACHE.put(template, function);
}

View File

@@ -12,20 +12,21 @@ import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import com.dfsek.terra.api.block.state.BlockState;
import com.dfsek.terra.api.config.meta.Meta;
import com.dfsek.terra.api.noise.NoiseSampler;
import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class PaletteLayerLoader implements ObjectTemplate<PaletteLayerHolder> {
@Value("materials")
private ProbabilityCollection<BlockState> collection;
private @Meta ProbabilityCollection<@Meta BlockState> collection;
@Value("sampler")
@Default
private NoiseSampler sampler = null;
private @Meta NoiseSampler sampler = null;
@Value("layers")
private int layers;
private @Meta int layers;
@Override
public PaletteLayerHolder get() {

View File

@@ -1,4 +1,4 @@
version = version("1.0.0")
version = version("1.1.0")
dependencies {
compileOnlyApi(project(":common:addons:manifest-addon-loader"))

View File

@@ -28,6 +28,8 @@ import com.dfsek.terra.api.event.events.config.ConfigurationLoadEvent;
import com.dfsek.terra.api.event.events.config.pack.ConfigPackPreLoadEvent;
import com.dfsek.terra.api.event.functional.FunctionalEventHandler;
import com.dfsek.terra.api.inject.annotations.Inject;
import com.dfsek.terra.api.properties.Context;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.structure.feature.Feature;
import com.dfsek.terra.api.util.reflection.TypeKey;
import com.dfsek.terra.api.world.biome.Biome;
@@ -49,12 +51,13 @@ public class FeatureGenerationAddon implements AddonInitializer {
@SuppressWarnings("unchecked")
@Override
public void initialize() {
PropertyKey<BiomeFeatures> biomeFeaturesKey = Context.create(BiomeFeatures.class);
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> event.getPack()
.getOrCreateRegistry(STAGE_TYPE_KEY)
.register(addon.key("FEATURE"), () -> new FeatureStageTemplate(platform)))
.register(addon.key("FEATURE"), () -> new FeatureStageTemplate(platform, biomeFeaturesKey)))
.failThrough();
platform.getEventManager()
@@ -84,7 +87,7 @@ public class FeatureGenerationAddon implements AddonInitializer {
featureGenerationStages.forEach(stage -> features.put(stage, template.get(stage.getID(), List.class)));
event.getLoadedObject(Biome.class).getContext().put(new BiomeFeatures(features));
event.getLoadedObject(Biome.class).getContext().put(biomeFeaturesKey, new BiomeFeatures(features));
}
})
.failThrough();

View File

@@ -12,6 +12,7 @@ import java.util.Random;
import com.dfsek.terra.addons.generation.feature.config.BiomeFeatures;
import com.dfsek.terra.api.Platform;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.registry.key.StringIdentifiable;
import com.dfsek.terra.api.util.Rotation;
import com.dfsek.terra.api.util.vector.Vector3Int;
@@ -28,10 +29,15 @@ public class FeatureGenerationStage implements GenerationStage, StringIdentifiab
private final String profile;
public FeatureGenerationStage(Platform platform, String id) {
private final int resolution;
private final PropertyKey<BiomeFeatures> biomeFeaturesKey;
public FeatureGenerationStage(Platform platform, String id, int resolution, PropertyKey<BiomeFeatures> biomeFeaturesKey) {
this.platform = platform;
this.id = id;
this.profile = "feature_stage:" + id;
this.resolution = resolution;
this.biomeFeaturesKey = biomeFeaturesKey;
}
@Override
@@ -41,33 +47,39 @@ public class FeatureGenerationStage implements GenerationStage, StringIdentifiab
int cx = world.centerChunkX() << 4;
int cz = world.centerChunkZ() << 4;
long seed = world.getSeed();
for(int x = 0; x < 16; x++) {
for(int z = 0; z < 16; z++) {
int tx = cx + x;
int tz = cz + z;
Column<WritableWorld> column = world.column(tx, tz);
long coordinateSeed = (seed * 31 + tx) * 31 + tz;
for(int chunkX = 0; chunkX < 16; chunkX += resolution) {
for(int chunkZ = 0; chunkZ < 16; chunkZ += resolution) {
int tx = cx + chunkX;
int tz = cz + chunkZ;
world.getBiomeProvider()
.getBiome(tx, 0, tz, seed)
.getContext()
.get(BiomeFeatures.class)
.getFeatures()
.getOrDefault(this, Collections.emptyList())
.forEach(feature -> {
platform.getProfiler().push(feature.getID());
if(feature.getDistributor().matches(tx, tz, seed)) {
feature.getLocator()
.getSuitableCoordinates(column)
.forEach(y ->
feature.getStructure(world, tx, y, tz)
.generate(Vector3Int.of(tx, y, tz),
world,
new Random(coordinateSeed * 31 + y),
Rotation.NONE)
);
.getColumn(tx, tz, world)
.forRanges(resolution, (min, max, biome) -> {
for(int subChunkX = 0; subChunkX < resolution; subChunkX++) {
for(int subChunkZ = 0; subChunkZ < resolution; subChunkZ++) {
int x = subChunkX + tx;
int z = subChunkZ + tz;
long coordinateSeed = (seed * 31 + x) * 31 + z;
Column<WritableWorld> column = world.column(x, z);
biome.getContext()
.get(biomeFeaturesKey)
.getFeatures()
.getOrDefault(this, Collections.emptyList())
.forEach(feature -> {
platform.getProfiler().push(feature.getID());
if(feature.getDistributor().matches(x, z, seed)) {
feature.getLocator()
.getSuitableCoordinates(column.clamp(min, max))
.forEach(y -> feature.getStructure(world, x, y, z)
.generate(Vector3Int.of(x, y, z),
world,
new Random(coordinateSeed * 31 + y),
Rotation.NONE)
);
}
platform.getProfiler().pop(feature.getID());
});
}
}
platform.getProfiler().pop(feature.getID());
});
}
}

View File

@@ -1,23 +1,46 @@
package com.dfsek.terra.addons.generation.feature.config;
import com.dfsek.tectonic.api.config.template.ValidatedConfigTemplate;
import com.dfsek.tectonic.api.config.template.annotations.Default;
import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import com.dfsek.tectonic.api.exception.ValidationException;
import com.dfsek.terra.addons.generation.feature.FeatureGenerationStage;
import com.dfsek.terra.api.Platform;
import com.dfsek.terra.api.properties.PropertyKey;
import com.dfsek.terra.api.world.chunk.generation.stage.GenerationStage;
public class FeatureStageTemplate implements ObjectTemplate<GenerationStage> {
public class FeatureStageTemplate implements ObjectTemplate<GenerationStage>, ValidatedConfigTemplate {
private final Platform platform;
private final PropertyKey<BiomeFeatures> biomeFeaturesKey;
@Value("id")
private String id;
public FeatureStageTemplate(Platform platform) { this.platform = platform; }
@Value("resolution")
@Default
private int resolution = 4;
public FeatureStageTemplate(Platform platform, PropertyKey<BiomeFeatures> biomeFeaturesKey) {
this.platform = platform;
this.biomeFeaturesKey = biomeFeaturesKey;
}
@Override
public FeatureGenerationStage get() {
return new FeatureGenerationStage(platform, id);
return new FeatureGenerationStage(platform, id, resolution, biomeFeaturesKey);
}
@Override
public boolean validate() throws ValidationException {
if(!(resolution == 1
|| resolution == 2
|| resolution == 4
|| resolution == 8
|| resolution == 16)) throw new ValidationException(
"Resolution must be power of 2 less than or equal to 16 (1, 2, 4, 8, 16), got: " + resolution);
return true;
}
}

View File

@@ -2,10 +2,6 @@ import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
version = version("1.0.0")
repositories {
maven { url = uri("https://jitpack.io/") }
}
dependencies {
api("commons-io:commons-io:2.7")
api("com.github.Querz:NBT:6.1")

View File

@@ -1,6 +1,6 @@
import com.github.jengelman.gradle.plugins.shadow.tasks.ShadowJar
version = version("1.0.1")
version = version("1.1.0")
dependencies {
api("commons-io:commons-io:2.7")

View File

@@ -15,9 +15,12 @@ import java.util.Map;
import com.dfsek.terra.addons.terrascript.parser.exceptions.ParseException;
import com.dfsek.terra.addons.terrascript.parser.lang.Block;
import com.dfsek.terra.addons.terrascript.parser.lang.Executable;
import com.dfsek.terra.addons.terrascript.parser.lang.Item;
import com.dfsek.terra.addons.terrascript.parser.lang.Keyword;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable.ReturnType;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope.ScopeBuilder;
import com.dfsek.terra.addons.terrascript.parser.lang.constants.BooleanConstant;
import com.dfsek.terra.addons.terrascript.parser.lang.constants.ConstantExpression;
import com.dfsek.terra.addons.terrascript.parser.lang.constants.NumericConstant;
@@ -48,12 +51,17 @@ import com.dfsek.terra.addons.terrascript.parser.lang.operations.statements.Grea
import com.dfsek.terra.addons.terrascript.parser.lang.operations.statements.LessThanOrEqualsStatement;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.statements.LessThanStatement;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.statements.NotEqualsStatement;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.VariableAssignmentNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.VariableDeclarationNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.VariableReferenceNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.assign.BoolAssignmentNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.assign.NumAssignmentNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.assign.StrAssignmentNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.assign.VariableAssignmentNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.reference.BoolVariableReferenceNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.reference.NumVariableReferenceNode;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.reference.StrVariableReferenceNode;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
import com.dfsek.terra.addons.terrascript.tokenizer.Token;
import com.dfsek.terra.addons.terrascript.tokenizer.Tokenizer;
import com.dfsek.terra.api.util.generic.pair.Pair;
@SuppressWarnings("unchecked")
@@ -83,11 +91,12 @@ public class Parser {
*
* @throws ParseException If parsing fails.
*/
public Block parse() {
return parseBlock(new Tokenizer(data), new HashMap<>(), false);
public Executable parse() {
ScopeBuilder scopeBuilder = new ScopeBuilder();
return new Executable(parseBlock(new Tokenizer(data), false, scopeBuilder), scopeBuilder);
}
private Keyword<?> parseLoopLike(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap, boolean loop) throws ParseException {
private Keyword<?> parseLoopLike(Tokenizer tokens, boolean loop, ScopeBuilder scopeBuilder) throws ParseException {
Token identifier = tokens.consume();
ParserUtil.checkType(identifier, Token.Type.IF_STATEMENT, Token.Type.WHILE_LOOP, Token.Type.FOR_LOOP);
@@ -95,43 +104,43 @@ public class Parser {
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_BEGIN);
return switch(identifier.getType()) {
case FOR_LOOP -> parseForLoop(tokens, variableMap, identifier.getPosition());
case IF_STATEMENT -> parseIfStatement(tokens, variableMap, identifier.getPosition(), loop);
case WHILE_LOOP -> parseWhileLoop(tokens, variableMap, identifier.getPosition());
case FOR_LOOP -> parseForLoop(tokens, identifier.getPosition(), scopeBuilder);
case IF_STATEMENT -> parseIfStatement(tokens, identifier.getPosition(), loop, scopeBuilder);
case WHILE_LOOP -> parseWhileLoop(tokens, identifier.getPosition(), scopeBuilder);
default -> throw new UnsupportedOperationException(
"Unknown keyword " + identifier.getContent() + ": " + identifier.getPosition());
};
}
private WhileKeyword parseWhileLoop(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap, Position start) {
Returnable<?> first = parseExpression(tokens, true, variableMap);
private WhileKeyword parseWhileLoop(Tokenizer tokens, Position start, ScopeBuilder scopeBuilder) {
Returnable<?> first = parseExpression(tokens, true, scopeBuilder);
ParserUtil.checkReturnType(first, Returnable.ReturnType.BOOLEAN);
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_END);
return new WhileKeyword(parseStatementBlock(tokens, variableMap, true), (Returnable<Boolean>) first, start); // While loop
return new WhileKeyword(parseStatementBlock(tokens, true, scopeBuilder), (Returnable<Boolean>) first, start); // While loop
}
private IfKeyword parseIfStatement(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap, Position start, boolean loop) {
Returnable<?> condition = parseExpression(tokens, true, variableMap);
private IfKeyword parseIfStatement(Tokenizer tokens, Position start, boolean loop, ScopeBuilder scopeBuilder) {
Returnable<?> condition = parseExpression(tokens, true, scopeBuilder);
ParserUtil.checkReturnType(condition, Returnable.ReturnType.BOOLEAN);
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_END);
Block elseBlock = null;
Block statement = parseStatementBlock(tokens, variableMap, loop);
Block statement = parseStatementBlock(tokens, loop, scopeBuilder);
List<IfKeyword.Pair<Returnable<Boolean>, Block>> elseIf = new ArrayList<>();
List<Pair<Returnable<Boolean>, Block>> elseIf = new ArrayList<>();
while(tokens.hasNext() && tokens.get().getType().equals(Token.Type.ELSE)) {
tokens.consume(); // Consume else.
if(tokens.get().getType().equals(Token.Type.IF_STATEMENT)) {
tokens.consume(); // Consume if.
Returnable<?> elseCondition = parseExpression(tokens, true, variableMap);
Returnable<?> elseCondition = parseExpression(tokens, true, scopeBuilder);
ParserUtil.checkReturnType(elseCondition, Returnable.ReturnType.BOOLEAN);
elseIf.add(new IfKeyword.Pair<>((Returnable<Boolean>) elseCondition, parseStatementBlock(tokens, variableMap, loop)));
elseIf.add(Pair.of((Returnable<Boolean>) elseCondition, parseStatementBlock(tokens, loop, scopeBuilder)));
} else {
elseBlock = parseStatementBlock(tokens, variableMap, loop);
elseBlock = parseStatementBlock(tokens, loop, scopeBuilder);
break; // Else must be last.
}
}
@@ -139,51 +148,51 @@ public class Parser {
return new IfKeyword(statement, (Returnable<Boolean>) condition, elseIf, elseBlock, start); // If statement
}
private Block parseStatementBlock(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap, boolean loop) {
private Block parseStatementBlock(Tokenizer tokens, boolean loop, ScopeBuilder scopeBuilder) {
if(tokens.get().getType().equals(Token.Type.BLOCK_BEGIN)) {
ParserUtil.checkType(tokens.consume(), Token.Type.BLOCK_BEGIN);
Block block = parseBlock(tokens, variableMap, loop);
Block block = parseBlock(tokens, loop, scopeBuilder);
ParserUtil.checkType(tokens.consume(), Token.Type.BLOCK_END);
return block;
} else {
Position position = tokens.get().getPosition();
Block block = new Block(Collections.singletonList(parseItem(tokens, variableMap, loop)), position);
Block block = new Block(Collections.singletonList(parseItem(tokens, loop, scopeBuilder)), position);
ParserUtil.checkType(tokens.consume(), Token.Type.STATEMENT_END);
return block;
}
}
private ForKeyword parseForLoop(Tokenizer tokens, Map<String, Returnable.ReturnType> old, Position start) {
Map<String, Returnable.ReturnType> variableMap = new HashMap<>(old); // New scope
private ForKeyword parseForLoop(Tokenizer tokens, Position start, ScopeBuilder scopeBuilder) {
scopeBuilder = scopeBuilder.sub(); // new scope
Token f = tokens.get();
ParserUtil.checkType(f, Token.Type.NUMBER_VARIABLE, Token.Type.STRING_VARIABLE, Token.Type.BOOLEAN_VARIABLE, Token.Type.IDENTIFIER);
Item<?> initializer;
if(f.isVariableDeclaration()) {
VariableDeclarationNode<?> forVar = parseVariableDeclaration(tokens, variableMap);
VariableAssignmentNode<?> forVar = parseVariableDeclaration(tokens, scopeBuilder);
Token name = tokens.get();
if(functions.containsKey(name.getContent()) || variableMap.containsKey(name.getContent()))
if(functions.containsKey(name.getContent()) || scopeBuilder.contains(name.getContent()))
throw new ParseException(name.getContent() + " is already defined in this scope", name.getPosition());
initializer = forVar;
} else initializer = parseExpression(tokens, true, variableMap);
} else initializer = parseExpression(tokens, true, scopeBuilder);
ParserUtil.checkType(tokens.consume(), Token.Type.STATEMENT_END);
Returnable<?> conditional = parseExpression(tokens, true, variableMap);
Returnable<?> conditional = parseExpression(tokens, true, scopeBuilder);
ParserUtil.checkReturnType(conditional, Returnable.ReturnType.BOOLEAN);
ParserUtil.checkType(tokens.consume(), Token.Type.STATEMENT_END);
Item<?> incrementer;
Token token = tokens.get();
if(variableMap.containsKey(token.getContent())) { // Assume variable assignment
incrementer = parseAssignment(tokens, variableMap);
} else incrementer = parseFunction(tokens, true, variableMap);
if(scopeBuilder.contains(token.getContent())) { // Assume variable assignment
incrementer = parseAssignment(tokens, scopeBuilder);
} else incrementer = parseFunction(tokens, true, scopeBuilder);
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_END);
return new ForKeyword(parseStatementBlock(tokens, variableMap, true), initializer, (Returnable<Boolean>) conditional, incrementer,
return new ForKeyword(parseStatementBlock(tokens, true, scopeBuilder), initializer, (Returnable<Boolean>) conditional, incrementer,
start);
}
private Returnable<?> parseExpression(Tokenizer tokens, boolean full, Map<String, Returnable.ReturnType> variableMap) {
private Returnable<?> parseExpression(Tokenizer tokens, boolean full, ScopeBuilder scopeBuilder) {
boolean booleanInverted = false; // Check for boolean not operator
boolean negate = false;
if(tokens.get().getType().equals(Token.Type.BOOLEAN_NOT)) {
@@ -202,13 +211,21 @@ public class Parser {
if(id.isConstant()) {
expression = parseConstantExpression(tokens);
} else if(id.getType().equals(Token.Type.GROUP_BEGIN)) { // Parse grouped expression
expression = parseGroup(tokens, variableMap);
expression = parseGroup(tokens, scopeBuilder);
} else {
if(functions.containsKey(id.getContent()))
expression = parseFunction(tokens, false, variableMap);
else if(variableMap.containsKey(id.getContent())) {
expression = parseFunction(tokens, false, scopeBuilder);
else if(scopeBuilder.contains(id.getContent())) {
ParserUtil.checkType(tokens.consume(), Token.Type.IDENTIFIER);
expression = new VariableReferenceNode(id.getContent(), id.getPosition(), variableMap.get(id.getContent()));
String varId = id.getContent();
ReturnType varType = scopeBuilder.getType(varId);
expression = switch(varType) {
case NUMBER -> new NumVariableReferenceNode(id.getPosition(), varType, scopeBuilder.getIndex(varId));
case STRING -> new StrVariableReferenceNode(id.getPosition(), varType, scopeBuilder.getIndex(varId));
case BOOLEAN -> new BoolVariableReferenceNode(id.getPosition(), varType, scopeBuilder.getIndex(varId));
default -> throw new ParseException("Illegal type for variable reference: " + varType, id.getPosition());
};
} else throw new ParseException("Unexpected token \" " + id.getContent() + "\"", id.getPosition());
}
@@ -221,7 +238,7 @@ public class Parser {
}
if(full && tokens.get().isBinaryOperator()) { // Parse binary operations
return parseBinaryOperation(expression, tokens, variableMap);
return parseBinaryOperation(expression, tokens, scopeBuilder);
}
return expression;
}
@@ -243,25 +260,25 @@ public class Parser {
}
}
private Returnable<?> parseGroup(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap) {
private Returnable<?> parseGroup(Tokenizer tokens, ScopeBuilder scopeBuilder) {
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_BEGIN);
Returnable<?> expression = parseExpression(tokens, true, variableMap); // Parse inside of group as a separate expression
Returnable<?> expression = parseExpression(tokens, true, scopeBuilder); // Parse inside of group as a separate expression
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_END);
return expression;
}
private BinaryOperation<?, ?> parseBinaryOperation(Returnable<?> left, Tokenizer tokens,
Map<String, Returnable.ReturnType> variableMap) {
ScopeBuilder scopeBuilder) {
Token binaryOperator = tokens.consume();
ParserUtil.checkBinaryOperator(binaryOperator);
Returnable<?> right = parseExpression(tokens, false, variableMap);
Returnable<?> right = parseExpression(tokens, false, scopeBuilder);
Token other = tokens.get();
if(ParserUtil.hasPrecedence(binaryOperator.getType(), other.getType())) {
return assemble(left, parseBinaryOperation(right, tokens, variableMap), binaryOperator);
return assemble(left, parseBinaryOperation(right, tokens, scopeBuilder), binaryOperator);
} else if(other.isBinaryOperator()) {
return parseBinaryOperation(assemble(left, right, binaryOperator), tokens, variableMap);
return parseBinaryOperation(assemble(left, right, binaryOperator), tokens, scopeBuilder);
}
return assemble(left, right, binaryOperator);
}
@@ -306,7 +323,7 @@ public class Parser {
}
}
private VariableDeclarationNode<?> parseVariableDeclaration(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap) {
private VariableAssignmentNode<?> parseVariableDeclaration(Tokenizer tokens, ScopeBuilder scopeBuilder) {
Token type = tokens.consume();
ParserUtil.checkType(type, Token.Type.STRING_VARIABLE, Token.Type.BOOLEAN_VARIABLE, Token.Type.NUMBER_VARIABLE);
@@ -315,30 +332,34 @@ public class Parser {
ParserUtil.checkVarType(type, returnType); // Check for type mismatch
Token identifier = tokens.consume();
ParserUtil.checkType(identifier, Token.Type.IDENTIFIER);
if(functions.containsKey(identifier.getContent()) || variableMap.containsKey(identifier.getContent()))
if(functions.containsKey(identifier.getContent()) || scopeBuilder.contains(identifier.getContent()))
throw new ParseException(identifier.getContent() + " is already defined in this scope", identifier.getPosition());
ParserUtil.checkType(tokens.consume(), Token.Type.ASSIGNMENT);
Returnable<?> value = parseExpression(tokens, true, variableMap);
Returnable<?> value = parseExpression(tokens, true, scopeBuilder);
ParserUtil.checkReturnType(value, returnType);
variableMap.put(identifier.getContent(), returnType);
return new VariableDeclarationNode<>(tokens.get().getPosition(), identifier.getContent(), value, returnType);
String id = identifier.getContent();
return switch(value.returnType()) {
case NUMBER -> new NumAssignmentNode((Returnable<Number>) value, identifier.getPosition(), scopeBuilder.num(id));
case STRING -> new StrAssignmentNode((Returnable<String>) value, identifier.getPosition(), scopeBuilder.str(id));
case BOOLEAN -> new BoolAssignmentNode((Returnable<Boolean>) value, identifier.getPosition(), scopeBuilder.bool(id));
default -> throw new ParseException("Illegal type for variable declaration: " + type, value.getPosition());
};
}
private Block parseBlock(Tokenizer tokens, Map<String, Returnable.ReturnType> superVars, boolean loop) {
private Block parseBlock(Tokenizer tokens, boolean loop, ScopeBuilder scopeBuilder) {
List<Item<?>> parsedItems = new ArrayList<>();
Map<String, Returnable.ReturnType> parsedVariables = new HashMap<>(
superVars); // New hashmap as to not mutate parent scope's declarations.
scopeBuilder = scopeBuilder.sub();
Token first = tokens.get();
while(tokens.hasNext()) {
Token token = tokens.get();
if(token.getType().equals(Token.Type.BLOCK_END)) break; // Stop parsing at block end.
Item<?> parsedItem = parseItem(tokens, parsedVariables, loop);
Item<?> parsedItem = parseItem(tokens, loop, scopeBuilder);
if(parsedItem != Function.NULL) {
parsedItems.add(parsedItem);
}
@@ -347,7 +368,7 @@ public class Parser {
return new Block(parsedItems, first.getPosition());
}
private Item<?> parseItem(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap, boolean loop) {
private Item<?> parseItem(Tokenizer tokens, boolean loop, ScopeBuilder scopeBuilder) {
Token token = tokens.get();
if(loop) ParserUtil.checkType(token, Token.Type.IDENTIFIER, Token.Type.IF_STATEMENT, Token.Type.WHILE_LOOP, Token.Type.FOR_LOOP,
Token.Type.NUMBER_VARIABLE, Token.Type.STRING_VARIABLE, Token.Type.BOOLEAN_VARIABLE,
@@ -357,14 +378,14 @@ public class Parser {
Token.Type.FAIL);
if(token.isLoopLike()) { // Parse loop-like tokens (if, while, etc)
return parseLoopLike(tokens, variableMap, loop);
return parseLoopLike(tokens, loop, scopeBuilder);
} else if(token.isIdentifier()) { // Parse identifiers
if(variableMap.containsKey(token.getContent())) { // Assume variable assignment
return parseAssignment(tokens, variableMap);
} else return parseFunction(tokens, true, variableMap);
if(scopeBuilder.contains(token.getContent())) { // Assume variable assignment
return parseAssignment(tokens, scopeBuilder);
} else return parseFunction(tokens, true, scopeBuilder);
} else if(token.isVariableDeclaration()) {
return parseVariableDeclaration(tokens, variableMap);
return parseVariableDeclaration(tokens, scopeBuilder);
} else if(token.getType().equals(Token.Type.RETURN)) return new ReturnKeyword(tokens.consume().getPosition());
else if(token.getType().equals(Token.Type.BREAK)) return new BreakKeyword(tokens.consume().getPosition());
@@ -373,21 +394,30 @@ public class Parser {
else throw new UnsupportedOperationException("Unexpected token " + token.getType() + ": " + token.getPosition());
}
private VariableAssignmentNode<?> parseAssignment(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap) {
private VariableAssignmentNode<?> parseAssignment(Tokenizer tokens, ScopeBuilder scopeBuilder) {
Token identifier = tokens.consume();
ParserUtil.checkType(identifier, Token.Type.IDENTIFIER);
ParserUtil.checkType(tokens.consume(), Token.Type.ASSIGNMENT);
Returnable<?> value = parseExpression(tokens, true, variableMap);
Returnable<?> value = parseExpression(tokens, true, scopeBuilder);
ParserUtil.checkReturnType(value, variableMap.get(identifier.getContent()));
String id = identifier.getContent();
return new VariableAssignmentNode<>(value, identifier.getContent(), identifier.getPosition());
ParserUtil.checkReturnType(value, scopeBuilder.getType(id));
ReturnType type = value.returnType();
return switch(type) {
case NUMBER -> new NumAssignmentNode((Returnable<Number>) value, identifier.getPosition(), scopeBuilder.getIndex(id));
case STRING -> new StrAssignmentNode((Returnable<String>) value, identifier.getPosition(), scopeBuilder.getIndex(id));
case BOOLEAN -> new BoolAssignmentNode((Returnable<Boolean>) value, identifier.getPosition(), scopeBuilder.getIndex(id));
default -> throw new ParseException("Illegal type for variable assignment: " + type, value.getPosition());
};
}
private Function<?> parseFunction(Tokenizer tokens, boolean fullStatement, Map<String, Returnable.ReturnType> variableMap) {
private Function<?> parseFunction(Tokenizer tokens, boolean fullStatement, ScopeBuilder scopeBuilder) {
Token identifier = tokens.consume();
ParserUtil.checkType(identifier, Token.Type.IDENTIFIER); // First token must be identifier
@@ -397,7 +427,7 @@ public class Parser {
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_BEGIN); // Second is body begin
List<Returnable<?>> args = getArgs(tokens, variableMap); // Extract arguments, consume the rest.
List<Returnable<?>> args = getArgs(tokens, scopeBuilder); // Extract arguments, consume the rest.
ParserUtil.checkType(tokens.consume(), Token.Type.GROUP_END); // Remove body end
@@ -425,11 +455,11 @@ public class Parser {
throw new UnsupportedOperationException("Unsupported function: " + identifier.getContent());
}
private List<Returnable<?>> getArgs(Tokenizer tokens, Map<String, Returnable.ReturnType> variableMap) {
private List<Returnable<?>> getArgs(Tokenizer tokens, ScopeBuilder scopeBuilder) {
List<Returnable<?>> args = new ArrayList<>();
while(!tokens.get().getType().equals(Token.Type.GROUP_END)) {
args.add(parseExpression(tokens, true, variableMap));
args.add(parseExpression(tokens, true, scopeBuilder));
ParserUtil.checkType(tokens.get(), Token.Type.SEPARATOR, Token.Type.GROUP_END);
if(tokens.get().getType().equals(Token.Type.SEPARATOR)) tokens.consume();
}

View File

@@ -21,23 +21,8 @@ public class Block implements Item<Block.ReturnInfo<?>> {
this.position = position;
}
public ReturnInfo<?> apply(ImplementationArguments implementationArguments) {
return apply(implementationArguments, new Scope());
}
@Override
public ReturnInfo<?> apply(ImplementationArguments implementationArguments, Scope scope) {
Scope sub = scope.sub();
for(Item<?> item : items) {
Object result = item.apply(implementationArguments, sub);
if(result instanceof ReturnInfo<?> level) {
if(!level.getLevel().equals(ReturnLevel.NONE)) return level;
}
}
return new ReturnInfo<>(ReturnLevel.NONE, null);
}
public ReturnInfo<?> applyNoNewScope(ImplementationArguments implementationArguments, Scope scope) {
for(Item<?> item : items) {
Object result = item.apply(implementationArguments, scope);
if(result instanceof ReturnInfo<?> level) {
@@ -52,10 +37,6 @@ public class Block implements Item<Block.ReturnInfo<?>> {
return position;
}
public List<Item<?>> getItems() {
return items;
}
public enum ReturnLevel {
NONE(false),
BREAK(false),

View File

@@ -0,0 +1,19 @@
package com.dfsek.terra.addons.terrascript.parser.lang;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope.ScopeBuilder;
public class Executable {
private final Block script;
private final ThreadLocal<Scope> scope;
public Executable(Block script, ScopeBuilder scopeBuilder) {
this.script = script;
this.scope = ThreadLocal.withInitial(scopeBuilder::build);
}
public boolean execute(ImplementationArguments arguments) {
return script.apply(arguments, scope.get()).getLevel() != Block.ReturnLevel.FAIL;
}
}

View File

@@ -13,5 +13,13 @@ import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public interface Item<T> {
T apply(ImplementationArguments implementationArguments, Scope scope);
default double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
throw new UnsupportedOperationException("Cannot apply " + this + " as double");
}
default boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
throw new UnsupportedOperationException("Cannot apply " + this + " as double");
}
Position getPosition();
}

View File

@@ -1,46 +1,138 @@
package com.dfsek.terra.addons.terrascript.parser.lang;
import net.jafama.FastMath;
import java.util.HashMap;
import java.util.Map;
import com.dfsek.terra.addons.terrascript.parser.lang.variables.Variable;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable.ReturnType;
import com.dfsek.terra.api.util.generic.pair.Pair;
public class Scope {
private static final Scope NULL = new Scope() {
@Override
public Variable<?> get(String id) {
throw new IllegalStateException("Cannot get variable from null scope: " + id);
private final double[] num;
private final boolean[] bool;
private final String[] str;
private Scope(int numSize, int boolSize, int strSize) {
this.num = new double[numSize];
this.bool = new boolean[boolSize];
this.str = new String[strSize];
}
public double getNum(int index) {
return num[index];
}
public boolean getBool(int index) {
return bool[index];
}
public String getStr(int index) {
return str[index];
}
public void setNum(int index, double value) {
num[index] = value;
}
public void setBool(int index, boolean value) {
bool[index] = value;
}
public void setStr(int index, String value) {
str[index] = value;
}
public static final class ScopeBuilder {
private final Map<String, Pair<Integer, ReturnType>> indices;
private int numSize, boolSize, strSize = 0;
private ScopeBuilder parent;
public ScopeBuilder() {
this.indices = new HashMap<>();
}
@Override
public void put(String id, Variable<?> variable) {
throw new IllegalStateException("Cannot set variable in null scope: " + id);
private ScopeBuilder(ScopeBuilder parent) {
this.parent = parent;
this.numSize = parent.numSize;
this.boolSize = parent.boolSize;
this.strSize = parent.strSize;
this.indices = new HashMap<>(parent.indices);
}
public Scope build() {
return new Scope(numSize, boolSize, strSize);
}
public ScopeBuilder sub() {
return new ScopeBuilder(this);
}
private String check(String id) {
if(indices.containsKey(id)) {
throw new IllegalArgumentException("Variable with ID " + id + " already registered.");
}
return id;
}
public int num(String id) {
int num = numSize;
indices.put(check(id), Pair.of(num, ReturnType.NUMBER));
numSize++;
updateNumSize(numSize);
return num;
}
public int str(String id) {
int str = strSize;
indices.put(check(id), Pair.of(str, ReturnType.STRING));
strSize++;
updateStrSize(strSize);
return str;
}
public int bool(String id) {
int bool = boolSize;
indices.put(check(id), Pair.of(bool, ReturnType.BOOLEAN));
boolSize++;
updateBoolSize(boolSize);
return bool;
}
private void updateBoolSize(int size) {
this.boolSize = FastMath.max(boolSize, size);
if(parent != null) {
parent.updateBoolSize(size);
}
}
private void updateNumSize(int size) {
this.numSize = FastMath.max(numSize, size);
if(parent != null) {
parent.updateNumSize(size);
}
}
private void updateStrSize(int size) {
this.strSize = FastMath.max(strSize, size);
if(parent != null) {
parent.updateStrSize(size);
}
}
public int getIndex(String id) {
return indices.get(id).getLeft();
}
public ReturnType getType(String id) {
return indices.get(id).getRight();
}
public boolean contains(String id) {
return indices.containsKey(id);
}
};
private final Scope parent;
private final Map<String, Variable<?>> variableMap = new HashMap<>();
public Scope(Scope parent) {
this.parent = parent;
}
public Scope() {
this.parent = NULL;
}
public Variable<?> get(String id) {
Variable<?> var = variableMap.get(id);
return var == null ? parent.get(id) : var;
}
public void put(String id, Variable<?> variable) {
variableMap.put(id, variable);
}
public Scope sub() {
return new Scope(this);
}
}

View File

@@ -7,12 +7,22 @@
package com.dfsek.terra.addons.terrascript.parser.lang.constants;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public class BooleanConstant extends ConstantExpression<Boolean> {
private final boolean constant;
public BooleanConstant(Boolean constant, Position position) {
super(constant, position);
this.constant = constant;
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return constant;
}
@Override

View File

@@ -7,13 +7,23 @@
package com.dfsek.terra.addons.terrascript.parser.lang.constants;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public class NumericConstant extends ConstantExpression<Number> {
private final double constant;
public NumericConstant(Number constant, Position position) {
super(constant, position);
this.constant = constant.doubleValue();
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return constant;
}
@Override

View File

@@ -19,15 +19,25 @@ public interface Function<T> extends Returnable<T> {
public ReturnType returnType() {
return null;
}
@Override
public Object apply(ImplementationArguments implementationArguments, Scope scope) {
return null;
}
@Override
public Position getPosition() {
return null;
}
};
@Override
default double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return ((Number) apply(implementationArguments, scope)).doubleValue();
}
@Override
default boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return (Boolean) apply(implementationArguments, scope);
}
}

View File

@@ -33,11 +33,10 @@ public class ForKeyword implements Keyword<Block.ReturnInfo<?>> {
@Override
public Block.ReturnInfo<?> apply(ImplementationArguments implementationArguments, Scope scope) {
Scope sub = scope.sub();
for(initializer.apply(implementationArguments, sub);
statement.apply(implementationArguments, sub);
incrementer.apply(implementationArguments, sub)) {
Block.ReturnInfo<?> level = conditional.applyNoNewScope(implementationArguments, sub);
for(initializer.apply(implementationArguments, scope);
statement.apply(implementationArguments, scope);
incrementer.apply(implementationArguments, scope)) {
Block.ReturnInfo<?> level = conditional.apply(implementationArguments, scope);
if(level.getLevel().equals(Block.ReturnLevel.BREAK)) break;
if(level.getLevel().isReturnFast()) return level;
}

View File

@@ -17,6 +17,7 @@ import com.dfsek.terra.addons.terrascript.parser.lang.Keyword;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
import com.dfsek.terra.api.util.generic.pair.Pair;
public class IfKeyword implements Keyword<Block.ReturnInfo<?>> {
@@ -58,23 +59,4 @@ public class IfKeyword implements Keyword<Block.ReturnInfo<?>> {
public ReturnType returnType() {
return ReturnType.VOID;
}
public static class Pair<L, R> {
private final L left;
private final R right;
public Pair(L left, R right) {
this.left = left;
this.right = right;
}
public L getLeft() {
return left;
}
public R getRight() {
return right;
}
}
}

View File

@@ -7,17 +7,13 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public abstract class BinaryOperation<I, O> implements Returnable<O> {
private final Returnable<I> left;
private final Returnable<I> right;
protected final Returnable<I> left;
protected final Returnable<I> right;
private final Position start;
public BinaryOperation(Returnable<I> left, Returnable<I> right, Position start) {
@@ -26,13 +22,6 @@ public abstract class BinaryOperation<I, O> implements Returnable<O> {
this.start = start;
}
public abstract O apply(Supplier<I> left, Supplier<I> right);
@Override
public O apply(ImplementationArguments implementationArguments, Scope scope) {
return apply(() -> left.apply(implementationArguments, scope), () -> right.apply(implementationArguments, scope));
}
@Override
public Position getPosition() {
return start;

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -18,13 +18,18 @@ public class BooleanAndOperation extends BinaryOperation<Boolean, Boolean> {
super(left, right, start);
}
@Override
public Boolean apply(Supplier<Boolean> left, Supplier<Boolean> right) {
return left.get() && right.get();
}
@Override
public ReturnType returnType() {
return ReturnType.BOOLEAN;
}
@Override
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return left.applyBoolean(implementationArguments, scope) && right.applyBoolean(implementationArguments, scope);
}
}

View File

@@ -7,7 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -17,8 +19,13 @@ public class BooleanNotOperation extends UnaryOperation<Boolean> {
}
@Override
public Boolean apply(Boolean input) {
return !input;
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return !input.applyBoolean(implementationArguments, scope);
}
@Override

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,10 +19,15 @@ public class BooleanOrOperation extends BinaryOperation<Boolean, Boolean> {
}
@Override
public Boolean apply(Supplier<Boolean> left, Supplier<Boolean> right) {
return left.get() || right.get();
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return left.applyBoolean(implementationArguments, scope) || right.applyBoolean(implementationArguments, scope);
}
@Override
public ReturnType returnType() {
return ReturnType.BOOLEAN;

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -18,13 +18,24 @@ public class ConcatenationOperation extends BinaryOperation<Object, Object> {
super(left, right, position);
}
@Override
public String apply(Supplier<Object> left, Supplier<Object> right) {
return left.get().toString() + right.get().toString();
private static String toString(Object object) {
String s = object.toString();
if(object instanceof Double) {
int l = s.length();
if(s.charAt(l - 2) == '.' && s.charAt(l - 1) == '0') {
s = s.substring(0, s.length() - 2);
}
}
return s;
}
@Override
public Returnable.ReturnType returnType() {
return Returnable.ReturnType.STRING;
}
@Override
public Object apply(ImplementationArguments implementationArguments, Scope scope) {
return toString(left.apply(implementationArguments, scope)) + toString(right.apply(implementationArguments, scope));
}
}

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -18,13 +18,18 @@ public class DivisionOperation extends BinaryOperation<Number, Number> {
super(left, right, position);
}
@Override
public Number apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() / right.get().doubleValue();
}
@Override
public Returnable.ReturnType returnType() {
return Returnable.ReturnType.NUMBER;
}
@Override
public Number apply(ImplementationArguments implementationArguments, Scope scope) {
return applyDouble(implementationArguments, scope);
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) / right.applyDouble(implementationArguments, scope);
}
}

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,10 +19,15 @@ public class ModuloOperation extends BinaryOperation<Number, Number> {
}
@Override
public Number apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() % right.get().doubleValue();
public Number apply(ImplementationArguments implementationArguments, Scope scope) {
return applyDouble(implementationArguments, scope);
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) % right.applyDouble(implementationArguments, scope);
}
@Override
public ReturnType returnType() {
return ReturnType.NUMBER;

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,10 +19,15 @@ public class MultiplicationOperation extends BinaryOperation<Number, Number> {
}
@Override
public Number apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() * right.get().doubleValue();
public Number apply(ImplementationArguments implementationArguments, Scope scope) {
return applyDouble(implementationArguments, scope);
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) * right.applyDouble(implementationArguments, scope);
}
@Override
public ReturnType returnType() {
return ReturnType.NUMBER;

View File

@@ -7,7 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -16,13 +18,18 @@ public class NegationOperation extends UnaryOperation<Number> {
super(input, position);
}
@Override
public Number apply(Number input) {
return -input.doubleValue();
}
@Override
public ReturnType returnType() {
return ReturnType.NUMBER;
}
@Override
public Number apply(ImplementationArguments implementationArguments, Scope scope) {
return applyDouble(implementationArguments, scope);
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return -input.applyDouble(implementationArguments, scope);
}
}

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,10 +19,15 @@ public class NumberAdditionOperation extends BinaryOperation<Number, Number> {
}
@Override
public Number apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() + right.get().doubleValue();
public Number apply(ImplementationArguments implementationArguments, Scope scope) {
return applyDouble(implementationArguments, scope);
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) + right.applyDouble(implementationArguments, scope);
}
@Override
public ReturnType returnType() {
return ReturnType.NUMBER;

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,10 +19,15 @@ public class SubtractionOperation extends BinaryOperation<Number, Number> {
}
@Override
public Number apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() - right.get().doubleValue();
public Number apply(ImplementationArguments implementationArguments, Scope scope) {
return applyDouble(implementationArguments, scope);
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) - right.applyDouble(implementationArguments, scope);
}
@Override
public ReturnType returnType() {
return ReturnType.NUMBER;

View File

@@ -7,14 +7,12 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public abstract class UnaryOperation<T> implements Returnable<T> {
private final Returnable<T> input;
protected final Returnable<T> input;
private final Position position;
public UnaryOperation(Returnable<T> input, Position position) {
@@ -22,13 +20,6 @@ public abstract class UnaryOperation<T> implements Returnable<T> {
this.position = position;
}
public abstract T apply(T input);
@Override
public T apply(ImplementationArguments implementationArguments, Scope scope) {
return apply(input.apply(implementationArguments, scope));
}
@Override
public Position getPosition() {
return position;

View File

@@ -9,9 +9,9 @@ package com.dfsek.terra.addons.terrascript.parser.lang.operations.statements;
import net.jafama.FastMath;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.BinaryOperation;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -24,20 +24,25 @@ public class EqualsStatement extends BinaryOperation<Object, Boolean> {
super(left, right, position);
}
@Override
public Boolean apply(Supplier<Object> left, Supplier<Object> right) {
Object leftUnwrapped = left.get();
Object rightUnwrapped = right.get();
if(leftUnwrapped instanceof Number l && rightUnwrapped instanceof Number r) {
return FastMath.abs(l.doubleValue() - r.doubleValue()) <= EPSILON;
}
return leftUnwrapped.equals(rightUnwrapped);
}
@Override
public Returnable.ReturnType returnType() {
return Returnable.ReturnType.BOOLEAN;
}
@Override
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
Object leftValue = left.apply(implementationArguments, scope);
Object rightValue = right.apply(implementationArguments, scope);
if(leftValue instanceof Number l && rightValue instanceof Number r) {
return FastMath.abs(l.doubleValue() - r.doubleValue()) <= EPSILON;
}
return leftValue.equals(rightValue);
}
}

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations.statements;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.BinaryOperation;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,14 +19,18 @@ public class GreaterOrEqualsThanStatement extends BinaryOperation<Number, Boolea
super(left, right, position);
}
@Override
public Boolean apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() >= right.get().doubleValue();
}
@Override
public Returnable.ReturnType returnType() {
return Returnable.ReturnType.BOOLEAN;
}
@Override
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) >= right.applyDouble(implementationArguments, scope);
}
}

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations.statements;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.BinaryOperation;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,11 +19,16 @@ public class GreaterThanStatement extends BinaryOperation<Number, Boolean> {
super(left, right, position);
}
@Override
public Boolean apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() > right.get().doubleValue();
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) > right.applyDouble(implementationArguments, scope);
}
@Override
public Returnable.ReturnType returnType() {

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations.statements;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.BinaryOperation;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,12 +19,17 @@ public class LessThanOrEqualsStatement extends BinaryOperation<Number, Boolean>
super(left, right, position);
}
@Override
public Boolean apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() <= right.get().doubleValue();
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) <= right.applyDouble(implementationArguments, scope);
}
@Override
public Returnable.ReturnType returnType() {
return Returnable.ReturnType.BOOLEAN;

View File

@@ -7,9 +7,9 @@
package com.dfsek.terra.addons.terrascript.parser.lang.operations.statements;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.BinaryOperation;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -19,12 +19,17 @@ public class LessThanStatement extends BinaryOperation<Number, Boolean> {
super(left, right, position);
}
@Override
public Boolean apply(Supplier<Number> left, Supplier<Number> right) {
return left.get().doubleValue() < right.get().doubleValue();
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
return left.applyDouble(implementationArguments, scope) < right.applyDouble(implementationArguments, scope);
}
@Override
public Returnable.ReturnType returnType() {
return Returnable.ReturnType.BOOLEAN;

View File

@@ -9,9 +9,9 @@ package com.dfsek.terra.addons.terrascript.parser.lang.operations.statements;
import net.jafama.FastMath;
import java.util.function.Supplier;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.parser.lang.operations.BinaryOperation;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
@@ -24,16 +24,20 @@ public class NotEqualsStatement extends BinaryOperation<Object, Boolean> {
}
@Override
public Boolean apply(Supplier<Object> left, Supplier<Object> right) {
Object leftUnwrapped = left.get();
Object rightUnwrapped = right.get();
if(leftUnwrapped instanceof Number l && rightUnwrapped instanceof Number r) {
return FastMath.abs(l.doubleValue() - r.doubleValue()) > EPSILON;
}
return !leftUnwrapped.equals(rightUnwrapped);
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
Object leftValue = left.apply(implementationArguments, scope);
Object rightValue = right.apply(implementationArguments, scope);
if(leftValue instanceof Number l && rightValue instanceof Number r) {
return FastMath.abs(l.doubleValue() - r.doubleValue()) > EPSILON;
}
return !leftValue.equals(rightValue);
}
@Override
public Returnable.ReturnType returnType() {

View File

@@ -1,40 +0,0 @@
/*
* Copyright (c) 2020-2021 Polyhedral Development
*
* The Terra Core Addons are licensed under the terms of the MIT License. For more details,
* reference the LICENSE file in this module's root directory.
*/
package com.dfsek.terra.addons.terrascript.parser.lang.variables;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Item;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public class VariableAssignmentNode<T> implements Item<T> {
private final Returnable<T> value;
private final Position position;
private final String identifier;
public VariableAssignmentNode(Returnable<T> value, String identifier, Position position) {
this.value = value;
this.identifier = identifier;
this.position = position;
}
@SuppressWarnings("unchecked")
@Override
public synchronized T apply(ImplementationArguments implementationArguments, Scope scope) {
T val = value.apply(implementationArguments, scope);
((Variable<T>) scope.get(identifier)).setValue(val);
return val;
}
@Override
public Position getPosition() {
return position;
}
}

View File

@@ -1,62 +0,0 @@
/*
* Copyright (c) 2020-2021 Polyhedral Development
*
* The Terra Core Addons are licensed under the terms of the MIT License. For more details,
* reference the LICENSE file in this module's root directory.
*/
package com.dfsek.terra.addons.terrascript.parser.lang.variables;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Item;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public class VariableDeclarationNode<T> implements Item<T> {
private final Position position;
private final String identifier;
private final Returnable<T> value;
private final Returnable.ReturnType type;
public VariableDeclarationNode(Position position, String identifier, Returnable<T> value, Returnable.ReturnType type) {
switch(type) {
case STRING:
case BOOLEAN:
case NUMBER:
break;
default:
throw new IllegalArgumentException("Invalid variable type: " + type);
}
this.position = position;
this.identifier = identifier;
this.value = value;
this.type = type;
}
@Override
public T apply(ImplementationArguments implementationArguments, Scope scope) {
T result = value.apply(implementationArguments, scope);
scope.put(identifier, switch(type) {
case NUMBER -> new NumberVariable((Number) result, position);
case BOOLEAN -> new BooleanVariable((Boolean) result, position);
case STRING -> new StringVariable((String) result, position);
default -> throw new IllegalStateException("Unexpected value: " + type);
});
return result;
}
@Override
public Position getPosition() {
return position;
}
public Returnable.ReturnType getType() {
return type;
}
public String getIdentifier() {
return identifier;
}
}

View File

@@ -0,0 +1,25 @@
package com.dfsek.terra.addons.terrascript.parser.lang.variables.assign;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public class BoolAssignmentNode extends VariableAssignmentNode<Boolean> {
public BoolAssignmentNode(Returnable<Boolean> value, Position position, int index) {
super(value, position, index);
}
@Override
public Boolean apply(ImplementationArguments implementationArguments, Scope scope) {
return applyBoolean(implementationArguments, scope);
}
@Override
public boolean applyBoolean(ImplementationArguments implementationArguments, Scope scope) {
boolean val = value.applyBoolean(implementationArguments, scope);
scope.setBool(index, val);
return val;
}
}

View File

@@ -0,0 +1,25 @@
package com.dfsek.terra.addons.terrascript.parser.lang.variables.assign;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public class NumAssignmentNode extends VariableAssignmentNode<Number> {
public NumAssignmentNode(Returnable<Number> value, Position position, int index) {
super(value, position, index);
}
@Override
public Number apply(ImplementationArguments implementationArguments, Scope scope) {
return applyDouble(implementationArguments, scope);
}
@Override
public double applyDouble(ImplementationArguments implementationArguments, Scope scope) {
double val = value.applyDouble(implementationArguments, scope);
scope.setNum(index, val);
return val;
}
}

View File

@@ -0,0 +1,21 @@
package com.dfsek.terra.addons.terrascript.parser.lang.variables.assign;
import com.dfsek.terra.addons.terrascript.parser.lang.ImplementationArguments;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.parser.lang.Scope;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public class StrAssignmentNode extends VariableAssignmentNode<String> {
public StrAssignmentNode(Returnable<String> value, Position position, int index) {
super(value, position, index);
}
@Override
public String apply(ImplementationArguments implementationArguments, Scope scope) {
String val = value.apply(implementationArguments, scope);
scope.setStr(index, val);
return val;
}
}

View File

@@ -0,0 +1,31 @@
/*
* Copyright (c) 2020-2021 Polyhedral Development
*
* The Terra Core Addons are licensed under the terms of the MIT License. For more details,
* reference the LICENSE file in this module's root directory.
*/
package com.dfsek.terra.addons.terrascript.parser.lang.variables.assign;
import com.dfsek.terra.addons.terrascript.parser.lang.Item;
import com.dfsek.terra.addons.terrascript.parser.lang.Returnable;
import com.dfsek.terra.addons.terrascript.tokenizer.Position;
public abstract class VariableAssignmentNode<T> implements Item<T> {
protected final Returnable<T> value;
protected final int index;
private final Position position;
public VariableAssignmentNode(Returnable<T> value, Position position, int index) {
this.value = value;
this.index = index;
this.position = position;
}
@Override
public Position getPosition() {
return position;
}
}

Some files were not shown because too many files have changed in this diff Show More