Change Java whitespace handling in .editorconfig (#425)

* Change whitespace handling in .editorconfig

* Reformat code

* fix format error

* Reformat code

---------

Co-authored-by: Zoë Gidiere <duplexsys@protonmail.com>
This commit is contained in:
Astrashh 2023-11-13 11:57:01 +11:00 committed by GitHub
parent a73fda7d04
commit defd775f13
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
793 changed files with 7579 additions and 7577 deletions

View File

@ -8,7 +8,7 @@ indent_style = space
insert_final_newline = false
max_line_length = 140
tab_width = 4
ij_continuation_indent_size = 8
ij_continuation_indent_size = 4
ij_formatter_off_tag = @formatter:off
ij_formatter_on_tag = @formatter:on
ij_formatter_tags_enabled = false
@ -25,12 +25,12 @@ ij_java_align_multiline_annotation_parameters = true
ij_java_align_multiline_array_initializer_expression = true
ij_java_align_multiline_assignment = true
ij_java_align_multiline_binary_operation = true
ij_java_align_multiline_chained_methods = true
ij_java_align_multiline_chained_methods = false
ij_java_align_multiline_extends_list = true
ij_java_align_multiline_for = true
ij_java_align_multiline_method_parentheses = true
ij_java_align_multiline_method_parentheses = false
ij_java_align_multiline_parameters = true
ij_java_align_multiline_parameters_in_calls = true
ij_java_align_multiline_parameters_in_calls = false
ij_java_align_multiline_parenthesized_expression = true
ij_java_align_multiline_records = true
ij_java_align_multiline_resources = true
@ -127,7 +127,7 @@ ij_java_keep_blank_lines_in_declarations = 2
ij_java_keep_builder_methods_indents = true
ij_java_keep_control_statement_in_one_line = true
ij_java_keep_first_column_comment = false
ij_java_keep_indents_on_empty_lines = true
ij_java_keep_indents_on_empty_lines = false
ij_java_keep_line_breaks = true
ij_java_keep_multiple_expressions_in_one_line = false
ij_java_keep_simple_blocks_in_one_line = true

View File

@ -8,17 +8,17 @@ import com.dfsek.terra.api.addon.BaseAddon;
public class ApiAddon implements BaseAddon {
private final Version version;
private final String id;
public ApiAddon(Version version, String id) {
this.version = version;
this.id = id;
}
@Override
public Version getVersion() {
return version;
}
@Override
public String getID() {
return id;

View File

@ -15,7 +15,7 @@ public class ApiAddonClassLoader extends URLClassLoader {
static {
ClassLoader.registerAsParallelCapable();
}
public ApiAddonClassLoader(URL[] urls, ClassLoader parent) {
super(urls, parent);
}

View File

@ -20,18 +20,18 @@ import com.dfsek.terra.api.addon.bootstrap.BootstrapBaseAddon;
public class ApiAddonLoader implements BootstrapBaseAddon<BaseAddon> {
private static final Version VERSION = Versions.getVersion(1, 0, 0);
@Override
public Iterable<BaseAddon> loadAddons(Path addonsFolder, BootstrapAddonClassLoader parent) {
return Collections.emptySet();
}
@Override
public String getID() {
return "API";
}
@Override
public Version getVersion() {
return VERSION;

View File

@ -9,11 +9,11 @@ class BaseBiomeColumn implements Column<Biome> {
private final Biome base;
private final int min;
private final int max;
private final int x;
private final int z;
private final long seed;
protected BaseBiomeColumn(BiomeExtrusionProvider biomeProvider, Biome base, int min, int max, int x, int z, long seed) {
this.biomeProvider = biomeProvider;
this.base = base;
@ -23,27 +23,27 @@ class BaseBiomeColumn implements Column<Biome> {
this.z = z;
this.seed = seed;
}
@Override
public int getMinY() {
return min;
}
@Override
public int getMaxY() {
return max;
}
@Override
public int getX() {
return x;
}
@Override
public int getZ() {
return z;
}
@Override
public Biome get(int y) {
return biomeProvider.extrude(base, x, y, z, seed);

View File

@ -2,6 +2,8 @@ package com.dfsek.terra.addons.biome.extrusion;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import java.util.function.Supplier;
import com.dfsek.terra.addons.biome.extrusion.api.Extrusion;
import com.dfsek.terra.addons.biome.extrusion.api.ReplaceableBiome;
import com.dfsek.terra.addons.biome.extrusion.config.BiomeExtrusionTemplate;
@ -21,47 +23,45 @@ import com.dfsek.terra.api.util.reflection.TypeKey;
import com.dfsek.terra.api.world.biome.Biome;
import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
import java.util.function.Supplier;
public class BiomeExtrusionAddon implements AddonInitializer {
public static final TypeKey<Supplier<ObjectTemplate<Extrusion>>> EXTRUSION_REGISTRY_KEY = new TypeKey<>() {
};
public static final TypeKey<Supplier<ObjectTemplate<BiomeProvider>>> PROVIDER_REGISTRY_KEY = new TypeKey<>() {
};
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry =
event.getPack()
.getOrCreateRegistry(PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("EXTRUSION"), BiomeExtrusionTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Extrusion>>> extrusionRegistry = event.getPack().getOrCreateRegistry(
EXTRUSION_REGISTRY_KEY);
extrusionRegistry.register(addon.key("SET"), SetExtrusionTemplate::new);
extrusionRegistry.register(addon.key("REPLACE"), ReplaceExtrusionTemplate::new);
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry =
event.getPack()
.getOrCreateRegistry(PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("EXTRUSION"), BiomeExtrusionTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Extrusion>>> extrusionRegistry = event.getPack().getOrCreateRegistry(
EXTRUSION_REGISTRY_KEY);
extrusionRegistry.register(addon.key("SET"), SetExtrusionTemplate::new);
extrusionRegistry.register(addon.key("REPLACE"), ReplaceExtrusionTemplate::new);
})
.failThrough();
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Registry<Biome> biomeRegistry = event.getPack().getRegistry(Biome.class);
event.getPack().applyLoader(ReplaceableBiome.class, new ReplaceableBiomeLoader(biomeRegistry));
});
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Registry<Biome> biomeRegistry = event.getPack().getRegistry(Biome.class);
event.getPack().applyLoader(ReplaceableBiome.class, new ReplaceableBiomeLoader(biomeRegistry));
});
}
}

View File

@ -16,7 +16,7 @@ public class BiomeExtrusionProvider implements BiomeProvider {
private final Set<Biome> biomes;
private final List<Extrusion> extrusions;
private final int resolution;
public BiomeExtrusionProvider(BiomeProvider delegate, List<Extrusion> extrusions, int resolution) {
this.delegate = delegate;
this.biomes = delegate.stream().collect(Collectors.toSet());
@ -24,43 +24,43 @@ public class BiomeExtrusionProvider implements BiomeProvider {
this.extrusions = extrusions;
this.resolution = resolution;
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
Biome delegated = delegate.getBiome(x, y, z, seed);
return extrude(delegated, x, y, z, seed);
}
public Biome extrude(Biome original, int x, int y, int z, long seed) {
for(Extrusion extrusion : extrusions) {
original = extrusion.extrude(original, x, y, z, seed);
}
return original;
}
@Override
public Column<Biome> getColumn(int x, int z, long seed, int min, int max) {
return delegate.getBaseBiome(x, z, seed)
.map(base -> (Column<Biome>) new BaseBiomeColumn(this, base, min, max, x, z, seed))
.orElseGet(() -> BiomeProvider.super.getColumn(x, z, seed, min, max));
.map(base -> (Column<Biome>) new BaseBiomeColumn(this, base, min, max, x, z, seed))
.orElseGet(() -> BiomeProvider.super.getColumn(x, z, seed, min, max));
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return delegate.getBaseBiome(x, z, seed);
}
@Override
public Iterable<Biome> getBiomes() {
return biomes;
}
@Override
public int resolution() {
return resolution;
}
public BiomeProvider getDelegate() {
return delegate;
}

View File

@ -7,6 +7,6 @@ import com.dfsek.terra.api.world.biome.Biome;
public interface Extrusion {
Biome extrude(Biome original, int x, int y, int z, long seed);
Collection<Biome> getBiomes();
}

View File

@ -6,16 +6,16 @@ import com.dfsek.terra.api.world.biome.Biome;
final class PresentBiome implements ReplaceableBiome {
private final Biome biome;
PresentBiome(Biome biome) {
this.biome = biome;
}
@Override
public Biome get(Biome existing) {
return biome;
}
@Override
public boolean isSelf() {
return false;

View File

@ -13,19 +13,19 @@ public sealed interface ReplaceableBiome permits PresentBiome, SelfBiome {
static ReplaceableBiome of(Biome biome) {
return new PresentBiome(biome);
}
static ReplaceableBiome self() {
return SelfBiome.INSTANCE;
}
Biome get(Biome existing);
default Biome get() {
if(isSelf()) {
throw new IllegalStateException("Cannot get() self biome!");
}
return get(null);
}
boolean isSelf();
}

View File

@ -8,12 +8,12 @@ import com.dfsek.terra.api.world.biome.Biome;
final class SelfBiome implements ReplaceableBiome {
public static final SelfBiome INSTANCE = new SelfBiome();
@Override
public Biome get(Biome existing) {
return Objects.requireNonNull(existing);
}
@Override
public boolean isSelf() {
return true;

View File

@ -15,14 +15,14 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomeExtrusionTemplate implements ObjectTemplate<BiomeProvider> {
@Value("provider")
private @Meta BiomeProvider provider;
@Value("resolution")
@Default
private @Meta int resolution = 4;
@Value("extrusions")
private @Meta List<@Meta Extrusion> extrusions;
@Override
public BiomeProvider get() {
return new BiomeExtrusionProvider(provider, extrusions, resolution);

View File

@ -15,18 +15,18 @@ import com.dfsek.terra.api.world.biome.Biome;
public class ReplaceableBiomeLoader implements TypeLoader<ReplaceableBiome> {
private final Registry<Biome> biomeRegistry;
public ReplaceableBiomeLoader(Registry<Biome> biomeRegistry) {
this.biomeRegistry = biomeRegistry;
}
@Override
public ReplaceableBiome load(@NotNull AnnotatedType t, @NotNull Object c, @NotNull ConfigLoader loader, DepthTracker depthTracker)
throws LoadException {
if(c.equals("SELF")) return ReplaceableBiome.self();
return biomeRegistry
.getByID((String) c)
.map(ReplaceableBiome::of)
.orElseThrow(() -> new LoadException("No such biome: " + c, depthTracker));
.getByID((String) c)
.map(ReplaceableBiome::of)
.orElseThrow(() -> new LoadException("No such biome: " + c, depthTracker));
}
}

View File

@ -12,10 +12,10 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class ReplaceExtrusionTemplate extends SamplerExtrusionTemplate {
@Value("to")
private @Meta ProbabilityCollection<@Meta ReplaceableBiome> biomes;
@Value("from")
private @Meta String fromTag;
@Override
public Extrusion get() {
return new ReplaceExtrusion(sampler, range, biomes, fromTag);

View File

@ -12,7 +12,7 @@ import com.dfsek.terra.api.util.Range;
public abstract class SamplerExtrusionTemplate implements ObjectTemplate<Extrusion> {
@Value("sampler")
protected @Meta NoiseSampler sampler;
@Value("range")
protected @Meta Range range;
}

View File

@ -12,7 +12,7 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class SetExtrusionTemplate extends SamplerExtrusionTemplate {
@Value("to")
private @Meta ProbabilityCollection<@Meta ReplaceableBiome> biomes;
@Override
public Extrusion get() {
return new SetExtrusion(sampler, range, biomes);

View File

@ -18,20 +18,20 @@ import com.dfsek.terra.api.world.biome.Biome;
*/
public class ReplaceExtrusion implements Extrusion {
private final NoiseSampler sampler;
private final Range range;
private final ProbabilityCollection<ReplaceableBiome> biomes;
private final Predicate<Biome> hasTag;
public ReplaceExtrusion(NoiseSampler sampler, Range range, ProbabilityCollection<ReplaceableBiome> biomes, String tag) {
this.sampler = sampler;
this.range = range;
this.biomes = biomes;
this.hasTag = BiomeQueries.has(tag);
}
@Override
public Biome extrude(Biome original, int x, int y, int z, long seed) {
if(hasTag.test(original)) {
@ -39,14 +39,14 @@ public class ReplaceExtrusion implements Extrusion {
}
return original;
}
@Override
public Collection<Biome> getBiomes() {
return biomes
.getContents()
.stream()
.filter(Predicate.not(ReplaceableBiome::isSelf))
.map(ReplaceableBiome::get)
.collect(Collectors.toSet());
.getContents()
.stream()
.filter(Predicate.not(ReplaceableBiome::isSelf))
.map(ReplaceableBiome::get)
.collect(Collectors.toSet());
}
}

View File

@ -17,29 +17,29 @@ import com.dfsek.terra.api.world.biome.Biome;
*/
public class SetExtrusion implements Extrusion {
private final NoiseSampler sampler;
private final Range range;
private final ProbabilityCollection<ReplaceableBiome> biomes;
public SetExtrusion(NoiseSampler sampler, Range range, ProbabilityCollection<ReplaceableBiome> biomes) {
this.sampler = sampler;
this.range = range;
this.biomes = biomes;
}
@Override
public Biome extrude(Biome original, int x, int y, int z, long seed) {
return range.ifInRange(y, () -> biomes.get(sampler, x, y, z, seed).get(original), original);
}
@Override
public Collection<Biome> getBiomes() {
return biomes
.getContents()
.stream()
.filter(Predicate.not(ReplaceableBiome::isSelf))
.map(ReplaceableBiome::get)
.collect(Collectors.toSet());
.getContents()
.stream()
.filter(Predicate.not(ReplaceableBiome::isSelf))
.map(ReplaceableBiome::get)
.collect(Collectors.toSet());
}
}

View File

@ -17,33 +17,33 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class ImageBiomeProvider implements BiomeProvider {
private final int resolution;
private final ColorConverter<Biome> colorConverter;
private final ColorSampler colorSampler;
public ImageBiomeProvider(ColorConverter<Biome> colorConverter, ColorSampler colorSampler, int resolution) {
this.resolution = resolution;
this.colorConverter = colorConverter;
this.colorSampler = colorSampler;
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
return getBiome(x, z);
}
public Biome getBiome(int x, int z) {
x /= resolution;
z /= resolution;
return colorConverter.apply(colorSampler.apply(x, z));
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(getBiome(x, z));
}
@Override
public Iterable<Biome> getBiomes() {
return colorConverter.getEntries();

View File

@ -33,46 +33,46 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class ImageBiomeProviderAddon implements AddonInitializer {
public static final TypeKey<Supplier<ObjectTemplate<BiomeProvider>>> PROVIDER_REGISTRY_KEY = new TypeKey<>() {
};
public static final TypeKey<Supplier<ObjectTemplate<ColorConverter<Biome>>>> BIOME_COLOR_CONVERTER_REGISTRY_KEY = new TypeKey<>() {
};
public static final TypeKey<Supplier<ObjectTemplate<ColorMapping<Biome>>>> BIOME_COLOR_MAPPING_REGISTRY_KEY = new TypeKey<>() {
};
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.priority(501)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("IMAGE"), ImageProviderTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<ColorConverter<Biome>>>> biomeColorConverterRegistry =
event.getPack().getOrCreateRegistry(
BIOME_COLOR_CONVERTER_REGISTRY_KEY);
biomeColorConverterRegistry.register(addon.key("EXACT"), ExactBiomeColorConverterTemplate::new);
biomeColorConverterRegistry.register(addon.key("CLOSEST"), ClosestBiomeColorConverterTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<ColorMapping<Biome>>>> biomeColorMappingRegistry =
event.getPack().getOrCreateRegistry(
BIOME_COLOR_MAPPING_REGISTRY_KEY);
biomeColorMappingRegistry.register(addon.key("USE_BIOME_COLORS"),
() -> () -> new BiomeDefinedColorMapping<>(event.getPack().getRegistry(Biome.class),
b -> b));
biomeColorMappingRegistry.register(addon.key("MAP"), DefinedBiomeColorMappingTemplate::new);
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.priority(501)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("IMAGE"), ImageProviderTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<ColorConverter<Biome>>>> biomeColorConverterRegistry =
event.getPack().getOrCreateRegistry(
BIOME_COLOR_CONVERTER_REGISTRY_KEY);
biomeColorConverterRegistry.register(addon.key("EXACT"), ExactBiomeColorConverterTemplate::new);
biomeColorConverterRegistry.register(addon.key("CLOSEST"), ClosestBiomeColorConverterTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<ColorMapping<Biome>>>> biomeColorMappingRegistry =
event.getPack().getOrCreateRegistry(
BIOME_COLOR_MAPPING_REGISTRY_KEY);
biomeColorMappingRegistry.register(addon.key("USE_BIOME_COLORS"),
() -> () -> new BiomeDefinedColorMapping<>(event.getPack().getRegistry(Biome.class),
b -> b));
biomeColorMappingRegistry.register(addon.key("MAP"), DefinedBiomeColorMappingTemplate::new);
})
.failThrough();
}
}

View File

@ -21,18 +21,18 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
@SuppressWarnings("FieldMayBeFinal")
public class ImageProviderTemplate implements ObjectTemplate<BiomeProvider> {
@Value("resolution")
@Default
@Description("Sets the resolution at which to sample the image.")
private int resolution = 1;
@Value("color-sampler")
private ColorSampler colorSampler;
@Value("color-conversion")
private ColorConverter<Biome> colorConverter;
@Override
public BiomeProvider get() {
return new ImageBiomeProvider(colorConverter, colorSampler, resolution);

View File

@ -8,10 +8,10 @@ import com.dfsek.terra.api.world.biome.Biome;
public class ClosestBiomeColorConverterTemplate extends ClosestColorConverterTemplate<Biome> {
@Value("match")
private ColorMapping<Biome> match;
@Override
protected ColorMapping<Biome> getMapping() {
return match;

View File

@ -9,27 +9,27 @@ import com.dfsek.terra.api.world.biome.Biome;
public class ExactBiomeColorConverterTemplate extends ExactColorConverterTemplate<Biome> {
@Value("match")
private ColorMapping<Biome> match;
@Value("else")
private Biome fallback;
@Value("ignore-alpha")
@Default
private boolean ignoreAlpha = true;
@Override
protected ColorMapping<Biome> getMapping() {
return match;
}
@Override
protected Biome getFallback() {
return fallback;
}
@Override
protected boolean ignoreAlpha() {
return ignoreAlpha;

View File

@ -12,10 +12,10 @@ import com.dfsek.terra.api.world.biome.Biome;
public class DefinedBiomeColorMappingTemplate implements ObjectTemplate<ColorMapping<Biome>> {
@Value("map")
Map<ColorString, Biome> map;
@Override
public ColorMapping<Biome> get() {
var map = MapUtil.mapKeys(this.map, ColorString::getColor);

View File

@ -23,53 +23,53 @@ public class ImageBiomeProvider implements BiomeProvider {
private final BufferedImage image;
private final int resolution;
private final Align align;
public ImageBiomeProvider(Set<Biome> registry, BufferedImage image, int resolution, Align align) {
this.image = image;
this.resolution = resolution;
this.align = align;
registry.forEach(biome -> colorBiomeMap.put(new Color(biome.getColor()), biome));
}
private static int distance(Color a, Color b) {
return Math.abs(a.getRed() - b.getRed()) + Math.abs(a.getGreen() - b.getGreen()) + Math.abs(a.getBlue() - b.getBlue());
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
return getBiome(x, z);
}
public Biome getBiome(int x, int z) {
x /= resolution;
z /= resolution;
Color color = align.getColor(image, x, z);
return colorBiomeMap.get(colorBiomeMap.keySet()
.stream()
.reduce(colorBiomeMap.keySet().stream().findAny().orElseThrow(IllegalStateException::new),
(running, element) -> {
int d1 = distance(color, running);
int d2 = distance(color, element);
return d1 < d2 ? running : element;
}));
.stream()
.reduce(colorBiomeMap.keySet().stream().findAny().orElseThrow(IllegalStateException::new),
(running, element) -> {
int d1 = distance(color, running);
int d2 = distance(color, element);
return d1 < d2 ? running : element;
}));
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(getBiome(x, z));
}
@Override
public Iterable<Biome> getBiomes() {
return colorBiomeMap.values();
}
public enum Align {
CENTER {
@Override
public Color getColor(BufferedImage image, int x, int z) {
return new Color(image.getRGB(Math.floorMod(x - image.getWidth() / 2, image.getWidth()),
Math.floorMod(z - image.getHeight() / 2, image.getHeight())));
Math.floorMod(z - image.getHeight() / 2, image.getHeight())));
}
},
NONE {
@ -78,7 +78,7 @@ public class ImageBiomeProvider implements BiomeProvider {
return new Color(image.getRGB(Math.floorMod(x, image.getWidth()), Math.floorMod(z, image.getHeight())));
}
};
public abstract Color getColor(BufferedImage image, int x, int z);
}
}

View File

@ -26,31 +26,31 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class ImageBiomeProviderAddon implements AddonInitializer {
public static final TypeKey<Supplier<ObjectTemplate<BiomeProvider>>> PROVIDER_REGISTRY_KEY = new TypeKey<>() {
};
private static final Logger logger = LoggerFactory.getLogger(ImageBiomeProviderAddon.class);
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("IMAGE"),
() -> new ImageProviderTemplate(event.getPack().getRegistry(Biome.class)));
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("IMAGE"),
() -> new ImageProviderTemplate(event.getPack().getRegistry(Biome.class)));
})
.failThrough();
if(platform.getTerraConfig().isDebugLog())
logger.warn(
"The biome-provider-image addon is deprecated and scheduled for removal in Terra 7.0. It is recommended to use the " +
"biome-provider-image-v2 addon for future pack development instead.");
"The biome-provider-image addon is deprecated and scheduled for removal in Terra 7.0. It is recommended to use the " +
"biome-provider-image-v2 addon for future pack development instead.");
}
}

View File

@ -33,11 +33,11 @@ public class ImageProviderTemplate implements ObjectTemplate<BiomeProvider> {
@Value("image.align")
@Description("Sets the alignment style to use for the image.")
private ImageBiomeProvider.Align align;
public ImageProviderTemplate(Registry<Biome> set) {
this.biomes = set;
}
@Override
public BiomeProvider get() {
return new ImageBiomeProvider(new HashSet<>(biomes.entries()), image, resolution, align);

View File

@ -38,52 +38,52 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomePipelineAddon implements AddonInitializer {
public static final TypeKey<Supplier<ObjectTemplate<Source>>> SOURCE_REGISTRY_KEY = new TypeKey<>() {
};
public static final TypeKey<Supplier<ObjectTemplate<Stage>>> STAGE_REGISTRY_KEY = new TypeKey<>() {
};
public static final TypeKey<Supplier<ObjectTemplate<BiomeProvider>>> PROVIDER_REGISTRY_KEY = new TypeKey<>() {
};
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("PIPELINE"), BiomePipelineTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Source>>> sourceRegistry = event.getPack().getOrCreateRegistry(
SOURCE_REGISTRY_KEY);
sourceRegistry.register(addon.key("SAMPLER"), SamplerSourceTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Stage>>> stageRegistry = event.getPack().getOrCreateRegistry(
STAGE_REGISTRY_KEY);
stageRegistry.register(addon.key("FRACTAL_EXPAND"), ExpanderStageTemplate::new);
stageRegistry.register(addon.key("SMOOTH"), SmoothStageTemplate::new);
stageRegistry.register(addon.key("REPLACE"), ReplaceStageTemplate::new);
stageRegistry.register(addon.key("REPLACE_LIST"), ReplaceListStageTemplate::new);
stageRegistry.register(addon.key("BORDER"), BorderStageTemplate::new);
stageRegistry.register(addon.key("BORDER_LIST"), BorderListStageTemplate::new);
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("PIPELINE"), BiomePipelineTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Source>>> sourceRegistry = event.getPack().getOrCreateRegistry(
SOURCE_REGISTRY_KEY);
sourceRegistry.register(addon.key("SAMPLER"), SamplerSourceTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Stage>>> stageRegistry = event.getPack().getOrCreateRegistry(
STAGE_REGISTRY_KEY);
stageRegistry.register(addon.key("FRACTAL_EXPAND"), ExpanderStageTemplate::new);
stageRegistry.register(addon.key("SMOOTH"), SmoothStageTemplate::new);
stageRegistry.register(addon.key("REPLACE"), ReplaceStageTemplate::new);
stageRegistry.register(addon.key("REPLACE_LIST"), ReplaceListStageTemplate::new);
stageRegistry.register(addon.key("BORDER"), BorderStageTemplate::new);
stageRegistry.register(addon.key("BORDER_LIST"), BorderListStageTemplate::new);
})
.failThrough();
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Registry<Biome> biomeRegistry = event.getPack().getRegistry(Biome.class);
event.getPack().applyLoader(PipelineBiome.class, new PipelineBiomeLoader(biomeRegistry));
});
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Registry<Biome> biomeRegistry = event.getPack().getRegistry(Biome.class);
event.getPack().applyLoader(PipelineBiome.class, new PipelineBiomeLoader(biomeRegistry));
});
}
}

View File

@ -12,11 +12,11 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomePipelineColumn implements Column<Biome> {
private final int min;
private final int max;
private final int x;
private final int z;
private final Biome biome;
protected BiomePipelineColumn(BiomeProvider biomeProvider, int min, int max, int x, int z, long seed) {
this.min = min;
this.max = max;
@ -24,44 +24,44 @@ public class BiomePipelineColumn implements Column<Biome> {
this.z = z;
this.biome = biomeProvider.getBiome(x, 0, z, seed);
}
@Override
public int getMinY() {
return min;
}
@Override
public int getMaxY() {
return max;
}
@Override
public int getX() {
return x;
}
@Override
public int getZ() {
return z;
}
@Override
public Biome get(int y) {
return biome;
}
@Override
public void forRanges(int resolution, IntIntObjConsumer<Biome> consumer) {
consumer.accept(min, max, biome);
}
@Override
public void forEach(Consumer<Biome> consumer) {
for(int y = min; y < max; y++) {
consumer.accept(biome);
}
}
@Override
public void forEach(IntObjConsumer<Biome> consumer) {
for(int y = min; y < max; y++) {

View File

@ -22,23 +22,23 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class PipelineBiomeProvider implements BiomeProvider {
private final LoadingCache<SeededVector, BiomeChunk> biomeChunkCache;
private final int chunkSize;
private final int resolution;
private final NoiseSampler mutator;
private final double noiseAmp;
private final Set<Biome> biomes;
public PipelineBiomeProvider(Pipeline pipeline, int resolution, NoiseSampler mutator, double noiseAmp) {
this.resolution = resolution;
this.mutator = mutator;
this.noiseAmp = noiseAmp;
this.chunkSize = pipeline.getChunkSize();
this.biomeChunkCache = Caffeine.newBuilder()
.maximumSize(64)
.build(pipeline::generateChunk);
.maximumSize(64)
.build(pipeline::generateChunk);
Set<PipelineBiome> biomeSet = new HashSet<>();
pipeline.getSource().getBiomes().forEach(biomeSet::add);
Iterable<PipelineBiome> result = biomeSet;
@ -49,16 +49,16 @@ public class PipelineBiomeProvider implements BiomeProvider {
Iterable<PipelineBiome> finalResult = result;
result.forEach(pipelineBiome -> {
if(pipelineBiome.isPlaceholder()) {
StringBuilder biomeList = new StringBuilder("\n");
StreamSupport.stream(finalResult.spliterator(), false)
.sorted(Comparator.comparing(StringIdentifiable::getID))
.forEach(delegate -> biomeList
.append(" - ")
.append(delegate.getID())
.append(':')
.append(delegate.getClass().getCanonicalName())
.append('\n'));
.sorted(Comparator.comparing(StringIdentifiable::getID))
.forEach(delegate -> biomeList
.append(" - ")
.append(delegate.getID())
.append(':')
.append(delegate.getClass().getCanonicalName())
.append('\n'));
throw new IllegalArgumentException("Biome Pipeline leaks placeholder biome \"" + pipelineBiome.getID() +
"\". Ensure there is a stage to guarantee replacement of the placeholder biome. " +
"Biomes: " +
@ -67,47 +67,47 @@ public class PipelineBiomeProvider implements BiomeProvider {
this.biomes.add(pipelineBiome.getBiome());
});
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
return getBiome(x, z, seed);
}
public Biome getBiome(int x, int z, long seed) {
x += mutator.noise(seed + 1, x, z) * noiseAmp;
z += mutator.noise(seed + 2, x, z) * noiseAmp;
x /= resolution;
z /= resolution;
int chunkX = Math.floorDiv(x, chunkSize);
int chunkZ = Math.floorDiv(z, chunkSize);
int chunkWorldX = chunkX * chunkSize;
int chunkWorldZ = chunkZ * chunkSize;
int xInChunk = x - chunkWorldX;
int zInChunk = z - chunkWorldZ;
return biomeChunkCache.get(new SeededVector(seed, chunkWorldX, chunkWorldZ)).get(xInChunk, zInChunk).getBiome();
}
@Override
public Iterable<Biome> getBiomes() {
return biomes;
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(getBiome(x, z, seed));
}
@Override
public Column<Biome> getColumn(int x, int z, long seed, int min, int max) {
return new BiomePipelineColumn(this, min, max, x, z, seed);
}
@Override
public int resolution() {
return resolution;

View File

@ -5,6 +5,6 @@ import com.dfsek.terra.addons.biome.pipeline.v2.api.biome.PipelineBiome;
public interface BiomeChunk {
PipelineBiome get(int xInChunk, int zInChunk);
}

View File

@ -9,14 +9,14 @@ import com.dfsek.terra.addons.biome.pipeline.v2.pipeline.BiomeChunkImpl.ViewPoin
* filling in null biomes as a result of this resizing.
*/
public interface Expander extends Stage {
PipelineBiome fillBiome(ViewPoint viewPoint);
@Override
default int maxRelativeReadDistance() {
return 0;
}
@Override
default PipelineBiome apply(ViewPoint viewPoint) {
PipelineBiome currentBiome = viewPoint.getBiome();

View File

@ -5,10 +5,10 @@ import java.util.List;
public interface Pipeline {
BiomeChunk generateChunk(SeededVector worldCoordinates);
int getChunkSize();
Source getSource();
List<Stage> getStages();
}

View File

@ -1,7 +1,7 @@
package com.dfsek.terra.addons.biome.pipeline.v2.api;
public record SeededVector(long seed, int x, int z) {
@Override
public boolean equals(Object obj) {
if(obj instanceof SeededVector that) {
@ -9,7 +9,7 @@ public record SeededVector(long seed, int x, int z) {
}
return false;
}
@Override
public int hashCode() {
int code = x;

View File

@ -6,6 +6,6 @@ import com.dfsek.terra.addons.biome.pipeline.v2.api.biome.PipelineBiome;
public interface Source {
PipelineBiome get(long seed, int x, int z);
Iterable<PipelineBiome> getBiomes();
}

View File

@ -6,9 +6,9 @@ import com.dfsek.terra.addons.biome.pipeline.v2.pipeline.BiomeChunkImpl.ViewPoin
public interface Stage {
PipelineBiome apply(ViewPoint viewPoint);
int maxRelativeReadDistance();
default Iterable<PipelineBiome> getBiomes(Iterable<PipelineBiome> biomes) {
return biomes;
}

View File

@ -7,32 +7,32 @@ import com.dfsek.terra.api.world.biome.Biome;
public final class DelegatedPipelineBiome implements PipelineBiome {
private final Biome biome;
public DelegatedPipelineBiome(Biome biome) {
this.biome = biome;
}
@Override
public Biome getBiome() {
return biome;
}
@Override
public int hashCode() {
return biome.hashCode();
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof DelegatedPipelineBiome that)) return false;
return that.biome.equals(this.biome);
}
@Override
public Set<String> getTags() {
return biome.getTags();
}
@Override
public String getID() {
return biome.getID();

View File

@ -10,26 +10,26 @@ public interface PipelineBiome extends StringIdentifiable {
static PipelineBiome placeholder(String id) {
return new PlaceholderPipelineBiome(id);
}
static PipelineBiome from(Biome biome) {
return new DelegatedPipelineBiome(biome);
}
static PipelineBiome self() {
return SelfPipelineBiome.INSTANCE;
}
Biome getBiome();
Set<String> getTags();
default boolean isPlaceholder() {
return false;
}
default boolean isSelf() {
return false;
}
}

View File

@ -9,43 +9,43 @@ import com.dfsek.terra.api.world.biome.Biome;
final class PlaceholderPipelineBiome implements PipelineBiome {
private final Set<String> tags;
private final String id;
public PlaceholderPipelineBiome(String id) {
this.id = id;
tags = new HashSet<>();
tags.add(id);
tags.add("ALL");
}
@Override
public Biome getBiome() {
throw new UnsupportedOperationException("Cannot get raw biome from placeholder pipeline biome");
}
@Override
public Set<String> getTags() {
return tags;
}
@Override
public String getID() {
return id;
}
@Override
public boolean isPlaceholder() {
return true;
}
@Override
public int hashCode() {
return id.hashCode();
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof PlaceholderPipelineBiome that)) return false;
return this.id.equals(that.id);
}
}

View File

@ -8,31 +8,31 @@ import com.dfsek.terra.api.world.biome.Biome;
final class SelfPipelineBiome implements PipelineBiome {
public static final SelfPipelineBiome INSTANCE = new SelfPipelineBiome();
private SelfPipelineBiome() {
}
@Override
public Biome getBiome() {
throw new UnsupportedOperationException("Cannot get biome from self delegate");
}
@Override
public boolean isSelf() {
return true;
}
@Override
public boolean isPlaceholder() {
return true;
}
@Override
public Set<String> getTags() {
return Collections.emptySet();
}
@Override
public String getID() {
return "SELF";

View File

@ -29,7 +29,7 @@ public class BiomePipelineTemplate implements ObjectTemplate<BiomeProvider> {
@Default
@Description("""
The resolution at which to sample biomes.
Larger values are quadratically faster, but produce lower quality results.
For example, a value of 3 would sample every 3 blocks.""")
protected @Meta int resolution = 1;
@ -47,7 +47,7 @@ public class BiomePipelineTemplate implements ObjectTemplate<BiomeProvider> {
@Value("pipeline.stages")
@Description("A list of pipeline stages to apply to the result of #source")
private @Meta List<@Meta Stage> stages;
@Override
public BiomeProvider get() {
return new PipelineBiomeProvider(new PipelineImpl(source, stages, resolution, 128), resolution, blendSampler, blendAmplitude);

View File

@ -15,18 +15,18 @@ import com.dfsek.terra.api.world.biome.Biome;
public class PipelineBiomeLoader implements TypeLoader<PipelineBiome> {
private final Registry<Biome> biomeRegistry;
public PipelineBiomeLoader(Registry<Biome> biomeRegistry) {
this.biomeRegistry = biomeRegistry;
}
@Override
public PipelineBiome load(@NotNull AnnotatedType t, @NotNull Object c, @NotNull ConfigLoader loader, DepthTracker depthTracker)
throws LoadException {
if(c.equals("SELF")) return PipelineBiome.self();
return biomeRegistry
.getByID((String) c)
.map(PipelineBiome::from)
.orElseGet(() -> PipelineBiome.placeholder((String) c));
.getByID((String) c)
.map(PipelineBiome::from)
.orElseGet(() -> PipelineBiome.placeholder((String) c));
}
}

View File

@ -22,11 +22,11 @@ public class SamplerSourceTemplate extends SourceTemplate {
@Value("sampler")
@Description("The sampler used to distribute biomes.")
private @Meta NoiseSampler noise;
@Value("biomes")
@Description("The biomes to be distributed.")
private @Meta ProbabilityCollection<@Meta PipelineBiome> biomes;
@Override
public Source get() {
return new SamplerSource(biomes, noise);

View File

@ -23,17 +23,17 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class BorderListStageTemplate extends StageTemplate {
@Value("from")
private @Meta String from;
@Value("default-replace")
private @Meta String defaultReplace;
@Value("default-to")
private @Meta ProbabilityCollection<@Meta PipelineBiome> defaultTo;
@Value("replace")
private @Meta Map<@Meta PipelineBiome, @Meta ProbabilityCollection<@Meta PipelineBiome>> replace;
@Override
public Stage get() {
return new BorderListStage(replace, from, defaultReplace, noise, defaultTo);

View File

@ -21,13 +21,13 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class BorderStageTemplate extends StageTemplate {
@Value("from")
private @Meta String from;
@Value("replace")
private @Meta String replace;
@Value("to")
private @Meta ProbabilityCollection<@Meta PipelineBiome> to;
@Override
public Stage get() {
return new BorderStage(from, replace, noise, to);

View File

@ -23,13 +23,13 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class ReplaceListStageTemplate extends StageTemplate {
@Value("default-from")
private @Meta String defaultFrom;
@Value("default-to")
private @Meta ProbabilityCollection<@Meta PipelineBiome> defaultTo;
@Value("to")
private @Meta Map<@Meta PipelineBiome, @Meta ProbabilityCollection<@Meta PipelineBiome>> replace;
@Override
public Stage get() {
return new ReplaceListStage(replace, defaultFrom, defaultTo, noise);

View File

@ -21,10 +21,10 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class ReplaceStageTemplate extends StageTemplate {
@Value("from")
private @Meta String from;
@Value("to")
private @Meta ProbabilityCollection<@Meta PipelineBiome> to;
@Override
public Stage get() {
return new ReplaceStage(from, to, noise);

View File

@ -10,44 +10,44 @@ import com.dfsek.terra.addons.biome.pipeline.v2.api.biome.PipelineBiome;
public class BiomeChunkImpl implements BiomeChunk {
private final SeededVector worldOrigin;
private final int chunkOriginArrayIndex;
private final int worldCoordinateScale;
private PipelineBiome[][] biomes;
public BiomeChunkImpl(SeededVector worldOrigin, PipelineImpl pipeline) {
this.worldOrigin = worldOrigin;
this.chunkOriginArrayIndex = pipeline.getChunkOriginArrayIndex();
this.worldCoordinateScale = pipeline.getResolution();
int size = pipeline.getArraySize();
int expanderCount = pipeline.getExpanderCount();
int expansionsApplied = 0;
// Allocate working arrays
this.biomes = new PipelineBiome[size][size];
PipelineBiome[][] lookupArray = new PipelineBiome[size][size];
// A second lookup array is required such that stage application doesn't affect lookups, otherwise application may cascade
// Construct working grid
int gridOrigin = 0;
int gridInterval = calculateGridInterval(expanderCount, expansionsApplied);
int gridSize = (size / gridInterval);
gridSize += expanderCount > 0 ? 1 : 0; // Add an extra border if expansion occurs
// Fill working grid with initial cells
for(int gridX = 0; gridX < gridSize; gridX++) {
for(int gridZ = 0; gridZ < gridSize; gridZ++) {
int xIndex = gridOrigin + gridX * gridInterval;
int zIndex = gridOrigin + gridZ * gridInterval;
biomes[xIndex][zIndex] = pipeline.getSource().get(worldOrigin.seed(), xIndexToWorldCoordinate(xIndex),
zIndexToWorldCoordinate(zIndex));
zIndexToWorldCoordinate(zIndex));
}
}
for(Stage stage : pipeline.getStages()) {
if(stage instanceof Expander) {
// Shrink working grid size, the expander will fill in null cells (as a result of shrinking the grid) during mutation
@ -55,20 +55,20 @@ public class BiomeChunkImpl implements BiomeChunk {
gridInterval = calculateGridInterval(expanderCount, expansionsApplied);
gridSize = expandSize(gridSize);
}
int stageReadDistance = stage.maxRelativeReadDistance();
if(stageReadDistance > 0) {
// Discard edges such that adjacent lookups are only ran on valid cells
gridSize = contractBordersFromSize(gridSize, stageReadDistance);
gridOrigin += stageReadDistance * gridInterval;
}
// Cycle arrays, the previously populated array is swapped to be used for lookups, and the result of the stage application
// overwrites the previous lookup array. This saves having to allocate a new array copy each time
PipelineBiome[][] tempArray = biomes;
biomes = lookupArray;
lookupArray = tempArray;
// Apply stage to working grid
for(int gridZ = 0; gridZ < gridSize; gridZ = gridZ + 1) {
for(int gridX = 0; gridX < gridSize; gridX = gridX + 1) {
@ -79,7 +79,7 @@ public class BiomeChunkImpl implements BiomeChunk {
}
}
}
protected static int initialSizeToArraySize(int expanderCount, int initialSize) {
int size = initialSize;
for(int i = 0; i < expanderCount; i++) {
@ -87,18 +87,18 @@ public class BiomeChunkImpl implements BiomeChunk {
}
return size;
}
protected static int calculateChunkOriginArrayIndex(int totalExpanderCount, List<Stage> stages) {
int finalGridOrigin = calculateFinalGridOrigin(totalExpanderCount, stages);
int initialGridInterval = calculateGridInterval(totalExpanderCount, 0);
// Round the final grid origin up to the nearest multiple of initialGridInterval, such that each
// chunk samples points on the same overall grid.
// Without this, shared chunk borders (required because of adjacent cell reads) will not be identical
// because points would be sampled on grids at different offsets, resulting in artifacts at borders.
return (int) Math.ceil((double) finalGridOrigin / initialGridInterval) * initialGridInterval;
}
private static int calculateFinalGridOrigin(int totalExpanderCount, List<Stage> stages) {
int gridOrigin = 0;
int expansionsApplied = 0;
@ -112,42 +112,42 @@ public class BiomeChunkImpl implements BiomeChunk {
}
return gridOrigin;
}
protected static int calculateChunkSize(int arraySize, int chunkOriginArrayIndex, int totalExpanderCount) {
return contractBordersFromSize(arraySize, chunkOriginArrayIndex) - (totalExpanderCount > 0 ? 1 : 0);
}
private static int expandSize(int size) {
return size * 2 - 1;
}
private static int contractBordersFromSize(int size, int border) {
return size - border * 2;
}
private static int calculateGridInterval(int totalExpansions, int expansionsApplied) {
return 1 << (totalExpansions - expansionsApplied);
}
@Override
public PipelineBiome get(int xInChunk, int zInChunk) {
int xIndex = xInChunk + chunkOriginArrayIndex;
int zIndex = zInChunk + chunkOriginArrayIndex;
return biomes[xIndex][zIndex];
}
private int xIndexToWorldCoordinate(int xIndex) {
return (worldOrigin.x() + xIndex - chunkOriginArrayIndex) * worldCoordinateScale;
}
private int zIndexToWorldCoordinate(int zIndex) {
return (worldOrigin.z() + zIndex - chunkOriginArrayIndex) * worldCoordinateScale;
}
private SeededVector getOrigin() {
return worldOrigin;
}
/**
* Represents a point on the operating grid within the biomes array
*/
@ -160,7 +160,7 @@ public class BiomeChunkImpl implements BiomeChunk {
private final int xIndex;
private final int zIndex;
private final PipelineBiome[][] lookupArray;
private ViewPoint(BiomeChunkImpl chunk, int gridInterval, int gridX, int gridZ, int xIndex, int zIndex,
PipelineBiome[][] lookupArray) {
this.chunk = chunk;
@ -172,45 +172,45 @@ public class BiomeChunkImpl implements BiomeChunk {
this.lookupArray = lookupArray;
this.biome = lookupArray[xIndex][zIndex];
}
public PipelineBiome getRelativeBiome(int x, int z) {
int lookupXIndex = this.xIndex + x * gridInterval;
int lookupZIndex = this.zIndex + z * gridInterval;
return lookupArray[lookupXIndex][lookupZIndex];
}
public PipelineBiome getBiome() {
return biome;
}
/**
* @return X position of the point relative to the operating grid
*/
public int gridX() {
return gridX;
}
/**
* @return Z position of the point relative to the operating grid
*/
public int gridZ() {
return gridZ;
}
/**
* @return X position of the point in the world
*/
public int worldX() {
return chunk.xIndexToWorldCoordinate(xIndex);
}
/**
* @return Z position of the point in the world
*/
public int worldZ() {
return chunk.zIndexToWorldCoordinate(zIndex);
}
public long worldSeed() {
return chunk.getOrigin().seed();
}

View File

@ -14,9 +14,9 @@ import com.dfsek.terra.addons.biome.pipeline.v2.api.Stage;
public class PipelineImpl implements Pipeline {
private static final Logger logger = LoggerFactory.getLogger(PipelineImpl.class);
private final Source source;
private final List<Stage> stages;
private final int chunkSize;
@ -24,13 +24,13 @@ public class PipelineImpl implements Pipeline {
private final int arraySize;
private final int chunkOriginArrayIndex;
private final int resolution;
public PipelineImpl(Source source, List<Stage> stages, int resolution, int idealChunkArraySize) {
this.source = source;
this.stages = stages;
this.resolution = resolution;
this.expanderCount = (int) stages.stream().filter(s -> s instanceof Expander).count();
// Optimize for the ideal array size
int arraySize;
int chunkOriginArrayIndex;
@ -43,49 +43,49 @@ public class PipelineImpl implements Pipeline {
if(chunkSize > 1 && arraySize >= idealChunkArraySize) break;
initialSize++;
}
this.arraySize = arraySize;
this.chunkOriginArrayIndex = chunkOriginArrayIndex;
this.chunkSize = chunkSize;
logger.debug("Initialized a new biome pipeline:");
logger.debug("Array size: {} (Target: {})", arraySize, idealChunkArraySize);
logger.debug("Internal array origin: {}", chunkOriginArrayIndex);
logger.debug("Chunk size: {}", chunkSize);
}
@Override
public BiomeChunk generateChunk(SeededVector worldCoordinates) {
return new BiomeChunkImpl(worldCoordinates, this);
}
@Override
public int getChunkSize() {
return chunkSize;
}
@Override
public Source getSource() {
return source;
}
@Override
public List<Stage> getStages() {
return stages;
}
protected int getExpanderCount() {
return expanderCount;
}
protected int getArraySize() {
return arraySize;
}
protected int getChunkOriginArrayIndex() {
return chunkOriginArrayIndex;
}
protected int getResolution() {
return resolution;
}

View File

@ -9,17 +9,17 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class SamplerSource implements Source {
private final ProbabilityCollection<PipelineBiome> biomes;
private final NoiseSampler sampler;
public SamplerSource(ProbabilityCollection<PipelineBiome> biomes, NoiseSampler sampler) {
this.biomes = biomes;
this.sampler = sampler;
}
@Override
public PipelineBiome get(long seed, int x, int z) {
return biomes.get(sampler, x, z, seed);
}
@Override
public Iterable<PipelineBiome> getBiomes() {
return biomes.getContents();

View File

@ -8,18 +8,18 @@ import com.dfsek.terra.addons.biome.pipeline.v2.api.biome.PipelineBiome;
public class SingleSource implements Source {
private final PipelineBiome biome;
public SingleSource(PipelineBiome biome) {
this.biome = biome;
}
@Override
public PipelineBiome get(long seed, int x, int z) {
return biome;
}
@Override
public Set<PipelineBiome> getBiomes() {
return Collections.singleton(biome);

View File

@ -7,27 +7,27 @@ import com.dfsek.terra.api.noise.NoiseSampler;
public class FractalExpander implements Expander {
private final NoiseSampler sampler;
public FractalExpander(NoiseSampler sampler) {
this.sampler = sampler;
}
@Override
public PipelineBiome fillBiome(BiomeChunkImpl.ViewPoint viewPoint) {
int xMod2 = viewPoint.gridX() % 2;
int zMod2 = viewPoint.gridZ() % 2;
double roll = sampler.noise(viewPoint.worldSeed(), viewPoint.worldX(), viewPoint.worldZ());
if(xMod2 == 1 && zMod2 == 0) { // Pick one of 2 neighbors on X axis randomly
return roll > 0 ? viewPoint.getRelativeBiome(-1, 0) : viewPoint.getRelativeBiome(1, 0);
} else if(xMod2 == 0 && zMod2 == 1) { // Pick one of 2 neighbors on Z axis randomly
return roll > 0 ? viewPoint.getRelativeBiome(0, -1) : viewPoint.getRelativeBiome(0, 1);
} else { // Pick one of 4 corners randomly
return roll > 0 ?
roll > 0.25 ? viewPoint.getRelativeBiome(-1, 1) : viewPoint.getRelativeBiome(1, 1) :

View File

@ -28,9 +28,9 @@ public class BorderListStage implements Stage {
private final ProbabilityCollection<PipelineBiome> replaceDefault;
private final String defaultReplace;
private final Map<PipelineBiome, ProbabilityCollection<PipelineBiome>> replace;
private final Vector2Int[] borderPoints;
public BorderListStage(Map<PipelineBiome, ProbabilityCollection<PipelineBiome>> replace, String border, String defaultReplace,
NoiseSampler noiseSampler, ProbabilityCollection<PipelineBiome> replaceDefault) {
this.border = border;
@ -38,7 +38,7 @@ public class BorderListStage implements Stage {
this.replaceDefault = replaceDefault;
this.defaultReplace = defaultReplace;
this.replace = replace;
List<Vector2Int> points = new ArrayList<>();
for(int x = -1; x <= 1; x++) {
for(int z = -1; z <= 1; z++) {
@ -47,9 +47,9 @@ public class BorderListStage implements Stage {
}
}
this.borderPoints = points.toArray(new Vector2Int[0]);
}
@Override
public Iterable<PipelineBiome> getBiomes(Iterable<PipelineBiome> biomes) {
Set<PipelineBiome> biomeSet = new HashSet<>();
@ -58,7 +58,7 @@ public class BorderListStage implements Stage {
replace.forEach((biome, collection) -> biomeSet.addAll(collection.getContents()));
return biomeSet;
}
@Override
public PipelineBiome apply(BiomeChunkImpl.ViewPoint viewPoint) {
PipelineBiome center = viewPoint.getBiome();
@ -68,18 +68,18 @@ public class BorderListStage implements Stage {
if(current != null && current.getTags().contains(border)) {
if(replace.containsKey(center)) {
PipelineBiome replacement = replace.get(center).get(noiseSampler, viewPoint.worldX(), viewPoint.worldZ(),
viewPoint.worldSeed());
viewPoint.worldSeed());
return replacement.isSelf() ? center : replacement;
}
PipelineBiome replacement = replaceDefault.get(noiseSampler, viewPoint.worldX(), viewPoint.worldZ(),
viewPoint.worldSeed());
viewPoint.worldSeed());
return replacement.isSelf() ? center : replacement;
}
}
}
return center;
}
@Override
public int maxRelativeReadDistance() {
return 1;

View File

@ -27,7 +27,7 @@ public class BorderStage implements Stage {
private final ProbabilityCollection<PipelineBiome> replace;
private final String replaceTag;
private final Vector2Int[] borderPoints;
public BorderStage(String border, String replaceTag, NoiseSampler noiseSampler, ProbabilityCollection<PipelineBiome> replace) {
this.border = border;
this.noiseSampler = noiseSampler;
@ -42,7 +42,7 @@ public class BorderStage implements Stage {
}
this.borderPoints = points.toArray(new Vector2Int[0]);
}
@Override
public PipelineBiome apply(BiomeChunkImpl.ViewPoint viewPoint) {
PipelineBiome center = viewPoint.getBiome();
@ -57,23 +57,23 @@ public class BorderStage implements Stage {
}
return center;
}
@Override
public Iterable<PipelineBiome> getBiomes(Iterable<PipelineBiome> biomes) {
Set<PipelineBiome> biomeSet = new HashSet<>();
biomes.forEach(biomeSet::add);
biomeSet.addAll(
replace
.getContents()
.stream()
.filter(
Predicate.not(PipelineBiome::isSelf)
)
.toList()
);
replace
.getContents()
.stream()
.filter(
Predicate.not(PipelineBiome::isSelf)
)
.toList()
);
return biomeSet;
}
@Override
public int maxRelativeReadDistance() {
return 1;

View File

@ -24,7 +24,7 @@ public class ReplaceListStage implements Stage {
private final NoiseSampler sampler;
private final ProbabilityCollection<PipelineBiome> replaceDefault;
private final String defaultTag;
public ReplaceListStage(Map<PipelineBiome, ProbabilityCollection<PipelineBiome>> replace, String defaultTag,
ProbabilityCollection<PipelineBiome> replaceDefault, NoiseSampler sampler) {
this.replace = replace;
@ -32,7 +32,7 @@ public class ReplaceListStage implements Stage {
this.defaultTag = defaultTag;
this.replaceDefault = replaceDefault;
}
@Override
public PipelineBiome apply(BiomeChunkImpl.ViewPoint viewPoint) {
PipelineBiome center = viewPoint.getBiome();
@ -46,18 +46,18 @@ public class ReplaceListStage implements Stage {
}
return center;
}
@Override
public int maxRelativeReadDistance() {
return 0;
}
@Override
public Iterable<PipelineBiome> getBiomes(Iterable<PipelineBiome> biomes) {
Set<PipelineBiome> biomeSet = new HashSet<>();
Set<PipelineBiome> reject = new HashSet<>();
biomes.forEach(biome -> {
if(!biome.getTags().contains(defaultTag) && !replace.containsKey(biome)) {
biomeSet.add(biome);

View File

@ -22,13 +22,13 @@ public class ReplaceStage implements Stage {
private final String replaceableTag;
private final ProbabilityCollection<PipelineBiome> replace;
private final NoiseSampler sampler;
public ReplaceStage(String replaceable, ProbabilityCollection<PipelineBiome> replace, NoiseSampler sampler) {
this.replaceableTag = replaceable;
this.replace = replace;
this.sampler = sampler;
}
@Override
public PipelineBiome apply(BiomeChunkImpl.ViewPoint viewPoint) {
if(viewPoint.getBiome().getTags().contains(replaceableTag)) {
@ -37,12 +37,12 @@ public class ReplaceStage implements Stage {
}
return viewPoint.getBiome();
}
@Override
public int maxRelativeReadDistance() {
return 0;
}
@Override
public Iterable<PipelineBiome> getBiomes(Iterable<PipelineBiome> biomes) {
Set<PipelineBiome> biomeSet = new HashSet<>();

View File

@ -16,25 +16,25 @@ import com.dfsek.terra.api.noise.NoiseSampler;
public class SmoothStage implements Stage {
private final NoiseSampler sampler;
public SmoothStage(NoiseSampler sampler) {
this.sampler = sampler;
}
@Override
public PipelineBiome apply(BiomeChunkImpl.ViewPoint viewPoint) {
PipelineBiome top = viewPoint.getRelativeBiome(1, 0);
PipelineBiome bottom = viewPoint.getRelativeBiome(-1, 0);
PipelineBiome left = viewPoint.getRelativeBiome(0, 1);
PipelineBiome right = viewPoint.getRelativeBiome(0, -1);
double roll = sampler.noise(viewPoint.worldSeed(), viewPoint.worldX(), viewPoint.worldZ());
boolean vert = Objects.equals(top, bottom);
boolean horiz = Objects.equals(left, right);
if(vert && horiz) {
return roll > 0 ?
roll > 0.25 ? left : right :
@ -48,7 +48,7 @@ public class SmoothStage implements Stage {
}
return viewPoint.getBiome();
}
@Override
public int maxRelativeReadDistance() {
return 1;

View File

@ -20,7 +20,7 @@ public class BiomeHolderImpl implements BiomeHolder {
private final int width;
private final int offset;
private BiomeDelegate[][] biomes;
public BiomeHolderImpl(int width, Vector2.Mutable origin) {
width += 4;
this.width = width;
@ -28,38 +28,38 @@ public class BiomeHolderImpl implements BiomeHolder {
this.origin = origin;
this.offset = 2;
}
private BiomeHolderImpl(BiomeDelegate[][] biomes, Vector2.Mutable origin, int width, int offset) {
this.biomes = biomes;
this.origin = origin;
this.width = width;
this.offset = 2 * offset;
}
@Override
public BiomeHolder expand(BiomeExpander expander, long seed) {
BiomeDelegate[][] old = biomes;
int newWidth = width * 2 - 1;
biomes = new BiomeDelegate[newWidth][newWidth];
for(int x = 0; x < width; x++) {
for(int z = 0; z < width; z++) {
biomes[x * 2][z * 2] = old[x][z];
if(z != width - 1)
biomes[x * 2][z * 2 + 1] = expander.getBetween(x + origin.getX(), z + 1 + origin.getZ(), seed, old[x][z],
old[x][z + 1]);
old[x][z + 1]);
if(x != width - 1)
biomes[x * 2 + 1][z * 2] = expander.getBetween(x + 1 + origin.getX(), z + origin.getZ(), seed, old[x][z],
old[x + 1][z]);
old[x + 1][z]);
if(x != width - 1 && z != width - 1)
biomes[x * 2 + 1][z * 2 + 1] = expander.getBetween(x + 1 + origin.getX(), z + 1 + origin.getZ(), seed, old[x][z],
old[x + 1][z + 1], old[x][z + 1], old[x + 1][z]);
old[x + 1][z + 1], old[x][z + 1], old[x + 1][z]);
}
}
return new BiomeHolderImpl(biomes, origin.setX(origin.getX() * 2 - 1).setZ(origin.getZ() * 2 - 1), newWidth, offset);
}
@Override
public void mutate(BiomeMutator mutator, long seed) {
for(int x = 0; x < width; x++) {
@ -69,7 +69,7 @@ public class BiomeHolderImpl implements BiomeHolder {
}
}
}
@Override
public void fill(BiomeSource source, long seed) {
for(int x = 0; x < width; x++) {
@ -78,14 +78,14 @@ public class BiomeHolderImpl implements BiomeHolder {
}
}
}
@Override
public BiomeDelegate getBiome(int x, int z) {
x += offset;
z += offset;
return getBiomeRaw(x, z);
}
@Override
public BiomeDelegate getBiomeRaw(int x, int z) {
if(x >= width || z >= width || x < 0 || z < 0) return null;

View File

@ -22,14 +22,14 @@ public class BiomePipeline {
private final List<Stage> stages;
private final int size;
private final int init;
private BiomePipeline(BiomeSource source, List<Stage> stages, int size, int init) {
this.source = source;
this.stages = stages;
this.size = size;
this.init = init;
}
/**
* Get biomes in a chunk
*
@ -44,37 +44,37 @@ public class BiomePipeline {
for(Stage stage : stages) holder = stage.apply(holder, seed);
return holder;
}
public BiomeSource getSource() {
return source;
}
public List<Stage> getStages() {
return Collections.unmodifiableList(stages);
}
public int getSize() {
return size;
}
public static final class BiomePipelineBuilder {
private final int init;
private final List<Stage> stages = new ArrayList<>();
private int expand;
public BiomePipelineBuilder(int init) {
this.init = init;
expand = init;
}
public BiomePipeline build(BiomeSource source) {
for(Stage stage : stages) {
if(stage.isExpansion()) expand = expand * 2 - 1;
}
return new BiomePipeline(source, stages, expand, init);
}
public BiomePipelineBuilder addStage(Stage stage) {
stages.add(stage);
return this;

View File

@ -40,7 +40,7 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class BiomePipelineAddon implements AddonInitializer {
public static final TypeKey<Supplier<ObjectTemplate<BiomeSource>>> SOURCE_REGISTRY_KEY = new TypeKey<>() {
};
public static final TypeKey<Supplier<ObjectTemplate<Stage>>> STAGE_REGISTRY_KEY = new TypeKey<>() {
@ -50,47 +50,47 @@ public class BiomePipelineAddon implements AddonInitializer {
private static final Logger logger = LoggerFactory.getLogger(BiomePipelineAddon.class);
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("PIPELINE"), BiomePipelineTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeSource>>> sourceRegistry = event.getPack().getOrCreateRegistry(
SOURCE_REGISTRY_KEY);
sourceRegistry.register(addon.key("SAMPLER"), SamplerSourceTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Stage>>> stageRegistry = event.getPack().getOrCreateRegistry(
STAGE_REGISTRY_KEY);
stageRegistry.register(addon.key("FRACTAL_EXPAND"), ExpanderStageTemplate::new);
stageRegistry.register(addon.key("SMOOTH"), SmoothMutatorTemplate::new);
stageRegistry.register(addon.key("REPLACE"), ReplaceMutatorTemplate::new);
stageRegistry.register(addon.key("REPLACE_LIST"), ReplaceListMutatorTemplate::new);
stageRegistry.register(addon.key("BORDER"), BorderMutatorTemplate::new);
stageRegistry.register(addon.key("BORDER_LIST"), BorderListMutatorTemplate::new);
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("PIPELINE"), BiomePipelineTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeSource>>> sourceRegistry = event.getPack().getOrCreateRegistry(
SOURCE_REGISTRY_KEY);
sourceRegistry.register(addon.key("SAMPLER"), SamplerSourceTemplate::new);
})
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<Stage>>> stageRegistry = event.getPack().getOrCreateRegistry(
STAGE_REGISTRY_KEY);
stageRegistry.register(addon.key("FRACTAL_EXPAND"), ExpanderStageTemplate::new);
stageRegistry.register(addon.key("SMOOTH"), SmoothMutatorTemplate::new);
stageRegistry.register(addon.key("REPLACE"), ReplaceMutatorTemplate::new);
stageRegistry.register(addon.key("REPLACE_LIST"), ReplaceListMutatorTemplate::new);
stageRegistry.register(addon.key("BORDER"), BorderMutatorTemplate::new);
stageRegistry.register(addon.key("BORDER_LIST"), BorderListMutatorTemplate::new);
})
.failThrough();
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Registry<Biome> biomeRegistry = event.getPack().getRegistry(Biome.class);
event.getPack().applyLoader(BiomeDelegate.class, new BiomeDelegateLoader(biomeRegistry));
});
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Registry<Biome> biomeRegistry = event.getPack().getRegistry(Biome.class);
event.getPack().applyLoader(BiomeDelegate.class, new BiomeDelegateLoader(biomeRegistry));
});
if(platform.getTerraConfig().isDebugLog())
logger.warn(
"The biome-provider-pipeline addon is deprecated and scheduled for removal in Terra 7.0. It is recommended to use the" +
" biome-provider-pipeline-v2 addon for future pack development instead.");
"The biome-provider-pipeline addon is deprecated and scheduled for removal in Terra 7.0. It is recommended to use the" +
" biome-provider-pipeline-v2 addon for future pack development instead.");
}
}

View File

@ -12,11 +12,11 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
class BiomePipelineColumn implements Column<Biome> {
private final int min;
private final int max;
private final int x;
private final int z;
private final Biome biome;
protected BiomePipelineColumn(BiomeProvider biomeProvider, int min, int max, int x, int z, long seed) {
this.min = min;
this.max = max;
@ -24,44 +24,44 @@ class BiomePipelineColumn implements Column<Biome> {
this.z = z;
this.biome = biomeProvider.getBiome(x, 0, z, seed);
}
@Override
public int getMinY() {
return min;
}
@Override
public int getMaxY() {
return max;
}
@Override
public int getX() {
return x;
}
@Override
public int getZ() {
return z;
}
@Override
public Biome get(int y) {
return biome;
}
@Override
public void forRanges(int resolution, IntIntObjConsumer<Biome> consumer) {
consumer.accept(min, max, biome);
}
@Override
public void forEach(Consumer<Biome> consumer) {
for(int y = min; y < max; y++) {
consumer.accept(biome);
}
}
@Override
public void forEach(IntObjConsumer<Biome> consumer) {
for(int y = min; y < max; y++) {

View File

@ -32,18 +32,18 @@ public class BiomePipelineProvider implements BiomeProvider {
private final int resolution;
private final NoiseSampler mutator;
private final double noiseAmp;
private final Set<Biome> biomes;
public BiomePipelineProvider(BiomePipeline pipeline, int resolution, NoiseSampler mutator, double noiseAmp) {
this.resolution = resolution;
this.mutator = mutator;
this.noiseAmp = noiseAmp;
holderCache = Caffeine.newBuilder()
.maximumSize(1024)
.build(key -> pipeline.getBiomes(key.x, key.z, key.seed));
.maximumSize(1024)
.build(key -> pipeline.getBiomes(key.x, key.z, key.seed));
this.pipeline = pipeline;
Set<BiomeDelegate> biomeSet = new HashSet<>();
pipeline.getSource().getBiomes().forEach(biomeSet::add);
Iterable<BiomeDelegate> result = biomeSet;
@ -54,16 +54,16 @@ public class BiomePipelineProvider implements BiomeProvider {
Iterable<BiomeDelegate> finalResult = result;
result.forEach(biomeDelegate -> {
if(biomeDelegate.isEphemeral()) {
StringBuilder biomeList = new StringBuilder("\n");
StreamSupport.stream(finalResult.spliterator(), false)
.sorted(Comparator.comparing(StringIdentifiable::getID))
.forEach(delegate -> biomeList
.append(" - ")
.append(delegate.getID())
.append(':')
.append(delegate.getClass().getCanonicalName())
.append('\n'));
.sorted(Comparator.comparing(StringIdentifiable::getID))
.forEach(delegate -> biomeList
.append(" - ")
.append(delegate.getID())
.append(':')
.append(delegate.getClass().getCanonicalName())
.append('\n'));
throw new IllegalArgumentException("Biome Pipeline leaks ephemeral biome \"" + biomeDelegate.getID() +
"\". Ensure there is a stage to guarantee replacement of the ephemeral biome. Biomes: " +
biomeList);
@ -71,46 +71,46 @@ public class BiomePipelineProvider implements BiomeProvider {
this.biomes.add(biomeDelegate.getBiome());
});
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
return getBiome(x, z, seed);
}
public Biome getBiome(int x, int z, long seed) {
x += mutator.noise(seed + 1, x, z) * noiseAmp;
z += mutator.noise(seed + 2, x, z) * noiseAmp;
x /= resolution;
z /= resolution;
int fdX = Math.floorDiv(x, pipeline.getSize());
int fdZ = Math.floorDiv(z, pipeline.getSize());
return holderCache.get(new SeededVector(fdX, fdZ, seed)).getBiome(x - fdX * pipeline.getSize(),
z - fdZ * pipeline.getSize()).getBiome();
z - fdZ * pipeline.getSize()).getBiome();
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(getBiome(x, z, seed));
}
@Override
public Iterable<Biome> getBiomes() {
return biomes;
}
@Override
public Column<Biome> getColumn(int x, int z, long seed, int min, int max) {
return new BiomePipelineColumn(this, min, max, x, z, seed);
}
@Override
public int resolution() {
return resolution;
}
private record SeededVector(int x, int z, long seed) {
@Override
public boolean equals(Object obj) {
@ -119,7 +119,7 @@ public class BiomePipelineProvider implements BiomeProvider {
}
return false;
}
@Override
public int hashCode() {
int code = x;

View File

@ -15,12 +15,12 @@ import com.dfsek.terra.addons.biome.pipeline.source.BiomeSource;
public interface BiomeHolder {
BiomeHolder expand(BiomeExpander expander, long seed);
void mutate(BiomeMutator mutator, long seed);
void fill(BiomeSource source, long seed);
BiomeDelegate getBiome(int x, int z);
BiomeDelegate getBiomeRaw(int x, int z);
}

View File

@ -10,26 +10,26 @@ public interface BiomeDelegate extends StringIdentifiable {
static BiomeDelegate ephemeral(String id) {
return new EphemeralBiomeDelegate(id);
}
static BiomeDelegate from(Biome biome) {
return new DelegatedBiome(biome);
}
static BiomeDelegate self() {
return SelfDelegate.INSTANCE;
}
Biome getBiome();
Set<String> getTags();
default boolean isEphemeral() {
return false;
}
default boolean isSelf() {
return false;
}
}

View File

@ -7,32 +7,32 @@ import com.dfsek.terra.api.world.biome.Biome;
final class DelegatedBiome implements BiomeDelegate {
private final Biome biome;
public DelegatedBiome(Biome biome) {
this.biome = biome;
}
@Override
public Biome getBiome() {
return biome;
}
@Override
public int hashCode() {
return biome.hashCode();
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof DelegatedBiome that)) return false;
return that.biome.equals(this.biome);
}
@Override
public Set<String> getTags() {
return biome.getTags();
}
@Override
public String getID() {
return biome.getID();

View File

@ -1,51 +1,51 @@
package com.dfsek.terra.addons.biome.pipeline.api.delegate;
import com.dfsek.terra.api.world.biome.Biome;
import java.util.HashSet;
import java.util.Set;
import com.dfsek.terra.api.world.biome.Biome;
final class EphemeralBiomeDelegate implements BiomeDelegate {
private final Set<String> tags;
private final String id;
public EphemeralBiomeDelegate(String id) {
this.id = id;
tags = new HashSet<>();
tags.add(id);
tags.add("ALL");
}
@Override
public Biome getBiome() {
throw new UnsupportedOperationException("Cannot get biome from ephemeral delegate");
}
@Override
public Set<String> getTags() {
return tags;
}
@Override
public String getID() {
return id;
}
@Override
public boolean isEphemeral() {
return true;
}
@Override
public int hashCode() {
return id.hashCode();
}
@Override
public boolean equals(Object obj) {
if(!(obj instanceof EphemeralBiomeDelegate that)) return false;
return this.id.equals(that.id);
}
}

View File

@ -8,31 +8,31 @@ import com.dfsek.terra.api.world.biome.Biome;
final class SelfDelegate implements BiomeDelegate {
public static final SelfDelegate INSTANCE = new SelfDelegate();
private SelfDelegate() {
}
@Override
public Biome getBiome() {
throw new UnsupportedOperationException("Cannot get biome from self delegate");
}
@Override
public boolean isSelf() {
return true;
}
@Override
public boolean isEphemeral() {
return true;
}
@Override
public Set<String> getTags() {
return Collections.emptySet();
}
@Override
public String getID() {
return "SELF";

View File

@ -13,8 +13,8 @@ import com.dfsek.terra.addons.biome.pipeline.api.delegate.BiomeDelegate;
public interface Stage {
BiomeHolder apply(BiomeHolder in, long seed);
boolean isExpansion();
Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes);
}

View File

@ -13,23 +13,23 @@ import com.dfsek.terra.addons.biome.pipeline.api.delegate.BiomeDelegate;
public interface BiomeMutator {
BiomeDelegate mutate(ViewPoint viewPoint, double x, double z, long seed);
default Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes) {
return biomes;
}
class ViewPoint {
private final BiomeHolder biomes;
private final int offX;
private final int offZ;
public ViewPoint(BiomeHolder biomes, int offX, int offZ) {
this.biomes = biomes;
this.offX = offX;
this.offZ = offZ;
}
public BiomeDelegate getBiome(int x, int z) {
return biomes.getBiomeRaw(x + offX, z + offZ);
}

View File

@ -15,18 +15,18 @@ import com.dfsek.terra.api.world.biome.Biome;
public class BiomeDelegateLoader implements TypeLoader<BiomeDelegate> {
private final Registry<Biome> biomeRegistry;
public BiomeDelegateLoader(Registry<Biome> biomeRegistry) {
this.biomeRegistry = biomeRegistry;
}
@Override
public BiomeDelegate load(@NotNull AnnotatedType t, @NotNull Object c, @NotNull ConfigLoader loader, DepthTracker depthTracker)
throws LoadException {
if(c.equals("SELF")) return BiomeDelegate.self();
return biomeRegistry
.getByID((String) c)
.map(BiomeDelegate::from)
.orElseGet(() -> BiomeDelegate.ephemeral((String) c));
.getByID((String) c)
.map(BiomeDelegate::from)
.orElseGet(() -> BiomeDelegate.ephemeral((String) c));
}
}

View File

@ -35,15 +35,15 @@ public class BiomePipelineTemplate extends BiomeProviderTemplate {
and subtract 1. (The size is also printed to the server console if you
have debug mode enabled)""")
private @Meta int initialSize = 2;
@Value("pipeline.source")
@Description("The Biome Source to use for initial population of biomes.")
private @Meta BiomeSource source;
@Value("pipeline.stages")
@Description("A list of pipeline stages to apply to the result of #source")
private @Meta List<@Meta Stage> stages;
@Override
public BiomeProvider get() {
BiomePipeline.BiomePipelineBuilder biomePipelineBuilder = new BiomePipeline.BiomePipelineBuilder(initialSize);

View File

@ -22,11 +22,11 @@ public class SamplerSourceTemplate extends SourceTemplate {
@Value("sampler")
@Description("The sampler used to distribute biomes.")
private @Meta NoiseSampler noise;
@Value("biomes")
@Description("The biomes to be distributed.")
private @Meta ProbabilityCollection<@Meta BiomeDelegate> biomes;
@Override
public BiomeSource get() {
return new SamplerSource(biomes, noise);

View File

@ -24,17 +24,17 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class BorderListMutatorTemplate extends StageTemplate {
@Value("from")
private @Meta String from;
@Value("default-replace")
private @Meta String defaultReplace;
@Value("default-to")
private @Meta ProbabilityCollection<@Meta BiomeDelegate> defaultTo;
@Value("replace")
private @Meta Map<@Meta BiomeDelegate, @Meta ProbabilityCollection<@Meta BiomeDelegate>> replace;
@Override
public Stage get() {
return new MutatorStage(new BorderListMutator(replace, from, defaultReplace, noise, defaultTo));

View File

@ -22,13 +22,13 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class BorderMutatorTemplate extends StageTemplate {
@Value("from")
private @Meta String from;
@Value("replace")
private @Meta String replace;
@Value("to")
private @Meta ProbabilityCollection<@Meta BiomeDelegate> to;
@Override
public Stage get() {
return new MutatorStage(new BorderMutator(from, replace, noise, to));

View File

@ -24,13 +24,13 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class ReplaceListMutatorTemplate extends StageTemplate {
@Value("default-from")
private @Meta String defaultFrom;
@Value("default-to")
private @Meta ProbabilityCollection<@Meta BiomeDelegate> defaultTo;
@Value("to")
private @Meta Map<@Meta BiomeDelegate, @Meta ProbabilityCollection<@Meta BiomeDelegate>> replace;
@Override
public Stage get() {
return new MutatorStage(new ReplaceListMutator(replace, defaultFrom, defaultTo, noise));

View File

@ -22,10 +22,10 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class ReplaceMutatorTemplate extends StageTemplate {
@Value("from")
private @Meta String from;
@Value("to")
private @Meta ProbabilityCollection<@Meta BiomeDelegate> to;
@Override
public Stage get() {
return new MutatorStage(new ReplaceMutator(from, to, noise));

View File

@ -15,11 +15,11 @@ import com.dfsek.terra.api.util.MathUtil;
public class FractalExpander implements BiomeExpander {
private final NoiseSampler sampler;
public FractalExpander(NoiseSampler sampler) {
this.sampler = sampler;
}
@Override
public BiomeDelegate getBetween(double x, double z, long seed, BiomeDelegate... others) {
return others[MathUtil.normalizeIndex(sampler.noise(seed, x, z), others.length)];

View File

@ -24,7 +24,7 @@ public class BorderListMutator implements BiomeMutator {
private final ProbabilityCollection<BiomeDelegate> replaceDefault;
private final String defaultReplace;
private final Map<BiomeDelegate, ProbabilityCollection<BiomeDelegate>> replace;
public BorderListMutator(Map<BiomeDelegate, ProbabilityCollection<BiomeDelegate>> replace, String border, String defaultReplace,
NoiseSampler noiseSampler, ProbabilityCollection<BiomeDelegate> replaceDefault) {
this.border = border;
@ -33,7 +33,7 @@ public class BorderListMutator implements BiomeMutator {
this.defaultReplace = defaultReplace;
this.replace = replace;
}
@Override
public BiomeDelegate mutate(ViewPoint viewPoint, double x, double z, long seed) {
BiomeDelegate origin = viewPoint.getBiome(0, 0);
@ -55,7 +55,7 @@ public class BorderListMutator implements BiomeMutator {
}
return origin;
}
@Override
public Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes) {
Set<BiomeDelegate> biomeSet = new HashSet<>();

View File

@ -22,14 +22,14 @@ public class BorderMutator implements BiomeMutator {
private final NoiseSampler noiseSampler;
private final ProbabilityCollection<BiomeDelegate> replace;
private final String replaceTag;
public BorderMutator(String border, String replaceTag, NoiseSampler noiseSampler, ProbabilityCollection<BiomeDelegate> replace) {
this.border = border;
this.noiseSampler = noiseSampler;
this.replace = replace;
this.replaceTag = replaceTag;
}
@Override
public BiomeDelegate mutate(ViewPoint viewPoint, double x, double z, long seed) {
BiomeDelegate origin = viewPoint.getBiome(0, 0);
@ -47,20 +47,20 @@ public class BorderMutator implements BiomeMutator {
}
return origin;
}
@Override
public Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes) {
Set<BiomeDelegate> biomeSet = new HashSet<>();
biomes.forEach(biomeSet::add);
biomeSet.addAll(
replace
.getContents()
.stream()
.filter(
Predicate.not(BiomeDelegate::isSelf)
)
.toList()
);
replace
.getContents()
.stream()
.filter(
Predicate.not(BiomeDelegate::isSelf)
)
.toList()
);
return biomeSet;
}
}

View File

@ -23,7 +23,7 @@ public class ReplaceListMutator implements BiomeMutator {
private final NoiseSampler sampler;
private final ProbabilityCollection<BiomeDelegate> replaceDefault;
private final String defaultTag;
public ReplaceListMutator(Map<BiomeDelegate, ProbabilityCollection<BiomeDelegate>> replace, String defaultTag,
ProbabilityCollection<BiomeDelegate> replaceDefault, NoiseSampler sampler) {
this.replace = replace;
@ -31,7 +31,7 @@ public class ReplaceListMutator implements BiomeMutator {
this.defaultTag = defaultTag;
this.replaceDefault = replaceDefault;
}
@Override
public BiomeDelegate mutate(ViewPoint viewPoint, double x, double z, long seed) {
BiomeDelegate center = viewPoint.getBiome(0, 0);
@ -45,13 +45,13 @@ public class ReplaceListMutator implements BiomeMutator {
}
return center;
}
@Override
public Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes) {
Set<BiomeDelegate> biomeSet = new HashSet<>();
Set<BiomeDelegate> reject = new HashSet<>();
biomes.forEach(biome -> {
if(!biome.getTags().contains(defaultTag) && !replace.containsKey(biome)) {
biomeSet.add(biome);

View File

@ -21,13 +21,13 @@ public class ReplaceMutator implements BiomeMutator {
private final String replaceableTag;
private final ProbabilityCollection<BiomeDelegate> replace;
private final NoiseSampler sampler;
public ReplaceMutator(String replaceable, ProbabilityCollection<BiomeDelegate> replace, NoiseSampler sampler) {
this.replaceableTag = replaceable;
this.replace = replace;
this.sampler = sampler;
}
@Override
public BiomeDelegate mutate(ViewPoint viewPoint, double x, double z, long seed) {
if(viewPoint.getBiome(0, 0).getTags().contains(replaceableTag)) {
@ -36,7 +36,7 @@ public class ReplaceMutator implements BiomeMutator {
}
return viewPoint.getBiome(0, 0);
}
@Override
public Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes) {
Set<BiomeDelegate> biomeSet = new HashSet<>();

View File

@ -16,31 +16,31 @@ import com.dfsek.terra.api.util.MathUtil;
public class SmoothMutator implements BiomeMutator {
private final NoiseSampler sampler;
public SmoothMutator(NoiseSampler sampler) {
this.sampler = sampler;
}
@Override
public BiomeDelegate mutate(ViewPoint viewPoint, double x, double z, long seed) {
BiomeDelegate top = viewPoint.getBiome(1, 0);
BiomeDelegate bottom = viewPoint.getBiome(-1, 0);
BiomeDelegate left = viewPoint.getBiome(0, 1);
BiomeDelegate right = viewPoint.getBiome(0, -1);
boolean vert = Objects.equals(top, bottom) && top != null;
boolean horiz = Objects.equals(left, right) && left != null;
if(vert && horiz) {
return MathUtil.normalizeIndex(sampler.noise(seed, x, z), 2) == 0 ? left : top;
}
if(vert) return top;
if(horiz) return left;
return viewPoint.getBiome(0, 0);
}
}

View File

@ -12,6 +12,6 @@ import com.dfsek.terra.addons.biome.pipeline.api.delegate.BiomeDelegate;
public interface BiomeSource {
BiomeDelegate getBiome(double x, double z, long seed);
Iterable<BiomeDelegate> getBiomes();
}

View File

@ -15,17 +15,17 @@ import com.dfsek.terra.api.util.collection.ProbabilityCollection;
public class SamplerSource implements BiomeSource {
private final ProbabilityCollection<BiomeDelegate> biomes;
private final NoiseSampler sampler;
public SamplerSource(ProbabilityCollection<BiomeDelegate> biomes, NoiseSampler sampler) {
this.biomes = biomes;
this.sampler = sampler;
}
@Override
public BiomeDelegate getBiome(double x, double z, long seed) {
return biomes.get(sampler, x, z, seed);
}
@Override
public Iterable<BiomeDelegate> getBiomes() {
return biomes.getContents();

View File

@ -15,21 +15,21 @@ import com.dfsek.terra.addons.biome.pipeline.api.stage.type.BiomeExpander;
public class ExpanderStage implements Stage {
private final BiomeExpander expander;
public ExpanderStage(BiomeExpander expander) {
this.expander = expander;
}
@Override
public BiomeHolder apply(BiomeHolder in, long seed) {
return in.expand(expander, seed);
}
@Override
public boolean isExpansion() {
return true;
}
@Override
public Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes) {
return biomes;

View File

@ -15,27 +15,27 @@ import com.dfsek.terra.addons.biome.pipeline.api.stage.type.BiomeMutator;
public class MutatorStage implements Stage {
private final BiomeMutator mutator;
public MutatorStage(BiomeMutator mutator) {
this.mutator = mutator;
}
@Override
public BiomeHolder apply(BiomeHolder in, long seed) {
in.mutate(mutator, seed);
return in;
}
@Override
public boolean isExpansion() {
return false;
}
@Override
public Iterable<BiomeDelegate> getBiomes(Iterable<BiomeDelegate> biomes) {
return mutator.getBiomes(biomes);
}
public enum Type {
REPLACE,
REPLACE_LIST,

View File

@ -16,21 +16,21 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class SingleBiomeProvider implements BiomeProvider {
private final Biome biome;
public SingleBiomeProvider(Biome biome) {
this.biome = biome;
}
@Override
public Biome getBiome(int x, int y, int z, long seed) {
return biome;
}
@Override
public Optional<Biome> getBaseBiome(int x, int z, long seed) {
return Optional.of(biome);
}
@Override
public Iterable<Biome> getBiomes() {
return Collections.singleton(biome);

View File

@ -25,23 +25,23 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class SingleBiomeProviderAddon implements AddonInitializer {
public static final TypeKey<Supplier<ObjectTemplate<BiomeProvider>>> PROVIDER_REGISTRY_KEY = new TypeKey<>() {
};
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("SINGLE"), SingleBiomeProviderTemplate::new);
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.then(event -> {
CheckedRegistry<Supplier<ObjectTemplate<BiomeProvider>>> providerRegistry = event.getPack().getOrCreateRegistry(
PROVIDER_REGISTRY_KEY);
providerRegistry.register(addon.key("SINGLE"), SingleBiomeProviderTemplate::new);
})
.failThrough();
}
}

View File

@ -18,7 +18,7 @@ import com.dfsek.terra.api.world.biome.generation.BiomeProvider;
public class SingleBiomeProviderTemplate implements ObjectTemplate<BiomeProvider> {
@Value("biome")
private @Meta Biome biome;
@Override
public BiomeProvider get() {
return new SingleBiomeProvider(biome);

View File

@ -21,26 +21,26 @@ public class BiomeQueryAPIAddon implements AddonInitializer {
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Collection<Biome> biomes = event
.getPack()
.getRegistry(Biome.class)
.entries();
BiomeTagFlattener flattener = new BiomeTagFlattener(biomes
.stream()
.flatMap(biome -> biome.getTags().stream())
.toList());
biomes.forEach(biome -> biome.getContext().put(BIOME_TAG_KEY, new BiomeTagHolder(biome, flattener)));
})
.global();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPostLoadEvent.class)
.then(event -> {
Collection<Biome> biomes = event
.getPack()
.getRegistry(Biome.class)
.entries();
BiomeTagFlattener flattener = new BiomeTagFlattener(biomes
.stream()
.flatMap(biome -> biome.getTags().stream())
.toList());
biomes.forEach(biome -> biome.getContext().put(BIOME_TAG_KEY, new BiomeTagHolder(biome, flattener)));
})
.global();
}
}

View File

@ -8,9 +8,9 @@ import com.dfsek.terra.api.world.biome.Biome;
public final class BiomeQueries {
private BiomeQueries() {
}
public static Predicate<Biome> has(String tag) {
return new SingleTagQuery(tag);
}

View File

@ -5,15 +5,15 @@ import java.util.List;
public class BiomeTagFlattener {
private final List<String> tags;
public BiomeTagFlattener(List<String> tags) {
this.tags = tags;
}
public int index(String tag) {
return tags.indexOf(tag);
}
public int size() {
return tags.size();
}

View File

@ -7,7 +7,7 @@ import com.dfsek.terra.api.world.biome.Biome;
public class BiomeTagHolder implements Properties {
private final boolean[] tags;
private final BiomeTagFlattener flattener;
public BiomeTagHolder(Biome biome, BiomeTagFlattener flattener) {
this.tags = new boolean[flattener.size()];
this.flattener = flattener;
@ -15,11 +15,11 @@ public class BiomeTagHolder implements Properties {
tags[flattener.index(tag)] = true;
}
}
boolean get(int index) {
return tags[index];
}
public BiomeTagFlattener getFlattener() {
return flattener;
}

View File

@ -9,23 +9,23 @@ import com.dfsek.terra.api.world.biome.Biome;
public class SingleTagQuery implements Predicate<Biome> {
private final String tag;
private int tagIndex = -1;
public SingleTagQuery(String tag) {
this.tag = tag;
}
@Override
public boolean test(Biome biome) {
if(tagIndex < 0) {
tagIndex = biome
.getContext()
.get(BiomeQueryAPIAddon.BIOME_TAG_KEY)
.getFlattener()
.index(tag);
}
return biome
.getContext()
.get(BiomeQueryAPIAddon.BIOME_TAG_KEY)
.get(tagIndex);
.getFlattener()
.index(tag);
}
return biome
.getContext()
.get(BiomeQueryAPIAddon.BIOME_TAG_KEY)
.get(tagIndex);
}
}

View File

@ -31,54 +31,54 @@ import com.dfsek.terra.api.world.chunk.generation.util.provider.ChunkGeneratorPr
public class NoiseChunkGenerator3DAddon implements AddonInitializer {
@Inject
private Platform platform;
@Inject
private BaseAddon addon;
@Override
public void initialize() {
PropertyKey<BiomePaletteInfo> paletteInfoPropertyKey = Context.create(BiomePaletteInfo.class);
PropertyKey<BiomeNoiseProperties> noisePropertiesPropertyKey = Context.create(BiomeNoiseProperties.class);
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.priority(1000)
.then(event -> {
event.getPack().applyLoader(SlantHolder.CalculationMethod.class,
(type, o, loader, depthTracker) -> SlantHolder.CalculationMethod.valueOf((String) o));
NoiseChunkGeneratorPackConfigTemplate config = event.loadTemplate(new NoiseChunkGeneratorPackConfigTemplate());
event.getPack().getContext().put(config);
event.getPack()
.getOrCreateRegistry(ChunkGeneratorProvider.class)
.register(addon.key("NOISE_3D"),
pack -> new NoiseChunkGenerator3D(pack, platform, config.getElevationBlend(),
config.getHorizontalRes(),
config.getVerticalRes(), noisePropertiesPropertyKey,
paletteInfoPropertyKey));
event.getPack()
.applyLoader(SlantHolder.Layer.class, SlantLayerTemplate::new);
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigPackPreLoadEvent.class)
.priority(1000)
.then(event -> {
event.getPack().applyLoader(SlantHolder.CalculationMethod.class,
(type, o, loader, depthTracker) -> SlantHolder.CalculationMethod.valueOf((String) o));
NoiseChunkGeneratorPackConfigTemplate config = event.loadTemplate(new NoiseChunkGeneratorPackConfigTemplate());
event.getPack().getContext().put(config);
event.getPack()
.getOrCreateRegistry(ChunkGeneratorProvider.class)
.register(addon.key("NOISE_3D"),
pack -> new NoiseChunkGenerator3D(pack, platform, config.getElevationBlend(),
config.getHorizontalRes(),
config.getVerticalRes(), noisePropertiesPropertyKey,
paletteInfoPropertyKey));
event.getPack()
.applyLoader(SlantHolder.Layer.class, SlantLayerTemplate::new);
})
.failThrough();
platform.getEventManager()
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigurationLoadEvent.class)
.then(event -> {
if(event.is(Biome.class)) {
NoiseChunkGeneratorPackConfigTemplate config = event.getPack().getContext().get(
NoiseChunkGeneratorPackConfigTemplate.class);
event.getLoadedObject(Biome.class).getContext().put(paletteInfoPropertyKey,
event.load(new BiomePaletteTemplate(platform,
config.getSlantCalculationMethod()))
.get());
event.getLoadedObject(Biome.class).getContext().put(noisePropertiesPropertyKey,
event.load(new BiomeNoiseConfigTemplate()).get());
}
})
.failThrough();
.getHandler(FunctionalEventHandler.class)
.register(addon, ConfigurationLoadEvent.class)
.then(event -> {
if(event.is(Biome.class)) {
NoiseChunkGeneratorPackConfigTemplate config = event.getPack().getContext().get(
NoiseChunkGeneratorPackConfigTemplate.class);
event.getLoadedObject(Biome.class).getContext().put(paletteInfoPropertyKey,
event.load(new BiomePaletteTemplate(platform,
config.getSlantCalculationMethod()))
.get());
event.getLoadedObject(Biome.class).getContext().put(noisePropertiesPropertyKey,
event.load(new BiomeNoiseConfigTemplate()).get());
}
})
.failThrough();
}
}

View File

@ -13,31 +13,31 @@ public class NoiseChunkGeneratorPackConfigTemplate implements ConfigTemplate, Pr
@Value("blend.terrain.elevation")
@Default
private @Meta int elevationBlend = 4;
@Value("carving.resolution.horizontal")
@Default
private @Meta int horizontalRes = 4;
@Value("carving.resolution.vertical")
@Default
private @Meta int verticalRes = 2;
@Value("slant.calculation-method")
@Default
private SlantHolder.@Meta CalculationMethod slantCalculationMethod = SlantHolder.CalculationMethod.Derivative;
public int getElevationBlend() {
return elevationBlend;
}
public int getHorizontalRes() {
return horizontalRes;
}
public int getVerticalRes() {
return verticalRes;
}
public SlantHolder.CalculationMethod getSlantCalculationMethod() {
return slantCalculationMethod;
}

View File

@ -11,34 +11,34 @@ import com.dfsek.terra.api.noise.NoiseSampler;
public class BiomeNoiseConfigTemplate implements ObjectTemplate<BiomeNoiseProperties> {
@Value("terrain.sampler")
private @Meta NoiseSampler baseSampler;
@Value("terrain.sampler-2d")
@Default
private @Meta NoiseSampler elevationSampler = NoiseSampler.zero();
@Value("carving.sampler")
@Default
private @Meta NoiseSampler carvingSampler = NoiseSampler.zero();
@Value("terrain.blend.distance")
@Default
private @Meta int blendDistance = 3;
@Value("terrain.blend.weight")
@Default
private @Meta double blendWeight = 1;
@Value("terrain.blend.step")
@Default
private @Meta int blendStep = 4;
@Value("terrain.blend.weight-2d")
@Default
private @Meta double elevationWeight = 1;
@Override
public BiomeNoiseProperties get() {
return new BiomeNoiseProperties(baseSampler, elevationSampler, carvingSampler, blendDistance, blendStep, blendWeight,
elevationWeight, new ThreadLocalNoiseHolder());
elevationWeight, new ThreadLocalNoiseHolder());
}
}

View File

@ -5,14 +5,14 @@ import com.dfsek.terra.api.noise.NoiseSampler;
public class ThreadLocalNoiseHolder {
private final ThreadLocal<Holder> holder = ThreadLocal.withInitial(Holder::new);
public double getNoise(NoiseSampler sampler, int x, int y, int z, long seed) {
Holder holder = this.holder.get();
if(holder.init && holder.y == y && holder.z == z && holder.x == x && holder.seed == seed) {
return holder.noise;
}
double noise = sampler.noise(seed, x, y, z);
holder.noise = noise;
holder.x = x;
@ -22,7 +22,7 @@ public class ThreadLocalNoiseHolder {
holder.init = true;
return noise;
}
private static final class Holder {
int x, y, z;
boolean init = false;

View File

@ -12,6 +12,10 @@ import com.dfsek.tectonic.api.config.template.annotations.Description;
import com.dfsek.tectonic.api.config.template.annotations.Value;
import com.dfsek.tectonic.api.config.template.object.ObjectTemplate;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import com.dfsek.terra.addons.chunkgenerator.palette.BiomePaletteInfo;
import com.dfsek.terra.addons.chunkgenerator.palette.PaletteHolder;
import com.dfsek.terra.addons.chunkgenerator.palette.slant.SlantHolder;
@ -20,10 +24,6 @@ import com.dfsek.terra.api.block.state.BlockState;
import com.dfsek.terra.api.config.meta.Meta;
import com.dfsek.terra.api.world.chunk.generation.util.Palette;
import java.util.Collections;
import java.util.List;
import java.util.Map;
public class BiomePaletteTemplate implements ObjectTemplate<BiomePaletteInfo> {
private final Platform platform;
@ -55,15 +55,15 @@ public class BiomePaletteTemplate implements ObjectTemplate<BiomePaletteInfo> {
@Value("carving.update-palette")
@Default
private @Meta boolean updatePalette = false;
public BiomePaletteTemplate(Platform platform, SlantHolder.CalculationMethod slantCalculationMethod) {
this.platform = platform;
this.slantCalculationMethod = slantCalculationMethod;
}
@Override
public BiomePaletteInfo get() {
return new BiomePaletteInfo(PaletteHolder.of(palettes), SlantHolder.of(slantLayers, slantDepth, slantCalculationMethod),
oceanPalette, seaLevel, updatePalette);
oceanPalette, seaLevel, updatePalette);
}
}

View File

@ -13,13 +13,13 @@ import com.dfsek.terra.api.world.chunk.generation.util.Palette;
public class SlantLayerTemplate implements ObjectTemplate<SlantHolder.Layer> {
@Value("threshold")
private @Meta double threshold;
@Value("palette")
private @Meta List<@Meta Map<@Meta Palette, @Meta Integer>> palettes;
@Override
public SlantHolder.Layer get() {
return new SlantHolder.Layer(PaletteHolder.of(palettes), threshold);

Some files were not shown because too many files have changed in this diff Show More