mirror of
https://github.com/VolmitSoftware/Iris.git
synced 2025-07-01 23:47:21 +00:00
Revert "Revert "Graphs & streams ""
This reverts commit 4d64ad8bb8c9aacc1945a033af11bae2d2e8b12a.
This commit is contained in:
parent
4d64ad8bb8
commit
a271ed000e
@ -42,6 +42,7 @@ import java.util.Locale;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
@ -221,6 +222,21 @@ public class ResourceLoader<T extends IrisRegistrant> implements MeteredCache {
|
||||
return m;
|
||||
}
|
||||
|
||||
public KList<T> loadAll(KList<String> s, Consumer<T> postLoad) {
|
||||
KList<T> m = new KList<>();
|
||||
|
||||
for (String i : s) {
|
||||
T t = load(i);
|
||||
|
||||
if (t != null) {
|
||||
m.add(t);
|
||||
postLoad.accept(t);
|
||||
}
|
||||
}
|
||||
|
||||
return m;
|
||||
}
|
||||
|
||||
public KList<T> loadAll(String[] s) {
|
||||
KList<T> m = new KList<>();
|
||||
|
||||
|
@ -123,48 +123,34 @@ public class IrisComplex implements DataProvider {
|
||||
ProceduralStream.of((x, z) -> focusRegion,
|
||||
Interpolated.of(a -> 0D, a -> focusRegion))
|
||||
: regionStyleStream
|
||||
.selectRarity(engine.getDimension().getRegions(), (i) -> data.getRegionLoader().load(i))
|
||||
.convertCached((s) -> data.getRegionLoader().load(s)).cache2D("regionStream", engine, cacheSize);
|
||||
.selectRarity(data.getRegionLoader().loadAll(engine.getDimension().getRegions()))
|
||||
.cache2D("regionStream", engine, cacheSize);
|
||||
regionIDStream = regionIdentityStream.convertCached((i) -> new UUID(Double.doubleToLongBits(i), String.valueOf(i * 38445).hashCode() * 3245556666L));
|
||||
caveBiomeStream = regionStream.convert((r)
|
||||
-> engine.getDimension().getCaveBiomeStyle().create(rng.nextParallelRNG(InferredType.CAVE.ordinal()), getData()).stream()
|
||||
.zoom(r.getCaveBiomeZoom())
|
||||
.selectRarity(r.getCaveBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.onNull("")
|
||||
.convertCached((s) -> {
|
||||
if (s.isEmpty()) {
|
||||
return emptyBiome;
|
||||
}
|
||||
|
||||
return data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.CAVE);
|
||||
})
|
||||
.selectRarity(data.getBiomeLoader().loadAll(r.getCaveBiomes()))
|
||||
.onNull(emptyBiome)
|
||||
).convertAware2D(ProceduralStream::get).cache2D("caveBiomeStream", engine, cacheSize);
|
||||
inferredStreams.put(InferredType.CAVE, caveBiomeStream);
|
||||
landBiomeStream = regionStream.convert((r)
|
||||
-> engine.getDimension().getLandBiomeStyle().create(rng.nextParallelRNG(InferredType.LAND.ordinal()), getData()).stream()
|
||||
.zoom(r.getLandBiomeZoom())
|
||||
.selectRarity(r.getLandBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.convertCached((s) -> data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.LAND))
|
||||
.selectRarity(data.getBiomeLoader().loadAll(r.getLandBiomes(), (t) -> t.setInferredType(InferredType.LAND)))
|
||||
).convertAware2D(ProceduralStream::get)
|
||||
.cache2D("landBiomeStream", engine, cacheSize);
|
||||
inferredStreams.put(InferredType.LAND, landBiomeStream);
|
||||
seaBiomeStream = regionStream.convert((r)
|
||||
-> engine.getDimension().getSeaBiomeStyle().create(rng.nextParallelRNG(InferredType.SEA.ordinal()), getData()).stream()
|
||||
.zoom(r.getSeaBiomeZoom())
|
||||
.selectRarity(r.getSeaBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.convertCached((s) -> data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.SEA))
|
||||
.selectRarity(data.getBiomeLoader().loadAll(r.getSeaBiomes(), (t) -> t.setInferredType(InferredType.SEA)))
|
||||
).convertAware2D(ProceduralStream::get)
|
||||
.cache2D("seaBiomeStream", engine, cacheSize);
|
||||
inferredStreams.put(InferredType.SEA, seaBiomeStream);
|
||||
shoreBiomeStream = regionStream.convert((r)
|
||||
-> engine.getDimension().getShoreBiomeStyle().create(rng.nextParallelRNG(InferredType.SHORE.ordinal()), getData()).stream()
|
||||
.zoom(r.getShoreBiomeZoom())
|
||||
.selectRarity(r.getShoreBiomes(), (i) -> data.getBiomeLoader().load(i))
|
||||
.convertCached((s) -> data.getBiomeLoader().load(s)
|
||||
.setInferredType(InferredType.SHORE))
|
||||
.selectRarity(data.getBiomeLoader().loadAll(r.getShoreBiomes(), (t) -> t.setInferredType(InferredType.SHORE)))
|
||||
).convertAware2D(ProceduralStream::get).cache2D("shoreBiomeStream", engine, cacheSize);
|
||||
inferredStreams.put(InferredType.SHORE, shoreBiomeStream);
|
||||
bridgeStream = focus != null ? ProceduralStream.of((x, z) -> focus.getInferredType(),
|
||||
|
@ -25,6 +25,7 @@ import com.volmit.iris.core.events.IrisEngineHotloadEvent;
|
||||
import com.volmit.iris.core.gui.PregeneratorJob;
|
||||
import com.volmit.iris.core.project.IrisProject;
|
||||
import com.volmit.iris.core.service.PreservationSVC;
|
||||
import com.volmit.iris.engine.biome.AreaSystem;
|
||||
import com.volmit.iris.engine.data.cache.AtomicCache;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
import com.volmit.iris.engine.framework.EngineEffects;
|
||||
@ -104,10 +105,12 @@ public class IrisEngine implements Engine {
|
||||
private double maxBiomeLayerDensity;
|
||||
private double maxBiomeDecoratorDensity;
|
||||
private IrisComplex complex;
|
||||
private AreaSystem biomeSystem;
|
||||
|
||||
public IrisEngine(EngineTarget target, boolean studio) {
|
||||
this.studio = studio;
|
||||
this.target = target;
|
||||
biomeSystem = getDimension().createBiomeSystem();
|
||||
getEngineData();
|
||||
verifySeed();
|
||||
this.seedManager = new SeedManager(target.getWorld().getRawWorldSeed());
|
||||
|
@ -18,7 +18,71 @@
|
||||
|
||||
package com.volmit.iris.engine.object;
|
||||
|
||||
import com.volmit.iris.util.collection.KList;
|
||||
import com.volmit.iris.util.reflect.V;
|
||||
import com.volmit.iris.util.stream.ProceduralStream;
|
||||
import com.volmit.iris.util.stream.arithmetic.FittedStream;
|
||||
import com.volmit.iris.util.stream.interpolation.Interpolated;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
public interface IRare {
|
||||
static <T extends IRare>ProceduralStream<T> stream(ProceduralStream<Double> noise, List<T> possibilities)
|
||||
{
|
||||
return ProceduralStream.of((x, z) -> pick(possibilities, noise.get(x, z)),
|
||||
(x, y, z) -> pick(possibilities, noise.get(x, y, z)),
|
||||
new Interpolated<T>() {
|
||||
@Override
|
||||
public double toDouble(T t) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public T fromDouble(double d) {
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static <T extends IRare> T pick(List<T> possibilities, double noiseValue)
|
||||
{
|
||||
if(possibilities.isEmpty())
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if(possibilities.size() == 1)
|
||||
{
|
||||
return possibilities.get(0);
|
||||
}
|
||||
|
||||
double completeWeight = 0.0;
|
||||
double highestWeight = 0.0;
|
||||
|
||||
for (T item : possibilities)
|
||||
{
|
||||
double weight = Math.max(item.getRarity(), 1);
|
||||
highestWeight = Math.max(highestWeight, weight);
|
||||
completeWeight += weight;
|
||||
}
|
||||
|
||||
double r = noiseValue * completeWeight;
|
||||
double countWeight = 0.0;
|
||||
|
||||
for (T item : possibilities) {
|
||||
double weight = Math.max(highestWeight - Math.max(item.getRarity(), 1), 1);
|
||||
countWeight += weight;
|
||||
if (countWeight >= r)
|
||||
{
|
||||
return item;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
static int get(Object v) {
|
||||
return v instanceof IRare ? Math.max(1, ((IRare) v).getRarity()) : 1;
|
||||
}
|
||||
|
@ -21,7 +21,10 @@ package com.volmit.iris.engine.object;
|
||||
import com.volmit.iris.Iris;
|
||||
import com.volmit.iris.core.loader.IrisData;
|
||||
import com.volmit.iris.core.loader.IrisRegistrant;
|
||||
import com.volmit.iris.engine.biome.AreaSystem;
|
||||
import com.volmit.iris.engine.biome.ProceduralBiomeSystem;
|
||||
import com.volmit.iris.engine.data.cache.AtomicCache;
|
||||
import com.volmit.iris.engine.framework.Engine;
|
||||
import com.volmit.iris.engine.object.annotations.ArrayType;
|
||||
import com.volmit.iris.engine.object.annotations.Desc;
|
||||
import com.volmit.iris.engine.object.annotations.MaxNumber;
|
||||
@ -393,4 +396,8 @@ public class IrisDimension extends IrisRegistrant {
|
||||
public void scanForErrors(JSONObject p, VolmitSender sender) {
|
||||
|
||||
}
|
||||
|
||||
public AreaSystem createBiomeSystem(Engine engine) {
|
||||
return new ProceduralBiomeSystem(engine);
|
||||
}
|
||||
}
|
||||
|
@ -28,11 +28,14 @@ import com.volmit.iris.engine.object.annotations.Snippet;
|
||||
import com.volmit.iris.util.math.RNG;
|
||||
import com.volmit.iris.util.noise.CNG;
|
||||
import com.volmit.iris.util.noise.ExpressionNoise;
|
||||
import com.volmit.iris.util.stream.ProceduralStream;
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Data;
|
||||
import lombok.NoArgsConstructor;
|
||||
import lombok.experimental.Accessors;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
@Snippet("style")
|
||||
@Accessors(chain = true)
|
||||
@NoArgsConstructor
|
||||
|
@ -29,6 +29,7 @@ import com.volmit.iris.util.function.Function3;
|
||||
import com.volmit.iris.util.function.Function4;
|
||||
import com.volmit.iris.util.hunk.Hunk;
|
||||
import com.volmit.iris.util.math.RNG;
|
||||
import com.volmit.iris.util.reflect.V;
|
||||
import com.volmit.iris.util.stream.arithmetic.AddingStream;
|
||||
import com.volmit.iris.util.stream.arithmetic.ClampedStream;
|
||||
import com.volmit.iris.util.stream.arithmetic.CoordinateBitShiftLeftStream;
|
||||
@ -63,8 +64,10 @@ import com.volmit.iris.util.stream.utility.ProfiledStream;
|
||||
import com.volmit.iris.util.stream.utility.SemaphoreStream;
|
||||
import com.volmit.iris.util.stream.utility.SynchronizedStream;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.function.Function;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@SuppressWarnings("ALL")
|
||||
public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T> {
|
||||
@ -363,32 +366,16 @@ public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T> {
|
||||
return new SelectionStream<V>(this, rarityTypes);
|
||||
}
|
||||
|
||||
default <V> ProceduralStream<V> selectRarity(List<V> types) {
|
||||
KList<V> rarityTypes = new KList<>();
|
||||
int totalRarity = 0;
|
||||
for (V i : types) {
|
||||
totalRarity += IRare.get(i);
|
||||
}
|
||||
|
||||
for (V i : types) {
|
||||
rarityTypes.addMultiple(i, totalRarity / IRare.get(i));
|
||||
}
|
||||
|
||||
return new SelectionStream<V>(this, rarityTypes);
|
||||
default <V extends IRare> ProceduralStream<V> selectRarity(List<V> types) {
|
||||
return IRare.stream(this.forceDouble(), types);
|
||||
}
|
||||
|
||||
default <V> ProceduralStream<V> selectRarity(List<V> types, Function<V, IRare> loader) {
|
||||
KList<V> rarityTypes = new KList<>();
|
||||
int totalRarity = 0;
|
||||
for (V i : types) {
|
||||
totalRarity += IRare.get(loader.apply(i));
|
||||
default <V> ProceduralStream<IRare> selectRarity(List<V> types, Function<V, IRare> loader) {
|
||||
List<IRare> r = new ArrayList<>();
|
||||
for(V f : types) {
|
||||
r.add(loader.apply(f));
|
||||
}
|
||||
|
||||
for (V i : types) {
|
||||
rarityTypes.addMultiple(i, totalRarity / IRare.get(loader.apply(i)));
|
||||
}
|
||||
|
||||
return new SelectionStream<V>(this, rarityTypes);
|
||||
return selectRarity(r);
|
||||
}
|
||||
|
||||
default <V> int countPossibilities(List<V> types, Function<V, IRare> loader) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user