Forcefully shove stuff into other stuff

This commit is contained in:
Daniel Mills
2020-11-10 00:49:28 -05:00
parent 50ffcceaf4
commit 4fc8a5ad0c
364 changed files with 2975 additions and 5303 deletions

View File

@@ -0,0 +1,83 @@
package com.volmit.iris.scaffold;
import com.volmit.iris.Iris;
import com.volmit.iris.object.IrisDimension;
import com.volmit.iris.scaffold.engine.EngineCompositeGenerator;
import org.bukkit.World;
import org.bukkit.WorldCreator;
import org.bukkit.generator.ChunkGenerator;
public class IrisWorldCreator
{
private String name;
private boolean studio = false;
private String dimensionName = null;
private long seed = 1337;
public IrisWorldCreator()
{
}
public IrisWorldCreator dimension(String loadKey)
{
this.dimensionName = dimensionName;
return this;
}
public IrisWorldCreator dimension(IrisDimension dim)
{
this.dimensionName = dim.getLoadKey();
return this;
}
public IrisWorldCreator name(String name)
{
this.name = name;
return this;
}
public IrisWorldCreator seed(long seed)
{
this.seed = seed;
return this;
}
public IrisWorldCreator studioMode()
{
this.studio = true;
return this;
}
public IrisWorldCreator productionMode()
{
this.studio = false;
return this;
}
public WorldCreator create()
{
ChunkGenerator g = new EngineCompositeGenerator(dimensionName, !studio);
return new WorldCreator(name)
.environment(findEnvironment())
.generateStructures(true)
.generator(g).seed(seed);
}
private World.Environment findEnvironment() {
if(dimensionName == null)
{
return World.Environment.NORMAL;
}
IrisDimension dim = Iris.globaldata.getDimensionLoader().load(dimensionName);
if(dim != null)
{
return dim.getEnvironment();
}
return World.Environment.NORMAL;
}
}

View File

@@ -0,0 +1,40 @@
package com.volmit.iris.scaffold;
import com.volmit.iris.scaffold.engine.IrisAccess;
import com.volmit.iris.util.KMap;
import org.bukkit.World;
public class IrisWorlds
{
private static final KMap<String, IrisAccess> provisioned = new KMap<>();
public static void register(World w, IrisAccess p)
{
provisioned.put(w.getUID().toString(), p);
}
public static boolean isIrisWorld(World world)
{
if(provisioned.containsKey(world.getUID().toString()))
{
return true;
}
return world.getGenerator() instanceof IrisAccess;
}
public static IrisAccess access(World world)
{
if(isIrisWorld(world))
{
if(provisioned.containsKey(world.getUID().toString()))
{
return provisioned.get(world.getUID().toString());
}
return ((IrisAccess) world.getGenerator());
}
return null;
}
}

View File

@@ -0,0 +1,23 @@
package com.volmit.iris.scaffold.cache;
public interface Cache<V>
{
public int getId();
public V get(int x, int z);
public static long key(int x, int z)
{
return (((long)x) << 32) | (z & 0xffffffffL);
}
public static int keyX(long key)
{
return (int)(key >> 32);
}
public static int keyZ(long key)
{
return (int)key;
}
}

View File

@@ -0,0 +1,11 @@
package com.volmit.iris.scaffold.cache;
public interface Multicache
{
@SuppressWarnings("hiding")
public <V> Cache<V> getCache(int id);
@SuppressWarnings("hiding")
public <V> Cache<V> createCache();
}

View File

@@ -0,0 +1,70 @@
package com.volmit.iris.scaffold.data;
import com.volmit.iris.util.KList;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
public class DataPalette<T> {
private final KList<T> palette;
public DataPalette()
{
this(new KList<>(16));
}
public DataPalette(KList<T> palette)
{
this.palette = palette;
}
public KList<T> getPalette()
{
return palette;
}
public int getIndex(T t)
{
int v = 0;
synchronized (palette)
{
v = palette.indexOf(t);
if(v == -1)
{
v = palette.size();
palette.add(t);
}
}
return v;
}
public void write(IOAdapter<T> adapter, DataOutputStream dos) throws IOException
{
synchronized (palette)
{
dos.writeShort(getPalette().size() + Short.MIN_VALUE);
for(int i = 0; i < palette.size(); i++)
{
adapter.write(palette.get(i), dos);
}
}
}
public static <T> DataPalette<T> getPalette(IOAdapter<T> adapter, DataInputStream din) throws IOException
{
KList<T> palette = new KList<>();
int s = din.readShort() - Short.MIN_VALUE;
for(int i = 0; i < s; i++)
{
palette.add(adapter.read(din));
}
return new DataPalette<>(palette);
}
}

View File

@@ -0,0 +1,8 @@
package com.volmit.iris.scaffold.data;
import com.volmit.iris.manager.IrisDataManager;
public interface DataProvider
{
public IrisDataManager getData();
}

View File

@@ -0,0 +1,10 @@
package com.volmit.iris.scaffold.data;
import java.io.*;
public interface IOAdapter<T>
{
public void write(T t, DataOutputStream dos) throws IOException;
public T read(DataInputStream din) throws IOException;
}

View File

@@ -0,0 +1,14 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.util.RNG;
import org.bukkit.Chunk;
import org.bukkit.block.data.BlockData;
public interface BlockUpdater {
public void catchBlockUpdates(int x, int y, int z, BlockData data);
public void updateChunk(Chunk c);
public void update(int x, int y, int z, Chunk c, RNG rf);
}

View File

@@ -0,0 +1,308 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.Iris;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.*;
import com.volmit.iris.scaffold.cache.Cache;
import com.volmit.iris.scaffold.data.DataProvider;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.scaffold.parallax.ParallaxAccess;
import com.volmit.iris.util.B;
import com.volmit.iris.util.KList;
import com.volmit.iris.util.RNG;
import org.bukkit.Chunk;
import org.bukkit.World;
import org.bukkit.block.Biome;
import org.bukkit.block.Block;
import org.bukkit.block.data.BlockData;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.InventoryHolder;
import org.bukkit.inventory.ItemStack;
import java.util.Arrays;
public interface Engine extends DataProvider, Fallible, GeneratorAccess, LootProvider, BlockUpdater {
public void close();
public boolean isClosed();
public EngineWorldManager getWorldManager();
public void setParallelism(int parallelism);
public int getParallelism();
public EngineTarget getTarget();
public EngineFramework getFramework();
public void setMinHeight(int min);
public int getIndex();
public int getMinHeight();
public double modifyX(double x);
public double modifyZ(double z);
public void generate(int x, int z, Hunk<BlockData> blocks, Hunk<Biome> biomes);
default void save()
{
getParallax().saveAll();
}
default void saveNow()
{
getParallax().saveAllNOW();
}
default String getName()
{
return getDimension().getName();
}
public default int getHeight()
{
return getTarget().getHeight();
}
public default IrisDataManager getData()
{
return getTarget().getData();
}
public default World getWorld()
{
return getTarget().getWorld();
}
public default IrisDimension getDimension()
{
return getTarget().getDimension();
}
public default ParallaxAccess getParallax()
{
return getTarget().getParallaxWorld();
}
@Override
public default IrisRegion getRegion(int x, int z) {
return getFramework().getComplex().getRegionStream().get(x, z);
}
@Override
public default ParallaxAccess getParallaxAccess()
{
return getParallax();
}
@Override
public default IrisBiome getCaveBiome(int x, int z)
{
return getFramework().getComplex().getCaveBiomeStream().get(x, z);
}
@Override
public default IrisBiome getSurfaceBiome(int x, int z)
{
return getFramework().getComplex().getTrueBiomeStream().get(x, z);
}
@Override
public default int getHeight(int x, int z)
{
return getFramework().getEngineParallax().getHighest(x, z, true);
}
@Override
public default void catchBlockUpdates(int x, int y, int z, BlockData data) {
if(data == null)
{
return;
}
if(B.isUpdatable(data))
{
getParallax().updateBlock(x,y,z);
getParallax().getMetaRW(x>>4, z>>4).setUpdates(true);
}
}
@Override
public default void updateChunk(Chunk c)
{
if(getParallax().getMetaR(c.getX(), c.getZ()).isUpdates())
{
Hunk<Boolean> b = getParallax().getUpdatesR(c.getX(), c.getZ());
b.iterateSync((x,y,z,v) -> {
if(v != null && v)
{
update(x,y,z, c, new RNG(Cache.key(c.getX(), c.getZ())));
}
});
}
}
@Override
public default void update(int x, int y, int z, Chunk c, RNG rf)
{
Block block = c.getBlock(x,y,z);
BlockData data = block.getBlockData();
if(B.isStorage(data))
{
RNG rx = rf.nextParallelRNG(x).nextParallelRNG(z).nextParallelRNG(y);
InventorySlotType slot = null;
if(B.isStorageChest(data))
{
slot = InventorySlotType.STORAGE;
}
if(slot != null)
{
KList<IrisLootTable> tables = getLootTables(rx.nextParallelRNG(4568111), block);
InventorySlotType slott = slot;
try
{
InventoryHolder m = (InventoryHolder) block.getState();
addItems(false, m.getInventory(), rx, tables, slott, x, y, z, 15);
}
catch(Throwable ignored)
{
}
}
}
else if(B.isLit(data))
{
Iris.linkBK.updateBlock(block);
}
}
@Override
public default void scramble(Inventory inventory, RNG rng)
{
org.bukkit.inventory.ItemStack[] items = inventory.getContents();
org.bukkit.inventory.ItemStack[] nitems = new org.bukkit.inventory.ItemStack[inventory.getSize()];
System.arraycopy(items, 0, nitems, 0, items.length);
boolean packedFull = false;
splitting: for(int i = 0; i < nitems.length; i++)
{
ItemStack is = nitems[i];
if(is != null && is.getAmount() > 1 && !packedFull)
{
for(int j = 0; j < nitems.length; j++)
{
if(nitems[j] == null)
{
int take = rng.nextInt(is.getAmount());
take = take == 0 ? 1 : take;
is.setAmount(is.getAmount() - take);
nitems[j] = is.clone();
nitems[j].setAmount(take);
continue splitting;
}
}
packedFull = true;
}
}
for(int i = 0; i < 4; i++)
{
try
{
Arrays.parallelSort(nitems, (a, b) -> rng.nextInt());
break;
}
catch(Throwable e)
{
}
}
inventory.setContents(nitems);
}
@Override
public default void injectTables(KList<IrisLootTable> list, IrisLootReference r)
{
if(r.getMode().equals(LootMode.CLEAR) || r.getMode().equals(LootMode.REPLACE))
{
list.clear();
}
list.addAll(r.getLootTables(getFramework().getComplex()));
}
@Override
public default KList<IrisLootTable> getLootTables(RNG rng, Block b)
{
int rx = b.getX();
int rz = b.getZ();
double he = getFramework().getComplex().getHeightStream().get(rx, rz);
IrisRegion region = getFramework().getComplex().getRegionStream().get(rx, rz);
IrisBiome biomeSurface = getFramework().getComplex().getTrueBiomeStream().get(rx, rz);
IrisBiome biomeUnder = b.getY() < he ? getFramework().getComplex().getCaveBiomeStream().get(rx, rz) : biomeSurface;
KList<IrisLootTable> tables = new KList<>();
double multiplier = 1D * getDimension().getLoot().getMultiplier() * region.getLoot().getMultiplier() * biomeSurface.getLoot().getMultiplier() * biomeUnder.getLoot().getMultiplier();
injectTables(tables, getDimension().getLoot());
injectTables(tables, region.getLoot());
injectTables(tables, biomeSurface.getLoot());
injectTables(tables, biomeUnder.getLoot());
if(tables.isNotEmpty())
{
int target = (int) Math.round(tables.size() * multiplier);
while(tables.size() < target && tables.isNotEmpty())
{
tables.add(tables.get(rng.i(tables.size() - 1)));
}
while(tables.size() > target && tables.isNotEmpty())
{
tables.remove(rng.i(tables.size() - 1));
}
}
return tables;
}
@Override
public default void addItems(boolean debug, Inventory inv, RNG rng, KList<IrisLootTable> tables, InventorySlotType slot, int x, int y, int z, int mgf)
{
KList<ItemStack> items = new KList<>();
int b = 4;
for(IrisLootTable i : tables)
{
b++;
items.addAll(i.getLoot(debug, items.isEmpty(), rng.nextParallelRNG(345911), slot, x, y, z, b + b, mgf + b));
}
for(ItemStack i : items)
{
inv.addItem(i);
}
scramble(inv, rng);
}
public default int getMaxHeight()
{
return getHeight() + getMinHeight();
}
}

View File

@@ -0,0 +1,8 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.scaffold.hunk.Hunk;
public interface EngineActuator<O> extends EngineComponent
{
public void actuate(int x, int z, Hunk<O> output);
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.scaffold.hunk.Hunk;
public abstract class EngineAssignedActuator<T> extends EngineAssignedComponent implements EngineActuator<T>
{
public EngineAssignedActuator(Engine engine, String name)
{
super(engine, name);
}
public abstract void onActuate(int x, int z, Hunk<T> output);
@Override
public void actuate(int x, int z, Hunk<T> output) {
onActuate(x, z, output);
}
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.scaffold.hunk.Hunk;
public abstract class EngineAssignedBiModifier<A, B> extends EngineAssignedComponent implements EngineBiModifier<A, B>
{
public EngineAssignedBiModifier(Engine engine, String name)
{
super(engine, name);
}
public abstract void onModify(int x, int z, Hunk<A> a, Hunk<B> b);
@Override
public void modify(int x, int z, Hunk<A> a, Hunk<B> b) {
onModify(x, z, a, b);
}
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.util.RollingSequence;
import lombok.Data;
@Data
public class EngineAssignedComponent implements EngineComponent {
private final Engine engine;
private final RollingSequence metrics;
private final String name;
public EngineAssignedComponent(Engine engine, String name)
{
this.engine = engine;
this.metrics = new RollingSequence(16);
this.name = name;
}
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.scaffold.hunk.Hunk;
public abstract class EngineAssignedModifier<T> extends EngineAssignedComponent implements EngineModifier<T>
{
public EngineAssignedModifier(Engine engine, String name)
{
super(engine, name);
}
public abstract void onModify(int x, int z, Hunk<T> output);
@Override
public void modify(int x, int z, Hunk<T> output) {
onModify(x, z, output);
}
}

View File

@@ -0,0 +1,7 @@
package com.volmit.iris.scaffold.engine;
public abstract class EngineAssignedStructureManager extends EngineAssignedComponent implements EngineStructureManager {
public EngineAssignedStructureManager(Engine engine) {
super(engine, "Structure");
}
}

View File

@@ -0,0 +1,73 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.Iris;
import org.bukkit.Bukkit;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.entity.EntitySpawnEvent;
import org.bukkit.event.world.WorldSaveEvent;
import org.bukkit.event.world.WorldUnloadEvent;
public abstract class EngineAssignedWorldManager extends EngineAssignedComponent implements EngineWorldManager, Listener {
private final int taskId;
public EngineAssignedWorldManager(Engine engine) {
super(engine, "World");
Iris.instance.registerListener(this);
taskId = Bukkit.getScheduler().scheduleSyncRepeatingTask(Iris.instance, this::onTick, 0, 0);
}
@EventHandler
public void on(WorldSaveEvent e)
{
if(e.getWorld().equals(getTarget().getWorld()))
{
onSave();
}
}
@EventHandler
public void on(WorldUnloadEvent e)
{
if(e.getWorld().equals(getTarget().getWorld()))
{
getEngine().close();
}
}
@EventHandler
public void on(EntitySpawnEvent e)
{
if(e.getEntity().getWorld().equals(getTarget().getWorld()))
{
onEntitySpawn(e);
}
}
@EventHandler
public void on(BlockBreakEvent e)
{
if(e.getPlayer().getWorld().equals(getTarget().getWorld()))
{
onBlockBreak(e);
}
}
@EventHandler
public void on(BlockPlaceEvent e)
{
if(e.getPlayer().getWorld().equals(getTarget().getWorld()))
{
onBlockPlace(e);
}
}
@Override
public void close() {
super.close();
Iris.instance.unregisterListener(this);
Bukkit.getScheduler().cancelTask(taskId);
}
}

View File

@@ -0,0 +1,7 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.scaffold.hunk.Hunk;
public interface EngineBiModifier<A, B> extends EngineComponent {
public void modify(int x, int z, Hunk<A> a, Hunk<B> b);
}

View File

@@ -0,0 +1,83 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.Iris;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.IrisDimension;
import com.volmit.iris.util.RollingSequence;
import com.volmit.iris.generator.IrisComplex;
import com.volmit.iris.scaffold.parallax.ParallaxAccess;
import org.bukkit.event.Listener;
public interface EngineComponent {
public Engine getEngine();
public RollingSequence getMetrics();
public String getName();
default void close()
{
try
{
if(this instanceof Listener)
{
Iris.instance.unregisterListener((Listener) this);
}
}
catch(Throwable ignored)
{
}
}
default double modX(double x)
{
return getEngine().modifyX(x);
}
default double modZ(double z)
{
return getEngine().modifyZ(z);
}
public default IrisDataManager getData()
{
return getEngine().getData();
}
public default ParallaxAccess getParallax()
{
return getEngine().getParallax();
}
public default EngineTarget getTarget()
{
return getEngine().getTarget();
}
public default IrisDimension getDimension()
{
return getEngine().getDimension();
}
public default long getSeed()
{
return getTarget().getWorld().getSeed();
}
public default EngineFramework getFramework()
{
return getEngine().getFramework();
}
public default int getParallelism()
{
return getEngine().getParallelism();
}
public default IrisComplex getComplex()
{
return getFramework().getComplex();
}
}

View File

@@ -0,0 +1,291 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.Iris;
import com.volmit.iris.generator.IrisEngineCompound;
import com.volmit.iris.generator.legacy.scaffold.TerrainChunk;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.IrisBiome;
import com.volmit.iris.object.IrisDimension;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.util.Form;
import com.volmit.iris.util.KList;
import com.volmit.iris.util.M;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
import org.bukkit.generator.BlockPopulator;
import org.bukkit.generator.ChunkGenerator;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.File;
import java.util.List;
import java.util.Random;
import java.util.concurrent.atomic.AtomicBoolean;
public class EngineCompositeGenerator extends ChunkGenerator implements IrisAccess {
private EngineCompound compound;
private final AtomicBoolean initialized;
private final String dimensionHint;
private final boolean production;
private final KList<BlockPopulator> populators;
private int generated = 0;
public EngineCompositeGenerator() {
this(null, true);
}
public EngineCompositeGenerator(String hint, boolean production) {
super();
this.production = production;
this.dimensionHint = hint;
initialized = new AtomicBoolean(false);
populators = new KList<BlockPopulator>().qadd(new BlockPopulator() {
@Override
public void populate(@NotNull World world, @NotNull Random random, @NotNull Chunk chunk) {
if(compound != null)
{
for(BlockPopulator i : compound.getPopulators())
{
i.populate(world, random, chunk);
}
}
}
});
}
public void hotload()
{
Iris.globaldata.dump();
initialized.lazySet(false);
}
private synchronized IrisDimension getDimension(World world) {
String hint = dimensionHint;
IrisDimension dim = null;
if (hint == null) {
File iris = new File(world.getWorldFolder(), "iris");
searching:
for (File i : iris.listFiles()) {
// Look for v1 location
if (i.isDirectory() && i.getName().equals("dimensions")) {
for (File j : i.listFiles()) {
if (j.isFile() && j.getName().endsWith(".json")) {
hint = j.getName().replaceAll("\\Q.json\\E", "");
break searching;
}
}
}
// Look for v2 location
else if (i.isFile() && i.getName().equals("engine-metadata.json")) {
EngineData metadata = EngineData.load(i);
hint = metadata.getDimension();
break;
}
}
}
if (hint == null) {
throw new RuntimeException("Cannot find iris dimension data for world: " + world.getName() + "! FAILED");
}
dim = Iris.globaldata.preferFolder(hint).getDimensionLoader().load(hint);
if (dim == null) {
throw new RuntimeException("Cannot find dimension: " + hint);
}
if (production) {
dim = new IrisDataManager(getDataFolder(world), true).preferFolder(dim.getLoadKey()).getDimensionLoader().load(dim.getLoadKey());
if (dim == null) {
throw new RuntimeException("Cannot find dimension: " + hint);
}
}
return dim;
}
private synchronized void initialize(World world) {
if (initialized.get()) {
return;
}
IrisDataManager data = production ? new IrisDataManager(getDataFolder(world)) : Iris.globaldata.copy();
IrisDimension dim = getDimension(world);
data.preferFolder(dim.getLoadKey());
compound = new IrisEngineCompound(world, dim, data, Iris.getThreadCount());
initialized.set(true);
populators.clear();
populators.addAll(compound.getPopulators());
}
private File getDataFolder(World world) {
return new File(world.getWorldFolder(), "iris/pack");
}
@NotNull
@Override
public ChunkData generateChunkData(@NotNull World world, @NotNull Random ignored, int x, int z, @NotNull BiomeGrid biome) {
TerrainChunk tc = TerrainChunk.create(world, biome);
generateChunkRawData(world, ignored, x, z, tc);
return tc.getRaw();
}
public void generateChunkRawData(World world, Random ignored, int x, int z, TerrainChunk tc)
{
initialize(world);
Hunk<BlockData> blocks = Hunk.view((ChunkData) tc);
Hunk<Biome> biomes = Hunk.view((BiomeGrid) tc);
long m = M.ms();
compound.generate(x * 16, z * 16, blocks, biomes);
System.out.println("Generated " + x + "," + z + " in " + Form.duration(M.ms() - m, 0));
generated++;
}
@Override
public boolean canSpawn(@NotNull World world, int x, int z) {
return super.canSpawn(world, x, z);
}
@NotNull
@Override
public List<BlockPopulator> getDefaultPopulators(@NotNull World world) {
return populators;
}
@Nullable
@Override
public Location getFixedSpawnLocation(@NotNull World world, @NotNull Random random) {
return super.getFixedSpawnLocation(world, random);
}
@Override
public boolean isParallelCapable() {
return true;
}
@Override
public boolean shouldGenerateCaves() {
return false;
}
@Override
public boolean shouldGenerateDecorations() {
return false;
}
@Override
public boolean shouldGenerateMobs() {
return false;
}
@Override
public boolean shouldGenerateStructures() {
return false;
}
public static EngineCompositeGenerator newStudioWorld(String dimension) {
return new EngineCompositeGenerator(dimension, false);
}
public static EngineCompositeGenerator newProductionWorld(String dimension) {
return new EngineCompositeGenerator(dimension, true);
}
public static EngineCompositeGenerator newProductionWorld() {
return new EngineCompositeGenerator(null, true);
}
public EngineCompound getComposite() {
return compound;
}
@Override
public IrisBiome getBiome(int x, int z) {
return getBiome(x, 0, z);
}
@Override
public IrisBiome getCaveBiome(int x, int z) {
return getCaveBiome(x, 0, z);
}
@Override
public int getGenerated() {
return generated;
}
@Override
public IrisBiome getBiome(int x, int y, int z) {
// TODO: REMOVE GET ABS BIOME OR THIS ONE
return getEngineAccess(y).getBiome(x, y-getComposite().getEngineForHeight(y).getMinHeight(), z);
}
@Override
public IrisBiome getCaveBiome(int x, int y, int z) {
return getEngineAccess(y).getCaveBiome(x, z);
}
@Override
public GeneratorAccess getEngineAccess(int y) {
return getComposite().getEngineForHeight(y);
}
@Override
public IrisDataManager getData() {
return getComposite().getData();
}
@Override
public int getHeight(int x, int y, int z) {
return getEngineAccess(y).getHeight(x, z);
}
@Override
public IrisBiome getAbsoluteBiome(int x, int y, int z) {
// TODO: REMOVE GET BIOME OR THIS ONE
return getEngineAccess(y).getBiome(x, y-getComposite().getEngineForHeight(y).getMinHeight(), z);
}
@Override
public int getThreadCount() {
// TODO: NOT CORRECT
return Iris.getThreadCount();
}
@Override
public void changeThreadCount(int m) {
// TODO: DO IT
}
@Override
public void regenerate(int x, int z) {
// TODO: DO IT
}
@Override
public void close() {
getComposite().close();
}
@Override
public boolean isClosed() {
return getComposite().getEngine(0).isClosed();
}
@Override
public EngineTarget getTarget() {
return getComposite().getEngine(0).getTarget();
}
@Override
public EngineCompound getCompound() {
return getComposite();
}
}

View File

@@ -0,0 +1,90 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.IrisDimension;
import com.volmit.iris.util.KList;
import org.bukkit.World;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
import org.bukkit.event.Listener;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.scaffold.parallel.MultiBurst;
import org.bukkit.generator.BlockPopulator;
public interface EngineCompound extends Listener
{
public IrisDimension getRootDimension();
public void generate(int x, int z, Hunk<BlockData> blocks, Hunk<Biome> biomes);
public World getWorld();
public int getSize();
public Engine getEngine(int index);
public MultiBurst getBurster();
public EngineData getEngineMetadata();
public void saveEngineMetadata();
public KList<BlockPopulator> getPopulators();
default Engine getEngineForHeight(int height)
{
if(getSize() == 1)
{
return getEngine(0);
}
int buf = 0;
for(int i = 0; i < getSize(); i++)
{
Engine e = getEngine(i);
buf += e.getHeight();
if(buf >= height)
{
return e;
}
}
return getEngine(getSize() - 1);
}
public default void save()
{
saveEngineMetadata();
for(int i = 0; i < getSize(); i++)
{
getEngine(i).save();
}
}
public default void saveNOW()
{
saveEngineMetadata();
for(int i = 0; i < getSize(); i++)
{
getEngine(i).saveNow();
}
}
public IrisDataManager getData(int height);
public default IrisDataManager getData()
{
return getData(0);
}
public default void close()
{
for(int i = 0; i < getSize(); i++)
{
getEngine(i).close();
}
}
}

View File

@@ -0,0 +1,40 @@
package com.volmit.iris.scaffold.engine;
import com.google.gson.Gson;
import com.volmit.iris.util.IO;
import lombok.Data;
import java.io.File;
import java.io.IOException;
@Data
public class EngineData {
private String dimension;
private String lastVersion;
public void save(File f)
{
try {
f.getParentFile().mkdirs();
IO.writeAll(f, new Gson().toJson(this));
} catch (IOException e) {
e.printStackTrace();
}
}
public static EngineData load(File f)
{
try
{
f.getParentFile().mkdirs();
return new Gson().fromJson(IO.readAll(f), EngineData.class);
}
catch(Throwable e)
{
}
return new EngineData();
}
}

View File

@@ -0,0 +1,20 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.object.IrisBiome;
import com.volmit.iris.util.B;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bukkit.block.data.BlockData;
public interface EngineDecorator extends EngineComponent {
public void decorate(int x, int z, int realX, int realX1, int realX_1, int realZ, int realZ1, int realZ_1, Hunk<BlockData> data, IrisBiome biome, int height, int max);
default void decorate(int x, int z, int realX, int realZ, Hunk<BlockData> data, IrisBiome biome, int height, int max)
{
decorate(x, z, realX, realX, realX, realZ, realZ, realZ, data, biome, height, max);
}
default boolean canGoOn(BlockData decorant, BlockData atop)
{
return B.canPlaceOnto(decorant.getMaterial(), atop.getMaterial());
}
}

View File

@@ -0,0 +1,50 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.util.M;
import com.volmit.iris.scaffold.parallel.MultiBurst;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.generator.IrisComplex;
import com.volmit.iris.scaffold.data.DataProvider;
public interface EngineFramework extends DataProvider
{
public Engine getEngine();
public IrisComplex getComplex();
public EngineParallaxManager getEngineParallax();
default IrisDataManager getData() {
return getComplex().getData();
}
default void recycle()
{
if(M.r(0.1))
{
MultiBurst.burst.lazy(() -> {
getEngine().getParallax().cleanup();
getData().getObjectLoader().clean();
});
}
}
public EngineActuator<BlockData> getTerrainActuator();
public EngineActuator<BlockData> getDecorantActuator();
public EngineActuator<Biome> getBiomeActuator();
public EngineModifier<BlockData> getCaveModifier();
public EngineModifier<BlockData> getRavineModifier();
public EngineModifier<BlockData> getDepositModifier();
public EngineModifier<BlockData> getPostModifier();
void close();
}

View File

@@ -0,0 +1,7 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.scaffold.hunk.Hunk;
public interface EngineModifier<T> extends EngineComponent {
public void modify(int x, int z, Hunk<T> t);
}

View File

@@ -0,0 +1,443 @@
package com.volmit.iris.scaffold.engine;
import java.util.concurrent.atomic.AtomicInteger;
import com.volmit.iris.object.*;
import com.volmit.iris.util.*;
import com.volmit.iris.generator.actuator.IrisTerrainActuator;
import com.volmit.iris.generator.modifier.IrisCaveModifier;
import com.volmit.iris.scaffold.parallax.ParallaxChunkMeta;
import org.bukkit.block.data.BlockData;
import org.bukkit.util.BlockVector;
import com.volmit.iris.Iris;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.generator.IrisComplex;
import com.volmit.iris.scaffold.cache.Cache;
import com.volmit.iris.scaffold.data.DataProvider;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.scaffold.parallax.ParallaxAccess;
import com.volmit.iris.scaffold.parallel.BurstExecutor;
import com.volmit.iris.scaffold.parallel.MultiBurst;
public interface EngineParallaxManager extends DataProvider, IObjectPlacer
{
public static final BlockData AIR = B.get("AIR");
public Engine getEngine();
public int getParallaxSize();
public EngineStructureManager getStructureManager();
default EngineFramework getFramework()
{
return getEngine().getFramework();
}
default ParallaxAccess getParallaxAccess()
{
return getEngine().getParallax();
}
default IrisDataManager getData()
{
return getEngine().getData();
}
default IrisComplex getComplex()
{
return getEngine().getFramework().getComplex();
}
default KList<IrisRegion> getAllRegions()
{
KList<IrisRegion> r = new KList<>();
for(String i : getEngine().getDimension().getRegions())
{
r.add(getEngine().getData().getRegionLoader().load(i));
}
return r;
}
default KList<IrisBiome> getAllBiomes()
{
KList<IrisBiome> r = new KList<>();
for(IrisRegion i : getAllRegions())
{
r.addAll(i.getAllBiomes(this));
}
return r;
}
default void insertParallax(int x, int z, Hunk<BlockData> data)
{
ParallaxChunkMeta meta = getParallaxAccess().getMetaR(x>>4, z>>4);
if(!meta.isObjects()) {
return;
}
for(int i = x; i < x+ data.getWidth(); i++)
{
for(int j= z; j < z + data.getDepth(); j++)
{
for(int k = 0; k < data.getHeight(); k++)
{
BlockData d = getParallaxAccess().getBlock(i, k, j);
if(d != null)
{
data.set(i - x, k, j - z, d);
}
}
}
}
}
default void generateParallaxArea(int x, int z)
{
int s = (int) Math.ceil(getParallaxSize() / 2D);
int j;
BurstExecutor e = MultiBurst.burst.burst(getParallaxSize() * getParallaxSize());
for(int i = -s; i <= s; i++)
{
int ii = i;
for(j = -s; j <= s; j++)
{
int jj = j;
e.queue(() -> generateParallaxLayer((ii*16)+x, (jj*16)+z));
}
}
e.complete();
getParallaxAccess().setChunkGenerated(x>>4, z>>4);
}
default void generateParallaxLayer(int x, int z)
{
if(getParallaxAccess().isParallaxGenerated(x >> 4, z >> 4))
{
return;
}
getParallaxAccess().setParallaxGenerated(x>>4, z>>4);
RNG rng = new RNG(Cache.key(x, z)).nextParallelRNG(getEngine().getTarget().getWorld().getSeed());
IrisRegion region = getComplex().getRegionStream().get(x+8, z+8);
IrisBiome biome = getComplex().getTrueBiomeStream().get(x+8, z+8);
generateParallaxSurface(rng, x, z, biome);
generateParallaxMutations(rng, x, z);
generateStructures(rng, x>>4, z>>4, region, biome);
}
default void generateStructures(RNG rng, int x, int z, IrisRegion region, IrisBiome biome)
{
int g = 30265;
for(IrisStructurePlacement k : region.getStructures())
{
if(k == null)
{
continue;
}
getStructureManager().placeStructure(k, rng.nextParallelRNG(2228 * 2 * g++), x, z);
}
for(IrisStructurePlacement k : biome.getStructures())
{
if(k == null)
{
continue;
}
getStructureManager().placeStructure(k, rng.nextParallelRNG(-22228 * 4 * g++), x, z);
}
}
default void generateParallaxSurface(RNG rng, int x, int z, IrisBiome biome) {
for (IrisObjectPlacement i : biome.getSurfaceObjects())
{
if(rng.chance(i.getChance()))
{
place(rng, x, z, i);
}
}
}
default void generateParallaxMutations(RNG rng, int x, int z) {
if(getEngine().getDimension().getMutations().isEmpty())
{
return;
}
searching: for(IrisBiomeMutation k : getEngine().getDimension().getMutations())
{
for(int l = 0; l < k.getChecks(); l++)
{
IrisBiome sa = getComplex().getTrueBiomeStream().get(((x * 16) + rng.nextInt(16)) + rng.i(-k.getRadius(), k.getRadius()), ((z * 16) + rng.nextInt(16)) + rng.i(-k.getRadius(), k.getRadius()));
IrisBiome sb = getComplex().getTrueBiomeStream().get(((x * 16) + rng.nextInt(16)) + rng.i(-k.getRadius(), k.getRadius()), ((z * 16) + rng.nextInt(16)) + rng.i(-k.getRadius(), k.getRadius()));
if(sa.getLoadKey().equals(sb.getLoadKey()))
{
continue;
}
if(k.getRealSideA(this).contains(sa.getLoadKey()) && k.getRealSideB(this).contains(sb.getLoadKey()))
{
for(IrisObjectPlacement m : k.getObjects())
{
place(rng.nextParallelRNG((34 * ((x * 30) + (z * 30)) * x * z) + x - z + 1569962), x, z, m);
}
continue searching;
}
}
}
}
default void place(RNG rng, int x, int z, IrisObjectPlacement objectPlacement)
{
place(rng, x,-1, z, objectPlacement);
}
default void place(RNG rng, int x, int forceY, int z, IrisObjectPlacement objectPlacement)
{
for(int i = 0; i < objectPlacement.getDensity(); i++)
{
IrisObject v = objectPlacement.getSchematic(getComplex(), rng);
int xx = rng.i(x, x+16);
int zz = rng.i(z, z+16);
int id = rng.i(0, Integer.MAX_VALUE);
v.place(xx, forceY, zz, this, objectPlacement, rng, (b) -> {
getParallaxAccess().setObject(b.getX(), b.getY(), b.getZ(), v.getLoadKey() + "@" + id);
ParallaxChunkMeta meta = getParallaxAccess().getMetaRW(b.getX() >> 4, b.getZ() >> 4);
meta.setObjects(true);
meta.setMaxObject(Math.max(b.getY(), meta.getMaxObject()));
meta.setMinObject(Math.min(b.getY(), Math.max(meta.getMinObject(), 0)));
}, null, getData());
}
}
default void updateParallaxChunkObjectData(int minY, int maxY, int x, int z, IrisObject v)
{
ParallaxChunkMeta meta = getParallaxAccess().getMetaRW(x >> 4, z >> 4);
meta.setObjects(true);
meta.setMaxObject(Math.max(maxY, meta.getMaxObject()));
meta.setMinObject(Math.min(minY, Math.max(meta.getMinObject(), 0)));
}
default int computeParallaxSize()
{
Iris.verbose("Calculating the Parallax Size in Parallel");
AtomicInteger xg = new AtomicInteger(0);
AtomicInteger zg = new AtomicInteger();
xg.set(0);
zg.set(0);
KSet<String> objects = new KSet<>();
KList<IrisRegion> r = getAllRegions();
KList<IrisBiome> b = getAllBiomes();
for(IrisBiome i : b)
{
for(IrisObjectPlacement j : i.getObjects())
{
objects.addAll(j.getPlace());
}
}
Iris.verbose("Checking sizes for " + Form.f(objects.size()) + " referenced objects.");
BurstExecutor e = MultiBurst.burst.burst(objects.size());
for(String i : objects)
{
e.queue(() -> {
try
{
BlockVector bv = IrisObject.sampleSize(getData().getObjectLoader().findFile(i));
synchronized (xg)
{
xg.getAndSet(Math.max(bv.getBlockX(), xg.get()));
}
synchronized (zg)
{
zg.getAndSet(Math.max(bv.getBlockZ(), zg.get()));
}
}
catch(Throwable ignored)
{
}
});
}
e.complete();
int x = xg.get();
int z = zg.get();
for(IrisDepositGenerator i : getEngine().getDimension().getDeposits())
{
int max = i.getMaxDimension();
x = Math.max(max, x);
z = Math.max(max, z);
}
for(IrisTextPlacement i : getEngine().getDimension().getText())
{
int max = i.maxDimension();
x = Math.max(max, x);
z = Math.max(max, z);
}
for(IrisRegion v : r)
{
for(IrisDepositGenerator i : v.getDeposits())
{
int max = i.getMaxDimension();
x = Math.max(max, x);
z = Math.max(max, z);
}
for(IrisTextPlacement i : v.getText())
{
int max = i.maxDimension();
x = Math.max(max, x);
z = Math.max(max, z);
}
}
for(IrisBiome v : b)
{
for(IrisDepositGenerator i : v.getDeposits())
{
int max = i.getMaxDimension();
x = Math.max(max, x);
z = Math.max(max, z);
}
for(IrisTextPlacement i : v.getText())
{
int max = i.maxDimension();
x = Math.max(max, x);
z = Math.max(max, z);
}
}
x = (Math.max(x, 16) + 16) >> 4;
z = (Math.max(z, 16) + 16) >> 4;
x = x % 2 == 0 ? x + 1 : x;
z = z % 2 == 0 ? z + 1 : z;
x = Math.max(x, z);
z = x;
Iris.verbose(getEngine().getDimension().getName() + " Parallax Size: " + x + ", " + z);
return x;
}
@Override
default int getHighest(int x, int z) {
return getHighest(x,z,false);
}
@Override
default int getHighest(int x, int z, boolean ignoreFluid) {
return ignoreFluid ? trueHeight(x, z) : Math.max(trueHeight(x, z), getEngine().getDimension().getFluidHeight());
}
default int trueHeight(int x, int z)
{
int rx = (int) Math.round(getEngine().modifyX(x));
int rz = (int) Math.round(getEngine().modifyZ(z));
int height = (int) Math.round(getComplex().getHeightStream().get(rx, rz));
int m = height;
if(getEngine().getDimension().isCarving())
{
if(getEngine().getDimension().isCarved(rx, m, rz, ((IrisTerrainActuator)getFramework().getTerrainActuator()).getRng(), height))
{
m--;
while(getEngine().getDimension().isCarved(rx, m, rz, ((IrisTerrainActuator)getFramework().getTerrainActuator()).getRng(), height))
{
m--;
}
}
}
if(getEngine().getDimension().isCaves())
{
KList<CaveResult> caves = ((IrisCaveModifier)getFramework().getCaveModifier()).genCaves(rx, rz, 0, 0, null);
boolean again = true;
while(again)
{
again = false;
for(CaveResult i : caves)
{
if(i.getCeiling() > m && i.getFloor() < m)
{
m = i.getFloor();
again = true;
}
}
}
}
return m;
}
@Override
default void set(int x, int y, int z, BlockData d) {
getParallaxAccess().setBlock(x,y,z,d);
}
@Override
default BlockData get(int x, int y, int z) {
BlockData block = getParallaxAccess().getBlock(x,y,z);
if(block == null)
{
return AIR;
}
return block;
}
@Override
default boolean isPreventingDecay() {
return getEngine().getDimension().isPreventLeafDecay();
}
@Override
default boolean isSolid(int x, int y, int z) {
return B.isSolid(get(x,y,z));
}
@Override
default boolean isUnderwater(int x, int z) {
return getHighest(x, z, true) <= getFluidHeight();
}
@Override
default int getFluidHeight() {
return getEngine().getDimension().getFluidHeight();
}
@Override
default boolean isDebugSmartBore() {
return getEngine().getDimension().isDebugSmartBore();
}
default void close()
{
}
}

View File

@@ -0,0 +1,114 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.object.IrisObject;
import com.volmit.iris.object.IrisRareObject;
import com.volmit.iris.object.IrisStructurePlacement;
import com.volmit.iris.object.TileResult;
import com.volmit.iris.util.ChunkPosition;
import com.volmit.iris.util.KSet;
import com.volmit.iris.util.RNG;
import com.volmit.iris.scaffold.parallax.ParallaxChunkMeta;
public interface EngineStructureManager extends EngineComponent
{
default void placeStructure(IrisStructurePlacement structure, RNG rngno, int cx, int cz)
{
RNG rng = new RNG(getEngine().getWorld().getSeed()).nextParallelRNG(-88738456 + rngno.nextInt());
RNG rnp = rng.nextParallelRNG(cx - (cz * cz << 3) + rngno.nextInt());
int s = structure.gridSize(getEngine()) - (structure.getStructure(getEngine()).isMergeEdges() ? 1 : 0);
int sh = structure.gridHeight(getEngine()) - (structure.getStructure(getEngine()).isMergeEdges() ? 1 : 0);
KSet<ChunkPosition> m = new KSet<>();
for(int i = cx << 4; i <= (cx << 4) + 15; i += 1)
{
if(Math.floorDiv(i, s) * s >> 4 < cx)
{
continue;
}
for(int j = cz << 4; j <= (cz << 4) + 15; j += 1)
{
if(Math.floorDiv(j, s) * s >> 4 < cz)
{
continue;
}
ChunkPosition p = new ChunkPosition(Math.floorDiv(i, s) * s, Math.floorDiv(j, s) * s);
if(m.contains(p))
{
continue;
}
m.add(p);
if(structure.getStructure(getEngine()).getMaxLayers() <= 1)
{
placeLayer(structure, rng, rnp, i, 0, j, s, sh);
continue;
}
for(int k = 0; k < s * structure.getStructure(getEngine()).getMaxLayers(); k += Math.max(sh, 1))
{
placeLayer(structure, rng, rnp, i, k, j, s, sh);
}
}
}
}
default void placeLayer(IrisStructurePlacement structure, RNG rng, RNG rnp, int i, int k, int j, int s, int sh)
{
if(!hasStructure(structure, rng, i, k, j))
{
return;
}
int h = (structure.getHeight() == -1 ? 0 : structure.getHeight()) + (Math.floorDiv(k, sh) * sh);
TileResult t = structure.getStructure(getEngine()).getTile(rng, Math.floorDiv(i, s) * s, h, Math.floorDiv(j, s) * s);
if(t != null)
{
IrisObject o = null;
for(IrisRareObject l : t.getTile().getRareObjects())
{
if(rnp.i(1, l.getRarity()) == 1)
{
o = structure.load(getEngine(), l.getObject());
break;
}
}
o = o != null ? o : structure.load(getEngine(), t.getTile().getObjects().get(rnp.nextInt(t.getTile().getObjects().size())));
int id = rng.i(0, Integer.MAX_VALUE);
IrisObject oo = o;
o.place(
Math.floorDiv(i, s) * s,
structure.getHeight() == -1 ? -1 : h,
Math.floorDiv(j, s) * s,
getEngine().getFramework().getEngineParallax(),
t.getPlacement(),
rng,
(b) -> {
getEngine().getParallax().setObject(b.getX(), b.getY(), b.getZ(), oo.getLoadKey() + "@" + id);
ParallaxChunkMeta meta = getEngine().getParallax().getMetaRW(b.getX() >> 4, b.getZ() >> 4);
meta.setObjects(true);
meta.setMaxObject(Math.max(b.getY(), meta.getMaxObject()));
meta.setMinObject(Math.min(b.getY(), Math.max(meta.getMinObject(), 0)));
},
null,
getData()
);
}
}
default boolean hasStructure(IrisStructurePlacement structure, RNG random, double x, double y, double z)
{
if(structure.getChanceGenerator(new RNG(getEngine().getWorld().getSeed())).getIndex(x / structure.getZoom(), y / structure.getZoom(), z / structure.getZoom(), structure.getRarity()) == structure.getRarity() / 2)
{
return structure.getRatio() > 0 ? structure.getChanceGenerator(new RNG(getEngine().getWorld().getSeed())).getDistance(x / structure.getZoom(), z / structure.getZoom()) > structure.getRatio() : structure.getChanceGenerator(new RNG(getEngine().getWorld().getSeed())).getDistance(x / structure.getZoom(), z / structure.getZoom()) < Math.abs(structure.getRatio());
}
return false;
}
}

View File

@@ -0,0 +1,38 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.scaffold.parallax.ParallaxWorld;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.IrisDimension;
import com.volmit.iris.scaffold.parallel.MultiBurst;
import lombok.Data;
import org.bukkit.World;
import java.io.File;
@Data
public class EngineTarget
{
private final MultiBurst burster;
private final IrisDimension dimension;
private final World world;
private final int height;
private final IrisDataManager data;
private final ParallaxWorld parallaxWorld;
private final boolean inverted;
public EngineTarget(World world, IrisDimension dimension, IrisDataManager data, int height, boolean inverted, int threads)
{
this.world = world;
this.height = height;
this.dimension = dimension;
this.data = data;
this.parallaxWorld = new ParallaxWorld(256, new File(world.getWorldFolder(), "iris/" + dimension.getLoadKey() + "/parallax"));
this.inverted = inverted;
this.burster = new MultiBurst(threads);
}
public EngineTarget(World world, IrisDimension dimension, IrisDataManager data, int height, int threads)
{
this(world, dimension, data, height, false, threads);
}
}

View File

@@ -0,0 +1,23 @@
package com.volmit.iris.scaffold.engine;
import org.bukkit.Chunk;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.entity.EntitySpawnEvent;
public interface EngineWorldManager
{
public void close();
public void onEntitySpawn(EntitySpawnEvent e);
public void onTick();
public void onSave();
public void spawnInitialEntities(Chunk chunk);
public void onBlockBreak(BlockBreakEvent e);
public void onBlockPlace(BlockPlaceEvent e);
}

View File

@@ -0,0 +1,26 @@
package com.volmit.iris.scaffold.engine;
public interface Fallible
{
public default void fail(String error)
{
try
{
throw new RuntimeException();
}
catch(Throwable e)
{
fail(error, e);
}
}
public default void fail(Throwable e)
{
fail("Failed to generate", e);
}
public void fail(String error, Throwable e);
public boolean hasFailed();
}

View File

@@ -0,0 +1,66 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.IrisBiome;
import com.volmit.iris.object.IrisObjectPlacement;
import com.volmit.iris.object.IrisRegion;
import com.volmit.iris.scaffold.parallax.ParallaxAccess;
public interface GeneratorAccess
{
public IrisRegion getRegion(int x, int z);
public ParallaxAccess getParallaxAccess();
public IrisDataManager getData();
public IrisBiome getCaveBiome(int x, int z);
public IrisBiome getSurfaceBiome(int x, int z);
public int getHeight(int x, int z);
public default IrisBiome getBiome(int x, int y, int z)
{
if(y <= getHeight(x, z) - 2)
{
return getCaveBiome(x, z);
}
return getSurfaceBiome(x, z);
}
public default PlacedObject getObjectPlacement(int x, int y, int z)
{
String objectAt = getParallaxAccess().getObject(x, y, z);
if(objectAt == null || objectAt.isEmpty())
{
return null;
}
String[] v = objectAt.split("\\Q@\\E");
String object = v[0];
int id = Integer.parseInt(v[1]);
IrisRegion region = getRegion(x, z);
for(IrisObjectPlacement i : region.getObjects())
{
if(i.getPlace().contains(object))
{
return new PlacedObject(i, getData().getObjectLoader().load(object), id);
}
}
IrisBiome biome = getBiome(x, y, z);
for(IrisObjectPlacement i : biome.getObjects())
{
if(i.getPlace().contains(object))
{
return new PlacedObject(i, getData().getObjectLoader().load(object), id);
}
}
return new PlacedObject(null, getData().getObjectLoader().load(object), id);
}
}

View File

@@ -0,0 +1,5 @@
package com.volmit.iris.scaffold.engine;
public interface Hotloadable {
public void hotload();
}

View File

@@ -0,0 +1,43 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.IrisBiome;
public interface IrisAccess extends Hotloadable {
public int getGenerated();
public IrisBiome getBiome(int x, int y, int z);
public IrisBiome getCaveBiome(int x, int y, int z);
public IrisBiome getBiome(int x, int z);
public IrisBiome getCaveBiome(int x, int z);
public GeneratorAccess getEngineAccess(int y);
public IrisDataManager getData();
public int getHeight(int x, int y, int z);
public IrisBiome getAbsoluteBiome(int x, int y, int z);
public int getThreadCount();
public void changeThreadCount(int m);
public void regenerate(int x, int z);
public void close();
public boolean isClosed();
public EngineTarget getTarget();
public EngineCompound getCompound();
public boolean isFailing();
public boolean isStudio();
}

View File

@@ -0,0 +1,19 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.object.InventorySlotType;
import com.volmit.iris.object.IrisLootReference;
import com.volmit.iris.object.IrisLootTable;
import com.volmit.iris.util.KList;
import com.volmit.iris.util.RNG;
import org.bukkit.block.Block;
import org.bukkit.inventory.Inventory;
public interface LootProvider {
public void scramble(Inventory inventory, RNG rng);
public void injectTables(KList<IrisLootTable> list, IrisLootReference r);
public KList<IrisLootTable> getLootTables(RNG rng, Block b);
public void addItems(boolean debug, Inventory inv, RNG rng, KList<IrisLootTable> tables, InventorySlotType slot, int x, int y, int z, int mgf);
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.engine;
import com.volmit.iris.object.IrisObject;
import com.volmit.iris.object.IrisObjectPlacement;
import lombok.AllArgsConstructor;
import lombok.Data;
import javax.annotation.Nullable;
@Data
@AllArgsConstructor
public class PlacedObject {
@Nullable
private IrisObjectPlacement placement;
@Nullable
private IrisObject object;
private int id;
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,11 @@
package com.volmit.iris.scaffold.hunk;
public enum HunkFace
{
TOP,
BOTTOM,
EAST,
WEST,
NORTH,
SOUTH;
}

View File

@@ -0,0 +1,68 @@
package com.volmit.iris.scaffold.hunk.io;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.util.Function3;
import java.io.*;
import java.util.concurrent.atomic.AtomicBoolean;
public abstract class BasicHunkIOAdapter<T> implements HunkIOAdapter<T> {
@Override
public void write(Hunk<T> t, OutputStream out) throws IOException {
DataOutputStream dos = new DataOutputStream(out);
dos.writeShort(t.getWidth() + Short.MIN_VALUE);
dos.writeShort(t.getHeight() + Short.MIN_VALUE);
dos.writeShort(t.getDepth() + Short.MIN_VALUE);
dos.writeInt(t.getNonNullEntries() + Integer.MIN_VALUE);
AtomicBoolean failure = new AtomicBoolean(false);
t.iterate(0, (x,y,z,w) -> {
if(w != null)
{
try
{
dos.writeShort(x + Short.MIN_VALUE);
dos.writeShort(y + Short.MIN_VALUE);
dos.writeShort(z + Short.MIN_VALUE);
write(w, dos);
}
catch(Throwable e)
{
e.printStackTrace();
failure.set(true);
}
}
});
dos.close();
}
@Override
public Hunk<T> read(Function3<Integer,Integer,Integer,Hunk<T>> factory, InputStream in) throws IOException {
DataInputStream din = new DataInputStream(in);
int w = din.readShort() - Short.MIN_VALUE;
int h = din.readShort() - Short.MIN_VALUE;
int d = din.readShort() - Short.MIN_VALUE;
int e = din.readInt() - Integer.MIN_VALUE;
Hunk<T> t = factory.apply(w, h, d);
for(int i = 0; i < e; i++)
{
int x = din.readShort() - Short.MIN_VALUE;
int y = din.readShort() - Short.MIN_VALUE;
int z = din.readShort() - Short.MIN_VALUE;
T v = read(din);
if(v == null)
{
throw new IOException("NULL VALUE AT " + x + " " + y + " " + z);
}
t.setRaw(x,y,z, v);
}
in.close();
return t;
}
}

View File

@@ -0,0 +1,21 @@
package com.volmit.iris.scaffold.hunk.io;
import com.volmit.iris.util.B;
import org.bukkit.block.data.BlockData;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
public class BlockDataHunkIOAdapter extends PaletteHunkIOAdapter<BlockData> {
@Override
public void write(BlockData blockData, DataOutputStream dos) throws IOException {
dos.writeUTF(blockData.getAsString(true));
}
@Override
public BlockData read(DataInputStream din) throws IOException {
return B.get(din.readUTF());
}
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.hunk.io;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
public class BooleanHunkIOAdapter extends PaletteHunkIOAdapter<Boolean> {
@Override
public void write(Boolean data, DataOutputStream dos) throws IOException {
dos.writeBoolean(data);
}
@Override
public Boolean read(DataInputStream din) throws IOException {
return din.readBoolean();
}
}

View File

@@ -0,0 +1,44 @@
package com.volmit.iris.scaffold.hunk.io;
import com.volmit.iris.IrisSettings;
import com.volmit.iris.scaffold.data.IOAdapter;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.util.ByteArrayTag;
import com.volmit.iris.util.CustomOutputStream;
import com.volmit.iris.util.Function3;
import java.io.*;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
public interface HunkIOAdapter<T> extends IOAdapter<T>
{
public void write(Hunk<T> t, OutputStream out) throws IOException;
public Hunk<T> read(Function3<Integer,Integer,Integer,Hunk<T>> factory, InputStream in) throws IOException;
default void write(Hunk<T> t, File f) throws IOException
{
f.getParentFile().mkdirs();
FileOutputStream fos = new FileOutputStream(f);
GZIPOutputStream gzo = new CustomOutputStream(fos, IrisSettings.get().parallaxCompressionLevel);
write(t, gzo);
}
default Hunk<T> read(Function3<Integer,Integer,Integer,Hunk<T>> factory, File f) throws IOException
{
return read(factory, new GZIPInputStream(new FileInputStream(f)));
}
default Hunk<T> read(Function3<Integer,Integer,Integer,Hunk<T>> factory, ByteArrayTag f) throws IOException
{
return read(factory, new ByteArrayInputStream(f.getValue()));
}
default ByteArrayTag writeByteArrayTag(Hunk<T> tHunk, String name) throws IOException
{
ByteArrayOutputStream boas = new ByteArrayOutputStream();
write(tHunk, boas);
return new ByteArrayTag(name, boas.toByteArray());
}
}

View File

@@ -0,0 +1,92 @@
package com.volmit.iris.scaffold.hunk.io;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Map;
import com.volmit.iris.util.CompoundTag;
import com.volmit.iris.util.KMap;
import com.volmit.iris.util.NBTInputStream;
import com.volmit.iris.util.NBTOutputStream;
import com.volmit.iris.util.Tag;
public class HunkRegion
{
private final File folder;
private CompoundTag compound;
private final int x;
private final int z;
public HunkRegion(File folder, int x, int z, CompoundTag compound)
{
this.compound = fix(compound);
this.folder = folder;
this.x = x;
this.z = z;
folder.mkdirs();
}
public HunkRegion(File folder, int x, int z)
{
this(folder, x, z, new CompoundTag(x + "." + z, new KMap<>()));
File f = getFile();
if(f.exists())
{
try
{
NBTInputStream in = new NBTInputStream(new FileInputStream(f));
compound = fix((CompoundTag) in.readTag());
in.close();
}
catch(Throwable ignored)
{
}
}
}
public CompoundTag getCompound() {
return compound;
}
private CompoundTag fix(CompoundTag readTag)
{
Map<String, Tag> v = readTag.getValue();
if(!(v instanceof KMap))
{
return new CompoundTag(readTag.getName(), new KMap<String, Tag>(v));
}
return readTag;
}
public File getFile()
{
return new File(folder, x + "." + z + ".dat");
}
public void save() throws IOException
{
synchronized(compound)
{
File f = getFile();
FileOutputStream fos = new FileOutputStream(f);
NBTOutputStream out = new NBTOutputStream(fos);
out.writeTag(compound);
out.close();
}
}
public int getX() {
return x;
}
public int getZ() {
return z;
}
}

View File

@@ -0,0 +1,239 @@
package com.volmit.iris.scaffold.hunk.io;
import java.io.IOException;
import com.volmit.iris.util.*;
import org.bukkit.block.data.BlockData;
import com.volmit.iris.Iris;
import com.volmit.iris.scaffold.hunk.Hunk;
public class HunkRegionSlice<T>
{
public static final Function2<Integer, CompoundTag, HunkRegionSlice<BlockData>> BLOCKDATA = (h, c) -> new HunkRegionSlice<>(h, Hunk::newAtomicHunk, new BlockDataHunkIOAdapter(), c, "blockdata");
public static final Function3<Integer, CompoundTag, String, HunkRegionSlice<String>> STRING = (h, c, t) -> new HunkRegionSlice<>(h, Hunk::newAtomicHunk, new StringHunkIOAdapter(), c, t);
public static final Function3<Integer, CompoundTag, String, HunkRegionSlice<Boolean>> BOOLEAN = (h, c, t) -> new HunkRegionSlice<>(h, Hunk::newAtomicHunk, new BooleanHunkIOAdapter(), c, t);
private final Function3<Integer, Integer, Integer, Hunk<T>> factory;
private final HunkIOAdapter<T> adapter;
private final CompoundTag compound;
private final String key;
private final KMap<ChunkPosition, Hunk<T>> loadedChunks;
private final KMap<ChunkPosition, Long> lastUse;
private final KList<ChunkPosition> save;
private final int height;
public HunkRegionSlice(int height, Function3<Integer, Integer, Integer, Hunk<T>> factory, HunkIOAdapter<T> adapter, CompoundTag compound, String key)
{
this.height = height;
this.loadedChunks = new KMap<>();
this.factory = factory;
this.adapter = adapter;
this.compound = compound;
this.save = new KList<>();
this.key = key;
this.lastUse = new KMap<>();
}
public void cleanup(long t)
{
if(loadedChunks.size() != lastUse.size())
{
Iris.warn("Incorrect chunk use counts in " + key);
for(ChunkPosition i : lastUse.k())
{
if(!loadedChunks.containsKey(i))
{
Iris.warn(" Missing LoadChunkKey " + i);
}
}
}
for(ChunkPosition i : lastUse.k())
{
if(M.ms() - lastUse.get(i) > t)
{
unload(i.getX(), i.getZ());
}
}
}
public void clear()
{
synchronized(save)
{
for(String i : new KList<>(compound.getValue().keySet()))
{
if(i.startsWith(key + "."))
{
compound.getValue().remove(i);
}
}
}
}
public void save()
{
for(ChunkPosition i : save)
{
save(i.getX(), i.getZ());
}
save.clear();
}
public boolean contains(int x, int z)
{
return compound.getValue().containsKey(key(x, z));
}
public void delete(int x, int z)
{
compound.getValue().remove(key(x, z));
}
public Hunk<T> read(int x, int z) throws IOException
{
Tag t = compound.getValue().get(key(x, z));
if(!(t instanceof ByteArrayTag))
{
Iris.verbose("NOT BYTE ARRAY!");
return null;
}
return adapter.read(factory, (ByteArrayTag) t);
}
public void write(Hunk<T> hunk, int x, int z) throws IOException
{
compound.getValue().put(key(x, z), hunk.writeByteArrayTag(adapter, key(x, z)));
}
public synchronized void unloadAll()
{
for(ChunkPosition i : loadedChunks.k())
{
unload(i.getX(), i.getZ());
}
save.clear();
loadedChunks.clear();
lastUse.clear();
}
public synchronized void save(Hunk<T> region, int x, int z)
{
try
{
write(region, x, z);
}
catch(IOException e)
{
e.printStackTrace();
}
}
public boolean isLoaded(int x, int z)
{
return loadedChunks.containsKey(new ChunkPosition(x, z));
}
public synchronized void save(int x, int z)
{
if(isLoaded(x, z))
{
save(get(x, z), x, z);
}
}
public synchronized void unload(int x, int z)
{
ChunkPosition key = new ChunkPosition(x, z);
if(isLoaded(x, z))
{
if(save.contains(key))
{
save(x, z);
save.remove(key);
}
lastUse.remove(key);
loadedChunks.remove(key);
}
}
public synchronized Hunk<T> load(int x, int z)
{
if(isLoaded(x, z))
{
return loadedChunks.get(new ChunkPosition(x, z));
}
Hunk<T> v = null;
if(contains(x, z))
{
try
{
v = read(x, z);
}
catch(IOException e)
{
e.printStackTrace();
}
}
if(v == null)
{
v = factory.apply(16, height, 16);
}
loadedChunks.put(new ChunkPosition(x, z), v);
return v;
}
public Hunk<T> get(int x, int z)
{
ChunkPosition key = new ChunkPosition(x, z);
Hunk<T> c = loadedChunks.get(key);
if(c == null)
{
c = load(x, z);
}
lastUse.put(new ChunkPosition(x, z), M.ms());
return c;
}
public Hunk<T> getR(int x, int z)
{
return get(x, z).readOnly();
}
public Hunk<T> getRW(int x, int z)
{
save.addIfMissing(new ChunkPosition(x, z));
return get(x, z);
}
private String key(int x, int z)
{
if(x < 0 || x >= 32 || z < 0 || z >= 32)
{
throw new IndexOutOfBoundsException("The chunk " + x + " " + z + " is out of bounds max is 31x31");
}
return key + "." + x + "." + z;
}
public int getLoadCount()
{
return loadedChunks.size();
}
}

View File

@@ -0,0 +1,82 @@
package com.volmit.iris.scaffold.hunk.io;
import com.volmit.iris.scaffold.data.DataPalette;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.util.Function3;
import java.io.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
public abstract class PaletteHunkIOAdapter<T> implements HunkIOAdapter<T> {
@Override
public void write(Hunk<T> t, OutputStream out) throws IOException {
DataOutputStream dos = new DataOutputStream(out);
dos.writeShort(t.getWidth() + Short.MIN_VALUE);
dos.writeShort(t.getHeight() + Short.MIN_VALUE);
dos.writeShort(t.getDepth() + Short.MIN_VALUE);
AtomicInteger nonNull = new AtomicInteger(0);
DataPalette<T> palette = new DataPalette<T>();
t.iterate(0, (x,y,z,w) -> {
if(w != null)
{
palette.getIndex(w);
nonNull.getAndAdd(1);
}
});
palette.write(this, dos);
dos.writeInt(nonNull.get() + Integer.MIN_VALUE);
AtomicBoolean failure = new AtomicBoolean(false);
t.iterate(0, (x,y,z,w) -> {
if(w != null)
{
try
{
dos.writeShort(x + Short.MIN_VALUE);
dos.writeShort(y + Short.MIN_VALUE);
dos.writeShort(z + Short.MIN_VALUE);
dos.writeShort(palette.getIndex(w) + Short.MIN_VALUE);
}
catch(Throwable e)
{
e.printStackTrace();
failure.set(true);
}
}
});
dos.close();
}
@Override
public Hunk<T> read(Function3<Integer,Integer,Integer,Hunk<T>> factory, InputStream in) throws IOException {
DataInputStream din = new DataInputStream(in);
int w = din.readShort() - Short.MIN_VALUE;
int h = din.readShort() - Short.MIN_VALUE;
int d = din.readShort() - Short.MIN_VALUE;
DataPalette<T> palette = DataPalette.getPalette(this, din);
int e = din.readInt() - Integer.MIN_VALUE;
Hunk<T> t = factory.apply(w, h, d);
for(int i = 0; i < e; i++)
{
int x = din.readShort() - Short.MIN_VALUE;
int y = din.readShort() - Short.MIN_VALUE;
int z = din.readShort() - Short.MIN_VALUE;
T v = palette.getPalette().get(din.readShort() - Short.MIN_VALUE);
if(v == null)
{
throw new IOException("NULL VALUE AT " + x + " " + y + " " + z);
}
t.setRaw(x,y,z, v);
}
in.close();
return t;
}
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.hunk.io;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
public class StringHunkIOAdapter extends PaletteHunkIOAdapter<String> {
@Override
public void write(String data, DataOutputStream dos) throws IOException {
dos.writeUTF(data);
}
@Override
public String read(DataInputStream din) throws IOException {
return din.readUTF();
}
}

View File

@@ -0,0 +1,44 @@
package com.volmit.iris.scaffold.hunk.storage;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bouncycastle.util.Arrays;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = false)
public class ArrayHunk<T> extends StorageHunk<T> implements Hunk<T>
{
private final T[] data;
@SuppressWarnings("unchecked")
public ArrayHunk(int w, int h, int d)
{
super(w, h, d);
data = (T[]) new Object[w * h * d];
}
@Override
public void setRaw(int x, int y, int z, T t)
{
data[index(x, y, z)] = t;
}
@Override
public T getRaw(int x, int y, int z)
{
return data[index(x, y, z)];
}
private int index(int x, int y, int z)
{
return (z * getWidth() * getHeight()) + (y * getWidth()) + x;
}
@Override
public void fill(T t)
{
Arrays.fill(data, t);
}
}

View File

@@ -0,0 +1,43 @@
package com.volmit.iris.scaffold.hunk.storage;
import com.google.common.util.concurrent.AtomicDoubleArray;
import com.volmit.iris.scaffold.hunk.Hunk;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = false)
public class AtomicDoubleHunk extends StorageHunk<Double> implements Hunk<Double>
{
private final AtomicDoubleArray data;
public AtomicDoubleHunk(int w, int h, int d)
{
super(w, h, d);
data = new AtomicDoubleArray(w * h * d);
}
@Override
public boolean isAtomic()
{
return true;
}
@Override
public void setRaw(int x, int y, int z, Double t)
{
data.set(index(x, y, z), t);
}
@Override
public Double getRaw(int x, int y, int z)
{
return data.get(index(x, y, z));
}
private int index(int x, int y, int z)
{
return (z * getWidth() * getHeight()) + (y * getWidth()) + x;
}
}

View File

@@ -0,0 +1,43 @@
package com.volmit.iris.scaffold.hunk.storage;
import java.util.concurrent.atomic.AtomicReferenceArray;
import com.volmit.iris.scaffold.hunk.Hunk;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = false)
public class AtomicHunk<T> extends StorageHunk<T> implements Hunk<T>
{
private final AtomicReferenceArray<T> data;
public AtomicHunk(int w, int h, int d)
{
super(w, h, d);
data = new AtomicReferenceArray<T>(w * h * d);
}
@Override
public boolean isAtomic()
{
return true;
}
@Override
public void setRaw(int x, int y, int z, T t)
{
data.set(index(x, y, z), t);
}
@Override
public T getRaw(int x, int y, int z)
{
return data.get(index(x, y, z));
}
private int index(int x, int y, int z)
{
return (z * getWidth() * getHeight()) + (y * getWidth()) + x;
}
}

View File

@@ -0,0 +1,43 @@
package com.volmit.iris.scaffold.hunk.storage;
import java.util.concurrent.atomic.AtomicIntegerArray;
import com.volmit.iris.scaffold.hunk.Hunk;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = false)
public class AtomicIntegerHunk extends StorageHunk<Integer> implements Hunk<Integer>
{
private final AtomicIntegerArray data;
public AtomicIntegerHunk(int w, int h, int d)
{
super(w, h, d);
data = new AtomicIntegerArray(w * h * d);
}
@Override
public boolean isAtomic()
{
return true;
}
@Override
public void setRaw(int x, int y, int z, Integer t)
{
data.set(index(x, y, z), t);
}
@Override
public Integer getRaw(int x, int y, int z)
{
return data.get(index(x, y, z));
}
private int index(int x, int y, int z)
{
return (z * getWidth() * getHeight()) + (y * getWidth()) + x;
}
}

View File

@@ -0,0 +1,43 @@
package com.volmit.iris.scaffold.hunk.storage;
import java.util.concurrent.atomic.AtomicLongArray;
import com.volmit.iris.scaffold.hunk.Hunk;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = false)
public class AtomicLongHunk extends StorageHunk<Long> implements Hunk<Long>
{
private final AtomicLongArray data;
public AtomicLongHunk(int w, int h, int d)
{
super(w, h, d);
data = new AtomicLongArray(w * h * d);
}
@Override
public boolean isAtomic()
{
return true;
}
@Override
public void setRaw(int x, int y, int z, Long t)
{
data.set(index(x, y, z), t);
}
@Override
public Long getRaw(int x, int y, int z)
{
return data.get(index(x, y, z));
}
private int index(int x, int y, int z)
{
return (z * getWidth() * getHeight()) + (y * getWidth()) + x;
}
}

View File

@@ -0,0 +1,47 @@
package com.volmit.iris.scaffold.hunk.storage;
import com.volmit.iris.util.Consumer4;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.util.BlockPosition;
import com.volmit.iris.util.KMap;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.util.Map;
@Data
@EqualsAndHashCode(callSuper = false)
public class MappedHunk<T> extends StorageHunk<T> implements Hunk<T>
{
private final KMap<BlockPosition, T> data;
public MappedHunk(int w, int h, int d)
{
super(w, h, d);
data = new KMap<>();
}
@Override
public void setRaw(int x, int y, int z, T t)
{
data.put(new BlockPosition(x, y, z), t);
}
@Override
public Hunk<T> iterateSync(Consumer4<Integer, Integer, Integer, T> c)
{
for(Map.Entry<BlockPosition, T> g : data.entrySet())
{
c.accept( g.getKey().getX(), g.getKey().getY(), g.getKey().getZ(), g.getValue());
}
return this;
}
@Override
public T getRaw(int x, int y, int z)
{
return data.get(new BlockPosition(x, y, z));
}
}

View File

@@ -0,0 +1,30 @@
package com.volmit.iris.scaffold.hunk.storage;
import com.volmit.iris.scaffold.hunk.Hunk;
import lombok.Data;
@Data
public abstract class StorageHunk<T> implements Hunk<T>
{
private final int width;
private final int height;
private final int depth;
public StorageHunk(int width, int height, int depth)
{
if(width <= 0 || height <= 0 || depth <= 0)
{
throw new RuntimeException("Unsupported size " + width + " " + height + " " + depth);
}
this.width = width;
this.height = height;
this.depth = depth;
}
@Override
public abstract void setRaw(int x, int y, int z, T t);
@Override
public abstract T getRaw(int x, int y, int z);
}

View File

@@ -0,0 +1,53 @@
package com.volmit.iris.scaffold.hunk.storage;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bouncycastle.util.Arrays;
import lombok.Data;
import lombok.EqualsAndHashCode;
@Data
@EqualsAndHashCode(callSuper = false)
public class SynchronizedArrayHunk<T> extends StorageHunk<T> implements Hunk<T>
{
private final T[] data;
@SuppressWarnings("unchecked")
public SynchronizedArrayHunk(int w, int h, int d)
{
super(w, h, d);
data = (T[]) new Object[w * h * d];
}
@Override
public void setRaw(int x, int y, int z, T t)
{
synchronized(data)
{
data[index(x, y, z)] = t;
}
}
@Override
public T getRaw(int x, int y, int z)
{
synchronized(data)
{
return data[index(x, y, z)];
}
}
private int index(int x, int y, int z)
{
return (z * getWidth() * getHeight()) + (y * getWidth()) + x;
}
@Override
public void fill(T t)
{
synchronized(data)
{
Arrays.fill(data, t);
}
}
}

View File

@@ -0,0 +1,45 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bukkit.block.Biome;
import org.bukkit.generator.ChunkGenerator.BiomeGrid;
public class BiomeGridHunkView implements Hunk<Biome>
{
private final BiomeGrid chunk;
public BiomeGridHunkView(BiomeGrid chunk)
{
this.chunk = chunk;
}
@Override
public int getWidth()
{
return 16;
}
@Override
public int getDepth()
{
return 16;
}
@Override
public int getHeight()
{
return 256;
}
@Override
public void setRaw(int x, int y, int z, Biome t)
{
chunk.setBiome(x, y, z, t);
}
@Override
public Biome getRaw(int x, int y, int z)
{
return chunk.getBiome(x, y, z);
}
}

View File

@@ -0,0 +1,52 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bukkit.Chunk;
import org.bukkit.block.Biome;
import com.volmit.iris.Iris;
public class ChunkBiomeHunkView implements Hunk<Biome>
{
private final Chunk chunk;
public ChunkBiomeHunkView(Chunk chunk)
{
this.chunk = chunk;
}
@Override
public int getWidth()
{
return 16;
}
@Override
public int getDepth()
{
return 16;
}
@Override
public int getHeight()
{
return chunk.getWorld().getMaxHeight();
}
@Override
public void setRaw(int x, int y, int z, Biome t)
{
if(t == null)
{
return;
}
Iris.edit.setBiome(chunk.getWorld(), x + (chunk.getX() * 16), y, z + (chunk.getZ() * 16), t);
}
@Override
public Biome getRaw(int x, int y, int z)
{
return Iris.edit.getBiome(chunk.getWorld(), x + (chunk.getX() * 16), y, z + (chunk.getZ() * 16));
}
}

View File

@@ -0,0 +1,62 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bukkit.block.data.BlockData;
import org.bukkit.generator.ChunkGenerator.ChunkData;
public class ChunkDataHunkView implements Hunk<BlockData>
{
private final ChunkData chunk;
public ChunkDataHunkView(ChunkData chunk)
{
this.chunk = chunk;
}
@Override
public int getWidth()
{
return 16;
}
@Override
public int getDepth()
{
return 16;
}
@Override
public int getHeight()
{
return chunk.getMaxHeight();
}
@Override
public void set(int x1, int y1, int z1, int x2, int y2, int z2, BlockData t)
{
if(t == null)
{
return;
}
enforceBounds(x1, y1, z1, x2 - x1, y2 - y1, z2 - z1);
chunk.setRegion(x1, y1, z1, x2, y2, z2, t);
}
@Override
public void setRaw(int x, int y, int z, BlockData t)
{
if(t == null)
{
return;
}
chunk.setBlock(x, y, z, t);
}
@Override
public BlockData getRaw(int x, int y, int z)
{
return chunk.getBlockData(x, y, z);
}
}

View File

@@ -0,0 +1,52 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bukkit.Chunk;
import org.bukkit.block.data.BlockData;
import com.volmit.iris.Iris;
public class ChunkHunkView implements Hunk<BlockData>
{
private final Chunk chunk;
public ChunkHunkView(Chunk chunk)
{
this.chunk = chunk;
}
@Override
public int getWidth()
{
return 16;
}
@Override
public int getDepth()
{
return 16;
}
@Override
public int getHeight()
{
return chunk.getWorld().getMaxHeight();
}
@Override
public void setRaw(int x, int y, int z, BlockData t)
{
if(t == null)
{
return;
}
Iris.edit.set(chunk.getWorld(), x + (chunk.getX() * 16), y, z + (chunk.getZ() * 16), t);
}
@Override
public BlockData getRaw(int x, int y, int z)
{
return Iris.edit.get(chunk.getWorld(), x + (chunk.getX() * 16), y, z + (chunk.getZ() * 16));
}
}

View File

@@ -0,0 +1,55 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class DriftHunkView<T> implements Hunk<T>
{
private final int ox;
private final int oy;
private final int oz;
private final Hunk<T> src;
public DriftHunkView(Hunk<T> src, int ox, int oy, int oz)
{
this.src = src;
this.ox = ox;
this.oy = oy;
this.oz = oz;
}
@Override
public void setRaw(int x, int y, int z, T t)
{
src.setRaw(x + ox, y + oy, z + oz, t);
}
@Override
public T getRaw(int x, int y, int z)
{
return src.getRaw(x + ox, y + oy, z + oz);
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,71 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class HunkView<T> implements Hunk<T>
{
private final int ox;
private final int oy;
private final int oz;
private final int w;
private final int h;
private final int d;
private final Hunk<T> src;
public HunkView(Hunk<T> src)
{
this(src, src.getWidth(), src.getHeight(), src.getDepth());
}
public HunkView(Hunk<T> src, int w, int h, int d)
{
this(src, w, h, d, 0, 0, 0);
}
public HunkView(Hunk<T> src, int w, int h, int d, int ox, int oy, int oz)
{
this.src = src;
this.w = w;
this.h = h;
this.d = d;
this.ox = ox;
this.oy = oy;
this.oz = oz;
}
@Override
public void setRaw(int x, int y, int z, T t)
{
src.setRaw(x + ox, y + oy, z + oz, t);
}
@Override
public T getRaw(int x, int y, int z)
{
return src.getRaw(x + ox, y + oy, z + oz);
}
@Override
public int getWidth()
{
return w;
}
@Override
public int getDepth()
{
return d;
}
@Override
public int getHeight()
{
return h;
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,49 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class InvertedHunkView<T> implements Hunk<T>
{
private final Hunk<T> src;
public InvertedHunkView(Hunk<T> src)
{
this.src = src;
}
@Override
public void setRaw(int x, int y, int z, T t)
{
src.setRaw(x, (getHeight() -1) - y, z, t);
}
@Override
public T getRaw(int x, int y, int z)
{
return src.getRaw(x, y, z);
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,52 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.util.Consumer4;
import com.volmit.iris.scaffold.hunk.Hunk;
public class ListeningHunk<T> implements Hunk<T> {
private final Hunk<T> src;
private final Consumer4<Integer, Integer, Integer, T> listener;
public ListeningHunk(Hunk<T> src, Consumer4<Integer, Integer, Integer, T> listener)
{
this.src = src;
this.listener = listener;
}
@Override
public void setRaw(int x, int y, int z, T t)
{
listener.accept(x,y,z,t);
src.setRaw(x,y,z,t);
}
@Override
public T getRaw(int x, int y, int z)
{
return src.getRaw(x, y, z);
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,60 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class ReadOnlyHunk<T> implements Hunk<T> {
private final Hunk<T> src;
public ReadOnlyHunk(Hunk<T> src)
{
this.src = src;
}
@Override
public void setRaw(int x, int y, int z, T t)
{
throw new IllegalStateException("This hunk is read only!");
}
@Override
public T getRaw(int x, int y, int z)
{
return src.getRaw(x, y, z);
}
@Override
public void set(int x1, int y1, int z1, int x2, int y2, int z2, T t)
{
throw new IllegalStateException("This hunk is read only!");
}
@Override
public void fill(T t)
{
throw new IllegalStateException("This hunk is read only!");
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,63 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class RotatedXHunkView<T> implements Hunk<T>
{
private final Hunk<T> src;
private final double sin;
private final double cos;
public RotatedXHunkView(Hunk<T> src, double deg)
{
this.src = src;
this.sin = Math.sin(Math.toRadians(deg));
this.cos = Math.cos(Math.toRadians(deg));
}
@Override
public void setRaw(int x, int y, int z, T t)
{
int yc = (int) Math.round(cos * (getHeight() / 2) - sin * (getDepth() / 2));
int zc = (int) Math.round(sin * (getHeight() / 2) + cos * (getDepth() / 2));
src.setIfExists(x,
(int) Math.round(cos * (y-yc) - sin * (z-zc))-yc,
(int) Math.round(sin * y-yc + cos * (z-zc))-zc,
t);
}
@Override
public T getRaw(int x, int y, int z)
{
int yc = (int) Math.round(cos * (getHeight() / 2) - sin * (getDepth() / 2));
int zc = (int) Math.round(sin * (getHeight() / 2) + cos * (getDepth() / 2));
return src.getIfExists(x,
(int) Math.round(cos * (y-yc) - sin * (z-zc))-yc,
(int) Math.round(sin * y-yc + cos * (z-zc))-zc
);
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,64 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class RotatedYHunkView<T> implements Hunk<T>
{
private final Hunk<T> src;
private final double sin;
private final double cos;
public RotatedYHunkView(Hunk<T> src, double deg)
{
this.src = src;
this.sin = Math.sin(Math.toRadians(deg));
this.cos = Math.cos(Math.toRadians(deg));
}
@Override
public void setRaw(int x, int y, int z, T t)
{
int xc = (int) Math.round(cos * (getWidth() / 2) + sin * (getDepth() / 2));
int zc = (int) Math.round(-sin * (getWidth() / 2) + cos * (getDepth() / 2));
src.setIfExists((int)
Math.round(cos * (x - xc) + sin * (z - zc)) - xc,
y,
(int) Math.round(-sin * (x - xc) + cos * (z - zc)) - zc, t);
}
@Override
public T getRaw(int x, int y, int z)
{
int xc = (int) Math.round(cos * (getWidth() / 2) + sin * (getDepth() / 2));
int zc = (int) Math.round(-sin * (getWidth() / 2) + cos * (getDepth() / 2));
return src.getIfExists(
(int) Math.round(cos * (x - xc) + sin * (z - zc)) - xc,
y,
(int) Math.round(-sin * (x - xc) + cos * (z - zc)) - zc
);
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,59 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class RotatedZHunkView<T> implements Hunk<T>
{
private final Hunk<T> src;
private final double sin;
private final double cos;
public RotatedZHunkView(Hunk<T> src, double deg)
{
this.src = src;
this.sin = Math.sin(Math.toRadians(deg));
this.cos = Math.cos(Math.toRadians(deg));
}
@Override
public void setRaw(int x, int y, int z, T t)
{
int xc = (int) Math.round(cos * (getWidth() / 2) - sin * (getHeight() / 2));
int yc = (int) Math.round(sin * (getWidth() / 2) + cos * (getHeight() / 2));
src.setIfExists((int) Math.round(cos * (x - xc) - sin * (y - yc)) - xc, (int) Math.round(sin * (x - xc) + cos * (y - yc)) - yc, z, t);
}
@Override
public T getRaw(int x, int y, int z)
{
int xc = (int) Math.round(cos * (getWidth() / 2) - sin * (getHeight() / 2));
int yc = (int) Math.round(sin * (getWidth() / 2) + cos * (getHeight() / 2));
return src.getIfExists((int) Math.round(cos * (x - xc) - sin * (y - yc)) - xc,
(int) Math.round(sin * (x - xc) + cos * (y - yc)) - yc
, z);
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,51 @@
package com.volmit.iris.scaffold.hunk.view;
import com.volmit.iris.scaffold.hunk.Hunk;
public class SynchronizedHunkView<T> implements Hunk<T> {
private final Hunk<T> src;
public SynchronizedHunkView(Hunk<T> src)
{
this.src = src;
}
@Override
public void setRaw(int x, int y, int z, T t)
{
synchronized (src)
{
src.setRaw(x,y,z,t);
}
}
@Override
public T getRaw(int x, int y, int z)
{
return src.getRaw(x, y, z);
}
@Override
public int getWidth()
{
return src.getWidth();
}
@Override
public int getHeight()
{
return src.getHeight();
}
@Override
public int getDepth()
{
return src.getDepth();
}
@Override
public Hunk<T> getSource()
{
return src;
}
}

View File

@@ -0,0 +1,18 @@
package com.volmit.iris.scaffold.lighting;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
/**
* Maps {@link BlockFaceSet} values to a 16x16x16 area of blocks
*/
public class BlockFaceSetSection {
private final byte[] _maskData = new byte[4096];
public void set(int x, int y, int z, BlockFaceSet faces) {
_maskData[(y << 8) | (z << 4) | x] = (byte) faces.mask();
}
public BlockFaceSet get(int x, int y, int z) {
return BlockFaceSet.byMask((int) _maskData[(y << 8) | (z << 4) | x]);
}
}

View File

@@ -0,0 +1,153 @@
package com.volmit.iris.scaffold.lighting;
import java.util.Arrays;
import java.util.BitSet;
import java.util.stream.IntStream;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
/**
* Loads region information, storing whether or not
* the 32x32 (1024) chunks are available.
*/
public class FlatRegionInfo {
private static final int[] DEFAULT_RY_0 = new int[] {0}; // Optimization
public final World world;
public final int rx, rz;
public final int[] ry;
public final int cx, cz;
private final BitSet _chunks;
private boolean _loadedFromDisk;
public FlatRegionInfo(World world, int rx, int ry, int rz) {
this(world, rx, (ry==0) ? DEFAULT_RY_0 : new int[] {ry}, rz);
}
public FlatRegionInfo(World world, int rx, int[] ry, int rz) {
this.world = world;
this.rx = rx;
this.rz = rz;
this.ry = ry;
this.cx = (rx << 5);
this.cz = (rz << 5);
this._chunks = new BitSet(1024);
this._loadedFromDisk = false;
}
private FlatRegionInfo(FlatRegionInfo copy, int[] new_ry) {
this.world = copy.world;
this.rx = copy.rx;
this.ry = new_ry;
this.rz = copy.rz;
this.cx = copy.cx;
this.cz = copy.cz;
this._chunks = copy._chunks;
this._loadedFromDisk = copy._loadedFromDisk;
}
public void addChunk(int cx, int cz) {
cx -= this.cx;
cz -= this.cz;
if (cx < 0 || cx >= 32 || cz < 0 || cz >= 32) {
return;
}
this._chunks.set((cz << 5) | cx);
}
/**
* Gets the number of chunks in this region.
* If not loaded yet, the default 1024 is returned.
*
* @return chunk count
*/
public int getChunkCount() {
return this._chunks.cardinality();
}
/**
* Gets the region Y-coordinates as a sorted, immutable distinct stream
*
* @return ry int stream
*/
public IntStream getRYStream() {
return IntStream.of(this.ry);
}
/**
* Loads the region information, now telling what chunks are contained
*/
public void load() {
if (!this._loadedFromDisk) {
this._loadedFromDisk = true;
for (int ry : this.ry) {
this._chunks.or(WorldUtil.getWorldSavedRegionChunks3(this.world, this.rx, ry, this.rz));
}
}
}
/**
* Ignores loading region chunk information from chunks that aren't loaded
*/
public void ignoreLoad() {
this._loadedFromDisk = true;
}
/**
* Gets whether the chunk coordinates specified are within the range
* of coordinates of this region
*
* @param cx - chunk coordinates (world coordinates)
* @param cz - chunk coordinates (world coordinates)
* @return True if in range
*/
public boolean isInRange(int cx, int cz) {
cx -= this.cx;
cz -= this.cz;
return cx >= 0 && cz >= 0 && cx < 32 && cz < 32;
}
/**
* Gets whether a chunk is contained and exists inside this region
*
* @param cx - chunk coordinates (world coordinates)
* @param cz - chunk coordinates (world coordinates)
* @return True if the chunk is contained
*/
public boolean containsChunk(int cx, int cz) {
cx -= this.cx;
cz -= this.cz;
if (cx < 0 || cx >= 32 || cz < 0 || cz >= 32) {
return false;
}
// Load region file information the first time this is accessed
this.load();
// Check in bitset
return this._chunks.get((cz << 5) | cx);
}
/**
* Adds another Region Y-coordinate to the list.
* The set of chunks and other properties are copied.
*
* @param ry
* @return new flat region info object with updated ry
*/
public FlatRegionInfo addRegionYCoordinate(int ry) {
int index = Arrays.binarySearch(this.ry, ry);
if (index >= 0) {
return this; // Already contained
}
// Insert at this index (undo insertion point - 1)
index = -index - 1;
int[] new_y_coordinates = new int[this.ry.length + 1];
System.arraycopy(this.ry, 0, new_y_coordinates, 0, index);
new_y_coordinates[index] = ry;
System.arraycopy(this.ry, index, new_y_coordinates, index+1, this.ry.length - index);
return new FlatRegionInfo(this, new_y_coordinates);
}
}

View File

@@ -0,0 +1,188 @@
package com.volmit.iris.scaffold.lighting;
import java.util.Collection;
import java.util.Set;
import java.util.stream.IntStream;
import org.bukkit.Chunk;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.bases.IntVector3;
import com.bergerkiller.bukkit.common.utils.MathUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashMap;
/**
* A map of region information
*/
public class FlatRegionInfoMap {
private final World _world;
private final LongHashMap<FlatRegionInfo> _regions;
private FlatRegionInfoMap(World world, LongHashMap<FlatRegionInfo> regions) {
this._world = world;
this._regions = regions;
}
public World getWorld() {
return this._world;
}
public int getRegionCount() {
return this._regions.size();
}
public Collection<FlatRegionInfo> getRegions() {
return this._regions.getValues();
}
public FlatRegionInfo getRegion(int rx, int rz) {
return this._regions.get(rx, rz);
}
public FlatRegionInfo getRegionAtChunk(int cx, int cz) {
return this._regions.get(cx >> 5, cz >> 5);
}
/**
* Gets whether a chunk exists
*
* @param cx
* @param cz
* @return True if the chunk exists
*/
public boolean containsChunk(int cx, int cz) {
FlatRegionInfo region = getRegionAtChunk(cx, cz);
return region != null && region.containsChunk(cx, cz);
}
/**
* Gets whether a chunk, and all its 8 neighbours, exist
*
* @param cx
* @param cz
* @return True if the chunk and all its neighbours exist
*/
public boolean containsChunkAndNeighbours(int cx, int cz) {
FlatRegionInfo region = getRegionAtChunk(cx, cz);
if (region == null) {
return false;
}
for (int dx = -2; dx <= 2; dx++) {
for (int dz = -2; dz <= 2; dz++) {
int mx = cx + dx;
int mz = cz + dz;
if (region.isInRange(mx, mz)) {
if (!region.containsChunk(mx, mz)) {
return false;
}
} else {
if (!this.containsChunk(mx, mz)) {
return false;
}
}
}
}
return true;
}
/**
* Computes all the region Y-coordinates used by a region and its neighbouring 8 regions.
* The returned array is sorted in increasing order and is distinct (no duplicate values).
*
* @param region
* @return region and neighbouring regions' Y-coordinates
*/
public int[] getRegionYCoordinatesSelfAndNeighbours(FlatRegionInfo region) {
IntStream region_y_coord_stream = region.getRYStream();
for (int drx = -1; drx <= 1; drx++) {
for (int drz = -1; drz <= 1; drz++) {
if (drx == 0 && drz == 0) {
continue;
}
FlatRegionInfo neigh_region = this.getRegion(region.rx + drx, region.rz + drz);
if (neigh_region != null) {
region_y_coord_stream = IntStream.concat(region_y_coord_stream, neigh_region.getRYStream());
}
}
}
//TODO: There's technically a way to significantly speed up sorting two concatenated sorted streams
// Sadly, the java 8 SDK doesn't appear to do any optimizations here :(
return region_y_coord_stream.sorted().distinct().toArray();
}
/**
* Creates a region information mapping of all existing chunks of a world
* that are currently loaded. No further loading is required.
*
* @param world
* @return region info map
*/
public static FlatRegionInfoMap createLoaded(World world) {
LongHashMap<FlatRegionInfo> regions = new LongHashMap<FlatRegionInfo>();
for (Chunk chunk : world.getLoadedChunks()) {
int rx = WorldUtil.chunkToRegionIndex(chunk.getX());
int rz = WorldUtil.chunkToRegionIndex(chunk.getZ());
FlatRegionInfo prev_info = regions.get(rx, rz);
FlatRegionInfo new_info = prev_info;
if (new_info == null) {
new_info = new FlatRegionInfo(world, rx, 0, rz);
new_info.ignoreLoad();
}
// Refresh y-coordinates
for (Integer y_coord : WorldUtil.getLoadedSectionCoordinates(chunk)) {
new_info = new_info.addRegionYCoordinate(WorldUtil.chunkToRegionIndex(y_coord.intValue()));
}
// Add chunk to region bitset
new_info.addChunk(chunk.getX(), chunk.getZ());
// Store if new or changed
if (new_info != prev_info) {
regions.put(rx, rz, new_info);
}
}
return new FlatRegionInfoMap(world, regions);
}
/**
* Creates a region information mapping of all existing chunks of a world
*
* @param world
* @return region info map
*/
public static FlatRegionInfoMap create(World world) {
LongHashMap<FlatRegionInfo> regions = new LongHashMap<FlatRegionInfo>();
// Obtain the region coordinates in 3d space (vertical too!)
Set<IntVector3> regionCoordinates = WorldUtil.getWorldRegions3(world);
// For each region, create a RegionInfo entry
for (IntVector3 region : regionCoordinates) {
long key = MathUtil.longHashToLong(region.x, region.z);
FlatRegionInfo prev = regions.get(key);
if (prev != null) {
regions.put(key, prev.addRegionYCoordinate(region.y));
} else {
regions.put(key, new FlatRegionInfo(world, region.x, region.y, region.z));
}
}
// For all loaded chunks, add those chunks to their region up-front
// They may not yet have been saved to the region file
for (Chunk chunk : world.getLoadedChunks()) {
int rx = WorldUtil.chunkToRegionIndex(chunk.getX());
int rz = WorldUtil.chunkToRegionIndex(chunk.getZ());
FlatRegionInfo info = regions.get(rx, rz);
if (info != null) {
info.addChunk(chunk.getX(), chunk.getZ());
}
}
return new FlatRegionInfoMap(world, regions);
}
}

View File

@@ -0,0 +1,101 @@
package com.volmit.iris.scaffold.lighting;
import java.util.HashMap;
import com.volmit.iris.Iris;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.Task;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
/**
* Handles the automatic cleanup of chunk lighting when chunks are generated
*/
public class LightingAutoClean {
private static HashMap<World, LongHashSet> queues = new HashMap<World, LongHashSet>();
private static Task autoCleanTask = null;
/**
* Checks all neighbouring chunks to see if they are fully surrounded by chunks (now), and
* schedules lighting repairs. This function only does anything when automatic cleaning is activated.
*
* @param world the chunk is in
* @param chunkX coordinate
* @param chunkZ coordinate
*/
public static void handleChunkGenerated(World world, int chunkX, int chunkZ) {
for (int dx = -1; dx <= 1; dx++) {
for (int dz = -1; dz <= 1; dz++) {
if (dx == 0 && dz == 0) {
continue;
}
if (!WorldUtil.isChunkAvailable(world, chunkX + dx, chunkZ + dz)) {
continue;
}
// Check that all chunks surrounding this chunk are all available
boolean allNeighboursLoaded = true;
for (int dx2 = -1; dx2 <= 1 && allNeighboursLoaded; dx2++) {
for (int dz2 = -1; dz2 <= 1 && allNeighboursLoaded; dz2++) {
if (dx2 == 0 && dz2 == 0) {
continue; // ignore self
}
if (dx2 == -dx && dz2 == -dz) {
continue; // ignore the original generated chunk
}
allNeighboursLoaded &= WorldUtil.isChunkAvailable(world, chunkX + dx + dx2, chunkZ + dz + dz2);
}
}
// If all neighbours are available, schedule it for fixing
if (allNeighboursLoaded) {
schedule(world, chunkX + dx, chunkZ + dz);
}
}
}
}
private static synchronized void processAutoClean() {
while (queues.size() > 0) {
World world = queues.keySet().iterator().next();
LongHashSet chunks = queues.remove(world);
LightingService.schedule(world, chunks);
}
}
public static void schedule(World world, int chunkX, int chunkZ) {
schedule(world, chunkX, chunkZ, 80);
}
public static synchronized void schedule(World world, int chunkX, int chunkZ, int tickDelay) {
LongHashSet queue = queues.get(world);
if (queue == null) {
queue = new LongHashSet(9);
queues.put(world, queue);
}
// Queue this chunk, and all its neighbours
for (int dx = -1; dx <= 1; dx++) {
for (int dz = -1; dz <= 1; dz++) {
queue.add(chunkX + dx, chunkZ + dz);
}
}
// Initialize clean task if it hasn't been yet
if (autoCleanTask == null) {
autoCleanTask = new Task(Iris.instance) {
@Override
public void run() {
processAutoClean();
}
};
}
// Postpone the tick task while there are less than 100 chunks in the queue
if (queue.size() < 100) {
autoCleanTask.stop().start(tickDelay);
}
}
}

View File

@@ -0,0 +1,198 @@
package com.volmit.iris.scaffold.lighting;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
/**
* Represents a category of light being processed. All conditional logic
* for this is handled by this class.
*/
public enum LightingCategory {
SKY() {
@Override
public String getName() {
return "Sky";
}
@Override
public void initialize(LightingChunk chunk) {
if (!chunk.hasSkyLight) {
return;
}
// Find out the highest possible Y-position
int x, y, z, light, height, opacity;
BlockFaceSet opaqueFaces;
LightingCube cube = null;
// Apply initial sky lighting from top to bottom
for (z = chunk.start.z; z <= chunk.end.z; z++) {
for (x = chunk.start.x; x <= chunk.end.x; x++) {
light = 15;
height = chunk.getHeight(x, z) + 1;
for (y = chunk.maxY; y >= chunk.minY; y--) {
if ((cube = chunk.nextCube(cube, y)) == null) {
// Skip the remaining 15: they are all inaccessible as well
y -= 15;
// If not full skylight, reset light level, assuming it dimmed out
if (light != 15) {
light = 0;
}
continue;
}
// Set quickly when light level is at 0, or we are above height level
if (y > height || light <= 0) {
cube.skyLight.set(x, y & 0xf, z, light);
continue;
}
// If opaque at the top, set light to 0 instantly
opaqueFaces = cube.getOpaqueFaces(x, y & 0xf, z);
if (opaqueFaces.up()) {
light = 0;
} else {
// Apply the opacity to the light level
opacity = cube.opacity.get(x, y & 0xf, z);
if (light < 15 && opacity == 0) {
opacity = 1;
}
if ((light -= opacity) <= 0) {
light = 0;
}
}
// Apply sky light to block
cube.skyLight.set(x, y & 0xf, z, light);
// If opaque at the bottom, reset light to 0 for next block
// The block itself is lit
if (opaqueFaces.down()) {
light = 0;
}
}
}
}
}
@Override
public int getStartY(LightingChunk chunk, int x, int z) {
return chunk.getHeight(x, z);
}
@Override
public void setDirty(LightingChunk chunk, boolean dirty) {
chunk.isSkyLightDirty = dirty;
}
@Override
public int get(LightingCube section, int x, int y, int z) {
return section.skyLight.get(x, y, z);
}
@Override
public void set(LightingCube section, int x, int y, int z, int level) {
section.skyLight.set(x, y, z, level);
}
},
BLOCK() {
@Override
public String getName() {
return "Block";
}
@Override
public void initialize(LightingChunk chunk) {
// Some blocks that emit light, also have opaque faces
// They still emit light through the opaque faces to other blocks
// To fix this, run an initial processing step that spreads all
// emitted light to the neighbouring blocks' block light, ignoring own opaque faces
int x, y, z;
for (LightingCube cube : chunk.getSections()) {
for (y = 0; y < 16; y++) {
for (z = chunk.start.z; z <= chunk.end.z; z++) {
for (x = chunk.start.x; x <= chunk.end.x; x++) {
cube.spreadBlockLight(x, y, z);
}
}
}
}
}
@Override
public int getStartY(LightingChunk chunk, int x, int z) {
return chunk.maxY;
}
@Override
public void setDirty(LightingChunk chunk, boolean dirty) {
chunk.isBlockLightDirty = dirty;
}
@Override
public int get(LightingCube section, int x, int y, int z) {
return section.blockLight.get(x, y, z);
}
@Override
public void set(LightingCube section, int x, int y, int z, int level) {
section.blockLight.set(x, y, z, level);
}
};
/**
* Gets the name of this type of light, used when logging
*
* @return category name
*/
public abstract String getName();
/**
* Initializes the lighting in the chunk for this category
*
* @param chunk
*/
public abstract void initialize(LightingChunk chunk);
/**
* Gets the y-coordinate to start processing from when spreading light around
*
* @param chunk
* @param x
* @param z
* @return start y-coordinate
*/
public abstract int getStartY(LightingChunk chunk, int x, int z);
/**
* Sets whether this category of light is dirty, indicating this category of light is all good,
* or that more work is needed spreading light around.
*
* @param chunk
* @param dirty
*/
public abstract void setDirty(LightingChunk chunk, boolean dirty);
/**
* Gets the light level in a section at the coordinates specified.
* No bounds checking is performed.
*
* @param section
* @param x
* @param y
* @param z
* @return light level
*/
public abstract int get(LightingCube section, int x, int y, int z);
/**
* Sets the light level in a section at the coordinates specified.
* No bounds checking is performed.
*
* @param section
* @param x
* @param y
* @param z
* @param level
*/
public abstract void set(LightingCube section, int x, int y, int z, int level);
}

View File

@@ -0,0 +1,457 @@
package com.volmit.iris.scaffold.lighting;
import com.bergerkiller.bukkit.common.bases.IntVector2;
import com.bergerkiller.bukkit.common.chunk.ForcedChunk;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
import com.bergerkiller.bukkit.common.utils.ChunkUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.ChunkSection;
import com.bergerkiller.bukkit.common.wrappers.HeightMap;
import com.bergerkiller.bukkit.common.wrappers.IntHashMap;
import com.bergerkiller.generated.net.minecraft.server.ChunkHandle;
import com.volmit.iris.Iris;
import org.bukkit.Chunk;
import org.bukkit.World;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* Represents a single chunk full with lighting-relevant information.
* Initialization and use of this chunk in the process is as follows:<br>
* - New lighting chunks are created for all chunks to be processed<br>
* - notifyAccessible is called for all chunks, passing in all chunks<br>
* - fill/fillSection is called for all chunks, after which initLight is called<br>
* - spread is called on all chunks until all spreading is finished<br>
* - data from all LightingChunks/Sections is gathered and saved to chunks or region files<br>
* - possible chunk resends are performed
*/
public class LightingChunk {
public static final int OB = ~0xf; // Outside blocks
public static final int OC = ~0xff; // Outside chunk
public IntHashMap<LightingCube> sections;
public final LightingChunkNeighboring neighbors = new LightingChunkNeighboring();
public final int[] heightmap = new int[256];
public final World world;
public final int chunkX, chunkZ;
public boolean hasSkyLight = true;
public boolean isSkyLightDirty = true;
public boolean isBlockLightDirty = true;
public boolean isFilled = false;
public boolean isApplied = false;
public IntVector2 start = new IntVector2(1, 1);
public IntVector2 end = new IntVector2(14, 14);
public int minY = 0;
public int maxY = 0;
public final ForcedChunk forcedChunk = ForcedChunk.none();
public volatile boolean loadingStarted = false;
public LightingChunk(World world, int x, int z) {
this.world = world;
this.chunkX = x;
this.chunkZ = z;
}
/**
* Gets all the sections inside this chunk.
* Elements are never null.
*
* @return sections
*/
public Collection<LightingCube> getSections() {
return this.sections.values();
}
/**
* Efficiently iterates the vertical cubes of a chunk, only
* querying the lookup table every 16 blocks
*
* @param previous The previous cube we iterated
* @param y Block y-coordinate
* @return the cube at the Block y-coordinate, or null if this cube does not exist
*/
public LightingCube nextCube(LightingCube previous, int y) {
int cy = y >> 4;
if (previous != null && previous.cy == cy) {
return previous;
} else {
return this.sections.get(cy);
}
}
/**
* Notifies that a new chunk is accessible.
*
* @param chunk that is accessible
*/
public void notifyAccessible(LightingChunk chunk) {
final int dx = chunk.chunkX - this.chunkX;
final int dz = chunk.chunkZ - this.chunkZ;
// Only check neighbours, ignoring the corners and self
if (Math.abs(dx) > 1 || Math.abs(dz) > 1 || (dx != 0) == (dz != 0)) {
return;
}
// Results in -16, 16 or 0 for the x/z coordinates
neighbors.set(dx, dz, chunk);
// Update start/end coordinates
if (dx == 1) {
end = new IntVector2(15, end.z);
} else if (dx == -1) {
start = new IntVector2(0, start.z);
} else if (dz == 1) {
end = new IntVector2(end.x, 15);
} else if (dz == -1) {
start = new IntVector2(start.x, 0);
}
}
/**
* Initializes the neighboring cubes of all the cubes of this
* lighting chunk. This initializes the neighbors both within
* the same chunk (vertical) and for neighboring chunks (horizontal).
*/
public void detectCubeNeighbors() {
for (LightingCube cube : this.sections.values()) {
// Neighbors above and below
cube.neighbors.set(0, 1, 0, this.sections.get(cube.cy + 1));
cube.neighbors.set(0, -1, 0, this.sections.get(cube.cy - 1));
// Neighbors in neighboring chunks
cube.neighbors.set(-1, 0, 0, this.neighbors.getCube(-1, 0, cube.cy));
cube.neighbors.set( 1, 0, 0, this.neighbors.getCube( 1, 0, cube.cy));
cube.neighbors.set( 0, 0, -1, this.neighbors.getCube( 0, -1, cube.cy));
cube.neighbors.set( 0, 0, 1, this.neighbors.getCube( 0, 1, cube.cy));
}
}
public void fill(Chunk chunk, int[] region_y_coordinates) {
// Fill using chunk sections
hasSkyLight = WorldUtil.getDimensionType(chunk.getWorld()).hasSkyLight();
List<LightingCube> lightingChunkSectionList;
{
// First create a list of ChunkSection objects storing the data
// We must do this sequentially, because asynchronous access is not permitted
List<ChunkSection> chunkSectionList = IntStream.of(region_y_coordinates)
.map(WorldUtil::regionToChunkIndex)
.flatMap(base_cy -> IntStream.range(base_cy, base_cy + WorldUtil.CHUNKS_PER_REGION_AXIS))
.mapToObj(cy -> WorldUtil.getSection(chunk, cy))
.filter(section -> section != null)
.collect(Collectors.toList());
// Then process all the gathered chunk sections into a LightingChunkSection in parallel
lightingChunkSectionList = chunkSectionList.stream()
.parallel()
.map(section -> new LightingCube(this, section, hasSkyLight))
.collect(Collectors.toList());
}
// Add to mapping
this.sections = new IntHashMap<LightingCube>();
for (LightingCube lightingChunkSection : lightingChunkSectionList) {
this.sections.put(lightingChunkSection.cy, lightingChunkSection);
}
// Compute min/max y using sections that are available
// Make use of the fact that they are pre-sorted by y-coordinate
this.minY = 0;
this.maxY = 0;
if (!lightingChunkSectionList.isEmpty()) {
this.minY = lightingChunkSectionList.get(0).cy << 4;
this.maxY = (lightingChunkSectionList.get(lightingChunkSectionList.size()-1).cy << 4) + 15;
}
// Initialize and then load sky light heightmap information
if (this.hasSkyLight) {
HeightMap heightmap = ChunkUtil.getLightHeightMap(chunk, true);
for (int x = 0; x < 16; ++x) {
for (int z = 0; z < 16; ++z) {
this.heightmap[this.getHeightKey(x, z)] = Math.max(this.minY, heightmap.getHeight(x, z));
}
}
} else {
Arrays.fill(this.heightmap, this.maxY);
}
this.isFilled = true;
}
private int getHeightKey(int x, int z) {
return x | (z << 4);
}
/**
* Gets the height level (the top block that does not block light)
*
* @param x - coordinate
* @param z - coordinate
* @return height
*/
public int getHeight(int x, int z) {
return this.heightmap[getHeightKey(x, z)];
}
private final int getMaxLightLevel(LightingCube section, LightingCategory category, int lightLevel, int x, int y, int z) {
BlockFaceSet selfOpaqueFaces = section.getOpaqueFaces(x, y, z);
if (x >= 1 && z >= 1 && x <= 14 && z <= 14) {
// All within this chunk - simplified calculation
if (!selfOpaqueFaces.west()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_EAST, x - 1, y, z);
}
if (!selfOpaqueFaces.east()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_WEST, x + 1, y, z);
}
if (!selfOpaqueFaces.north()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_SOUTH, x, y, z - 1);
}
if (!selfOpaqueFaces.south()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_NORTH, x, y, z + 1);
}
// If dy is also within this section, we can simplify it
if (y >= 1 && y <= 14) {
if (!selfOpaqueFaces.down()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_UP, x, y - 1, z);
}
if (!selfOpaqueFaces.up()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_DOWN, x, y + 1, z);
}
return lightLevel;
}
} else {
// Crossing chunk boundaries - requires neighbor checks
if (!selfOpaqueFaces.west()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_EAST, x - 1, y, z);
}
if (!selfOpaqueFaces.east()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_WEST, x + 1, y, z);
}
if (!selfOpaqueFaces.north()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_SOUTH, x, y, z - 1);
}
if (!selfOpaqueFaces.south()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_NORTH, x, y, z + 1);
}
}
// Above and below, may need to check cube boundaries
// Below
if (!selfOpaqueFaces.down()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_UP, x, y - 1, z);
}
// Above
if (!selfOpaqueFaces.up()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_DOWN, x, y + 1, z);
}
return lightLevel;
}
/**
* Gets whether this lighting chunk has faults that need to be fixed
*
* @return True if there are faults, False if not
*/
public boolean hasFaults() {
return isSkyLightDirty || isBlockLightDirty;
}
public void forceSpreadBlocks()
{
spread(LightingCategory.BLOCK);
}
/**
* Spreads the light from sources to 'zero' light level blocks
*
* @return Number of processing loops executed. 0 indicates no faults were found.
*/
public int spread() {
if (hasFaults()) {
int count = 0;
if (isSkyLightDirty) {
count += spread(LightingCategory.SKY);
}
if (isBlockLightDirty) {
count += spread(LightingCategory.BLOCK);
}
return count;
} else {
return 0;
}
}
private int spread(LightingCategory category) {
if ((category == LightingCategory.SKY) && !hasSkyLight) {
this.isSkyLightDirty = false;
return 0;
}
int x, y, z, light, factor, startY, newlight;
int loops = 0;
int lasterrx = 0, lasterry = 0, lasterrz = 0;
boolean haserror;
boolean err_neigh_nx = false;
boolean err_neigh_px = false;
boolean err_neigh_nz = false;
boolean err_neigh_pz = false;
LightingCube cube = null;
// Keep spreading the light in this chunk until it is done
boolean mode = false;
IntVector2 loop_start, loop_end;
int loop_increment;
while (true) {
haserror = false;
// Alternate iterating positive and negative
// This allows proper optimized spreading in all directions
mode = !mode;
if (mode) {
loop_start = start;
loop_end = end.add(1, 1);
loop_increment = 1;
} else {
loop_start = end;
loop_end = start.subtract(1, 1);
loop_increment = -1;
}
// Go through all blocks, using the heightmap for sky light to skip a few
for (x = loop_start.x; x != loop_end.x; x += loop_increment) {
for (z = loop_start.z; z != loop_end.z; z += loop_increment) {
startY = category.getStartY(this, x, z);
for (y = startY; y >= this.minY; y--) {
if ((cube = nextCube(cube, y)) == null) {
// Skip this section entirely by setting y to the bottom of the section
y &= ~0xf;
continue;
}
// Take block opacity into account, skip if fully solid
factor = Math.max(1, cube.opacity.get(x, y & 0xf, z));
if (factor == 15) {
continue;
}
// Read the old light level and try to find a light level around it that exceeds
light = category.get(cube, x, y & 0xf, z);
newlight = light + factor;
if (newlight < 15) {
newlight = getMaxLightLevel(cube, category, newlight, x, y & 0xf, z);
}
newlight -= factor;
// pick the highest value
if (newlight > light) {
category.set(cube, x, y & 0xf, z, newlight);
lasterrx = x;
lasterry = y;
lasterrz = z;
err_neigh_nx |= (x == 0);
err_neigh_nz |= (z == 0);
err_neigh_px |= (x == 15);
err_neigh_pz |= (z == 15);
haserror = true;
}
}
}
}
if (!haserror) {
break;
} else if (++loops > 100) {
lasterrx += this.chunkX << 4;
lasterrz += this.chunkZ << 4;
StringBuilder msg = new StringBuilder();
msg.append("Failed to fix all " + category.getName() + " lighting at [");
msg.append(lasterrx).append('/').append(lasterry);
msg.append('/').append(lasterrz).append(']');
Iris.warn(msg.toString());
break;
}
}
// Set self as no longer dirty, all light is good
category.setDirty(this, false);
// When we change blocks at our chunk borders, neighbours have to do another spread cycle
if (err_neigh_nx) setNeighbourDirty(-1, 0, category);
if (err_neigh_px) setNeighbourDirty(1, 0, category);
if (err_neigh_nz) setNeighbourDirty(0, -1, category);
if (err_neigh_pz) setNeighbourDirty(0, 1, category);
return loops;
}
private void setNeighbourDirty(int dx, int dz, LightingCategory category) {
LightingChunk n = neighbors.get(dx, dz);
if (n != null) {
category.setDirty(n, true);
}
}
/**
* Applies the lighting information to a chunk. The returned completable future is called
* on the main thread when saving finishes.
*
* @param chunk to save to
* @return completable future completed when the chunk is saved,
* with value True passed when saving occurred, False otherwise
*/
@SuppressWarnings("unchecked")
public CompletableFuture<Boolean> saveToChunk(Chunk chunk) {
// Create futures for saving to all the chunk sections in parallel
List<LightingCube> sectionsToSave = this.sections.values();
final CompletableFuture<Boolean>[] futures = new CompletableFuture[sectionsToSave.size()];
{
int futureIndex = 0;
for (LightingCube sectionToSave : sectionsToSave) {
ChunkSection sectionToWriteTo = WorldUtil.getSection(chunk, sectionToSave.cy);
if (sectionToWriteTo == null) {
futures[futureIndex++] = CompletableFuture.completedFuture(Boolean.FALSE);
} else {
futures[futureIndex++] = sectionToSave.saveToChunk(sectionToWriteTo);
}
}
}
// When all of them complete, combine them into a single future
// If any changes were made to the chunk, return True as completed value
return CompletableFuture.allOf(futures).thenApply((o) -> {
isApplied = true;
try {
for (CompletableFuture<Boolean> future : futures) {
if (future.get().booleanValue()) {
ChunkHandle.fromBukkit(chunk).markDirty();
return Boolean.TRUE;
}
}
} catch (Throwable t) {
t.printStackTrace();
}
// None of the futures completed true
return Boolean.FALSE;
});
}
}

View File

@@ -0,0 +1,73 @@
package com.volmit.iris.scaffold.lighting;
/**
* Keeps track of the 4 x/z neighbors of chunks
*/
public class LightingChunkNeighboring {
public final LightingChunk[] values = new LightingChunk[4];
/**
* Generates a key ranging 0 - 3 for fixed x/z combinations<br>
* - Bit 1 is set to contain which of the two is not 1<br>
* - Bit 2 is set to contain whether x/z is 1 or -1<br><br>
* <p/>
* This system requires that the x/z pairs are one the following:<br>
* (0, 1) | (0, -1) | (1, 0) | (-1, 0)
*
* @param x value
* @param z value
* @return key
*/
private static final int getIndexByChunk(int x, int z) {
return (x & 1) | ((x + z + 1) & 0x2);
}
/**
* Gets whether all 4 chunk neighbors are accessible
*
* @return True if all neighbors are accessible
*/
public boolean hasAll() {
for (int i = 0; i < 4; i++) {
if (values[i] == null) {
return false;
}
}
return true;
}
/**
* Gets the neighbor representing the given relative chunk
*
* @param deltaChunkX
* @param deltaChunkZ
* @return neighbor
*/
public LightingChunk get(int deltaChunkX, int deltaChunkZ) {
return values[getIndexByChunk(deltaChunkX, deltaChunkZ)];
}
/**
* Gets a relative neighboring chunk, and then a vertical cube in that chunk, if possible.
*
* @param deltaChunkX
* @param deltaChunkZ
* @param cy Cube absolute y-coordinate
* @return cube, null if the chunk or cube is not available
*/
public LightingCube getCube(int deltaChunkX, int deltaChunkZ, int cy) {
LightingChunk chunk = get(deltaChunkX, deltaChunkZ);
return (chunk == null) ? null : chunk.sections.get(cy);
}
/**
* Sets the neighbor representing the given relative chunk
*
* @param deltaChunkX
* @param deltaChunkZ
* @param neighbor to set to
*/
public void set(int deltaChunkX, int deltaChunkZ, LightingChunk neighbor) {
values[getIndexByChunk(deltaChunkX, deltaChunkZ)] = neighbor;
}
}

View File

@@ -0,0 +1,317 @@
package com.volmit.iris.scaffold.lighting;
import java.util.concurrent.CompletableFuture;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.BlockData;
import com.bergerkiller.bukkit.common.wrappers.ChunkSection;
import com.bergerkiller.generated.net.minecraft.server.NibbleArrayHandle;
/**
* A single 16x16x16 cube of stored block information
*/
public class LightingCube {
public static final int OOC = ~0xf; // Outside Of Cube
public final LightingChunk owner;
public final LightingCubeNeighboring neighbors = new LightingCubeNeighboring();
public final int cy;
public final NibbleArrayHandle skyLight;
public final NibbleArrayHandle blockLight;
public final NibbleArrayHandle emittedLight;
public final NibbleArrayHandle opacity;
private final BlockFaceSetSection opaqueFaces;
public LightingCube(LightingChunk owner, ChunkSection chunkSection, boolean hasSkyLight) {
this.owner = owner;
this.cy = chunkSection.getY();
if (owner.neighbors.hasAll()) {
// Block light data (is re-initialized in the fill operation below, no need to read)
this.blockLight = NibbleArrayHandle.createNew();
// Sky light data (is re-initialized using heightmap operation later, no need to read)
if (hasSkyLight) {
this.skyLight = NibbleArrayHandle.createNew();
} else {
this.skyLight = null;
}
} else {
// We need to load the original light data, because we have a border that we do not update
// Block light data
byte[] blockLightData = WorldUtil.getSectionBlockLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
if (blockLightData != null) {
this.blockLight = NibbleArrayHandle.createNew(blockLightData);
} else {
this.blockLight = NibbleArrayHandle.createNew();
}
// Sky light data
if (hasSkyLight) {
byte[] skyLightData = WorldUtil.getSectionSkyLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
if (skyLightData != null) {
this.skyLight = NibbleArrayHandle.createNew(skyLightData);
} else {
this.skyLight = NibbleArrayHandle.createNew();
}
} else {
this.skyLight = null;
}
}
// World coordinates
int worldX = owner.chunkX << 4;
int worldY = chunkSection.getYPosition();
int worldZ = owner.chunkZ << 4;
// Fill opacity and initial block lighting values
this.opacity = NibbleArrayHandle.createNew();
this.emittedLight = NibbleArrayHandle.createNew();
this.opaqueFaces = new BlockFaceSetSection();
int x, y, z, opacity, blockEmission;
BlockFaceSet opaqueFaces;
BlockData info;
for (z = owner.start.z; z <= owner.end.z; z++) {
for (x = owner.start.x; x <= owner.end.x; x++) {
for (y = 0; y < 16; y++) {
info = chunkSection.getBlockData(x, y, z);
blockEmission = info.getEmission();
opacity = info.getOpacity(owner.world, worldX+x, worldY+y, worldZ+z);
if (opacity >= 0xf) {
opacity = 0xf;
opaqueFaces = BlockFaceSet.ALL;
} else {
if (opacity < 0) {
opacity = 0;
}
opaqueFaces = info.getOpaqueFaces(owner.world, worldX+x, worldY+y, worldZ+z);
}
this.opacity.set(x, y, z, opacity);
this.emittedLight.set(x, y, z, blockEmission);
this.blockLight.set(x, y, z, blockEmission);
this.opaqueFaces.set(x, y, z, opaqueFaces);
}
}
}
}
/**
* Gets the opaque faces of a block
*
* @param x - coordinate
* @param y - coordinate
* @param z - coordinate
* @return opaque face set
*/
public BlockFaceSet getOpaqueFaces(int x, int y, int z) {
return this.opaqueFaces.get(x, y, z);
}
/**
* Read light level of a neighboring block.
* If possibly more, also check opaque faces, and then return the
* higher light value if all these tests pass.
* The x/y/z coordinates are allowed to check neighboring cubes.
*
* @param category
* @param old_light
* @param faceMask
* @param x The X-coordinate of the block (-1 to 16)
* @param y The Y-coordinate of the block (-1 to 16)
* @param z The Z-coordinate of the block (-1 to 16)
* @return higher light level if propagated, otherwise the old light value
*/
public int getLightIfHigherNeighbor(LightingCategory category, int old_light, int faceMask, int x, int y, int z) {
if ((x & OOC | y & OOC | z & OOC) == 0) {
return this.getLightIfHigher(category, old_light, faceMask, x, y, z);
} else {
LightingCube neigh = this.neighbors.get(x>>4, y>>4, z>>4);
if (neigh != null) {
return neigh.getLightIfHigher(category, old_light, faceMask, x & 0xf, y & 0xf, z & 0xf);
} else {
return old_light;
}
}
}
/**
* Read light level of a neighboring block.
* If possibly more, also check opaque faces, and then return the
* higher light value if all these tests pass.
* Requires the x/y/z coordinates to lay within this cube.
*
* @param category Category of light to check
* @param old_light Previous light value
* @param faceMask The BlockFaceSet mask indicating the light-traveling direction
* @param x The X-coordinate of the block (0 to 15)
* @param y The Y-coordinate of the block (0 to 15)
* @param z The Z-coordinate of the block (0 to 15)
* @return higher light level if propagated, otherwise the old light value
*/
public int getLightIfHigher(LightingCategory category, int old_light, int faceMask, int x, int y, int z) {
int new_light_level = category.get(this, x, y, z);
return (new_light_level > old_light && !this.getOpaqueFaces(x, y, z).get(faceMask))
? new_light_level : old_light;
}
/**
* Called during initialization of block light to spread the light emitted by a block
* to all neighboring blocks.
*
* @param x The X-coordinate of the block (0 to 15)
* @param y The Y-coordinate of the block (0 to 15)
* @param z The Z-coordinate of the block (0 to 15)
*/
public void spreadBlockLight(int x, int y, int z) {
int emitted = this.emittedLight.get(x, y, z);
if (emitted <= 1) {
return; // Skip if neighbouring blocks won't receive light from it
}
if (x >= 1 && z >= 1 && x <= 14 && z <= 14) {
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_EAST, x-1, y, z);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_WEST, x+1, y, z);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_SOUTH, x, y, z-1);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_NORTH, x, y, z+1);
} else {
trySpreadBlockLight(emitted, BlockFaceSet.MASK_EAST, x-1, y, z);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_WEST, x+1, y, z);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_SOUTH, x, y, z-1);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_NORTH, x, y, z+1);
}
if (y >= 1 && y <= 14) {
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_UP, x, y-1, z);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_DOWN, x, y+1, z);
} else {
trySpreadBlockLight(emitted, BlockFaceSet.MASK_UP, x, y-1, z);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_DOWN, x, y+1, z);
}
}
/**
* Tries to spread block light from an emitting block to one of the 6 sites.
* The block being spread to is allowed to be outside of the bounds of this cube,
* in which case neighboring cubes are spread to instead.
*
* @param emitted The light that is emitted by the block
* @param faceMask The BlockFaceSet mask indicating the light-traveling direction
* @param x The X-coordinate of the block to spread to (-1 to 16)
* @param y The Y-coordinate of the block to spread to (-1 to 16)
* @param z The Z-coordinate of the block to spread to (-1 to 16)
*/
public void trySpreadBlockLight(int emitted, int faceMask, int x, int y, int z) {
if ((x & OOC | y & OOC | z & OOC) == 0) {
this.trySpreadBlockLightWithin(emitted, faceMask, x, y, z);
} else {
LightingCube neigh = this.neighbors.get(x>>4, y>>4, z>>4);
if (neigh != null) {
neigh.trySpreadBlockLightWithin(emitted, faceMask, x & 0xf, y & 0xf, z & 0xf);
}
}
}
/**
* Tries to spread block light from an emitting block to one of the 6 sides.
* Assumes that the block being spread to is within this cube.
*
* @param emitted The light that is emitted by the block
* @param faceMask The BlockFaceSet mask indicating the light-traveling direction
* @param x The X-coordinate of the block to spread to (0 to 15)
* @param y The Y-coordinate of the block to spread to (0 to 15)
* @param z The Z-coordinate of the block to spread to (0 to 15)
*/
public void trySpreadBlockLightWithin(int emitted, int faceMask, int x, int y, int z) {
if (!this.getOpaqueFaces(x, y, z).get(faceMask)) {
int new_level = emitted - Math.max(1, this.opacity.get(x, y, z));
if (new_level > this.blockLight.get(x, y, z)) {
this.blockLight.set(x, y, z, new_level);
}
}
}
/**
* Applies the lighting information to a chunk section
*
* @param chunkSection to save to
* @return future completed when saving is finished. Future resolves to False if no changes occurred, True otherwise.
*/
public CompletableFuture<Boolean> saveToChunk(ChunkSection chunkSection) {
CompletableFuture<Void> blockLightFuture = null;
CompletableFuture<Void> skyLightFuture = null;
try {
if (this.blockLight != null) {
byte[] newBlockLight = this.blockLight.getData();
byte[] oldBlockLight = WorldUtil.getSectionBlockLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
boolean blockLightChanged = false;
if (oldBlockLight == null || newBlockLight.length != oldBlockLight.length) {
blockLightChanged = true;
} else {
for (int i = 0; i < oldBlockLight.length; i++) {
if (oldBlockLight[i] != newBlockLight[i]) {
blockLightChanged = true;
break;
}
}
}
//TODO: Maybe do blockLightChanged check inside BKCommonLib?
if (blockLightChanged) {
blockLightFuture = WorldUtil.setSectionBlockLightAsync(owner.world,
owner.chunkX, this.cy, owner.chunkZ,
newBlockLight);
}
}
if (this.skyLight != null) {
byte[] newSkyLight = this.skyLight.getData();
byte[] oldSkyLight = WorldUtil.getSectionSkyLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
boolean skyLightChanged = false;
if (oldSkyLight == null || newSkyLight.length != oldSkyLight.length) {
skyLightChanged = true;
} else {
for (int i = 0; i < oldSkyLight.length; i++) {
if (oldSkyLight[i] != newSkyLight[i]) {
skyLightChanged = true;
break;
}
}
}
//TODO: Maybe do skyLightChanged check inside BKCommonLib?
if (skyLightChanged) {
skyLightFuture = WorldUtil.setSectionSkyLightAsync(owner.world,
owner.chunkX, this.cy, owner.chunkZ,
newSkyLight);
}
}
} catch (Throwable t) {
CompletableFuture<Boolean> exceptionally = new CompletableFuture<Boolean>();
exceptionally.completeExceptionally(t);
return exceptionally;
}
// No updates performed
if (blockLightFuture == null && skyLightFuture == null) {
return CompletableFuture.completedFuture(Boolean.FALSE);
}
// Join both completable futures as one, if needed
CompletableFuture<Void> combined;
if (blockLightFuture == null) {
combined = skyLightFuture;
} else if (skyLightFuture == null) {
combined = blockLightFuture;
} else {
combined = CompletableFuture.allOf(blockLightFuture, skyLightFuture);
}
// When combined resolves, return one that returns True
return combined.thenApply((c) -> Boolean.TRUE);
}
}

View File

@@ -0,0 +1,64 @@
package com.volmit.iris.scaffold.lighting;
/**
* Keeps track of the 6 x/y/z neighbors of cubes
*/
public class LightingCubeNeighboring {
public final LightingCube[] values = new LightingCube[6];
/**
* Generates a key ranging 0 - 5 for fixed x/y/z combinations<br>
* - Bit 1 is set to contain whether x/y/z is 1 or -1
* - Bit 2 is set to 1 when the axis is x<br>
* - Bit 3 is set to 1 when the axis is z<br><br>
* <p/>
* This system requires that the x/y/z pairs are one the following:<br>
* (0, 0, 1) | (0, 0, -1) | (0, 1, 0) | (0, -1, 0) | (1, 0, 0) | (-1, 0, 0)
*
* @param x value
* @param y value
* @param z value
* @return key
*/
private static final int getIndexByCube(int x, int y, int z) {
return (((x + y + z + 1) & 0x2) >> 1) | ((x & 0x1) << 1) | ((z & 0x1) << 2);
}
/**
* Gets whether all 6 cube neighbors are accessible
*
* @return True if all neighbors are accessible
*/
public boolean hasAll() {
for (int i = 0; i < 6; i++) {
if (values[i] == null) {
return false;
}
}
return true;
}
/**
* Gets the neighbor representing the given relative cube
*
* @param deltaCubeX
* @param deltaCubeY
* @param deltaCubeZ
* @return neighbor, null if no neighbor is available here
*/
public LightingCube get(int deltaCubeX, int deltaCubeY, int deltaCubeZ) {
return values[getIndexByCube(deltaCubeX, deltaCubeY, deltaCubeZ)];
}
/**
* Sets the neighbor representing the given relative cube
*
* @param deltaCubeX
* @param deltaCubeY
* @param deltaCubeZ
* @param neighbor to set to, is allowed to be null to set to 'none'
*/
public void set(int deltaCubeX, int deltaCubeY, int deltaCubeZ, LightingCube neighbor) {
values[getIndexByCube(deltaCubeX, deltaCubeY, deltaCubeZ)] = neighbor;
}
}

View File

@@ -0,0 +1,77 @@
package com.volmit.iris.scaffold.lighting;
import java.util.HashMap;
import java.util.Map;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.chunk.ForcedChunk;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
/**
* Shortly remembers the forced chunks it has kept loaded from a previous operation.
* Reduces chunk unloading-loading grind.
*/
public class LightingForcedChunkCache {
private static final Map<Key, ForcedChunk> _cache = new HashMap<Key, ForcedChunk>();
public static ForcedChunk get(World world, int x, int z) {
ForcedChunk cached;
synchronized (_cache) {
cached = _cache.get(new Key(world, x, z));
}
if (cached != null) {
return cached.clone();
} else {
return WorldUtil.forceChunkLoaded(world, x, z);
}
}
public static void store(ForcedChunk chunk) {
ForcedChunk prev;
synchronized (_cache) {
prev = _cache.put(new Key(chunk.getWorld(), chunk.getX(), chunk.getZ()), chunk.clone());
}
if (prev != null) {
prev.close();
}
}
public static void reset() {
synchronized (_cache) {
for (ForcedChunk chunk : _cache.values()) {
chunk.close();
}
_cache.clear();
}
}
private static final class Key {
public final World world;
public final int x;
public final int z;
public Key(World world, int x, int z) {
this.world = world;
this.x = x;
this.z = z;
}
@Override
public int hashCode() {
return this.x * 31 + this.z;
}
@Override
public boolean equals(Object o) {
if (o instanceof Key) {
Key other = (Key) o;
return other.x == this.x &&
other.z == this.z &&
other.world == this.world;
} else {
return false;
}
}
}
}

View File

@@ -0,0 +1,641 @@
package com.volmit.iris.scaffold.lighting;
import com.bergerkiller.bukkit.common.AsyncTask;
import com.bergerkiller.bukkit.common.bases.IntVector2;
import com.bergerkiller.bukkit.common.bases.IntVector3;
import com.bergerkiller.bukkit.common.permissions.NoPermissionException;
import com.bergerkiller.bukkit.common.utils.MathUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet.LongIterator;
import com.volmit.iris.Iris;
import org.bukkit.*;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import java.util.*;
public class LightingService extends AsyncTask {
private static final Set<RecipientWhenDone> recipientsForDone = new HashSet<RecipientWhenDone>();
private static final LinkedList<LightingTask> tasks = new LinkedList<LightingTask>();
private static final int PENDING_WRITE_INTERVAL = 10;
private static AsyncTask fixThread = null;
private static int taskChunkCount = 0;
private static int taskCounter = 0;
private static boolean pendingFileInUse = false;
private static LightingTask currentTask;
private static boolean paused = false;
private static boolean lowOnMemory = false;
/**
* Gets whether this service is currently processing something
*
* @return True if processing, False if not
*/
public static boolean isProcessing() {
return fixThread != null;
}
/**
* Starts or stops the processing service.
* Stopping the service does not instantly abort, the current task is continued.
*
* @param process to abort
*/
public static void setProcessing(boolean process) {
if (process == isProcessing()) {
return;
}
if (process) {
fixThread = new LightingService().start(true);
} else {
// Fix thread is running, abort
AsyncTask.stop(fixThread);
fixThread = null;
}
}
/**
* Gets whether execution is paused, and pending tasks are not being processed
*
* @return True if paused
*/
public static boolean isPaused() {
return paused;
}
/**
* Sets whether execution is paused.
*
* @param pause state to set to
*/
public static void setPaused(boolean pause) {
if (paused != pause) {
paused = pause;
}
}
/**
* Gets the status of the currently processed task
*
* @return current task status
*/
public static String getCurrentStatus() {
final LightingTask current = currentTask;
if (lowOnMemory) {
return ChatColor.RED + "Too low on available memory (paused)";
} else if (current == null) {
return "Finished.";
} else {
return current.getStatus();
}
}
/**
* Gets the time the currently processing task was started. If no task is being processed,
* an empty result is returned. If processing didn't start yet, the value will be 0.
*
* @return time when the current task was started
*/
public static java.util.OptionalLong getCurrentStartTime() {
final LightingTask current = currentTask;
return (current == null) ? java.util.OptionalLong.empty() : OptionalLong.of(current.getTimeStarted());
}
public static void addRecipient(CommandSender sender) {
synchronized (recipientsForDone) {
recipientsForDone.add(new RecipientWhenDone(sender));
}
}
public static void scheduleWorld(final World world) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setEntireWorld();
schedule(args);
}
/**
* Schedules a square chunk area for lighting fixing
*
* @param world the chunks are in
* @param middleX
* @param middleZ
* @param radius
*/
public static void scheduleArea(World world, int middleX, int middleZ, int radius) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setChunksAround(middleX, middleZ, radius);
schedule(args);
}
@Deprecated
public static void schedule(World world, Collection<IntVector2> chunks) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setChunks(chunks);
schedule(args);
}
public static void schedule(World world, LongHashSet chunks) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setChunks(chunks);
schedule(args);
}
public static void schedule(ScheduleArguments args) {
// World not allowed to be null
if (args.getWorld() == null) {
throw new IllegalArgumentException("Schedule arguments 'world' is null");
}
// If no chunks specified, entire world
if (args.isEntireWorld()) {
LightingTaskWorld task = new LightingTaskWorld(args.getWorld());
task.applyOptions(args);
schedule(task);
return;
}
// If less than 34x34 chunks are requested, schedule as one task
// In that case, be sure to only schedule chunks that actually exist
// This prevents generating new chunks as part of this command
LongHashSet chunks = args.getChunks();
if (chunks.size() <= (34*34)) {
LongHashSet chunks_filtered = new LongHashSet(chunks.size());
Set<IntVector2> region_coords_filtered = new HashSet<IntVector2>();
LongIterator iter = chunks.longIterator();
if (args.getLoadedChunksOnly()) {
// Remove coordinates of chunks that aren't loaded
while (iter.hasNext()) {
long chunk = iter.next();
int cx = MathUtil.longHashMsw(chunk);
int cz = MathUtil.longHashLsw(chunk);
if (WorldUtil.isLoaded(args.getWorld(), cx, cz)) {
chunks_filtered.add(chunk);
region_coords_filtered.add(new IntVector2(
WorldUtil.chunkToRegionIndex(cx),
WorldUtil.chunkToRegionIndex(cz)));
}
}
} else if (true) {
// Remove coordinates of chunks that don't actually exist (avoid generating new chunks)
// isChunkAvailable isn't very fast, but fast enough below this threshold of chunks
// To check for border chunks, we check that all 9 chunks are are available
Map<IntVector2, Boolean> tmp = new HashMap<>();
while (iter.hasNext()) {
long chunk = iter.next();
int cx = MathUtil.longHashMsw(chunk);
int cz = MathUtil.longHashLsw(chunk);
boolean fully_loaded = true;
for (int dx = -2; dx <= 2 && fully_loaded; dx++) {
for (int dz = -2; dz <= 2 && fully_loaded; dz++) {
IntVector2 pos = new IntVector2(cx + dx, cz + dz);
fully_loaded &= tmp.computeIfAbsent(pos, p -> WorldUtil.isChunkAvailable(args.getWorld(), p.x, p.z)).booleanValue();
}
}
if (fully_loaded) {
chunks_filtered.add(chunk);
region_coords_filtered.add(new IntVector2(
WorldUtil.chunkToRegionIndex(cx),
WorldUtil.chunkToRegionIndex(cz)));
}
}
} else {
// Remove coordinates of chunks that don't actually exist (avoid generating new chunks)
// isChunkAvailable isn't very fast, but fast enough below this threshold of chunks
while (iter.hasNext()) {
long chunk = iter.next();
int cx = MathUtil.longHashMsw(chunk);
int cz = MathUtil.longHashLsw(chunk);
if (WorldUtil.isChunkAvailable(args.getWorld(), cx, cz)) {
chunks_filtered.add(chunk);
region_coords_filtered.add(new IntVector2(
WorldUtil.chunkToRegionIndex(cx),
WorldUtil.chunkToRegionIndex(cz)));
}
}
}
// For all filtered chunk coordinates, compute regions
int[] regionYCoordinates;
{
Set<IntVector3> regions = WorldUtil.getWorldRegions3ForXZ(args.getWorld(), region_coords_filtered);
// Simplify to just the unique Y-coordinates
regionYCoordinates = regions.stream().mapToInt(r -> r.y).sorted().distinct().toArray();
}
// Schedule it
if (!chunks_filtered.isEmpty()) {
LightingTaskBatch task = new LightingTaskBatch(args.getWorld(), regionYCoordinates, chunks_filtered);
task.applyOptions(args);
schedule(task);
}
return;
}
// Too many chunks requested. Separate the operations per region file with small overlap.
FlatRegionInfoMap regions;
if (args.getLoadedChunksOnly()) {
regions = FlatRegionInfoMap.createLoaded(args.getWorld());
} else {
regions = FlatRegionInfoMap.create(args.getWorld());
}
LongIterator iter = chunks.longIterator();
LongHashSet scheduledRegions = new LongHashSet();
while (iter.hasNext()) {
long first_chunk = iter.next();
int first_chunk_x = MathUtil.longHashMsw(first_chunk);
int first_chunk_z = MathUtil.longHashLsw(first_chunk);
FlatRegionInfo region = regions.getRegionAtChunk(first_chunk_x, first_chunk_z);
if (region == null || scheduledRegions.contains(region.rx, region.rz)) {
continue; // Does not exist or already scheduled
}
if (!region.containsChunk(first_chunk_x, first_chunk_z)) {
continue; // Chunk does not exist in world (not generated yet) or isn't loaded (loaded chunks only option)
}
// Collect all the region Y coordinates used for this region and the neighbouring regions
// This makes sure we find all chunk slices we might need on an infinite height world
int[] region_y_coordinates = regions.getRegionYCoordinatesSelfAndNeighbours(region);
// Collect all chunks to process for this region.
// This is an union of the 34x34 area of chunks and the region file data set
LongHashSet buffer = new LongHashSet();
int rdx, rdz;
for (rdx = -1; rdx < 33; rdx++) {
for (rdz = -1; rdz < 33; rdz++) {
int cx = region.cx + rdx;
int cz = region.cz + rdz;
long chunk_key = MathUtil.longHashToLong(cx, cz);
if (!chunks.contains(chunk_key)) {
continue;
}
if (true) {
// Check the chunk and the surrounding chunks are all present
if (!regions.containsChunkAndNeighbours(cx, cz)) {
continue;
}
} else {
// Only check chunk
if (!regions.containsChunk(cx, cz)) {
continue;
}
}
buffer.add(chunk_key);
}
}
// Schedule the region
if (!buffer.isEmpty()) {
scheduledRegions.add(region.rx, region.rz);
LightingTaskBatch task = new LightingTaskBatch(args.getWorld(), region_y_coordinates, buffer);
task.applyOptions(args);
schedule(task);
}
}
}
public static void schedule(LightingTask task) {
synchronized (tasks) {
tasks.offer(task);
taskChunkCount += task.getChunkCount();
}
setProcessing(true);
}
/**
* Loads the pending chunk batch operations from a save file.
* If it is there, it will start processing these again.
*/
public static void loadPendingBatches() {
pendingFileInUse = false;
}
/**
* Saves all pending chunk batch operations to a save file.
* If the server, for whatever reason, crashes, it can restore using this file.
*/
public static void savePendingBatches() {
if (pendingFileInUse) {
return;
}
}
/**
* Clears all pending tasks, does continue with the current tasks
*/
public static void clearTasks() {
synchronized (tasks) {
tasks.clear();
}
final LightingTask current = currentTask;
if (current != null) {
current.abort();
}
synchronized (tasks) {
tasks.clear();
}
currentTask = null;
taskChunkCount = 0;
LightingForcedChunkCache.reset();
}
/**
* Orders this service to abort all tasks, finishing the current task in an orderly fashion.
* This method can only be called from the main Thread.
*/
public static void abort() {
// Finish the current lighting task if available
final LightingTask current = currentTask;
final AsyncTask service = fixThread;
if (service != null && current != null) {
setProcessing(false);
current.abort();
}
// Clear lighting tasks
synchronized (tasks) {
if (current != null) {
tasks.addFirst(current);
}
if (!tasks.isEmpty()) {
}
savePendingBatches();
clearTasks();
}
}
/**
* Gets the amount of chunks that are still faulty
*
* @return faulty chunk count
*/
public static int getChunkFaults() {
final LightingTask current = currentTask;
return taskChunkCount + (current == null ? 0 : current.getChunkCount());
}
@Override
public void run() {
// While paused, do nothing
while (paused) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
synchronized (tasks) {
if (tasks.isEmpty()) {
break; // Stop processing.
}
}
if (fixThread.isStopRequested()) {
return;
}
}
synchronized (tasks) {
currentTask = tasks.poll();
}
if (currentTask == null) {
// No more tasks, end this thread
// Messages
final String message = ChatColor.GREEN + "All lighting operations are completed.";
synchronized (recipientsForDone) {
for (RecipientWhenDone recipient : recipientsForDone) {
CommandSender recip = recipient.player_name == null ?
Bukkit.getConsoleSender() : Bukkit.getPlayer(recipient.player_name);
if (recip != null) {
String timeStr = LightingUtil.formatDuration(System.currentTimeMillis() - recipient.timeStarted);
recip.sendMessage(message + ChatColor.WHITE + " (Took " + timeStr + ")");
}
}
recipientsForDone.clear();
}
// Stop task and abort
taskCounter = 0;
setProcessing(false);
LightingForcedChunkCache.reset();
savePendingBatches();
return;
} else {
// Write to file?
if (taskCounter++ >= PENDING_WRITE_INTERVAL) {
taskCounter = 0;
// Start saving on another thread (IO access is slow...)
new AsyncTask() {
public void run() {
savePendingBatches();
}
}.start();
// Save the world of the current task being processed
}
// Subtract task from the task count
taskChunkCount -= currentTask.getChunkCount();
// Process the task
try {
currentTask.process();
} catch (Throwable t) {
t.printStackTrace();
Iris.error("Failed to process task: " + currentTask.getStatus());
}
}
}
private static long calcAvailableMemory(Runtime runtime) {
long max = runtime.maxMemory();
if (max == Long.MAX_VALUE) {
return Long.MAX_VALUE;
} else {
long used = (runtime.totalMemory() - runtime.freeMemory());
return (max - used);
}
}
public static class ScheduleArguments {
private World world;
private String worldName;
private LongHashSet chunks;
private boolean debugMakeCorrupted = false;
private boolean loadedChunksOnly = false;
private int radius = Bukkit.getServer().getViewDistance();
public boolean getDebugMakeCorrupted() {
return this.debugMakeCorrupted;
}
public boolean getLoadedChunksOnly() {
return this.loadedChunksOnly;
}
public int getRadius() {
return this.radius;
}
public boolean isEntireWorld() {
return this.chunks == null;
}
public World getWorld() {
return this.world;
}
public String getWorldName() {
return this.worldName;
}
public LongHashSet getChunks() {
return this.chunks;
}
/**
* Sets the world itself. Automatically updates the world name.
*
* @param world
* @return these arguments
*/
public ScheduleArguments setWorld(World world) {
this.world = world;
this.worldName = world.getName();
return this;
}
/**
* Sets the world name to perform operations on.
* If the world by this name does not exist, the world is null.
*
* @param worldName
* @return these arguments
*/
public ScheduleArguments setWorldName(String worldName) {
this.world = Bukkit.getWorld(worldName);
this.worldName = worldName;
return this;
}
public ScheduleArguments setEntireWorld() {
this.chunks = null;
return this;
}
public ScheduleArguments setDebugMakeCorrupted(boolean debug) {
this.debugMakeCorrupted = debug;
return this;
}
public ScheduleArguments setLoadedChunksOnly(boolean loadedChunksOnly) {
this.loadedChunksOnly = loadedChunksOnly;
return this;
}
public ScheduleArguments setRadius(int radius) {
this.radius = radius;
return this;
}
public ScheduleArguments setChunksAround(Location location, int radius) {
this.setWorld(location.getWorld());
return this.setChunksAround(location.getBlockX()>>4, location.getBlockZ()>>4, radius);
}
public ScheduleArguments setChunksAround(int middleX, int middleZ, int radius) {
this.setRadius(radius);
LongHashSet chunks_hashset = new LongHashSet((2*radius)*(2*radius));
for (int a = -radius; a <= radius; a++) {
for (int b = -radius; b <= radius; b++) {
int cx = middleX + a;
int cz = middleZ + b;
chunks_hashset.add(cx, cz);
}
}
return this.setChunks(chunks_hashset);
}
/**
* Sets the chunks to a cuboid area of chunks.
* Make sure the minimum chunk coordinates are less or equal to
* the maximum chunk coordinates.
*
* @param minChunkX Minimum chunk x-coordinate (inclusive)
* @param minChunkZ Minimum chunk z-coordinate (inclusive)
* @param maxChunkX Maximum chunk x-coordinate (inclusive)
* @param maxChunkZ Maximum chunk z-coordinate (inclusive)
* @return this
*/
public ScheduleArguments setChunkFromTo(int minChunkX, int minChunkZ, int maxChunkX, int maxChunkZ) {
int num_dx = (maxChunkX - minChunkX) + 1;
int num_dz = (maxChunkZ - minChunkZ) + 1;
if (num_dx <= 0 || num_dz <= 0) {
return this.setChunks(new LongHashSet()); // nothing
}
LongHashSet chunks_hashset = new LongHashSet(num_dx * num_dz);
for (int chunkX = minChunkX; chunkX <= maxChunkX; chunkX++) {
for (int chunkZ = minChunkZ; chunkZ <= maxChunkZ; chunkZ++) {
chunks_hashset.add(chunkX, chunkZ);
}
}
return this.setChunks(chunks_hashset);
}
public ScheduleArguments setChunks(Collection<IntVector2> chunks) {
LongHashSet chunks_hashset = new LongHashSet(chunks.size());
for (IntVector2 coord : chunks) {
chunks_hashset.add(coord.x, coord.z);
}
return this.setChunks(chunks_hashset);
}
public ScheduleArguments setChunks(LongHashSet chunks) {
this.chunks = chunks;
return this;
}
private boolean checkRadiusPermission(CommandSender sender, int radius) throws NoPermissionException {
return false;
}
/**
* Parses the arguments specified in a command
*
* @param sender
* @return false if the input is incorrect and operations may not proceed
* @throws NoPermissionException
*/
public boolean handleCommandInput(CommandSender sender, String[] args) throws NoPermissionException {
return true;
}
/**
* Creates a new ScheduleArguments instance ready to be configured
*
* @return args
*/
public static ScheduleArguments create()
{
return new ScheduleArguments();
}
}
private static class RecipientWhenDone {
public final String player_name;
public final long timeStarted;
public RecipientWhenDone(CommandSender sender) {
this.player_name = (sender instanceof Player) ? sender.getName() : null;
this.timeStarted = System.currentTimeMillis();
}
}
}

View File

@@ -0,0 +1,61 @@
package com.volmit.iris.scaffold.lighting;
import org.bukkit.World;
/**
* A single task the Lighting Service can handle
*/
public interface LightingTask {
/**
* Gets the world this task is working on
*
* @return task world
*/
World getWorld();
/**
* Gets the amount of chunks this task is going to fix.
* This can be a wild estimate. While processing this amount should be
* updated as well.
*
* @return estimated total chunk count
*/
int getChunkCount();
/**
* Gets a descriptive status of the current task being processed
*
* @return status
*/
String getStatus();
/**
* Gets the timestamp (milliseconds since epoch) when this task was first started.
* If 0 is returned, then the task wasn't started yet.
*
* @return time this task was started
*/
long getTimeStarted();
/**
* Processes this task (called from another thread!)
*/
void process();
/**
* Orders this task to abort
*/
void abort();
/**
* Whether this task can be saved to PendingLight.dat
*
* @return True if it can be saved
*/
boolean canSave();
/**
* Loads additional options
*/
void applyOptions(LightingService.ScheduleArguments args);
}

View File

@@ -0,0 +1,564 @@
package com.volmit.iris.scaffold.lighting;
import com.bergerkiller.bukkit.common.bases.IntVector2;
import com.bergerkiller.bukkit.common.utils.CommonUtil;
import com.bergerkiller.bukkit.common.utils.LogicUtil;
import com.bergerkiller.bukkit.common.utils.MathUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
import java.util.Arrays;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.stream.Stream;
import com.volmit.iris.Iris;
import org.bukkit.Chunk;
import org.bukkit.World;
/**
* Contains all the chunk coordinates that have to be fixed,
* and handles the full process of this fixing.
* It is literally a batch of chunks being processed.
*/
public class LightingTaskBatch implements LightingTask {
private static boolean DEBUG_LOG = false; // logs performance stats
public final World world;
private final Object chunks_lock = new Object();
private final int[] region_y_coords;
private volatile LightingChunk[] chunks = null;
private volatile long[] chunks_coords;
private boolean done = false;
private boolean aborted = false;
private volatile long timeStarted = 0;
private int numBeingLoaded = 0;
private volatile Stage stage = Stage.LOADING;
private LightingService.ScheduleArguments options = new LightingService.ScheduleArguments();
public LightingTaskBatch(World world, int[] regionYCoordinates, long[] chunkCoordinates) {
this.world = world;
this.region_y_coords = regionYCoordinates;
this.chunks_coords = chunkCoordinates;
}
public LightingTaskBatch(World world, int[] regionYCoordinates, LongHashSet chunkCoordinates) {
this.world = world;
this.region_y_coords = regionYCoordinates;
// Turn contents of the long hash set into an easily sortable IntVector2[] array
IntVector2[] coordinates = new IntVector2[chunkCoordinates.size()];
{
LongHashSet.LongIterator iter = chunkCoordinates.longIterator();
for (int i = 0; iter.hasNext(); i++) {
long coord = iter.next();
coordinates[i] = new IntVector2(MathUtil.longHashMsw(coord), MathUtil.longHashLsw(coord));
}
}
// Sort the array along the axis. This makes chunk loading more efficient.
Arrays.sort(coordinates, (a, b) -> {
int comp = Integer.compare(a.x, b.x);
if (comp == 0) {
comp = Integer.compare(a.z, b.z);
}
return comp;
});
// Turn back into a long[] array for memory efficiency
this.chunks_coords = Stream.of(coordinates).mapToLong(c -> MathUtil.longHashToLong(c.x, c.z)).toArray();
}
@Override
public World getWorld() {
return world;
}
/**
* Gets the X and Z-coordinates of all the chunk columns to process.
* The coordinates are combined into a single Long, which can be decoded
* using {@link MathUtil#longHashMsw(long)} for X and {@link MathUtil#longHashLsw(long) for Z.
*
* @return chunk coordinates
*/
public long[] getChunks() {
synchronized (this.chunks_lock) {
LightingChunk[] chunks = this.chunks;
if (chunks != null) {
long[] coords = new long[chunks.length];
for (int i = 0; i < chunks.length; i++) {
coords[i] = MathUtil.longHashToLong(chunks[i].chunkX, chunks[i].chunkZ);
}
return coords;
} else if (this.chunks_coords != null) {
return this.chunks_coords;
} else {
return new long[0];
}
}
}
/**
* Gets the Y-coordinates of all the regions to look for chunk data. A region stores 32 chunk
* slices vertically, and goes up/down 512 blocks every coordinate increase/decrease.
*
* @return region Y-coordinates
*/
public int[] getRegionYCoordinates() {
return this.region_y_coords;
}
@Override
public int getChunkCount() {
synchronized (this.chunks_lock) {
if (this.chunks == null) {
return this.done ? 0 : this.chunks_coords.length;
} else {
int faults = 0;
for (LightingChunk chunk : this.chunks) {
if (chunk.hasFaults()) {
faults++;
}
}
return faults;
}
}
}
@Override
public long getTimeStarted() {
return this.timeStarted;
}
private static final class BatchChunkInfo {
public final int cx;
public final int cz;
public final int count;
public BatchChunkInfo(int cx, int cz, int count) {
this.cx = cx;
this.cz = cz;
this.count = count;
}
}
public BatchChunkInfo getAverageChunk() {
int count = 0;
long cx = 0;
long cz = 0;
synchronized (this.chunks_lock) {
if (this.chunks != null) {
count = this.chunks.length;
for (LightingChunk chunk : this.chunks) {
cx += chunk.chunkX;
cz += chunk.chunkZ;
}
} else if (this.chunks_coords != null) {
count = this.chunks_coords.length;
for (long chunk : this.chunks_coords) {
cx += MathUtil.longHashMsw(chunk);
cz += MathUtil.longHashLsw(chunk);
}
} else {
return null;
}
}
if (count > 0) {
cx /= count;
cz /= count;
}
return new BatchChunkInfo((int) cx, (int) cz, count);
}
@Override
public String getStatus() {
BatchChunkInfo chunk = this.getAverageChunk();
if (chunk != null) {
String postfix = " chunks near " +
"x=" + (chunk.cx*16) + " z=" + (chunk.cz*16);
if (this.stage == Stage.LOADING) {
synchronized (this.chunks_lock) {
if (this.chunks != null) {
int num_loaded = 0;
for (LightingChunk lc : this.chunks) {
if (!lc.forcedChunk.isNone() && lc.forcedChunk.getChunkAsync().isDone()) {
num_loaded++;
}
}
return "Loaded " + num_loaded + "/" + chunk.count + postfix;
}
}
} else if (this.stage == Stage.APPLYING) {
synchronized (this.chunks_lock) {
if (this.chunks != null) {
int num_saved = 0;
for (LightingChunk lc : this.chunks) {
if (lc.isApplied) {
num_saved++;
}
}
return "Saved " + num_saved + "/" + chunk.count + postfix;
}
}
}
return "Cleaning " + chunk.count + postfix;
} else {
return done ? "Done" : "No Data";
}
}
private String getShortStatus() {
BatchChunkInfo chunk = this.getAverageChunk();
if (chunk != null) {
return "[x=" + (chunk.cx*16) + " z=" + (chunk.cz*16) + " count=" + chunk.count + "]";
} else {
return "[Unknown]";
}
}
private boolean waitForCheckAborted(CompletableFuture<?> future) {
while (!aborted) {
try {
future.get(200, TimeUnit.MILLISECONDS);
return true;
} catch (InterruptedException | TimeoutException e1) {
// Ignore
} catch (ExecutionException ex) {
ex.printStackTrace();
Iris.error("Error while processing");
return false;
}
}
return false;
}
private void tryLoadMoreChunks(final CompletableFuture<Void>[] chunkFutures) {
if (this.aborted) {
return;
}
int i = 0;
while (true) {
// While synchronized, pick the next chunk to load
LightingChunk nextChunk = null;
CompletableFuture<Void> nextChunkFuture = null;
synchronized (chunks_lock) {
for (; i < chunks.length && numBeingLoaded < Iris.getThreadCount(); i++) {
LightingChunk lc = chunks[i];
if (lc.loadingStarted) {
continue; // Already (being) loaded
}
// Pick it
numBeingLoaded++;
lc.loadingStarted = true;
nextChunk = lc;
nextChunkFuture = chunkFutures[i];
break;
}
}
// No more chunks to load / capacity reached
if (nextChunk == null) {
break;
}
// This shouldn't happen, but just in case, a check
if (nextChunkFuture.isDone()) {
continue;
}
// Outside of the lock, start loading the next chunk
final CompletableFuture<Void> f_nextChunkFuture = nextChunkFuture;
nextChunk.forcedChunk.move(LightingForcedChunkCache.get(world, nextChunk.chunkX, nextChunk.chunkZ));
nextChunk.forcedChunk.getChunkAsync().whenComplete((chunk, t) -> {
synchronized (chunks_lock) {
numBeingLoaded--;
}
f_nextChunkFuture.complete(null);
tryLoadMoreChunks(chunkFutures);
});
}
}
@SuppressWarnings("unchecked")
private CompletableFuture<Void> loadChunks() {
// For every LightingChunk, make a completable future
// Once all these futures are resolved the returned completable future resolves
CompletableFuture<Void>[] chunkFutures;
synchronized (this.chunks_lock) {
chunkFutures = new CompletableFuture[this.chunks.length];
}
for (int i = 0; i < chunkFutures.length; i++) {
chunkFutures[i] = new CompletableFuture<Void>();
}
// Start loading up to [asyncLoadConcurrency] number of chunks right now
// When a callback for a chunk load completes, we start loading additional chunks
tryLoadMoreChunks(chunkFutures);
return CompletableFuture.allOf(chunkFutures);
}
@Override
public void process() {
// Begin
this.stage = Stage.LOADING;
this.timeStarted = System.currentTimeMillis();
// Initialize lighting chunks
synchronized (this.chunks_lock) {
LightingChunk[] chunks_new = new LightingChunk[this.chunks_coords.length];
this.done = false;
int chunkIdx = 0;
for (long longCoord : this.chunks_coords) {
int x = MathUtil.longHashMsw(longCoord);
int z = MathUtil.longHashLsw(longCoord);
chunks_new[chunkIdx++] = new LightingChunk(this.world, x, z);
if (this.aborted) {
return;
}
}
// Update fields. We can remove the coordinates to free memory.
this.chunks = chunks_new;
this.chunks_coords = null;
}
// Check aborted
if (aborted) {
return;
}
// Load all the chunks. Wait for loading to finish.
// Regularly check that this task is not aborted
CompletableFuture<Void> loadChunksFuture = this.loadChunks();
if (!waitForCheckAborted(loadChunksFuture)) {
return;
}
// Causes all chunks in cache not used for this task to unload
// All chunks of this task are put into the cache, instead
LightingForcedChunkCache.reset();
for (LightingChunk lc : LightingTaskBatch.this.chunks) {
LightingForcedChunkCache.store(lc.forcedChunk);
}
// All chunks that can be loaded, are now loaded.
// Some chunks may have failed to be loaded, get rid of those now!
// To avoid massive spam, only show the average x/z coordinates of the chunk affected
synchronized (this.chunks_lock) {
long failed_chunk_avg_x = 0;
long failed_chunk_avg_z = 0;
int failed_chunk_count = 0;
LightingChunk[] new_chunks = this.chunks;
for (int i = new_chunks.length-1; i >= 0; i--) {
LightingChunk lc = new_chunks[i];
if (lc.forcedChunk.getChunkAsync().isCompletedExceptionally()) {
failed_chunk_avg_x += lc.chunkX;
failed_chunk_avg_z += lc.chunkZ;
failed_chunk_count++;
new_chunks = LogicUtil.removeArrayElement(new_chunks, i);
}
}
this.chunks = new_chunks;
// Tell all the (remaining) chunks about other neighbouring chunks before initialization
for (LightingChunk lc : new_chunks) {
for (LightingChunk neigh : new_chunks) {
lc.notifyAccessible(neigh);
}
}
// Log when chunks fail to be loaded
if (failed_chunk_count > 0) {
failed_chunk_avg_x = ((failed_chunk_avg_x / failed_chunk_count) << 4);
failed_chunk_avg_z = ((failed_chunk_avg_z / failed_chunk_count) << 4);
Iris.error("Failed to load " + failed_chunk_count + " chunks near " +
"world=" + world.getName() + " x=" + failed_chunk_avg_x + " z=" + failed_chunk_avg_z);
}
}
// Schedule, on the main thread, to fill all the loaded chunks with data
CompletableFuture<Void> chunkFillFuture = CompletableFuture.runAsync(() -> {
synchronized (this.chunks_lock) {
for (LightingChunk lc : chunks) {
lc.fill(lc.forcedChunk.getChunk(), region_y_coords);
}
}
}, CommonUtil.getPluginExecutor(Iris.instance));
if (!waitForCheckAborted(chunkFillFuture)) {
return;
}
// Now that all chunks we can process are filled, let all the 16x16x16 cubes know of their neighbors
// This neighboring data is only used during the fix() (initialize + spread) phase
synchronized (this.chunks_lock) {
for (LightingChunk lc : chunks) {
lc.detectCubeNeighbors();
}
}
// Fix
this.stage = Stage.FIXING;
fix();
if (this.aborted) {
return;
}
// Apply and wait for it to be finished
// Wait in 200ms intervals to allow for aborting
// After 2 minutes of inactivity, stop waiting and consider applying failed
this.stage = Stage.APPLYING;
try {
CompletableFuture<Void> future = apply();
int max_num_of_waits = (5*120);
while (true) {
if (--max_num_of_waits == 0) {
Iris.error("Failed to apply lighting data for " + getShortStatus() + ": Timeout");
break;
}
try {
future.get(200, TimeUnit.MILLISECONDS);
break;
} catch (TimeoutException e) {
if (this.aborted) {
return;
}
}
}
} catch (InterruptedException e) {
// Ignore
} catch (ExecutionException e) {
e.printStackTrace();
Iris.error("Failed to apply lighting data for " + getShortStatus());
}
this.done = true;
synchronized (this.chunks_lock) {
this.chunks = null;
}
}
@Override
public void abort() {
this.aborted = true;
// Close chunks kept loaded
LightingChunk[] chunks;
synchronized (this.chunks_lock) {
chunks = this.chunks;
}
if (chunks != null) {
for (LightingChunk lc : chunks) {
lc.forcedChunk.close();
}
}
}
/**
* Starts applying the new data to the world.
* This is done in several ticks on the main thread.
* The completable future is resolved when applying is finished.
*/
public CompletableFuture<Void> apply() {
// Apply data to chunks and unload if needed
LightingChunk[] chunks = LightingTaskBatch.this.chunks;
CompletableFuture<?>[] applyFutures = new CompletableFuture[chunks.length];
for (int i = 0; i < chunks.length; i++) {
LightingChunk lc = chunks[i];
Chunk bchunk = lc.forcedChunk.getChunk();
// Save to chunk
applyFutures[i] = lc.saveToChunk(bchunk).whenComplete((changed, t) -> {
if (t != null) {
t.printStackTrace();
} else if (changed.booleanValue()) {
WorldUtil.queueChunkSendLight(world, lc.chunkX, lc.chunkZ);
}
// Closes our forced chunk, may cause the chunk to now unload
lc.forcedChunk.close();
});
}
return CompletableFuture.allOf(applyFutures);
}
/**
* Performs the (slow) fixing procedure (call from another thread)
*/
public void fix() {
// Initialize light
for (LightingCategory category : LightingCategory.values()) {
for (LightingChunk chunk : chunks) {
category.initialize(chunk);
if (this.aborted) {
return;
}
}
}
// Skip spread phase when debug mode is active
if (this.options.getDebugMakeCorrupted()) {
return;
}
// Before spreading, change the opacity values to have a minimum of 1
// Spreading can never be done without losing light
// This isn't done during initialization because it is important
// for calculating the first opacity>0 block for sky light.
for (LightingChunk chunk : chunks) {
for (LightingCube section : chunk.getSections()) {
//TODO: Maybe build something into BKCommonLib for this
int x, y, z;
for (y = 0; y < 16; y++) {
for (z = 0; z < 16; z++) {
for (x = 0; x < 16; x++) {
if (section.opacity.get(x, y, z) == 0) {
section.opacity.set(x, y, z, 1);
}
}
}
}
}
}
// Spread (timed, for debug)
boolean hasFaults;
long startTime = System.currentTimeMillis();
int totalLoops = 0;
do {
hasFaults = false;
for (LightingChunk chunk : chunks) {
int count = chunk.spread();
totalLoops += count;
hasFaults |= count > 0;
}
} while (hasFaults && !this.aborted);
long duration = System.currentTimeMillis() - startTime;
if (DEBUG_LOG) {
System.out.println("Processed " + totalLoops + " in " + duration + " ms");
}
}
@Override
public void applyOptions(LightingService.ScheduleArguments args) {
this.options = args;
}
@Override
public boolean canSave() {
return !this.options.getLoadedChunksOnly() && !this.options.getDebugMakeCorrupted();
}
private static enum Stage {
LOADING, FIXING, APPLYING
}
}

View File

@@ -0,0 +1,173 @@
package com.volmit.iris.scaffold.lighting;
import com.bergerkiller.bukkit.common.utils.CommonUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import org.bukkit.World;
public class LightingTaskWorld implements LightingTask {
private static final int ASSUMED_CHUNKS_PER_REGION = 34 * 34;
private final World world;
private volatile FlatRegionInfoMap regions = null;
private volatile int regionCountLoaded;
private volatile int chunkCount;
private volatile long timeStarted;
private volatile boolean aborted;
private LightingService.ScheduleArguments options = new LightingService.ScheduleArguments();
public LightingTaskWorld(World world) {
this.world = world;
this.regionCountLoaded = 0;
this.aborted = false;
this.chunkCount = 0;
this.timeStarted = 0;
}
@Override
public World getWorld() {
return this.world;
}
@Override
public int getChunkCount() {
return chunkCount;
}
@Override
public long getTimeStarted() {
return this.timeStarted;
}
@Override
public String getStatus() {
if (regions == null) {
return "Reading available regions from world " + getWorld().getName();
} else {
return "Reading available chunks from world " + getWorld().getName() + " (region " + (regionCountLoaded+1) + "/" + regions.getRegionCount() + ")";
}
}
@Override
public void process() {
// Load regions on the main thread
// TODO: Can use main thread executor instead
this.timeStarted = System.currentTimeMillis();
final CompletableFuture<Void> regionsLoadedFuture = new CompletableFuture<Void>();
CommonUtil.nextTick(() -> {
try {
if (this.options.getLoadedChunksOnly()) {
this.regions = FlatRegionInfoMap.createLoaded(this.getWorld());
this.regionCountLoaded = this.regions.getRegionCount();
this.chunkCount = 0;
for (FlatRegionInfo region : this.regions.getRegions()) {
this.chunkCount += region.getChunkCount();
}
} else {
this.regions = FlatRegionInfoMap.create(this.getWorld());
this.regionCountLoaded = 0;
this.chunkCount = this.regions.getRegionCount() * ASSUMED_CHUNKS_PER_REGION;
}
regionsLoadedFuture.complete(null);
} catch (Throwable ex) {
regionsLoadedFuture.completeExceptionally(ex);
}
});
// Wait until region list is loaded synchronously
try {
regionsLoadedFuture.get();
} catch (InterruptedException ex) {
// Ignore
} catch (ExecutionException ex) {
throw new RuntimeException("Failed to load regions", ex.getCause());
}
// Check aborted
if (this.aborted) {
return;
}
// Start loading all chunks contained in the regions
if (!this.options.getLoadedChunksOnly()) {
for (FlatRegionInfo region : this.regions.getRegions()) {
// Abort handling
if (this.aborted) {
return;
}
// Load and update stats
region.load();
this.chunkCount -= ASSUMED_CHUNKS_PER_REGION - region.getChunkCount();
this.regionCountLoaded++;
}
}
// We now know of all the regions to be processed, convert all of them into tasks
// Use a slightly larger area to avoid cross-region errors
for (FlatRegionInfo region : regions.getRegions()) {
// Abort handling
if (this.aborted) {
return;
}
// If empty, skip
if (region.getChunkCount() == 0) {
continue;
}
// Find region Y-coordinates for this 34x34 section of chunks
int[] region_y_coordinates = regions.getRegionYCoordinatesSelfAndNeighbours(region);
// Reduce count, schedule and clear the buffer
// Put the coordinates that are available
final LongHashSet buffer = new LongHashSet(34*34);
if (true) {
int dx, dz;
for (dx = -1; dx < 33; dx++) {
for (dz = -1; dz < 33; dz++) {
int cx = region.cx + dx;
int cz = region.cz + dz;
if (this.regions.containsChunkAndNeighbours(cx, cz)) {
buffer.add(cx, cz);
}
}
}
} else {
int dx, dz;
for (dx = -1; dx < 33; dx++) {
for (dz = -1; dz < 33; dz++) {
int cx = region.cx + dx;
int cz = region.cz + dz;
if (this.regions.containsChunk(cx, cz)) {
buffer.add(cx, cz);
}
}
}
}
// Schedule and return amount of chunks
this.chunkCount -= buffer.size();
LightingTaskBatch batch_task = new LightingTaskBatch(this.getWorld(), region_y_coordinates, buffer);
batch_task.applyOptions(this.options);
LightingService.schedule(batch_task);
}
}
@Override
public void abort() {
this.aborted = true;
}
@Override
public void applyOptions(LightingService.ScheduleArguments args) {
this.options = args;
}
@Override
public boolean canSave() {
return false;
}
}

View File

@@ -0,0 +1,30 @@
package com.volmit.iris.scaffold.lighting;
import com.bergerkiller.bukkit.common.utils.MathUtil;
/**
* Just some utilities used by Light Cleaner
*/
public class LightingUtil {
private static TimeDurationFormat timeFormat_hh_mm = new TimeDurationFormat("HH 'hours' mm 'minutes'");
private static TimeDurationFormat timeFormat_mm_ss = new TimeDurationFormat("mm 'minutes' ss 'seconds'");
private static final long SECOND_MILLIS = 1000L;
private static final long MINUTE_MILLIS = 60L * SECOND_MILLIS;
private static final long HOUR_MILLIS = 60L * MINUTE_MILLIS;
private static final long DAY_MILLIS = 24L * HOUR_MILLIS;
public static String formatDuration(long duration) {
if (duration < MINUTE_MILLIS) {
return MathUtil.round((double) duration / (double) SECOND_MILLIS, 2) + " seconds";
} else if (duration < HOUR_MILLIS) {
return timeFormat_mm_ss.format(duration);
} else if (duration < (2*DAY_MILLIS)) {
return timeFormat_hh_mm.format(duration);
} else {
long num_days = duration / DAY_MILLIS;
long num_hours = (duration % DAY_MILLIS) / HOUR_MILLIS;
return num_days + " days " + num_hours + " hours";
}
}
}

View File

@@ -0,0 +1,44 @@
package com.volmit.iris.scaffold.lighting;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
/**
* Formatter for a duration String.
* Can represent a duration in milliseconds as a String.
* Taken from Traincarts (permission granted by same author)<br>
* <br>
* https://github.com/bergerhealer/TrainCarts/blob/master/src/main/java/com/bergerkiller/bukkit/tc/utils/TimeDurationFormat.java
*/
public class TimeDurationFormat {
private final TimeZone timeZone;
private final SimpleDateFormat sdf;
/**
* Creates a new time duration format. The format accepts the same formatting
* tokens as the Date formatter does.
*
* @param format
* @throws IllegalArgumentException if the input format is invalid
*/
public TimeDurationFormat(String format) {
if (format == null) {
throw new IllegalArgumentException("Input format should not be null");
}
this.timeZone = TimeZone.getTimeZone("GMT+0");
this.sdf = new SimpleDateFormat(format, Locale.getDefault());
this.sdf.setTimeZone(this.timeZone);
}
/**
* Formats the duration
*
* @param durationMillis
* @return formatted string
*/
public String format(long durationMillis) {
return this.sdf.format(new Date(durationMillis - this.timeZone.getRawOffset()));
}
}

View File

@@ -0,0 +1,95 @@
package com.volmit.iris.scaffold.parallax;
import com.volmit.iris.scaffold.hunk.Hunk;
import org.bukkit.block.data.BlockData;
public interface ParallaxAccess
{
default BlockData getBlock(int x, int y, int z)
{
return getBlocksR(x>>4,z>>4).get(x & 15,y,z & 15);
}
default void setBlock(int x, int y, int z, BlockData d)
{
getBlocksRW(x>>4, z>>4).set(x&15, y, z&15, d);
}
default String getObject(int x, int y, int z)
{
return getObjectsR(x>>4,z>>4).get(x & 15,y,z & 15);
}
default void setObject(int x, int y, int z, String d)
{
getObjectsRW(x>>4, z>>4).set(x&15, y, z&15, d);
}
default Boolean isUpdate(int x, int y, int z)
{
return getUpdatesR(x>>4,z>>4).get(x & 15,y,z & 15);
}
default void updateBlock(int x, int y, int z)
{
setUpdate(x, y, z, true);
}
default void setUpdate(int x, int y, int z, boolean d)
{
getUpdatesRW(x>>4, z>>4).set(x&15, y, z&15, d);
}
default boolean isParallaxGenerated(int x, int z)
{
return getMetaR(x,z).isParallaxGenerated();
}
default boolean isChunkGenerated(int x, int z) {
return getMetaR(x, z).isGenerated();
}
default void setParallaxGenerated(int x, int z) {
setParallaxGenerated(x,z,true);
}
default void setChunkGenerated(int x, int z) {
setChunkGenerated(x,z,true);
}
default void setParallaxGenerated(int x, int z, boolean v) {
getMetaRW(x, z).setParallaxGenerated(v);
}
default void setChunkGenerated(int x, int z, boolean v) {
getMetaRW(x, z).setGenerated(v);
}
public Hunk<BlockData> getBlocksR(int x, int z);
public Hunk<BlockData> getBlocksRW(int x, int z);
public Hunk<String> getObjectsR(int x, int z);
public Hunk<String> getObjectsRW(int x, int z);
public Hunk<Boolean> getUpdatesR(int x, int z);
public Hunk<Boolean> getUpdatesRW(int x, int z);
public ParallaxChunkMeta getMetaR(int x, int z);
public ParallaxChunkMeta getMetaRW(int x, int z);
public void cleanup(long regionIdle, long chunkIdle);
public void cleanup();
public void saveAll();
public void saveAllNOW();
public int getRegionCount();
public int getChunkCount();
}

View File

@@ -0,0 +1,55 @@
package com.volmit.iris.scaffold.parallax;
import com.volmit.iris.scaffold.hunk.io.HunkIOAdapter;
import com.volmit.iris.scaffold.hunk.io.PaletteHunkIOAdapter;
import com.volmit.iris.util.CompoundTag;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.function.Function;
@AllArgsConstructor
@Data
public class ParallaxChunkMeta {
public static final Function<CompoundTag, HunkIOAdapter<ParallaxChunkMeta>> adapter = (c) -> new PaletteHunkIOAdapter<ParallaxChunkMeta>() {
@Override
public void write(ParallaxChunkMeta parallaxChunkMeta, DataOutputStream dos) throws IOException {
dos.writeBoolean(parallaxChunkMeta.isUpdates());
dos.writeBoolean(parallaxChunkMeta.isGenerated());
dos.writeBoolean(parallaxChunkMeta.isParallaxGenerated());
dos.writeBoolean(parallaxChunkMeta.isObjects());
if(parallaxChunkMeta.isObjects())
{
dos.writeByte(parallaxChunkMeta.getMinObject() + Byte.MIN_VALUE);
dos.writeByte(parallaxChunkMeta.getMaxObject() + Byte.MIN_VALUE);
}
}
@Override
public ParallaxChunkMeta read(DataInputStream din) throws IOException {
boolean bb = din.readBoolean();
boolean g = din.readBoolean();
boolean p = din.readBoolean();
boolean o = din.readBoolean();
int min = o ? din.readByte() - Byte.MIN_VALUE : -1;
int max = o ? din.readByte() - Byte.MIN_VALUE : -1;
return new ParallaxChunkMeta(bb, g, p, o, min, max);
}
};
private boolean updates;
private boolean generated;
private boolean parallaxGenerated;
private boolean objects;
private int maxObject = -1;
private int minObject = -1;
public ParallaxChunkMeta()
{
this(false, false, false, false, -1, -1);
}
}

View File

@@ -0,0 +1,180 @@
package com.volmit.iris.scaffold.parallax;
import java.io.File;
import java.io.IOException;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.scaffold.hunk.io.HunkIOAdapter;
import com.volmit.iris.scaffold.hunk.io.HunkRegion;
import com.volmit.iris.scaffold.hunk.io.HunkRegionSlice;
import com.volmit.iris.util.*;
import org.bukkit.block.data.BlockData;
public class ParallaxRegion extends HunkRegion
{
private boolean dirtyMeta;
private Hunk<ParallaxChunkMeta> meta;
private HunkIOAdapter<ParallaxChunkMeta> metaAdapter;
private HunkRegionSlice<BlockData> blockSlice;
private HunkRegionSlice<String> objectSlice;
private HunkRegionSlice<Boolean> updateSlice;
private long lastUse;
private final int height;
public ParallaxRegion(int height, File folder, int x, int z, CompoundTag compound)
{
super(folder, x, z, compound);
this.height = height;
setupSlices();
}
public ParallaxRegion(int height, File folder, int x, int z)
{
super(folder, x, z);
this.height = height;
setupSlices();
}
private void setupSlices()
{
blockSlice = HunkRegionSlice.BLOCKDATA.apply(height, getCompound());
objectSlice = HunkRegionSlice.STRING.apply(height, getCompound(), "objects");
updateSlice = HunkRegionSlice.BOOLEAN.apply(height, getCompound(), "updates");
metaAdapter = ParallaxChunkMeta.adapter.apply(getCompound());
dirtyMeta = false;
meta = null;
lastUse = M.ms();
}
public boolean hasBeenIdleLongerThan(long time)
{
return M.ms() - lastUse > time;
}
public ParallaxChunkMeta getMetaR(int x, int z)
{
lastUse = M.ms();
return getMetaHunkR().getOr(x, 0, z, new ParallaxChunkMeta());
}
public ParallaxChunkMeta getMetaRW(int x, int z)
{
lastUse = M.ms();
dirtyMeta = true;
ParallaxChunkMeta p = getMetaHunkRW().get(x, 0, z);
if(p == null)
{
p = new ParallaxChunkMeta();
getMetaHunkRW().set(x,0,z,p);
}
return p;
}
private Hunk<ParallaxChunkMeta> getMetaHunkR()
{
if(meta == null)
{
meta = loadMetaHunk();
}
return meta;
}
private Hunk<ParallaxChunkMeta> getMetaHunkRW()
{
dirtyMeta = true;
return getMetaHunkR();
}
public synchronized Hunk<ParallaxChunkMeta> loadMetaHunk()
{
lastUse = M.ms();
if(meta == null)
{
Tag t = getCompound().getValue().get("meta");
if((t instanceof ByteArrayTag))
{
try {
meta = metaAdapter.read((x,y,z) -> Hunk.newArrayHunk(32, 1, 32), (ByteArrayTag)t);
} catch (IOException e) {
e.printStackTrace();
}
}
if(meta == null)
{
meta = Hunk.newArrayHunk(32, 1, 32);
}
}
return meta;
}
public synchronized void unloadMetaHunk()
{
if(dirtyMeta)
{
saveMetaHunk();
dirtyMeta = false;
}
meta = null;
}
public synchronized void saveMetaHunk()
{
if(meta != null && dirtyMeta)
{
try {
getCompound().getValue().put("meta", meta.writeByteArrayTag(metaAdapter, "meta"));
dirtyMeta = false;
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void save() throws IOException
{
blockSlice.save();
objectSlice.save();
updateSlice.save();
saveMetaHunk();
super.save();
}
public void unload()
{
blockSlice.unloadAll();
objectSlice.unloadAll();
updateSlice.unloadAll();
unloadMetaHunk();
}
public HunkRegionSlice<BlockData> getBlockSlice() {
lastUse = M.ms();
return blockSlice;
}
public HunkRegionSlice<String> getObjectSlice() {
lastUse = M.ms();
return objectSlice;
}
public HunkRegionSlice<Boolean> getUpdateSlice() {
lastUse = M.ms();
return updateSlice;
}
public void cleanup(long c) {
blockSlice.cleanup(c);
objectSlice.cleanup(c);
updateSlice.cleanup(c);
}
public int getChunkCount() {
return blockSlice.getLoadCount() + objectSlice.getLoadCount() + updateSlice.getLoadCount();
}
}

View File

@@ -0,0 +1,240 @@
package com.volmit.iris.scaffold.parallax;
import java.io.File;
import java.io.IOException;
import com.volmit.iris.util.*;
import org.bukkit.block.data.BlockData;
import com.volmit.iris.scaffold.hunk.Hunk;
public class ParallaxWorld implements ParallaxAccess
{
private final KMap<Long, ParallaxRegion> loadedRegions;
private final KList<Long> save;
private final File folder;
private final int height;
private final ChronoLatch cleanup;
public ParallaxWorld(int height, File folder)
{
this.height = height;
this.folder = folder;
save = new KList<>();
loadedRegions = new KMap<>();
cleanup = new ChronoLatch(5000);
folder.mkdirs();
}
public int getRegionCount()
{
return loadedRegions.size();
}
public int getChunkCount()
{
int m = 0;
synchronized (loadedRegions)
{
for(ParallaxRegion i : loadedRegions.values())
{
m+= i.getChunkCount();
}
}
return m;
}
public synchronized void close()
{
for(ParallaxRegion i : loadedRegions.v())
{
unload(i.getX(), i.getZ());
}
save.clear();
loadedRegions.clear();
}
public synchronized void save(ParallaxRegion region)
{
try
{
region.save();
}
catch(IOException e)
{
e.printStackTrace();
}
}
public boolean isLoaded(int x, int z)
{
return loadedRegions.containsKey(key(x, z));
}
public synchronized void save(int x, int z)
{
if(isLoaded(x, z))
{
save(getR(x, z));
}
}
public synchronized void unload(int x, int z)
{
long key = key(x, z);
if(isLoaded(x, z))
{
if(save.contains(key))
{
save(x, z);
save.remove(key);
}
loadedRegions.remove(key).unload();
}
}
public synchronized ParallaxRegion load(int x, int z)
{
if(isLoaded(x, z))
{
return loadedRegions.get(key(x, z));
}
ParallaxRegion v = new ParallaxRegion(height, folder, x, z);
loadedRegions.put(key(x, z), v);
if(cleanup.flip())
{
cleanup();
}
return v;
}
public ParallaxRegion getR(int x, int z)
{
long key = key(x, z);
ParallaxRegion region = loadedRegions.get(key);
if(region == null)
{
region = load(x, z);
}
return region;
}
public ParallaxRegion getRW(int x, int z)
{
save.addIfMissing(key(x, z));
return getR(x, z);
}
private long key(int x, int z)
{
return (((long) x) << 32) | (((long) z) & 0xffffffffL);
}
@Override
public Hunk<BlockData> getBlocksR(int x, int z)
{
return getR(x >> 5, z >> 5).getBlockSlice().getR(x & 31, z & 31);
}
@Override
public synchronized Hunk<BlockData> getBlocksRW(int x, int z)
{
return getRW(x >> 5, z >> 5).getBlockSlice().getRW(x & 31, z & 31);
}
@Override
public Hunk<String> getObjectsR(int x, int z)
{
return getR(x >> 5, z >> 5).getObjectSlice().getR(x & 31, z & 31);
}
@Override
public synchronized Hunk<String> getObjectsRW(int x, int z)
{
return getRW(x >> 5, z >> 5).getObjectSlice().getRW(x & 31, z & 31);
}
@Override
public Hunk<Boolean> getUpdatesR(int x, int z)
{
return getR(x >> 5, z >> 5).getUpdateSlice().getR(x & 31, z & 31);
}
@Override
public synchronized Hunk<Boolean> getUpdatesRW(int x, int z)
{
return getRW(x >> 5, z >> 5).getUpdateSlice().getRW(x & 31, z & 31);
}
@Override
public ParallaxChunkMeta getMetaR(int x, int z)
{
return getR(x >> 5, z >> 5).getMetaR(x & 31, z & 31);
}
@Override
public ParallaxChunkMeta getMetaRW(int x, int z)
{
return getRW(x >> 5, z >> 5).getMetaRW(x & 31, z & 31);
}
public void cleanup()
{
cleanup(10000, 5000);
}
@Override
public void cleanup(long r, long c) {
J.a(() -> {
synchronized (loadedRegions)
{
for(ParallaxRegion i : loadedRegions.v())
{
if(i.hasBeenIdleLongerThan(r))
{
unload(i.getX(), i.getZ());
}
else
{
i.cleanup(c);
}
}
}
});
}
@Override
public void saveAll() {
J.a(this::saveAllNOW);
}
@Override
public synchronized void saveAllNOW() {
synchronized (loadedRegions)
{
for(ParallaxRegion i : loadedRegions.v())
{
synchronized (save)
{
if(save.contains(key(i.getX(), i.getZ())))
{
save(i.getX(), i.getZ());
}
}
}
}
}
}

View File

@@ -0,0 +1,60 @@
package com.volmit.iris.scaffold.parallel;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import com.volmit.iris.util.KList;
public class BurstExecutor
{
private ExecutorService executor;
private KList<CompletableFuture<Void>> futures;
public BurstExecutor(ExecutorService executor, int burstSizeEstimate)
{
this.executor = executor;
futures = new KList<CompletableFuture<Void>>(burstSizeEstimate);
}
public CompletableFuture<Void> queue(Runnable r)
{
synchronized(futures)
{
CompletableFuture<Void> c = CompletableFuture.runAsync(r, executor);
futures.add(c);
return c;
}
}
public BurstExecutor queue(Runnable[] r)
{
synchronized(futures)
{
for(Runnable i : r)
{
CompletableFuture<Void> c = CompletableFuture.runAsync(i, executor);
futures.add(c);
}
}
return this;
}
public void complete()
{
synchronized(futures)
{
try
{
CompletableFuture.allOf(futures.toArray(new CompletableFuture[futures.size()])).get();
futures.clear();
}
catch(InterruptedException | ExecutionException e)
{
e.printStackTrace();
}
}
}
}

View File

@@ -0,0 +1,12 @@
package com.volmit.iris.scaffold.parallel;
import com.volmit.iris.scaffold.hunk.Hunk;
public interface BurstedHunk<T> extends Hunk<T>
{
public int getOffsetX();
public int getOffsetY();
public int getOffsetZ();
}

View File

@@ -0,0 +1,34 @@
package com.volmit.iris.scaffold.parallel;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class MultiBurst
{
public static MultiBurst burst = new MultiBurst(Runtime.getRuntime().availableProcessors());
private ExecutorService service;
public MultiBurst(int tc)
{
service = Executors.newWorkStealingPool(tc);
}
public void burst(Runnable... r)
{
burst(r.length).queue(r).complete();
}
public BurstExecutor burst(int estimate)
{
return new BurstExecutor(service, estimate);
}
public BurstExecutor burst()
{
return burst(16);
}
public void lazy(Runnable o) {
service.execute(o);
}
}

View File

@@ -0,0 +1,61 @@
package com.volmit.iris.scaffold.stream;
import com.volmit.iris.util.KList;
public class ArraySignificance<T> implements Significance<T>
{
private final KList<T> types;
private final KList<Double> significance;
private final T significant;
public ArraySignificance(KList<T> types, KList<Double> significance, T significant)
{
this.types = types;
this.significance = significance;
this.significant = significant;
}
public ArraySignificance(KList<T> types, KList<Double> significance)
{
this.types = types;
this.significance = significance;
double s = 0;
int v = 0;
for(int i = 0; i < significance.size(); i++)
{
if(significance.get(i) > s)
{
s = significance.get(i);
v = i;
}
}
significant = types.get(v);
}
@Override
public KList<T> getFactorTypes()
{
return types;
}
@Override
public double getSignificance(T t)
{
for(int i = 0; i < types.size(); i++)
{
if(types.get(i).equals(t))
{
return significance.get(i);
}
}
return 0;
}
@Override
public T getMostSignificantType()
{
return significant;
}
}

View File

@@ -0,0 +1,30 @@
package com.volmit.iris.scaffold.stream;
import lombok.AllArgsConstructor;
import lombok.Data;
@Data
@AllArgsConstructor
public class BasicLayer implements ProceduralLayer
{
private final long seed;
private final double zoom;
private final double offsetX;
private final double offsetY;
private final double offsetZ;
public BasicLayer(long seed, double zoom)
{
this(seed, zoom, 0D, 0D, 0D);
}
public BasicLayer(long seed)
{
this(seed, 1D);
}
public BasicLayer()
{
this(1337);
}
}

View File

@@ -0,0 +1,40 @@
package com.volmit.iris.scaffold.stream;
public abstract class BasicStream<T> extends BasicLayer implements ProceduralStream<T>
{
private final ProceduralStream<T> source;
public BasicStream(ProceduralStream<T> source)
{
super();
this.source = source;
}
public BasicStream()
{
this(null);
}
@Override
public ProceduralStream<T> getTypedSource() {
return source;
}
@Override
public ProceduralStream<?> getSource() {
return getTypedSource();
}
@Override
public abstract T get(double x, double z);
@Override
public abstract T get(double x, double y, double z);
@Override
public abstract double toDouble(T t);
@Override
public abstract T fromDouble(double d);
}

View File

@@ -0,0 +1,14 @@
package com.volmit.iris.scaffold.stream;
public interface ProceduralLayer
{
public long getSeed();
public double getOffsetX();
public double getOffsetY();
public double getOffsetZ();
public double getZoom();
}

View File

@@ -0,0 +1,515 @@
package com.volmit.iris.scaffold.stream;
import java.util.List;
import java.util.function.Function;
import com.volmit.iris.scaffold.hunk.Hunk;
import com.volmit.iris.scaffold.stream.arithmetic.*;
import com.volmit.iris.scaffold.stream.convert.*;
import com.volmit.iris.scaffold.stream.interpolation.Interpolated;
import com.volmit.iris.scaffold.stream.sources.FunctionStream;
import com.volmit.iris.scaffold.stream.utility.*;
import com.volmit.iris.util.Function2;
import com.volmit.iris.util.Function3;
import com.volmit.iris.util.Function4;
import com.volmit.iris.util.IRare;
import com.volmit.iris.util.KList;
public interface ProceduralStream<T> extends ProceduralLayer, Interpolated<T>
{
public static ProceduralStream<Double> ofDouble(Function2<Double, Double, Double> f)
{
return of(f, Interpolated.DOUBLE);
}
public static ProceduralStream<Double> ofDouble(Function3<Double, Double, Double, Double> f)
{
return of(f, Interpolated.DOUBLE);
}
public static <T> ProceduralStream<T> of(Function2<Double, Double, T> f, Interpolated<T> helper)
{
return of(f, (x, y, z) -> f.apply(x, z), helper);
}
public static <T> ProceduralStream<T> of(Function3<Double, Double, Double, T> f, Interpolated<T> helper)
{
return of((x, z) -> f.apply(x, 0D, z), f, helper);
}
public static <T> ProceduralStream<T> of(Function2<Double, Double, T> f, Function3<Double, Double, Double, T> f2, Interpolated<T> helper)
{
return new FunctionStream<>(f, f2, helper);
}
default ProceduralStream<T> profile()
{
return profile(10);
}
default ProceduralStream<T> profile(int memory)
{
return new ProfiledStream<>(this, memory);
}
default ProceduralStream<T> onNull(T v)
{
return new NullSafeStream<>(this, v);
}
default ProceduralStream<T> add(Function3<Double, Double, Double, Double> a)
{
return new AddingStream<>(this, a);
}
default ProceduralStream<T> add(Function2<Double, Double, Double> a)
{
return new AddingStream<>(this, a);
}
default ProceduralStream<T> add2D(Function2<Double, Double, Double> a)
{
return new AddingStream<>(this, a);
}
default ProceduralStream<T> add(double a)
{
return new AddingStream<>(this, a);
}
default ProceduralStream<T> blockToChunkCoords()
{
return bitShiftCoordsRight(4);
}
default ProceduralStream<T> chunkToRegionCoords()
{
return bitShiftCoordsRight(5);
}
default ProceduralStream<T> blockToRegionCoords()
{
return blockToChunkCoords().chunkToRegionCoords();
}
default ProceduralStream<T> regionToBlockCoords()
{
return regionToChunkCoords().chunkToBlockCoords();
}
default ProceduralStream<T> regionToChunkCoords()
{
return bitShiftCoordsLeft(5);
}
default ProceduralStream<T> chunkToBlockCoords()
{
return bitShiftCoordsLeft(4);
}
default ProceduralStream<T> bitShiftCoordsRight(int a)
{
return new CoordinateBitShiftRightStream<>(this, a);
}
default ProceduralStream<T> synchronize()
{
return new SynchronizedStream<>(this);
}
default ProceduralStream<T> semaphore(int permits)
{
return new SemaphoreStream<>(this, permits);
}
default ProceduralStream<T> bitShiftCoordsLeft(int a)
{
return new CoordinateBitShiftLeftStream<>(this, a);
}
default ProceduralStream<T> max(Function3<Double, Double, Double, Double> a)
{
return new MaxingStream<>(this, a);
}
default ProceduralStream<T> max(Function2<Double, Double, Double> a)
{
return new MaxingStream<>(this, a);
}
default ProceduralStream<T> slope()
{
return slope(1);
}
default ProceduralStream<T> slope(int range)
{
return new SlopeStream<>(this, range);
}
default ProceduralStream<T> max(double a)
{
return new MaxingStream<>(this, a);
}
default ProceduralStream<T> min(Function3<Double, Double, Double, Double> a)
{
return new MinningStream<>(this, a);
}
default ProceduralStream<T> min(Function2<Double, Double, Double> a)
{
return new MinningStream<>(this, a);
}
default ProceduralStream<T> min(double a)
{
return new MinningStream<>(this, a);
}
default ProceduralStream<T> subtract(Function3<Double, Double, Double, Double> a)
{
return new SubtractingStream<>(this, a);
}
default ProceduralStream<T> subtract(Function2<Double, Double, Double> a)
{
return new SubtractingStream<>(this, a);
}
default ProceduralStream<T> subtract(double a)
{
return new SubtractingStream<>(this, a);
}
default ProceduralStream<T> multiply(Function3<Double, Double, Double, Double> a)
{
return new MultiplyingStream<>(this, a);
}
default ProceduralStream<T> multiply(Function2<Double, Double, Double> a)
{
return new MultiplyingStream<>(this, a);
}
default ProceduralStream<T> multiply(double a)
{
return new MultiplyingStream<>(this, a);
}
default ProceduralStream<T> divide(Function3<Double, Double, Double, Double> a)
{
return new DividingStream<>(this, a);
}
default ProceduralStream<T> divide(Function2<Double, Double, Double> a)
{
return new DividingStream<>(this, a);
}
default ProceduralStream<T> divide(double a)
{
return new DividingStream<>(this, a);
}
default ProceduralStream<T> modulo(Function3<Double, Double, Double, Double> a)
{
return new ModuloStream<>(this, a);
}
default ProceduralStream<T> modulo(Function2<Double, Double, Double> a)
{
return new ModuloStream<>(this, a);
}
default ProceduralStream<T> modulo(double a)
{
return new ModuloStream<>(this, a);
}
default ProceduralStream<Integer> round()
{
return new RoundingStream(this);
}
default ProceduralStream<Double> roundDouble()
{
return new RoundingDoubleStream(this);
}
default ProceduralStream<Double> forceDouble()
{
return new ForceDoubleStream(this);
}
default ProceduralStream<Significance<T>> significance(double radius, int checks)
{
return new SignificanceStream<Significance<T>, T>(this, radius, checks);
}
default ProceduralStream<T> to3D()
{
return new To3DStream<T>(this);
}
default ProceduralStream<T> cache2D(int maxSize)
{
return new CachedStream2D<T>(this, maxSize);
}
default <V> ProceduralStream<V> convert(Function<T, V> converter)
{
return new ConversionStream<T,V>(this, converter);
}
default <V> ProceduralStream<V> convertAware2D(Function3<T, Double, Double, V> converter)
{
return new AwareConversionStream2D<T, V>(this, converter);
}
default <V> ProceduralStream<V> convertAware3D(Function4<T, Double, Double, Double, V> converter)
{
return new AwareConversionStream3D<T, V>(this, converter);
}
default <V> ProceduralStream<V> convertCached(Function<T, V> converter)
{
return new CachedConversionStream<T, V>(this, converter);
}
default ProceduralStream<T> offset(double x, double y, double z)
{
return new OffsetStream<T>(this, x, y, z);
}
default ProceduralStream<T> offset(double x, double z)
{
return new OffsetStream<T>(this, x, 0, z);
}
default ProceduralStream<T> zoom(double x, double y, double z)
{
return new ZoomStream<T>(this, x, y, z);
}
default ProceduralStream<T> zoom(double x, double z)
{
return new ZoomStream<T>(this, x, 1, z);
}
default ProceduralStream<T> zoom(double all)
{
return new ZoomStream<T>(this, all, all, all);
}
default ProceduralStream<T> radial(double scale)
{
return new RadialStream<>(this, scale);
}
default ProceduralStream<T> radial()
{
return radial(1D);
}
default <V> ProceduralStream<V> select(V... types)
{
return new SelectionStream<V>(this, types);
}
default <V> ProceduralStream<V> select(List<V> types)
{
return new SelectionStream<V>(this, types);
}
@SuppressWarnings("unchecked")
default <V> ProceduralStream<V> selectRarity(V... types)
{
KList<V> rarityTypes = new KList<>();
for(V i : types)
{
rarityTypes.addMultiple(i, IRare.get(i));
}
return new SelectionStream<V>(this, rarityTypes);
}
default <V> ProceduralStream<V> selectRarity(List<V> types)
{
KList<V> rarityTypes = new KList<>();
types.forEach((i) -> rarityTypes.addMultiple(i, IRare.get(i)));
return new SelectionStream<V>(this, rarityTypes);
}
default ProceduralStream<T> clamp(double min, double max)
{
return new ClampedStream<T>(this, min, max);
}
default ProceduralStream<T> fit(double min, double max)
{
return new FittedStream<T>(this, min, max);
}
default ProceduralStream<T> fit(double inMin, double inMax, double min, double max)
{
return new FittedStream<T>(this, inMin, inMax, min, max);
}
default void fill(Hunk<T> h, double x, double y, double z, int parallelism)
{
h.compute3D(parallelism, (xx, yy, zz, hh) -> hh.iterate((xv, yv, zv) -> hh.set(xv, yv, zv, get(xx + xv + x, yy + yv + y, zz + zv + z))));
}
default <V> void fill2D(Hunk<V> h, double x, double z, V v, int parallelism)
{
h.compute2D(parallelism, (xx, __, zz, hh) ->
{
for(int i = 0; i < hh.getWidth(); i++)
{
for(int k = 0; k < hh.getDepth(); k++)
{
double n = getDouble(i + x + xx, k + z + zz);
for(int j = 0; j < Math.min(h.getHeight(), n); j++)
{
hh.set(i, j, k, v);
}
}
}
});
}
default <V> void fill2D(Hunk<V> h, double x, double z, ProceduralStream<V> v, int parallelism)
{
h.compute2D(parallelism, (xx, yy, zz, hh) ->
{
for(int i = 0; i < hh.getWidth(); i++)
{
for(int k = 0; k < hh.getDepth(); k++)
{
double n = getDouble(i + x + xx, k + z + zz);
for(int j = 0; j < Math.min(h.getHeight(), n); j++)
{
hh.set(i, j, k, v.get(i + x + xx, j + yy, k + z + zz));
}
}
}
});
}
default <V> void fill2DYLocked(Hunk<V> h, double x, double z, V v, int parallelism)
{
h.compute2D(parallelism, (xx, yy, zz, hh) ->
{
for(int i = 0; i < hh.getWidth(); i++)
{
for(int k = 0; k < hh.getDepth(); k++)
{
double n = getDouble(i + x + xx, k + z + zz);
for(int j = 0; j < Math.min(h.getHeight(), n); j++)
{
hh.set(i, j, k, v);
}
}
}
});
}
default <V> void fill2DYLocked(Hunk<V> h, double x, double z, ProceduralStream<V> v, int parallelism)
{
h.compute2D(parallelism, (xx, yy, zz, hh) ->
{
for(int i = 0; i < hh.getWidth(); i++)
{
for(int k = 0; k < hh.getDepth(); k++)
{
double n = getDouble(i + x + xx, k + z + zz);
for(int j = 0; j < Math.min(h.getHeight(), n); j++)
{
hh.set(i, j, k, v.get(i + x + xx, k + z + zz));
}
}
}
});
}
default <V> void fill3D(Hunk<V> h, double x, int y, double z, V v, int parallelism)
{
h.compute3D(parallelism, (xx, yy, zz, hh) -> hh.iterate((xv, yv, zv) ->
{
if(getDouble(xx + xv + x, yy + yv + y, zz + zv + z) > 0.5)
{
hh.set(xv, yv, zv, v);
}
}));
}
default <V> void fill3D(Hunk<V> h, double x, int y, double z, ProceduralStream<V> v, int parallelism)
{
h.compute3D(parallelism, (xx, yy, zz, hh) -> hh.iterate((xv, yv, zv) ->
{
if(getDouble(xx + xv + x, yy + yv + y, zz + zv + z) > 0.5)
{
hh.set(xv, yv, zv, v.get(xx + xv + x, yy + yv + y, zz + zv + z));
}
}));
}
default void fill(Hunk<T> h, double x, double y, double z)
{
fill(h, x, z, 4);
}
default <V> void fill2D(Hunk<V> h, double x, double z, V v)
{
fill2D(h, x, z, v, 4);
}
default <V> void fill2D(Hunk<V> h, double x, double z, ProceduralStream<V> v)
{
fill2D(h, x, z, v, 4);
}
default <V> void fill2DYLocked(Hunk<V> h, double x, double z, V v)
{
fill2DYLocked(h, x, z, v, 4);
}
default <V> void fill2DYLocked(Hunk<V> h, double x, double z, ProceduralStream<V> v)
{
fill2DYLocked(h, x, z, v, 4);
}
default <V> void fill3D(Hunk<V> h, double x, int y, double z, V v)
{
fill3D(h, x, y, z, v, 4);
}
default <V> void fill3D(Hunk<V> h, double x, int y, double z, ProceduralStream<V> v)
{
fill3D(h, x, y, z, v, 4);
}
default double getDouble(double x, double z)
{
return toDouble(get(x, z));
}
default double getDouble(double x, double y, double z)
{
return toDouble(get(x, y, z));
}
public ProceduralStream<T> getTypedSource();
public ProceduralStream<?> getSource();
public T get(double x, double z);
public T get(double x, double y, double z);
}

View File

@@ -0,0 +1,12 @@
package com.volmit.iris.scaffold.stream;
import com.volmit.iris.util.KList;
public interface Significance<T>
{
public KList<T> getFactorTypes();
public double getSignificance(T t);
public T getMostSignificantType();
}

View File

@@ -0,0 +1,52 @@
package com.volmit.iris.scaffold.stream.arithmetic;
import com.volmit.iris.scaffold.stream.BasicStream;
import com.volmit.iris.scaffold.stream.ProceduralStream;
import com.volmit.iris.util.Function2;
import com.volmit.iris.util.Function3;
public class AddingStream<T> extends BasicStream<T>
{
private final Function3<Double, Double, Double, Double> add;
public AddingStream(ProceduralStream<T> stream, Function3<Double, Double, Double, Double> add)
{
super(stream);
this.add = add;
}
public AddingStream(ProceduralStream<T> stream, Function2<Double, Double, Double> add)
{
this(stream, (x, y, z) -> add.apply(x, z));
}
public AddingStream(ProceduralStream<T> stream, double add)
{
this(stream, (x, y, z) -> add);
}
@Override
public double toDouble(T t)
{
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d)
{
return getTypedSource().fromDouble(d);
}
@Override
public T get(double x, double z)
{
return fromDouble(add.apply(x, 0D, z) + getTypedSource().getDouble(x, z));
}
@Override
public T get(double x, double y, double z)
{
return fromDouble(add.apply(x, y, z) + getTypedSource().getDouble(x, y, z));
}
}

View File

@@ -0,0 +1,47 @@
package com.volmit.iris.scaffold.stream.arithmetic;
import com.volmit.iris.scaffold.stream.BasicStream;
import com.volmit.iris.scaffold.stream.ProceduralStream;
public class ClampedStream<T> extends BasicStream<T> implements ProceduralStream<T>
{
private final double min;
private final double max;
public ClampedStream(ProceduralStream<T> stream, double min, double max)
{
super(stream);
this.min = min;
this.max = max;
}
@Override
public double toDouble(T t)
{
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d)
{
return getTypedSource().fromDouble(d);
}
private double clamp(double v)
{
return Math.max(Math.min(v, max), min);
}
@Override
public T get(double x, double z)
{
return fromDouble(clamp(getTypedSource().getDouble(x, z)));
}
@Override
public T get(double x, double y, double z)
{
return fromDouble(clamp(getTypedSource().getDouble(x, y, z)));
}
}

View File

@@ -0,0 +1,40 @@
package com.volmit.iris.scaffold.stream.arithmetic;
import com.volmit.iris.scaffold.stream.BasicStream;
import com.volmit.iris.scaffold.stream.ProceduralStream;
public class CoordinateBitShiftLeftStream<T> extends BasicStream<T> implements ProceduralStream<T>
{
private final int amount;
public CoordinateBitShiftLeftStream(ProceduralStream<T> stream, int amount)
{
super(stream);
this.amount = amount;
}
@Override
public double toDouble(T t)
{
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d)
{
return getTypedSource().fromDouble(d);
}
@Override
public T get(double x, double z)
{
return getTypedSource().get((int) x << amount, (int) z << amount);
}
@Override
public T get(double x, double y, double z)
{
return getTypedSource().get((int) x << amount, (int) y << amount, (int) z << amount);
}
}

View File

@@ -0,0 +1,40 @@
package com.volmit.iris.scaffold.stream.arithmetic;
import com.volmit.iris.scaffold.stream.BasicStream;
import com.volmit.iris.scaffold.stream.ProceduralStream;
public class CoordinateBitShiftRightStream<T> extends BasicStream<T> implements ProceduralStream<T>
{
private final int amount;
public CoordinateBitShiftRightStream(ProceduralStream<T> stream, int amount)
{
super(stream);
this.amount = amount;
}
@Override
public double toDouble(T t)
{
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d)
{
return getTypedSource().fromDouble(d);
}
@Override
public T get(double x, double z)
{
return getTypedSource().get((int) x >> amount, (int) z >> amount);
}
@Override
public T get(double x, double y, double z)
{
return getTypedSource().get((int) x >> amount, (int) y >> amount, (int) z >> amount);
}
}

View File

@@ -0,0 +1,51 @@
package com.volmit.iris.scaffold.stream.arithmetic;
import com.volmit.iris.scaffold.stream.BasicStream;
import com.volmit.iris.scaffold.stream.ProceduralStream;
import com.volmit.iris.util.Function2;
import com.volmit.iris.util.Function3;
public class DividingStream<T> extends BasicStream<T> implements ProceduralStream<T>
{
private final Function3<Double, Double, Double, Double> add;
public DividingStream(ProceduralStream<T> stream, Function3<Double, Double, Double, Double> add)
{
super(stream);
this.add = add;
}
public DividingStream(ProceduralStream<T> stream, Function2<Double, Double, Double> add)
{
this(stream, (x, y, z) -> add.apply(x, z));
}
public DividingStream(ProceduralStream<T> stream, double add)
{
this(stream, (x, y, z) -> add);
}
@Override
public double toDouble(T t)
{
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d)
{
return getTypedSource().fromDouble(d);
}
@Override
public T get(double x, double z)
{
return fromDouble(getTypedSource().getDouble(x, z) / add.apply(x, 0D, z));
}
@Override
public T get(double x, double y, double z)
{
return fromDouble(getTypedSource().getDouble(x, y, z) / add.apply(x, y, z));
}
}

View File

@@ -0,0 +1,56 @@
package com.volmit.iris.scaffold.stream.arithmetic;
import com.volmit.iris.scaffold.stream.BasicStream;
import com.volmit.iris.scaffold.stream.ProceduralStream;
public class FittedStream<T> extends BasicStream<T> implements ProceduralStream<T>
{
private final double min;
private final double max;
private final double inMin;
private final double inMax;
public FittedStream(ProceduralStream<T> stream, double inMin, double inMax, double min, double max)
{
super(stream);
this.inMin = inMin;
this.inMax = inMax;
this.min = min;
this.max = max;
}
public FittedStream(ProceduralStream<T> stream, double min, double max)
{
this(stream, 0, 1, min, max);
}
@Override
public double toDouble(T t)
{
return getTypedSource().toDouble(t);
}
@Override
public T fromDouble(double d)
{
return getTypedSource().fromDouble(d);
}
private double dlerp(double v)
{
return min + ((max - min) * ((v - inMin) / (inMax - inMin)));
}
@Override
public T get(double x, double z)
{
return fromDouble(dlerp(getTypedSource().getDouble(x, z)));
}
@Override
public T get(double x, double y, double z)
{
return fromDouble(dlerp(getTypedSource().getDouble(x, y, z)));
}
}

Some files were not shown because too many files have changed in this diff Show More