This commit is contained in:
Daniel Mills 2020-11-06 09:20:29 -05:00
parent f4056a3fca
commit 5ea938a20b
40 changed files with 3525 additions and 35 deletions

View File

@ -17,6 +17,7 @@
<orderEntry type="library" scope="PROVIDED" name="Maven: org.yaml:snakeyaml:1.26" level="project" />
<orderEntry type="library" name="Maven: io.papermc:paperlib:1.0.5" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: org.bukkit.craftbukkit:cb-1.16.2:1.16.2" level="project" />
<orderEntry type="library" name="Maven: com.bergerkiller.bukkit:BKCommonLib:v1:1.16.4-v2" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.sk89q.worldedit:worldedit-bukkit:7.2.0-SNAPSHOT" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.sk89q.worldedit:worldedit-core:7.2.0-SNAPSHOT" level="project" />
<orderEntry type="library" scope="PROVIDED" name="Maven: com.sk89q.worldedit.worldedit-libs:core:7.2.0-SNAPSHOT" level="project" />

View File

@ -194,6 +194,12 @@
<scope>provided</scope>
</dependency>
<!-- Hooks -->
<dependency>
<groupId>com.bergerkiller.bukkit</groupId>
<artifactId>BKCommonLib</artifactId>
<version>1.16.4-v2</version>
<classifier>v1</classifier>
</dependency>
<dependency>
<groupId>com.sk89q.worldedit</groupId>
<artifactId>worldedit-bukkit</artifactId>

View File

@ -6,6 +6,7 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import com.volmit.iris.link.BKLink;
import org.bukkit.Bukkit;
import org.bukkit.World;
import org.bukkit.World.Environment;
@ -64,6 +65,7 @@ public class Iris extends VolmitPlugin
public static StructureManager struct;
public static EditManager edit;
public static IrisBoardManager board;
public static BKLink linkBK;
public static MultiverseCoreLink linkMultiverseCore;
public static MythicMobsLink linkMythicMobs;
public static CitizensLink linkCitizens;
@ -193,6 +195,7 @@ public class Iris extends VolmitPlugin
struct = new StructureManager();
board = new IrisBoardManager();
linkMultiverseCore = new MultiverseCoreLink();
linkBK = new BKLink();
linkMythicMobs = new MythicMobsLink();
edit = new EditManager();
J.a(() -> IO.delete(getTemp()));

View File

@ -0,0 +1,50 @@
package com.volmit.iris.link;
import com.bergerkiller.bukkit.common.utils.BlockUtil;
import com.bergerkiller.bukkit.common.utils.ChunkUtil;
import com.volmit.iris.util.KList;
import com.volmit.iris.v2.lighting.LightingChunk;
import com.volmit.iris.v2.lighting.LightingService;
import com.volmit.iris.v2.scaffold.parallel.MultiBurst;
import io.lumine.xikage.mythicmobs.MythicMobs;
import io.lumine.xikage.mythicmobs.mobs.MythicMob;
import org.bukkit.Bukkit;
import org.bukkit.Chunk;
import org.bukkit.Location;
import org.bukkit.Material;
import org.bukkit.block.Block;
import org.bukkit.block.data.BlockData;
import org.bukkit.entity.Entity;
import org.bukkit.event.world.ChunkUnloadEvent;
import org.bukkit.plugin.Plugin;
public class BKLink
{
public BKLink()
{
}
public void updateBlock(Block b) {
BlockData d = b.getBlockData();
b.setType(Material.AIR, false);
b.setBlockData(d, true);
}
public boolean supported()
{
return getBK() != null;
}
public Plugin getBK()
{
Plugin p = Bukkit.getPluginManager().getPlugin("BKCommonLib");
if(p == null)
{
return null;
}
return p;
}
}

View File

@ -1,6 +1,8 @@
package com.volmit.iris.manager;
import com.volmit.iris.util.J;
import com.volmit.iris.v2.scaffold.parallel.MultiBurst;
import net.minecraft.server.v1_16_R2.BlockSign;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
import org.bukkit.Location;
@ -9,9 +11,20 @@ import org.bukkit.block.Block;
import org.bukkit.block.data.BlockData;
import org.bukkit.entity.Entity;
import org.bukkit.entity.FallingBlock;
import org.bukkit.entity.Player;
import org.bukkit.util.Vector;
public class BlockSignal {
public static void of(Block block, int ticks)
{
new BlockSignal(block, ticks);
}
public static void of(Block block)
{
of(block, 100);
}
public BlockSignal(Block block, int ticks)
{
Location tg = block.getLocation().clone().add(0.5, 0, 0.5).clone();
@ -28,8 +41,13 @@ public class BlockSignal {
J.s(() -> {
e.remove();
BlockData type = block.getBlockData();
block.setType(Material.AIR, false);
block.setBlockData(type, false);
MultiBurst.burst.lazy(() -> {
for(Player i : block.getWorld().getPlayers())
{
i.sendBlockChange(block.getLocation(), block.getBlockData());
}
});
}, ticks);
}
}

View File

@ -2,7 +2,6 @@ package com.volmit.iris.noise;
import java.util.List;
import com.oracle.webservices.internal.api.databinding.DatabindingMode;
import com.volmit.iris.Iris;
import com.volmit.iris.v2.scaffold.stream.ProceduralStream;
import com.volmit.iris.v2.scaffold.stream.sources.CNGStream;

View File

@ -353,6 +353,11 @@ public class B
//@done
}
public static boolean isTrulyLit(BlockData mat)
{
return isLit(mat) || mat.getMaterial().equals(Material.LAVA);
}
public static boolean isLit(BlockData mat)
{
// @builder

View File

@ -1,25 +1,26 @@
package com.volmit.iris.v2.generator;
import com.sun.org.apache.xpath.internal.operations.Mult;
import com.volmit.iris.Iris;
import com.volmit.iris.link.BKLink;
import com.volmit.iris.manager.BlockSignal;
import com.volmit.iris.object.*;
import com.volmit.iris.util.*;
import com.volmit.iris.v2.scaffold.cache.Cache;
import com.volmit.iris.v2.scaffold.engine.Engine;
import com.volmit.iris.v2.scaffold.engine.EngineFramework;
import com.volmit.iris.v2.scaffold.engine.EngineTarget;
import com.volmit.iris.v2.scaffold.engine.EngineWorldManager;
import com.volmit.iris.v2.scaffold.hunk.Hunk;
import com.volmit.iris.v2.scaffold.parallel.MultiBurst;
import io.papermc.lib.PaperLib;
import lombok.Getter;
import lombok.Setter;
import net.minecraft.server.v1_16_R2.*;
import org.bukkit.Chunk;
import org.bukkit.Material;
import org.bukkit.World;
import org.bukkit.block.Biome;
import org.bukkit.block.Block;
import org.bukkit.block.data.BlockData;
import org.bukkit.craftbukkit.v1_16_R2.block.CraftBlock;
import org.bukkit.generator.BlockPopulator;
import org.bukkit.inventory.Inventory;
import org.bukkit.inventory.InventoryHolder;
@ -38,6 +39,9 @@ public class IrisEngine extends BlockPopulator implements Engine
@Getter
private final EngineFramework framework;
@Getter
private final EngineWorldManager worldManager;
@Setter
@Getter
private volatile int parallelism;
@ -51,9 +55,17 @@ public class IrisEngine extends BlockPopulator implements Engine
Iris.info("Initializing Engine: " + target.getWorld().getName() + "/" + target.getDimension().getLoadKey() + " (" + target.getHeight() + " height)");
this.target = target;
this.framework = new IrisEngineFramework(this);
worldManager = new IrisWorldManager(this);
minHeight = 0;
}
@Override
public void close()
{
getWorldManager().close();
getFramework().close();
}
@Override
public double modifyX(double x) {
return x / getDimension().getTerrainZoom();
@ -97,13 +109,30 @@ public class IrisEngine extends BlockPopulator implements Engine
if(B.isUpdatable(data))
{
getParallax().updateBlock(x,y,z);
getParallax().getMetaRW(x>>4, z>>4).setUpdates(true);
}
}
@Override
public void populate(@NotNull World world, @NotNull Random random, @NotNull Chunk c)
{
getWorldManager().spawnInitialEntities(c);
updateChunk(c);
}
public void updateChunk(Chunk c)
{
if(getParallax().getMetaR(c.getX(), c.getZ()).isUpdates())
{
Hunk<Boolean> b = getParallax().getUpdatesR(c.getX(), c.getZ());
b.iterateSync((x,y,z,v) -> {
if(v != null && v)
{
update(x,y,z, c, new RNG(Cache.key(c.getX(), c.getZ())));
}
});
}
}
private void update(int x, int y, int z, Chunk c, RNG rf)
@ -114,8 +143,6 @@ public class IrisEngine extends BlockPopulator implements Engine
if(B.isStorage(data))
{
RNG rx = rf.nextParallelRNG(x).nextParallelRNG(z).nextParallelRNG(y);
block.setType(Material.AIR, false);
block.setBlockData(data, true);
InventorySlotType slot = null;
if(B.isStorageChest(data))
@ -143,8 +170,7 @@ public class IrisEngine extends BlockPopulator implements Engine
else if(B.isLit(data))
{
block.setType(Material.AIR, false);
block.setBlockData(data, true);
Iris.linkBK.updateBlock(block);
}
}

View File

@ -20,7 +20,7 @@ public class IrisEngineFramework implements EngineFramework {
private final IrisComplex complex;
@Getter
final EngineParallax engineParallax;
final EngineParallaxManager engineParallax;
@Getter
private final EngineActuator<BlockData> terrainActuator;
@ -56,4 +56,17 @@ public class IrisEngineFramework implements EngineFramework {
this.caveModifier = new IrisCaveModifier(engine);
this.postModifier = new IrisPostModifier(engine);
}
@Override
public void close()
{
getEngineParallax().close();
getTerrainActuator().close();
getDecorantActuator().close();
getBiomeActuator().close();
getDepositModifier().close();
getRavineModifier().close();
getCaveModifier().close();
getPostModifier().close();
}
}

View File

@ -1,17 +1,16 @@
package com.volmit.iris.v2.generator;
import com.volmit.iris.v2.scaffold.engine.Engine;
import com.volmit.iris.v2.scaffold.engine.EngineFramework;
import com.volmit.iris.v2.scaffold.engine.EngineParallax;
import com.volmit.iris.v2.scaffold.engine.EngineStructure;
import com.volmit.iris.v2.scaffold.engine.EngineParallaxManager;
import com.volmit.iris.v2.scaffold.engine.EngineStructureManager;
import lombok.Getter;
public class IrisEngineParallax implements EngineParallax {
public class IrisEngineParallax implements EngineParallaxManager {
@Getter
private final Engine engine;
@Getter
private final EngineStructure structureManager;
private final EngineStructureManager structureManager;
@Getter
private final int parallaxSize;

View File

@ -1,9 +1,9 @@
package com.volmit.iris.v2.generator;
import com.volmit.iris.v2.scaffold.engine.Engine;
import com.volmit.iris.v2.scaffold.engine.EngineAssignedStructure;
import com.volmit.iris.v2.scaffold.engine.EngineAssignedStructureManager;
public class IrisEngineStructure extends EngineAssignedStructure {
public class IrisEngineStructure extends EngineAssignedStructureManager {
public IrisEngineStructure(Engine engine) {
super(engine);
}

View File

@ -0,0 +1,44 @@
package com.volmit.iris.v2.generator;
import com.volmit.iris.v2.scaffold.engine.Engine;
import com.volmit.iris.v2.scaffold.engine.EngineAssignedWorldManager;
import org.bukkit.Chunk;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.entity.EntitySpawnEvent;
public class IrisWorldManager extends EngineAssignedWorldManager {
public IrisWorldManager(Engine engine) {
super(engine);
}
@Override
public void onEntitySpawn(EntitySpawnEvent e) {
}
@Override
public void onTick() {
}
@Override
public void onSave() {
getEngine().getParallax().saveAll();
}
@Override
public void spawnInitialEntities(Chunk chunk) {
}
@Override
public void onBlockBreak(BlockBreakEvent e) {
}
@Override
public void onBlockPlace(BlockPlaceEvent e) {
}
}

View File

@ -0,0 +1,18 @@
package com.volmit.iris.v2.lighting;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
/**
* Maps {@link BlockFaceSet} values to a 16x16x16 area of blocks
*/
public class BlockFaceSetSection {
private final byte[] _maskData = new byte[4096];
public void set(int x, int y, int z, BlockFaceSet faces) {
_maskData[(y << 8) | (z << 4) | x] = (byte) faces.mask();
}
public BlockFaceSet get(int x, int y, int z) {
return BlockFaceSet.byMask((int) _maskData[(y << 8) | (z << 4) | x]);
}
}

View File

@ -0,0 +1,153 @@
package com.volmit.iris.v2.lighting;
import java.util.Arrays;
import java.util.BitSet;
import java.util.stream.IntStream;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
/**
* Loads region information, storing whether or not
* the 32x32 (1024) chunks are available.
*/
public class FlatRegionInfo {
private static final int[] DEFAULT_RY_0 = new int[] {0}; // Optimization
public final World world;
public final int rx, rz;
public final int[] ry;
public final int cx, cz;
private final BitSet _chunks;
private boolean _loadedFromDisk;
public FlatRegionInfo(World world, int rx, int ry, int rz) {
this(world, rx, (ry==0) ? DEFAULT_RY_0 : new int[] {ry}, rz);
}
public FlatRegionInfo(World world, int rx, int[] ry, int rz) {
this.world = world;
this.rx = rx;
this.rz = rz;
this.ry = ry;
this.cx = (rx << 5);
this.cz = (rz << 5);
this._chunks = new BitSet(1024);
this._loadedFromDisk = false;
}
private FlatRegionInfo(FlatRegionInfo copy, int[] new_ry) {
this.world = copy.world;
this.rx = copy.rx;
this.ry = new_ry;
this.rz = copy.rz;
this.cx = copy.cx;
this.cz = copy.cz;
this._chunks = copy._chunks;
this._loadedFromDisk = copy._loadedFromDisk;
}
public void addChunk(int cx, int cz) {
cx -= this.cx;
cz -= this.cz;
if (cx < 0 || cx >= 32 || cz < 0 || cz >= 32) {
return;
}
this._chunks.set((cz << 5) | cx);
}
/**
* Gets the number of chunks in this region.
* If not loaded yet, the default 1024 is returned.
*
* @return chunk count
*/
public int getChunkCount() {
return this._chunks.cardinality();
}
/**
* Gets the region Y-coordinates as a sorted, immutable distinct stream
*
* @return ry int stream
*/
public IntStream getRYStream() {
return IntStream.of(this.ry);
}
/**
* Loads the region information, now telling what chunks are contained
*/
public void load() {
if (!this._loadedFromDisk) {
this._loadedFromDisk = true;
for (int ry : this.ry) {
this._chunks.or(WorldUtil.getWorldSavedRegionChunks3(this.world, this.rx, ry, this.rz));
}
}
}
/**
* Ignores loading region chunk information from chunks that aren't loaded
*/
public void ignoreLoad() {
this._loadedFromDisk = true;
}
/**
* Gets whether the chunk coordinates specified are within the range
* of coordinates of this region
*
* @param cx - chunk coordinates (world coordinates)
* @param cz - chunk coordinates (world coordinates)
* @return True if in range
*/
public boolean isInRange(int cx, int cz) {
cx -= this.cx;
cz -= this.cz;
return cx >= 0 && cz >= 0 && cx < 32 && cz < 32;
}
/**
* Gets whether a chunk is contained and exists inside this region
*
* @param cx - chunk coordinates (world coordinates)
* @param cz - chunk coordinates (world coordinates)
* @return True if the chunk is contained
*/
public boolean containsChunk(int cx, int cz) {
cx -= this.cx;
cz -= this.cz;
if (cx < 0 || cx >= 32 || cz < 0 || cz >= 32) {
return false;
}
// Load region file information the first time this is accessed
this.load();
// Check in bitset
return this._chunks.get((cz << 5) | cx);
}
/**
* Adds another Region Y-coordinate to the list.
* The set of chunks and other properties are copied.
*
* @param ry
* @return new flat region info object with updated ry
*/
public FlatRegionInfo addRegionYCoordinate(int ry) {
int index = Arrays.binarySearch(this.ry, ry);
if (index >= 0) {
return this; // Already contained
}
// Insert at this index (undo insertion point - 1)
index = -index - 1;
int[] new_y_coordinates = new int[this.ry.length + 1];
System.arraycopy(this.ry, 0, new_y_coordinates, 0, index);
new_y_coordinates[index] = ry;
System.arraycopy(this.ry, index, new_y_coordinates, index+1, this.ry.length - index);
return new FlatRegionInfo(this, new_y_coordinates);
}
}

View File

@ -0,0 +1,188 @@
package com.volmit.iris.v2.lighting;
import java.util.Collection;
import java.util.Set;
import java.util.stream.IntStream;
import org.bukkit.Chunk;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.bases.IntVector3;
import com.bergerkiller.bukkit.common.utils.MathUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashMap;
/**
* A map of region information
*/
public class FlatRegionInfoMap {
private final World _world;
private final LongHashMap<FlatRegionInfo> _regions;
private FlatRegionInfoMap(World world, LongHashMap<FlatRegionInfo> regions) {
this._world = world;
this._regions = regions;
}
public World getWorld() {
return this._world;
}
public int getRegionCount() {
return this._regions.size();
}
public Collection<FlatRegionInfo> getRegions() {
return this._regions.getValues();
}
public FlatRegionInfo getRegion(int rx, int rz) {
return this._regions.get(rx, rz);
}
public FlatRegionInfo getRegionAtChunk(int cx, int cz) {
return this._regions.get(cx >> 5, cz >> 5);
}
/**
* Gets whether a chunk exists
*
* @param cx
* @param cz
* @return True if the chunk exists
*/
public boolean containsChunk(int cx, int cz) {
FlatRegionInfo region = getRegionAtChunk(cx, cz);
return region != null && region.containsChunk(cx, cz);
}
/**
* Gets whether a chunk, and all its 8 neighbours, exist
*
* @param cx
* @param cz
* @return True if the chunk and all its neighbours exist
*/
public boolean containsChunkAndNeighbours(int cx, int cz) {
FlatRegionInfo region = getRegionAtChunk(cx, cz);
if (region == null) {
return false;
}
for (int dx = -2; dx <= 2; dx++) {
for (int dz = -2; dz <= 2; dz++) {
int mx = cx + dx;
int mz = cz + dz;
if (region.isInRange(mx, mz)) {
if (!region.containsChunk(mx, mz)) {
return false;
}
} else {
if (!this.containsChunk(mx, mz)) {
return false;
}
}
}
}
return true;
}
/**
* Computes all the region Y-coordinates used by a region and its neighbouring 8 regions.
* The returned array is sorted in increasing order and is distinct (no duplicate values).
*
* @param region
* @return region and neighbouring regions' Y-coordinates
*/
public int[] getRegionYCoordinatesSelfAndNeighbours(FlatRegionInfo region) {
IntStream region_y_coord_stream = region.getRYStream();
for (int drx = -1; drx <= 1; drx++) {
for (int drz = -1; drz <= 1; drz++) {
if (drx == 0 && drz == 0) {
continue;
}
FlatRegionInfo neigh_region = this.getRegion(region.rx + drx, region.rz + drz);
if (neigh_region != null) {
region_y_coord_stream = IntStream.concat(region_y_coord_stream, neigh_region.getRYStream());
}
}
}
//TODO: There's technically a way to significantly speed up sorting two concatenated sorted streams
// Sadly, the java 8 SDK doesn't appear to do any optimizations here :(
return region_y_coord_stream.sorted().distinct().toArray();
}
/**
* Creates a region information mapping of all existing chunks of a world
* that are currently loaded. No further loading is required.
*
* @param world
* @return region info map
*/
public static FlatRegionInfoMap createLoaded(World world) {
LongHashMap<FlatRegionInfo> regions = new LongHashMap<FlatRegionInfo>();
for (Chunk chunk : world.getLoadedChunks()) {
int rx = WorldUtil.chunkToRegionIndex(chunk.getX());
int rz = WorldUtil.chunkToRegionIndex(chunk.getZ());
FlatRegionInfo prev_info = regions.get(rx, rz);
FlatRegionInfo new_info = prev_info;
if (new_info == null) {
new_info = new FlatRegionInfo(world, rx, 0, rz);
new_info.ignoreLoad();
}
// Refresh y-coordinates
for (Integer y_coord : WorldUtil.getLoadedSectionCoordinates(chunk)) {
new_info = new_info.addRegionYCoordinate(WorldUtil.chunkToRegionIndex(y_coord.intValue()));
}
// Add chunk to region bitset
new_info.addChunk(chunk.getX(), chunk.getZ());
// Store if new or changed
if (new_info != prev_info) {
regions.put(rx, rz, new_info);
}
}
return new FlatRegionInfoMap(world, regions);
}
/**
* Creates a region information mapping of all existing chunks of a world
*
* @param world
* @return region info map
*/
public static FlatRegionInfoMap create(World world) {
LongHashMap<FlatRegionInfo> regions = new LongHashMap<FlatRegionInfo>();
// Obtain the region coordinates in 3d space (vertical too!)
Set<IntVector3> regionCoordinates = WorldUtil.getWorldRegions3(world);
// For each region, create a RegionInfo entry
for (IntVector3 region : regionCoordinates) {
long key = MathUtil.longHashToLong(region.x, region.z);
FlatRegionInfo prev = regions.get(key);
if (prev != null) {
regions.put(key, prev.addRegionYCoordinate(region.y));
} else {
regions.put(key, new FlatRegionInfo(world, region.x, region.y, region.z));
}
}
// For all loaded chunks, add those chunks to their region up-front
// They may not yet have been saved to the region file
for (Chunk chunk : world.getLoadedChunks()) {
int rx = WorldUtil.chunkToRegionIndex(chunk.getX());
int rz = WorldUtil.chunkToRegionIndex(chunk.getZ());
FlatRegionInfo info = regions.get(rx, rz);
if (info != null) {
info.addChunk(chunk.getX(), chunk.getZ());
}
}
return new FlatRegionInfoMap(world, regions);
}
}

View File

@ -0,0 +1,101 @@
package com.volmit.iris.v2.lighting;
import java.util.HashMap;
import com.volmit.iris.Iris;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.Task;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
/**
* Handles the automatic cleanup of chunk lighting when chunks are generated
*/
public class LightingAutoClean {
private static HashMap<World, LongHashSet> queues = new HashMap<World, LongHashSet>();
private static Task autoCleanTask = null;
/**
* Checks all neighbouring chunks to see if they are fully surrounded by chunks (now), and
* schedules lighting repairs. This function only does anything when automatic cleaning is activated.
*
* @param world the chunk is in
* @param chunkX coordinate
* @param chunkZ coordinate
*/
public static void handleChunkGenerated(World world, int chunkX, int chunkZ) {
for (int dx = -1; dx <= 1; dx++) {
for (int dz = -1; dz <= 1; dz++) {
if (dx == 0 && dz == 0) {
continue;
}
if (!WorldUtil.isChunkAvailable(world, chunkX + dx, chunkZ + dz)) {
continue;
}
// Check that all chunks surrounding this chunk are all available
boolean allNeighboursLoaded = true;
for (int dx2 = -1; dx2 <= 1 && allNeighboursLoaded; dx2++) {
for (int dz2 = -1; dz2 <= 1 && allNeighboursLoaded; dz2++) {
if (dx2 == 0 && dz2 == 0) {
continue; // ignore self
}
if (dx2 == -dx && dz2 == -dz) {
continue; // ignore the original generated chunk
}
allNeighboursLoaded &= WorldUtil.isChunkAvailable(world, chunkX + dx + dx2, chunkZ + dz + dz2);
}
}
// If all neighbours are available, schedule it for fixing
if (allNeighboursLoaded) {
schedule(world, chunkX + dx, chunkZ + dz);
}
}
}
}
private static synchronized void processAutoClean() {
while (queues.size() > 0) {
World world = queues.keySet().iterator().next();
LongHashSet chunks = queues.remove(world);
LightingService.schedule(world, chunks);
}
}
public static void schedule(World world, int chunkX, int chunkZ) {
schedule(world, chunkX, chunkZ, 80);
}
public static synchronized void schedule(World world, int chunkX, int chunkZ, int tickDelay) {
LongHashSet queue = queues.get(world);
if (queue == null) {
queue = new LongHashSet(9);
queues.put(world, queue);
}
// Queue this chunk, and all its neighbours
for (int dx = -1; dx <= 1; dx++) {
for (int dz = -1; dz <= 1; dz++) {
queue.add(chunkX + dx, chunkZ + dz);
}
}
// Initialize clean task if it hasn't been yet
if (autoCleanTask == null) {
autoCleanTask = new Task(Iris.instance) {
@Override
public void run() {
processAutoClean();
}
};
}
// Postpone the tick task while there are less than 100 chunks in the queue
if (queue.size() < 100) {
autoCleanTask.stop().start(tickDelay);
}
}
}

View File

@ -0,0 +1,198 @@
package com.volmit.iris.v2.lighting;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
/**
* Represents a category of light being processed. All conditional logic
* for this is handled by this class.
*/
public enum LightingCategory {
SKY() {
@Override
public String getName() {
return "Sky";
}
@Override
public void initialize(LightingChunk chunk) {
if (!chunk.hasSkyLight) {
return;
}
// Find out the highest possible Y-position
int x, y, z, light, height, opacity;
BlockFaceSet opaqueFaces;
LightingCube cube = null;
// Apply initial sky lighting from top to bottom
for (z = chunk.start.z; z <= chunk.end.z; z++) {
for (x = chunk.start.x; x <= chunk.end.x; x++) {
light = 15;
height = chunk.getHeight(x, z) + 1;
for (y = chunk.maxY; y >= chunk.minY; y--) {
if ((cube = chunk.nextCube(cube, y)) == null) {
// Skip the remaining 15: they are all inaccessible as well
y -= 15;
// If not full skylight, reset light level, assuming it dimmed out
if (light != 15) {
light = 0;
}
continue;
}
// Set quickly when light level is at 0, or we are above height level
if (y > height || light <= 0) {
cube.skyLight.set(x, y & 0xf, z, light);
continue;
}
// If opaque at the top, set light to 0 instantly
opaqueFaces = cube.getOpaqueFaces(x, y & 0xf, z);
if (opaqueFaces.up()) {
light = 0;
} else {
// Apply the opacity to the light level
opacity = cube.opacity.get(x, y & 0xf, z);
if (light < 15 && opacity == 0) {
opacity = 1;
}
if ((light -= opacity) <= 0) {
light = 0;
}
}
// Apply sky light to block
cube.skyLight.set(x, y & 0xf, z, light);
// If opaque at the bottom, reset light to 0 for next block
// The block itself is lit
if (opaqueFaces.down()) {
light = 0;
}
}
}
}
}
@Override
public int getStartY(LightingChunk chunk, int x, int z) {
return chunk.getHeight(x, z);
}
@Override
public void setDirty(LightingChunk chunk, boolean dirty) {
chunk.isSkyLightDirty = dirty;
}
@Override
public int get(LightingCube section, int x, int y, int z) {
return section.skyLight.get(x, y, z);
}
@Override
public void set(LightingCube section, int x, int y, int z, int level) {
section.skyLight.set(x, y, z, level);
}
},
BLOCK() {
@Override
public String getName() {
return "Block";
}
@Override
public void initialize(LightingChunk chunk) {
// Some blocks that emit light, also have opaque faces
// They still emit light through the opaque faces to other blocks
// To fix this, run an initial processing step that spreads all
// emitted light to the neighbouring blocks' block light, ignoring own opaque faces
int x, y, z;
for (LightingCube cube : chunk.getSections()) {
for (y = 0; y < 16; y++) {
for (z = chunk.start.z; z <= chunk.end.z; z++) {
for (x = chunk.start.x; x <= chunk.end.x; x++) {
cube.spreadBlockLight(x, y, z);
}
}
}
}
}
@Override
public int getStartY(LightingChunk chunk, int x, int z) {
return chunk.maxY;
}
@Override
public void setDirty(LightingChunk chunk, boolean dirty) {
chunk.isBlockLightDirty = dirty;
}
@Override
public int get(LightingCube section, int x, int y, int z) {
return section.blockLight.get(x, y, z);
}
@Override
public void set(LightingCube section, int x, int y, int z, int level) {
section.blockLight.set(x, y, z, level);
}
};
/**
* Gets the name of this type of light, used when logging
*
* @return category name
*/
public abstract String getName();
/**
* Initializes the lighting in the chunk for this category
*
* @param chunk
*/
public abstract void initialize(LightingChunk chunk);
/**
* Gets the y-coordinate to start processing from when spreading light around
*
* @param chunk
* @param x
* @param z
* @return start y-coordinate
*/
public abstract int getStartY(LightingChunk chunk, int x, int z);
/**
* Sets whether this category of light is dirty, indicating this category of light is all good,
* or that more work is needed spreading light around.
*
* @param chunk
* @param dirty
*/
public abstract void setDirty(LightingChunk chunk, boolean dirty);
/**
* Gets the light level in a section at the coordinates specified.
* No bounds checking is performed.
*
* @param section
* @param x
* @param y
* @param z
* @return light level
*/
public abstract int get(LightingCube section, int x, int y, int z);
/**
* Sets the light level in a section at the coordinates specified.
* No bounds checking is performed.
*
* @param section
* @param x
* @param y
* @param z
* @param level
*/
public abstract void set(LightingCube section, int x, int y, int z, int level);
}

View File

@ -0,0 +1,458 @@
package com.volmit.iris.v2.lighting;
import com.bergerkiller.bukkit.common.bases.IntVector2;
import com.bergerkiller.bukkit.common.chunk.ForcedChunk;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
import com.bergerkiller.bukkit.common.utils.ChunkUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.ChunkSection;
import com.bergerkiller.bukkit.common.wrappers.HeightMap;
import com.bergerkiller.bukkit.common.wrappers.IntHashMap;
import com.bergerkiller.generated.net.minecraft.server.ChunkHandle;
import com.volmit.iris.Iris;
import org.bukkit.Chunk;
import org.bukkit.World;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import java.util.logging.Level;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
/**
* Represents a single chunk full with lighting-relevant information.
* Initialization and use of this chunk in the process is as follows:<br>
* - New lighting chunks are created for all chunks to be processed<br>
* - notifyAccessible is called for all chunks, passing in all chunks<br>
* - fill/fillSection is called for all chunks, after which initLight is called<br>
* - spread is called on all chunks until all spreading is finished<br>
* - data from all LightingChunks/Sections is gathered and saved to chunks or region files<br>
* - possible chunk resends are performed
*/
public class LightingChunk {
public static final int OB = ~0xf; // Outside blocks
public static final int OC = ~0xff; // Outside chunk
public IntHashMap<LightingCube> sections;
public final LightingChunkNeighboring neighbors = new LightingChunkNeighboring();
public final int[] heightmap = new int[256];
public final World world;
public final int chunkX, chunkZ;
public boolean hasSkyLight = true;
public boolean isSkyLightDirty = true;
public boolean isBlockLightDirty = true;
public boolean isFilled = false;
public boolean isApplied = false;
public IntVector2 start = new IntVector2(1, 1);
public IntVector2 end = new IntVector2(14, 14);
public int minY = 0;
public int maxY = 0;
public final ForcedChunk forcedChunk = ForcedChunk.none();
public volatile boolean loadingStarted = false;
public LightingChunk(World world, int x, int z) {
this.world = world;
this.chunkX = x;
this.chunkZ = z;
}
/**
* Gets all the sections inside this chunk.
* Elements are never null.
*
* @return sections
*/
public Collection<LightingCube> getSections() {
return this.sections.values();
}
/**
* Efficiently iterates the vertical cubes of a chunk, only
* querying the lookup table every 16 blocks
*
* @param previous The previous cube we iterated
* @param y Block y-coordinate
* @return the cube at the Block y-coordinate, or null if this cube does not exist
*/
public LightingCube nextCube(LightingCube previous, int y) {
int cy = y >> 4;
if (previous != null && previous.cy == cy) {
return previous;
} else {
return this.sections.get(cy);
}
}
/**
* Notifies that a new chunk is accessible.
*
* @param chunk that is accessible
*/
public void notifyAccessible(LightingChunk chunk) {
final int dx = chunk.chunkX - this.chunkX;
final int dz = chunk.chunkZ - this.chunkZ;
// Only check neighbours, ignoring the corners and self
if (Math.abs(dx) > 1 || Math.abs(dz) > 1 || (dx != 0) == (dz != 0)) {
return;
}
// Results in -16, 16 or 0 for the x/z coordinates
neighbors.set(dx, dz, chunk);
// Update start/end coordinates
if (dx == 1) {
end = new IntVector2(15, end.z);
} else if (dx == -1) {
start = new IntVector2(0, start.z);
} else if (dz == 1) {
end = new IntVector2(end.x, 15);
} else if (dz == -1) {
start = new IntVector2(start.x, 0);
}
}
/**
* Initializes the neighboring cubes of all the cubes of this
* lighting chunk. This initializes the neighbors both within
* the same chunk (vertical) and for neighboring chunks (horizontal).
*/
public void detectCubeNeighbors() {
for (LightingCube cube : this.sections.values()) {
// Neighbors above and below
cube.neighbors.set(0, 1, 0, this.sections.get(cube.cy + 1));
cube.neighbors.set(0, -1, 0, this.sections.get(cube.cy - 1));
// Neighbors in neighboring chunks
cube.neighbors.set(-1, 0, 0, this.neighbors.getCube(-1, 0, cube.cy));
cube.neighbors.set( 1, 0, 0, this.neighbors.getCube( 1, 0, cube.cy));
cube.neighbors.set( 0, 0, -1, this.neighbors.getCube( 0, -1, cube.cy));
cube.neighbors.set( 0, 0, 1, this.neighbors.getCube( 0, 1, cube.cy));
}
}
public void fill(Chunk chunk, int[] region_y_coordinates) {
// Fill using chunk sections
hasSkyLight = WorldUtil.getDimensionType(chunk.getWorld()).hasSkyLight();
List<LightingCube> lightingChunkSectionList;
{
// First create a list of ChunkSection objects storing the data
// We must do this sequentially, because asynchronous access is not permitted
List<ChunkSection> chunkSectionList = IntStream.of(region_y_coordinates)
.map(WorldUtil::regionToChunkIndex)
.flatMap(base_cy -> IntStream.range(base_cy, base_cy + WorldUtil.CHUNKS_PER_REGION_AXIS))
.mapToObj(cy -> WorldUtil.getSection(chunk, cy))
.filter(section -> section != null)
.collect(Collectors.toList());
// Then process all the gathered chunk sections into a LightingChunkSection in parallel
lightingChunkSectionList = chunkSectionList.stream()
.parallel()
.map(section -> new LightingCube(this, section, hasSkyLight))
.collect(Collectors.toList());
}
// Add to mapping
this.sections = new IntHashMap<LightingCube>();
for (LightingCube lightingChunkSection : lightingChunkSectionList) {
this.sections.put(lightingChunkSection.cy, lightingChunkSection);
}
// Compute min/max y using sections that are available
// Make use of the fact that they are pre-sorted by y-coordinate
this.minY = 0;
this.maxY = 0;
if (!lightingChunkSectionList.isEmpty()) {
this.minY = lightingChunkSectionList.get(0).cy << 4;
this.maxY = (lightingChunkSectionList.get(lightingChunkSectionList.size()-1).cy << 4) + 15;
}
// Initialize and then load sky light heightmap information
if (this.hasSkyLight) {
HeightMap heightmap = ChunkUtil.getLightHeightMap(chunk, true);
for (int x = 0; x < 16; ++x) {
for (int z = 0; z < 16; ++z) {
this.heightmap[this.getHeightKey(x, z)] = Math.max(this.minY, heightmap.getHeight(x, z));
}
}
} else {
Arrays.fill(this.heightmap, this.maxY);
}
this.isFilled = true;
}
private int getHeightKey(int x, int z) {
return x | (z << 4);
}
/**
* Gets the height level (the top block that does not block light)
*
* @param x - coordinate
* @param z - coordinate
* @return height
*/
public int getHeight(int x, int z) {
return this.heightmap[getHeightKey(x, z)];
}
private final int getMaxLightLevel(LightingCube section, LightingCategory category, int lightLevel, int x, int y, int z) {
BlockFaceSet selfOpaqueFaces = section.getOpaqueFaces(x, y, z);
if (x >= 1 && z >= 1 && x <= 14 && z <= 14) {
// All within this chunk - simplified calculation
if (!selfOpaqueFaces.west()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_EAST, x - 1, y, z);
}
if (!selfOpaqueFaces.east()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_WEST, x + 1, y, z);
}
if (!selfOpaqueFaces.north()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_SOUTH, x, y, z - 1);
}
if (!selfOpaqueFaces.south()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_NORTH, x, y, z + 1);
}
// If dy is also within this section, we can simplify it
if (y >= 1 && y <= 14) {
if (!selfOpaqueFaces.down()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_UP, x, y - 1, z);
}
if (!selfOpaqueFaces.up()) {
lightLevel = section.getLightIfHigher(category, lightLevel,
BlockFaceSet.MASK_DOWN, x, y + 1, z);
}
return lightLevel;
}
} else {
// Crossing chunk boundaries - requires neighbor checks
if (!selfOpaqueFaces.west()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_EAST, x - 1, y, z);
}
if (!selfOpaqueFaces.east()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_WEST, x + 1, y, z);
}
if (!selfOpaqueFaces.north()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_SOUTH, x, y, z - 1);
}
if (!selfOpaqueFaces.south()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_NORTH, x, y, z + 1);
}
}
// Above and below, may need to check cube boundaries
// Below
if (!selfOpaqueFaces.down()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_UP, x, y - 1, z);
}
// Above
if (!selfOpaqueFaces.up()) {
lightLevel = section.getLightIfHigherNeighbor(category, lightLevel,
BlockFaceSet.MASK_DOWN, x, y + 1, z);
}
return lightLevel;
}
/**
* Gets whether this lighting chunk has faults that need to be fixed
*
* @return True if there are faults, False if not
*/
public boolean hasFaults() {
return isSkyLightDirty || isBlockLightDirty;
}
public void forceSpreadBlocks()
{
spread(LightingCategory.BLOCK);
}
/**
* Spreads the light from sources to 'zero' light level blocks
*
* @return Number of processing loops executed. 0 indicates no faults were found.
*/
public int spread() {
if (hasFaults()) {
int count = 0;
if (isSkyLightDirty) {
count += spread(LightingCategory.SKY);
}
if (isBlockLightDirty) {
count += spread(LightingCategory.BLOCK);
}
return count;
} else {
return 0;
}
}
private int spread(LightingCategory category) {
if ((category == LightingCategory.SKY) && !hasSkyLight) {
this.isSkyLightDirty = false;
return 0;
}
int x, y, z, light, factor, startY, newlight;
int loops = 0;
int lasterrx = 0, lasterry = 0, lasterrz = 0;
boolean haserror;
boolean err_neigh_nx = false;
boolean err_neigh_px = false;
boolean err_neigh_nz = false;
boolean err_neigh_pz = false;
LightingCube cube = null;
// Keep spreading the light in this chunk until it is done
boolean mode = false;
IntVector2 loop_start, loop_end;
int loop_increment;
while (true) {
haserror = false;
// Alternate iterating positive and negative
// This allows proper optimized spreading in all directions
mode = !mode;
if (mode) {
loop_start = start;
loop_end = end.add(1, 1);
loop_increment = 1;
} else {
loop_start = end;
loop_end = start.subtract(1, 1);
loop_increment = -1;
}
// Go through all blocks, using the heightmap for sky light to skip a few
for (x = loop_start.x; x != loop_end.x; x += loop_increment) {
for (z = loop_start.z; z != loop_end.z; z += loop_increment) {
startY = category.getStartY(this, x, z);
for (y = startY; y >= this.minY; y--) {
if ((cube = nextCube(cube, y)) == null) {
// Skip this section entirely by setting y to the bottom of the section
y &= ~0xf;
continue;
}
// Take block opacity into account, skip if fully solid
factor = Math.max(1, cube.opacity.get(x, y & 0xf, z));
if (factor == 15) {
continue;
}
// Read the old light level and try to find a light level around it that exceeds
light = category.get(cube, x, y & 0xf, z);
newlight = light + factor;
if (newlight < 15) {
newlight = getMaxLightLevel(cube, category, newlight, x, y & 0xf, z);
}
newlight -= factor;
// pick the highest value
if (newlight > light) {
category.set(cube, x, y & 0xf, z, newlight);
lasterrx = x;
lasterry = y;
lasterrz = z;
err_neigh_nx |= (x == 0);
err_neigh_nz |= (z == 0);
err_neigh_px |= (x == 15);
err_neigh_pz |= (z == 15);
haserror = true;
}
}
}
}
if (!haserror) {
break;
} else if (++loops > 100) {
lasterrx += this.chunkX << 4;
lasterrz += this.chunkZ << 4;
StringBuilder msg = new StringBuilder();
msg.append("Failed to fix all " + category.getName() + " lighting at [");
msg.append(lasterrx).append('/').append(lasterry);
msg.append('/').append(lasterrz).append(']');
Iris.warn(msg.toString());
break;
}
}
// Set self as no longer dirty, all light is good
category.setDirty(this, false);
// When we change blocks at our chunk borders, neighbours have to do another spread cycle
if (err_neigh_nx) setNeighbourDirty(-1, 0, category);
if (err_neigh_px) setNeighbourDirty(1, 0, category);
if (err_neigh_nz) setNeighbourDirty(0, -1, category);
if (err_neigh_pz) setNeighbourDirty(0, 1, category);
return loops;
}
private void setNeighbourDirty(int dx, int dz, LightingCategory category) {
LightingChunk n = neighbors.get(dx, dz);
if (n != null) {
category.setDirty(n, true);
}
}
/**
* Applies the lighting information to a chunk. The returned completable future is called
* on the main thread when saving finishes.
*
* @param chunk to save to
* @return completable future completed when the chunk is saved,
* with value True passed when saving occurred, False otherwise
*/
@SuppressWarnings("unchecked")
public CompletableFuture<Boolean> saveToChunk(Chunk chunk) {
// Create futures for saving to all the chunk sections in parallel
List<LightingCube> sectionsToSave = this.sections.values();
final CompletableFuture<Boolean>[] futures = new CompletableFuture[sectionsToSave.size()];
{
int futureIndex = 0;
for (LightingCube sectionToSave : sectionsToSave) {
ChunkSection sectionToWriteTo = WorldUtil.getSection(chunk, sectionToSave.cy);
if (sectionToWriteTo == null) {
futures[futureIndex++] = CompletableFuture.completedFuture(Boolean.FALSE);
} else {
futures[futureIndex++] = sectionToSave.saveToChunk(sectionToWriteTo);
}
}
}
// When all of them complete, combine them into a single future
// If any changes were made to the chunk, return True as completed value
return CompletableFuture.allOf(futures).thenApply((o) -> {
isApplied = true;
try {
for (CompletableFuture<Boolean> future : futures) {
if (future.get().booleanValue()) {
ChunkHandle.fromBukkit(chunk).markDirty();
return Boolean.TRUE;
}
}
} catch (Throwable t) {
t.printStackTrace();
}
// None of the futures completed true
return Boolean.FALSE;
});
}
}

View File

@ -0,0 +1,73 @@
package com.volmit.iris.v2.lighting;
/**
* Keeps track of the 4 x/z neighbors of chunks
*/
public class LightingChunkNeighboring {
public final LightingChunk[] values = new LightingChunk[4];
/**
* Generates a key ranging 0 - 3 for fixed x/z combinations<br>
* - Bit 1 is set to contain which of the two is not 1<br>
* - Bit 2 is set to contain whether x/z is 1 or -1<br><br>
* <p/>
* This system requires that the x/z pairs are one the following:<br>
* (0, 1) | (0, -1) | (1, 0) | (-1, 0)
*
* @param x value
* @param z value
* @return key
*/
private static final int getIndexByChunk(int x, int z) {
return (x & 1) | ((x + z + 1) & 0x2);
}
/**
* Gets whether all 4 chunk neighbors are accessible
*
* @return True if all neighbors are accessible
*/
public boolean hasAll() {
for (int i = 0; i < 4; i++) {
if (values[i] == null) {
return false;
}
}
return true;
}
/**
* Gets the neighbor representing the given relative chunk
*
* @param deltaChunkX
* @param deltaChunkZ
* @return neighbor
*/
public LightingChunk get(int deltaChunkX, int deltaChunkZ) {
return values[getIndexByChunk(deltaChunkX, deltaChunkZ)];
}
/**
* Gets a relative neighboring chunk, and then a vertical cube in that chunk, if possible.
*
* @param deltaChunkX
* @param deltaChunkZ
* @param cy Cube absolute y-coordinate
* @return cube, null if the chunk or cube is not available
*/
public LightingCube getCube(int deltaChunkX, int deltaChunkZ, int cy) {
LightingChunk chunk = get(deltaChunkX, deltaChunkZ);
return (chunk == null) ? null : chunk.sections.get(cy);
}
/**
* Sets the neighbor representing the given relative chunk
*
* @param deltaChunkX
* @param deltaChunkZ
* @param neighbor to set to
*/
public void set(int deltaChunkX, int deltaChunkZ, LightingChunk neighbor) {
values[getIndexByChunk(deltaChunkX, deltaChunkZ)] = neighbor;
}
}

View File

@ -0,0 +1,317 @@
package com.volmit.iris.v2.lighting;
import java.util.concurrent.CompletableFuture;
import com.bergerkiller.bukkit.common.collections.BlockFaceSet;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.BlockData;
import com.bergerkiller.bukkit.common.wrappers.ChunkSection;
import com.bergerkiller.generated.net.minecraft.server.NibbleArrayHandle;
/**
* A single 16x16x16 cube of stored block information
*/
public class LightingCube {
public static final int OOC = ~0xf; // Outside Of Cube
public final LightingChunk owner;
public final LightingCubeNeighboring neighbors = new LightingCubeNeighboring();
public final int cy;
public final NibbleArrayHandle skyLight;
public final NibbleArrayHandle blockLight;
public final NibbleArrayHandle emittedLight;
public final NibbleArrayHandle opacity;
private final BlockFaceSetSection opaqueFaces;
public LightingCube(LightingChunk owner, ChunkSection chunkSection, boolean hasSkyLight) {
this.owner = owner;
this.cy = chunkSection.getY();
if (owner.neighbors.hasAll()) {
// Block light data (is re-initialized in the fill operation below, no need to read)
this.blockLight = NibbleArrayHandle.createNew();
// Sky light data (is re-initialized using heightmap operation later, no need to read)
if (hasSkyLight) {
this.skyLight = NibbleArrayHandle.createNew();
} else {
this.skyLight = null;
}
} else {
// We need to load the original light data, because we have a border that we do not update
// Block light data
byte[] blockLightData = WorldUtil.getSectionBlockLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
if (blockLightData != null) {
this.blockLight = NibbleArrayHandle.createNew(blockLightData);
} else {
this.blockLight = NibbleArrayHandle.createNew();
}
// Sky light data
if (hasSkyLight) {
byte[] skyLightData = WorldUtil.getSectionSkyLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
if (skyLightData != null) {
this.skyLight = NibbleArrayHandle.createNew(skyLightData);
} else {
this.skyLight = NibbleArrayHandle.createNew();
}
} else {
this.skyLight = null;
}
}
// World coordinates
int worldX = owner.chunkX << 4;
int worldY = chunkSection.getYPosition();
int worldZ = owner.chunkZ << 4;
// Fill opacity and initial block lighting values
this.opacity = NibbleArrayHandle.createNew();
this.emittedLight = NibbleArrayHandle.createNew();
this.opaqueFaces = new BlockFaceSetSection();
int x, y, z, opacity, blockEmission;
BlockFaceSet opaqueFaces;
BlockData info;
for (z = owner.start.z; z <= owner.end.z; z++) {
for (x = owner.start.x; x <= owner.end.x; x++) {
for (y = 0; y < 16; y++) {
info = chunkSection.getBlockData(x, y, z);
blockEmission = info.getEmission();
opacity = info.getOpacity(owner.world, worldX+x, worldY+y, worldZ+z);
if (opacity >= 0xf) {
opacity = 0xf;
opaqueFaces = BlockFaceSet.ALL;
} else {
if (opacity < 0) {
opacity = 0;
}
opaqueFaces = info.getOpaqueFaces(owner.world, worldX+x, worldY+y, worldZ+z);
}
this.opacity.set(x, y, z, opacity);
this.emittedLight.set(x, y, z, blockEmission);
this.blockLight.set(x, y, z, blockEmission);
this.opaqueFaces.set(x, y, z, opaqueFaces);
}
}
}
}
/**
* Gets the opaque faces of a block
*
* @param x - coordinate
* @param y - coordinate
* @param z - coordinate
* @return opaque face set
*/
public BlockFaceSet getOpaqueFaces(int x, int y, int z) {
return this.opaqueFaces.get(x, y, z);
}
/**
* Read light level of a neighboring block.
* If possibly more, also check opaque faces, and then return the
* higher light value if all these tests pass.
* The x/y/z coordinates are allowed to check neighboring cubes.
*
* @param category
* @param old_light
* @param faceMask
* @param x The X-coordinate of the block (-1 to 16)
* @param y The Y-coordinate of the block (-1 to 16)
* @param z The Z-coordinate of the block (-1 to 16)
* @return higher light level if propagated, otherwise the old light value
*/
public int getLightIfHigherNeighbor(LightingCategory category, int old_light, int faceMask, int x, int y, int z) {
if ((x & OOC | y & OOC | z & OOC) == 0) {
return this.getLightIfHigher(category, old_light, faceMask, x, y, z);
} else {
LightingCube neigh = this.neighbors.get(x>>4, y>>4, z>>4);
if (neigh != null) {
return neigh.getLightIfHigher(category, old_light, faceMask, x & 0xf, y & 0xf, z & 0xf);
} else {
return old_light;
}
}
}
/**
* Read light level of a neighboring block.
* If possibly more, also check opaque faces, and then return the
* higher light value if all these tests pass.
* Requires the x/y/z coordinates to lay within this cube.
*
* @param category Category of light to check
* @param old_light Previous light value
* @param faceMask The BlockFaceSet mask indicating the light-traveling direction
* @param x The X-coordinate of the block (0 to 15)
* @param y The Y-coordinate of the block (0 to 15)
* @param z The Z-coordinate of the block (0 to 15)
* @return higher light level if propagated, otherwise the old light value
*/
public int getLightIfHigher(LightingCategory category, int old_light, int faceMask, int x, int y, int z) {
int new_light_level = category.get(this, x, y, z);
return (new_light_level > old_light && !this.getOpaqueFaces(x, y, z).get(faceMask))
? new_light_level : old_light;
}
/**
* Called during initialization of block light to spread the light emitted by a block
* to all neighboring blocks.
*
* @param x The X-coordinate of the block (0 to 15)
* @param y The Y-coordinate of the block (0 to 15)
* @param z The Z-coordinate of the block (0 to 15)
*/
public void spreadBlockLight(int x, int y, int z) {
int emitted = this.emittedLight.get(x, y, z);
if (emitted <= 1) {
return; // Skip if neighbouring blocks won't receive light from it
}
if (x >= 1 && z >= 1 && x <= 14 && z <= 14) {
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_EAST, x-1, y, z);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_WEST, x+1, y, z);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_SOUTH, x, y, z-1);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_NORTH, x, y, z+1);
} else {
trySpreadBlockLight(emitted, BlockFaceSet.MASK_EAST, x-1, y, z);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_WEST, x+1, y, z);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_SOUTH, x, y, z-1);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_NORTH, x, y, z+1);
}
if (y >= 1 && y <= 14) {
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_UP, x, y-1, z);
trySpreadBlockLightWithin(emitted, BlockFaceSet.MASK_DOWN, x, y+1, z);
} else {
trySpreadBlockLight(emitted, BlockFaceSet.MASK_UP, x, y-1, z);
trySpreadBlockLight(emitted, BlockFaceSet.MASK_DOWN, x, y+1, z);
}
}
/**
* Tries to spread block light from an emitting block to one of the 6 sites.
* The block being spread to is allowed to be outside of the bounds of this cube,
* in which case neighboring cubes are spread to instead.
*
* @param emitted The light that is emitted by the block
* @param faceMask The BlockFaceSet mask indicating the light-traveling direction
* @param x The X-coordinate of the block to spread to (-1 to 16)
* @param y The Y-coordinate of the block to spread to (-1 to 16)
* @param z The Z-coordinate of the block to spread to (-1 to 16)
*/
public void trySpreadBlockLight(int emitted, int faceMask, int x, int y, int z) {
if ((x & OOC | y & OOC | z & OOC) == 0) {
this.trySpreadBlockLightWithin(emitted, faceMask, x, y, z);
} else {
LightingCube neigh = this.neighbors.get(x>>4, y>>4, z>>4);
if (neigh != null) {
neigh.trySpreadBlockLightWithin(emitted, faceMask, x & 0xf, y & 0xf, z & 0xf);
}
}
}
/**
* Tries to spread block light from an emitting block to one of the 6 sides.
* Assumes that the block being spread to is within this cube.
*
* @param emitted The light that is emitted by the block
* @param faceMask The BlockFaceSet mask indicating the light-traveling direction
* @param x The X-coordinate of the block to spread to (0 to 15)
* @param y The Y-coordinate of the block to spread to (0 to 15)
* @param z The Z-coordinate of the block to spread to (0 to 15)
*/
public void trySpreadBlockLightWithin(int emitted, int faceMask, int x, int y, int z) {
if (!this.getOpaqueFaces(x, y, z).get(faceMask)) {
int new_level = emitted - Math.max(1, this.opacity.get(x, y, z));
if (new_level > this.blockLight.get(x, y, z)) {
this.blockLight.set(x, y, z, new_level);
}
}
}
/**
* Applies the lighting information to a chunk section
*
* @param chunkSection to save to
* @return future completed when saving is finished. Future resolves to False if no changes occurred, True otherwise.
*/
public CompletableFuture<Boolean> saveToChunk(ChunkSection chunkSection) {
CompletableFuture<Void> blockLightFuture = null;
CompletableFuture<Void> skyLightFuture = null;
try {
if (this.blockLight != null) {
byte[] newBlockLight = this.blockLight.getData();
byte[] oldBlockLight = WorldUtil.getSectionBlockLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
boolean blockLightChanged = false;
if (oldBlockLight == null || newBlockLight.length != oldBlockLight.length) {
blockLightChanged = true;
} else {
for (int i = 0; i < oldBlockLight.length; i++) {
if (oldBlockLight[i] != newBlockLight[i]) {
blockLightChanged = true;
break;
}
}
}
//TODO: Maybe do blockLightChanged check inside BKCommonLib?
if (blockLightChanged) {
blockLightFuture = WorldUtil.setSectionBlockLightAsync(owner.world,
owner.chunkX, this.cy, owner.chunkZ,
newBlockLight);
}
}
if (this.skyLight != null) {
byte[] newSkyLight = this.skyLight.getData();
byte[] oldSkyLight = WorldUtil.getSectionSkyLight(owner.world,
owner.chunkX, this.cy, owner.chunkZ);
boolean skyLightChanged = false;
if (oldSkyLight == null || newSkyLight.length != oldSkyLight.length) {
skyLightChanged = true;
} else {
for (int i = 0; i < oldSkyLight.length; i++) {
if (oldSkyLight[i] != newSkyLight[i]) {
skyLightChanged = true;
break;
}
}
}
//TODO: Maybe do skyLightChanged check inside BKCommonLib?
if (skyLightChanged) {
skyLightFuture = WorldUtil.setSectionSkyLightAsync(owner.world,
owner.chunkX, this.cy, owner.chunkZ,
newSkyLight);
}
}
} catch (Throwable t) {
CompletableFuture<Boolean> exceptionally = new CompletableFuture<Boolean>();
exceptionally.completeExceptionally(t);
return exceptionally;
}
// No updates performed
if (blockLightFuture == null && skyLightFuture == null) {
return CompletableFuture.completedFuture(Boolean.FALSE);
}
// Join both completable futures as one, if needed
CompletableFuture<Void> combined;
if (blockLightFuture == null) {
combined = skyLightFuture;
} else if (skyLightFuture == null) {
combined = blockLightFuture;
} else {
combined = CompletableFuture.allOf(blockLightFuture, skyLightFuture);
}
// When combined resolves, return one that returns True
return combined.thenApply((c) -> Boolean.TRUE);
}
}

View File

@ -0,0 +1,64 @@
package com.volmit.iris.v2.lighting;
/**
* Keeps track of the 6 x/y/z neighbors of cubes
*/
public class LightingCubeNeighboring {
public final LightingCube[] values = new LightingCube[6];
/**
* Generates a key ranging 0 - 5 for fixed x/y/z combinations<br>
* - Bit 1 is set to contain whether x/y/z is 1 or -1
* - Bit 2 is set to 1 when the axis is x<br>
* - Bit 3 is set to 1 when the axis is z<br><br>
* <p/>
* This system requires that the x/y/z pairs are one the following:<br>
* (0, 0, 1) | (0, 0, -1) | (0, 1, 0) | (0, -1, 0) | (1, 0, 0) | (-1, 0, 0)
*
* @param x value
* @param y value
* @param z value
* @return key
*/
private static final int getIndexByCube(int x, int y, int z) {
return (((x + y + z + 1) & 0x2) >> 1) | ((x & 0x1) << 1) | ((z & 0x1) << 2);
}
/**
* Gets whether all 6 cube neighbors are accessible
*
* @return True if all neighbors are accessible
*/
public boolean hasAll() {
for (int i = 0; i < 6; i++) {
if (values[i] == null) {
return false;
}
}
return true;
}
/**
* Gets the neighbor representing the given relative cube
*
* @param deltaCubeX
* @param deltaCubeY
* @param deltaCubeZ
* @return neighbor, null if no neighbor is available here
*/
public LightingCube get(int deltaCubeX, int deltaCubeY, int deltaCubeZ) {
return values[getIndexByCube(deltaCubeX, deltaCubeY, deltaCubeZ)];
}
/**
* Sets the neighbor representing the given relative cube
*
* @param deltaCubeX
* @param deltaCubeY
* @param deltaCubeZ
* @param neighbor to set to, is allowed to be null to set to 'none'
*/
public void set(int deltaCubeX, int deltaCubeY, int deltaCubeZ, LightingCube neighbor) {
values[getIndexByCube(deltaCubeX, deltaCubeY, deltaCubeZ)] = neighbor;
}
}

View File

@ -0,0 +1,77 @@
package com.volmit.iris.v2.lighting;
import java.util.HashMap;
import java.util.Map;
import org.bukkit.World;
import com.bergerkiller.bukkit.common.chunk.ForcedChunk;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
/**
* Shortly remembers the forced chunks it has kept loaded from a previous operation.
* Reduces chunk unloading-loading grind.
*/
public class LightingForcedChunkCache {
private static final Map<Key, ForcedChunk> _cache = new HashMap<Key, ForcedChunk>();
public static ForcedChunk get(World world, int x, int z) {
ForcedChunk cached;
synchronized (_cache) {
cached = _cache.get(new Key(world, x, z));
}
if (cached != null) {
return cached.clone();
} else {
return WorldUtil.forceChunkLoaded(world, x, z);
}
}
public static void store(ForcedChunk chunk) {
ForcedChunk prev;
synchronized (_cache) {
prev = _cache.put(new Key(chunk.getWorld(), chunk.getX(), chunk.getZ()), chunk.clone());
}
if (prev != null) {
prev.close();
}
}
public static void reset() {
synchronized (_cache) {
for (ForcedChunk chunk : _cache.values()) {
chunk.close();
}
_cache.clear();
}
}
private static final class Key {
public final World world;
public final int x;
public final int z;
public Key(World world, int x, int z) {
this.world = world;
this.x = x;
this.z = z;
}
@Override
public int hashCode() {
return this.x * 31 + this.z;
}
@Override
public boolean equals(Object o) {
if (o instanceof Key) {
Key other = (Key) o;
return other.x == this.x &&
other.z == this.z &&
other.world == this.world;
} else {
return false;
}
}
}
}

View File

@ -0,0 +1,650 @@
package com.volmit.iris.v2.lighting;
import com.bergerkiller.bukkit.common.AsyncTask;
import com.bergerkiller.bukkit.common.bases.IntVector2;
import com.bergerkiller.bukkit.common.bases.IntVector3;
import com.bergerkiller.bukkit.common.config.CompressedDataReader;
import com.bergerkiller.bukkit.common.config.CompressedDataWriter;
import com.bergerkiller.bukkit.common.permissions.NoPermissionException;
import com.bergerkiller.bukkit.common.utils.MathUtil;
import com.bergerkiller.bukkit.common.utils.ParseUtil;
import com.bergerkiller.bukkit.common.utils.StringUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet.LongIterator;
import com.volmit.iris.Iris;
import org.bukkit.*;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.logging.Level;
public class LightingService extends AsyncTask {
private static final Set<RecipientWhenDone> recipientsForDone = new HashSet<RecipientWhenDone>();
private static final LinkedList<LightingTask> tasks = new LinkedList<LightingTask>();
private static final int PENDING_WRITE_INTERVAL = 10;
private static AsyncTask fixThread = null;
private static int taskChunkCount = 0;
private static int taskCounter = 0;
private static boolean pendingFileInUse = false;
private static LightingTask currentTask;
private static boolean paused = false;
private static boolean lowOnMemory = false;
/**
* Gets whether this service is currently processing something
*
* @return True if processing, False if not
*/
public static boolean isProcessing() {
return fixThread != null;
}
/**
* Starts or stops the processing service.
* Stopping the service does not instantly abort, the current task is continued.
*
* @param process to abort
*/
public static void setProcessing(boolean process) {
if (process == isProcessing()) {
return;
}
if (process) {
fixThread = new LightingService().start(true);
} else {
// Fix thread is running, abort
AsyncTask.stop(fixThread);
fixThread = null;
}
}
/**
* Gets whether execution is paused, and pending tasks are not being processed
*
* @return True if paused
*/
public static boolean isPaused() {
return paused;
}
/**
* Sets whether execution is paused.
*
* @param pause state to set to
*/
public static void setPaused(boolean pause) {
if (paused != pause) {
paused = pause;
}
}
/**
* Gets the status of the currently processed task
*
* @return current task status
*/
public static String getCurrentStatus() {
final LightingTask current = currentTask;
if (lowOnMemory) {
return ChatColor.RED + "Too low on available memory (paused)";
} else if (current == null) {
return "Finished.";
} else {
return current.getStatus();
}
}
/**
* Gets the time the currently processing task was started. If no task is being processed,
* an empty result is returned. If processing didn't start yet, the value will be 0.
*
* @return time when the current task was started
*/
public static java.util.OptionalLong getCurrentStartTime() {
final LightingTask current = currentTask;
return (current == null) ? java.util.OptionalLong.empty() : OptionalLong.of(current.getTimeStarted());
}
public static void addRecipient(CommandSender sender) {
synchronized (recipientsForDone) {
recipientsForDone.add(new RecipientWhenDone(sender));
}
}
public static void scheduleWorld(final World world) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setEntireWorld();
schedule(args);
}
/**
* Schedules a square chunk area for lighting fixing
*
* @param world the chunks are in
* @param middleX
* @param middleZ
* @param radius
*/
public static void scheduleArea(World world, int middleX, int middleZ, int radius) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setChunksAround(middleX, middleZ, radius);
schedule(args);
}
@Deprecated
public static void schedule(World world, Collection<IntVector2> chunks) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setChunks(chunks);
schedule(args);
}
public static void schedule(World world, LongHashSet chunks) {
ScheduleArguments args = new ScheduleArguments();
args.setWorld(world);
args.setChunks(chunks);
schedule(args);
}
public static void schedule(ScheduleArguments args) {
// World not allowed to be null
if (args.getWorld() == null) {
throw new IllegalArgumentException("Schedule arguments 'world' is null");
}
// If no chunks specified, entire world
if (args.isEntireWorld()) {
LightingTaskWorld task = new LightingTaskWorld(args.getWorld());
task.applyOptions(args);
schedule(task);
return;
}
// If less than 34x34 chunks are requested, schedule as one task
// In that case, be sure to only schedule chunks that actually exist
// This prevents generating new chunks as part of this command
LongHashSet chunks = args.getChunks();
if (chunks.size() <= (34*34)) {
LongHashSet chunks_filtered = new LongHashSet(chunks.size());
Set<IntVector2> region_coords_filtered = new HashSet<IntVector2>();
LongIterator iter = chunks.longIterator();
if (args.getLoadedChunksOnly()) {
// Remove coordinates of chunks that aren't loaded
while (iter.hasNext()) {
long chunk = iter.next();
int cx = MathUtil.longHashMsw(chunk);
int cz = MathUtil.longHashLsw(chunk);
if (WorldUtil.isLoaded(args.getWorld(), cx, cz)) {
chunks_filtered.add(chunk);
region_coords_filtered.add(new IntVector2(
WorldUtil.chunkToRegionIndex(cx),
WorldUtil.chunkToRegionIndex(cz)));
}
}
} else if (true) {
// Remove coordinates of chunks that don't actually exist (avoid generating new chunks)
// isChunkAvailable isn't very fast, but fast enough below this threshold of chunks
// To check for border chunks, we check that all 9 chunks are are available
Map<IntVector2, Boolean> tmp = new HashMap<>();
while (iter.hasNext()) {
long chunk = iter.next();
int cx = MathUtil.longHashMsw(chunk);
int cz = MathUtil.longHashLsw(chunk);
boolean fully_loaded = true;
for (int dx = -2; dx <= 2 && fully_loaded; dx++) {
for (int dz = -2; dz <= 2 && fully_loaded; dz++) {
IntVector2 pos = new IntVector2(cx + dx, cz + dz);
fully_loaded &= tmp.computeIfAbsent(pos, p -> WorldUtil.isChunkAvailable(args.getWorld(), p.x, p.z)).booleanValue();
}
}
if (fully_loaded) {
chunks_filtered.add(chunk);
region_coords_filtered.add(new IntVector2(
WorldUtil.chunkToRegionIndex(cx),
WorldUtil.chunkToRegionIndex(cz)));
}
}
} else {
// Remove coordinates of chunks that don't actually exist (avoid generating new chunks)
// isChunkAvailable isn't very fast, but fast enough below this threshold of chunks
while (iter.hasNext()) {
long chunk = iter.next();
int cx = MathUtil.longHashMsw(chunk);
int cz = MathUtil.longHashLsw(chunk);
if (WorldUtil.isChunkAvailable(args.getWorld(), cx, cz)) {
chunks_filtered.add(chunk);
region_coords_filtered.add(new IntVector2(
WorldUtil.chunkToRegionIndex(cx),
WorldUtil.chunkToRegionIndex(cz)));
}
}
}
// For all filtered chunk coordinates, compute regions
int[] regionYCoordinates;
{
Set<IntVector3> regions = WorldUtil.getWorldRegions3ForXZ(args.getWorld(), region_coords_filtered);
// Simplify to just the unique Y-coordinates
regionYCoordinates = regions.stream().mapToInt(r -> r.y).sorted().distinct().toArray();
}
// Schedule it
if (!chunks_filtered.isEmpty()) {
LightingTaskBatch task = new LightingTaskBatch(args.getWorld(), regionYCoordinates, chunks_filtered);
task.applyOptions(args);
schedule(task);
}
return;
}
// Too many chunks requested. Separate the operations per region file with small overlap.
FlatRegionInfoMap regions;
if (args.getLoadedChunksOnly()) {
regions = FlatRegionInfoMap.createLoaded(args.getWorld());
} else {
regions = FlatRegionInfoMap.create(args.getWorld());
}
LongIterator iter = chunks.longIterator();
LongHashSet scheduledRegions = new LongHashSet();
while (iter.hasNext()) {
long first_chunk = iter.next();
int first_chunk_x = MathUtil.longHashMsw(first_chunk);
int first_chunk_z = MathUtil.longHashLsw(first_chunk);
FlatRegionInfo region = regions.getRegionAtChunk(first_chunk_x, first_chunk_z);
if (region == null || scheduledRegions.contains(region.rx, region.rz)) {
continue; // Does not exist or already scheduled
}
if (!region.containsChunk(first_chunk_x, first_chunk_z)) {
continue; // Chunk does not exist in world (not generated yet) or isn't loaded (loaded chunks only option)
}
// Collect all the region Y coordinates used for this region and the neighbouring regions
// This makes sure we find all chunk slices we might need on an infinite height world
int[] region_y_coordinates = regions.getRegionYCoordinatesSelfAndNeighbours(region);
// Collect all chunks to process for this region.
// This is an union of the 34x34 area of chunks and the region file data set
LongHashSet buffer = new LongHashSet();
int rdx, rdz;
for (rdx = -1; rdx < 33; rdx++) {
for (rdz = -1; rdz < 33; rdz++) {
int cx = region.cx + rdx;
int cz = region.cz + rdz;
long chunk_key = MathUtil.longHashToLong(cx, cz);
if (!chunks.contains(chunk_key)) {
continue;
}
if (true) {
// Check the chunk and the surrounding chunks are all present
if (!regions.containsChunkAndNeighbours(cx, cz)) {
continue;
}
} else {
// Only check chunk
if (!regions.containsChunk(cx, cz)) {
continue;
}
}
buffer.add(chunk_key);
}
}
// Schedule the region
if (!buffer.isEmpty()) {
scheduledRegions.add(region.rx, region.rz);
LightingTaskBatch task = new LightingTaskBatch(args.getWorld(), region_y_coordinates, buffer);
task.applyOptions(args);
schedule(task);
}
}
}
public static void schedule(LightingTask task) {
synchronized (tasks) {
tasks.offer(task);
taskChunkCount += task.getChunkCount();
}
setProcessing(true);
}
/**
* Loads the pending chunk batch operations from a save file.
* If it is there, it will start processing these again.
*/
public static void loadPendingBatches() {
pendingFileInUse = false;
}
/**
* Saves all pending chunk batch operations to a save file.
* If the server, for whatever reason, crashes, it can restore using this file.
*/
public static void savePendingBatches() {
if (pendingFileInUse) {
return;
}
}
/**
* Clears all pending tasks, does continue with the current tasks
*/
public static void clearTasks() {
synchronized (tasks) {
tasks.clear();
}
final LightingTask current = currentTask;
if (current != null) {
current.abort();
}
synchronized (tasks) {
tasks.clear();
}
currentTask = null;
taskChunkCount = 0;
LightingForcedChunkCache.reset();
}
/**
* Orders this service to abort all tasks, finishing the current task in an orderly fashion.
* This method can only be called from the main Thread.
*/
public static void abort() {
// Finish the current lighting task if available
final LightingTask current = currentTask;
final AsyncTask service = fixThread;
if (service != null && current != null) {
setProcessing(false);
current.abort();
}
// Clear lighting tasks
synchronized (tasks) {
if (current != null) {
tasks.addFirst(current);
}
if (!tasks.isEmpty()) {
}
savePendingBatches();
clearTasks();
}
}
/**
* Gets the amount of chunks that are still faulty
*
* @return faulty chunk count
*/
public static int getChunkFaults() {
final LightingTask current = currentTask;
return taskChunkCount + (current == null ? 0 : current.getChunkCount());
}
@Override
public void run() {
// While paused, do nothing
while (paused) {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
synchronized (tasks) {
if (tasks.isEmpty()) {
break; // Stop processing.
}
}
if (fixThread.isStopRequested()) {
return;
}
}
synchronized (tasks) {
currentTask = tasks.poll();
}
if (currentTask == null) {
// No more tasks, end this thread
// Messages
final String message = ChatColor.GREEN + "All lighting operations are completed.";
synchronized (recipientsForDone) {
for (RecipientWhenDone recipient : recipientsForDone) {
CommandSender recip = recipient.player_name == null ?
Bukkit.getConsoleSender() : Bukkit.getPlayer(recipient.player_name);
if (recip != null) {
String timeStr = LightingUtil.formatDuration(System.currentTimeMillis() - recipient.timeStarted);
recip.sendMessage(message + ChatColor.WHITE + " (Took " + timeStr + ")");
}
}
recipientsForDone.clear();
}
// Stop task and abort
taskCounter = 0;
setProcessing(false);
LightingForcedChunkCache.reset();
savePendingBatches();
return;
} else {
// Write to file?
if (taskCounter++ >= PENDING_WRITE_INTERVAL) {
taskCounter = 0;
// Start saving on another thread (IO access is slow...)
new AsyncTask() {
public void run() {
savePendingBatches();
}
}.start();
// Save the world of the current task being processed
}
// Subtract task from the task count
taskChunkCount -= currentTask.getChunkCount();
// Process the task
try {
currentTask.process();
} catch (Throwable t) {
t.printStackTrace();
Iris.error("Failed to process task: " + currentTask.getStatus());
}
}
}
private static long calcAvailableMemory(Runtime runtime) {
long max = runtime.maxMemory();
if (max == Long.MAX_VALUE) {
return Long.MAX_VALUE;
} else {
long used = (runtime.totalMemory() - runtime.freeMemory());
return (max - used);
}
}
public static class ScheduleArguments {
private World world;
private String worldName;
private LongHashSet chunks;
private boolean debugMakeCorrupted = false;
private boolean loadedChunksOnly = false;
private int radius = Bukkit.getServer().getViewDistance();
public boolean getDebugMakeCorrupted() {
return this.debugMakeCorrupted;
}
public boolean getLoadedChunksOnly() {
return this.loadedChunksOnly;
}
public int getRadius() {
return this.radius;
}
public boolean isEntireWorld() {
return this.chunks == null;
}
public World getWorld() {
return this.world;
}
public String getWorldName() {
return this.worldName;
}
public LongHashSet getChunks() {
return this.chunks;
}
/**
* Sets the world itself. Automatically updates the world name.
*
* @param world
* @return these arguments
*/
public ScheduleArguments setWorld(World world) {
this.world = world;
this.worldName = world.getName();
return this;
}
/**
* Sets the world name to perform operations on.
* If the world by this name does not exist, the world is null.
*
* @param worldName
* @return these arguments
*/
public ScheduleArguments setWorldName(String worldName) {
this.world = Bukkit.getWorld(worldName);
this.worldName = worldName;
return this;
}
public ScheduleArguments setEntireWorld() {
this.chunks = null;
return this;
}
public ScheduleArguments setDebugMakeCorrupted(boolean debug) {
this.debugMakeCorrupted = debug;
return this;
}
public ScheduleArguments setLoadedChunksOnly(boolean loadedChunksOnly) {
this.loadedChunksOnly = loadedChunksOnly;
return this;
}
public ScheduleArguments setRadius(int radius) {
this.radius = radius;
return this;
}
public ScheduleArguments setChunksAround(Location location, int radius) {
this.setWorld(location.getWorld());
return this.setChunksAround(location.getBlockX()>>4, location.getBlockZ()>>4, radius);
}
public ScheduleArguments setChunksAround(int middleX, int middleZ, int radius) {
this.setRadius(radius);
LongHashSet chunks_hashset = new LongHashSet((2*radius)*(2*radius));
for (int a = -radius; a <= radius; a++) {
for (int b = -radius; b <= radius; b++) {
int cx = middleX + a;
int cz = middleZ + b;
chunks_hashset.add(cx, cz);
}
}
return this.setChunks(chunks_hashset);
}
/**
* Sets the chunks to a cuboid area of chunks.
* Make sure the minimum chunk coordinates are less or equal to
* the maximum chunk coordinates.
*
* @param minChunkX Minimum chunk x-coordinate (inclusive)
* @param minChunkZ Minimum chunk z-coordinate (inclusive)
* @param maxChunkX Maximum chunk x-coordinate (inclusive)
* @param maxChunkZ Maximum chunk z-coordinate (inclusive)
* @return this
*/
public ScheduleArguments setChunkFromTo(int minChunkX, int minChunkZ, int maxChunkX, int maxChunkZ) {
int num_dx = (maxChunkX - minChunkX) + 1;
int num_dz = (maxChunkZ - minChunkZ) + 1;
if (num_dx <= 0 || num_dz <= 0) {
return this.setChunks(new LongHashSet()); // nothing
}
LongHashSet chunks_hashset = new LongHashSet(num_dx * num_dz);
for (int chunkX = minChunkX; chunkX <= maxChunkX; chunkX++) {
for (int chunkZ = minChunkZ; chunkZ <= maxChunkZ; chunkZ++) {
chunks_hashset.add(chunkX, chunkZ);
}
}
return this.setChunks(chunks_hashset);
}
public ScheduleArguments setChunks(Collection<IntVector2> chunks) {
LongHashSet chunks_hashset = new LongHashSet(chunks.size());
for (IntVector2 coord : chunks) {
chunks_hashset.add(coord.x, coord.z);
}
return this.setChunks(chunks_hashset);
}
public ScheduleArguments setChunks(LongHashSet chunks) {
this.chunks = chunks;
return this;
}
private boolean checkRadiusPermission(CommandSender sender, int radius) throws NoPermissionException {
return false;
}
/**
* Parses the arguments specified in a command
*
* @param sender
* @return false if the input is incorrect and operations may not proceed
* @throws NoPermissionException
*/
public boolean handleCommandInput(CommandSender sender, String[] args) throws NoPermissionException {
return true;
}
/**
* Creates a new ScheduleArguments instance ready to be configured
*
* @return args
*/
public static ScheduleArguments create()
{
return new ScheduleArguments();
}
}
private static class RecipientWhenDone {
public final String player_name;
public final long timeStarted;
public RecipientWhenDone(CommandSender sender) {
this.player_name = (sender instanceof Player) ? sender.getName() : null;
this.timeStarted = System.currentTimeMillis();
}
}
}

View File

@ -0,0 +1,61 @@
package com.volmit.iris.v2.lighting;
import org.bukkit.World;
/**
* A single task the Lighting Service can handle
*/
public interface LightingTask {
/**
* Gets the world this task is working on
*
* @return task world
*/
World getWorld();
/**
* Gets the amount of chunks this task is going to fix.
* This can be a wild estimate. While processing this amount should be
* updated as well.
*
* @return estimated total chunk count
*/
int getChunkCount();
/**
* Gets a descriptive status of the current task being processed
*
* @return status
*/
String getStatus();
/**
* Gets the timestamp (milliseconds since epoch) when this task was first started.
* If 0 is returned, then the task wasn't started yet.
*
* @return time this task was started
*/
long getTimeStarted();
/**
* Processes this task (called from another thread!)
*/
void process();
/**
* Orders this task to abort
*/
void abort();
/**
* Whether this task can be saved to PendingLight.dat
*
* @return True if it can be saved
*/
boolean canSave();
/**
* Loads additional options
*/
void applyOptions(LightingService.ScheduleArguments args);
}

View File

@ -0,0 +1,566 @@
package com.volmit.iris.v2.lighting;
import com.bergerkiller.bukkit.common.bases.IntVector2;
import com.bergerkiller.bukkit.common.utils.CommonUtil;
import com.bergerkiller.bukkit.common.utils.LogicUtil;
import com.bergerkiller.bukkit.common.utils.MathUtil;
import com.bergerkiller.bukkit.common.utils.WorldUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
import java.util.Arrays;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.logging.Level;
import java.util.stream.Stream;
import com.volmit.iris.Iris;
import com.volmit.iris.IrisSettings;
import org.bukkit.Chunk;
import org.bukkit.World;
/**
* Contains all the chunk coordinates that have to be fixed,
* and handles the full process of this fixing.
* It is literally a batch of chunks being processed.
*/
public class LightingTaskBatch implements LightingTask {
private static boolean DEBUG_LOG = false; // logs performance stats
public final World world;
private final Object chunks_lock = new Object();
private final int[] region_y_coords;
private volatile LightingChunk[] chunks = null;
private volatile long[] chunks_coords;
private boolean done = false;
private boolean aborted = false;
private volatile long timeStarted = 0;
private int numBeingLoaded = 0;
private volatile Stage stage = Stage.LOADING;
private LightingService.ScheduleArguments options = new LightingService.ScheduleArguments();
public LightingTaskBatch(World world, int[] regionYCoordinates, long[] chunkCoordinates) {
this.world = world;
this.region_y_coords = regionYCoordinates;
this.chunks_coords = chunkCoordinates;
}
public LightingTaskBatch(World world, int[] regionYCoordinates, LongHashSet chunkCoordinates) {
this.world = world;
this.region_y_coords = regionYCoordinates;
// Turn contents of the long hash set into an easily sortable IntVector2[] array
IntVector2[] coordinates = new IntVector2[chunkCoordinates.size()];
{
LongHashSet.LongIterator iter = chunkCoordinates.longIterator();
for (int i = 0; iter.hasNext(); i++) {
long coord = iter.next();
coordinates[i] = new IntVector2(MathUtil.longHashMsw(coord), MathUtil.longHashLsw(coord));
}
}
// Sort the array along the axis. This makes chunk loading more efficient.
Arrays.sort(coordinates, (a, b) -> {
int comp = Integer.compare(a.x, b.x);
if (comp == 0) {
comp = Integer.compare(a.z, b.z);
}
return comp;
});
// Turn back into a long[] array for memory efficiency
this.chunks_coords = Stream.of(coordinates).mapToLong(c -> MathUtil.longHashToLong(c.x, c.z)).toArray();
}
@Override
public World getWorld() {
return world;
}
/**
* Gets the X and Z-coordinates of all the chunk columns to process.
* The coordinates are combined into a single Long, which can be decoded
* using {@link MathUtil#longHashMsw(long)} for X and {@link MathUtil#longHashLsw(long) for Z.
*
* @return chunk coordinates
*/
public long[] getChunks() {
synchronized (this.chunks_lock) {
LightingChunk[] chunks = this.chunks;
if (chunks != null) {
long[] coords = new long[chunks.length];
for (int i = 0; i < chunks.length; i++) {
coords[i] = MathUtil.longHashToLong(chunks[i].chunkX, chunks[i].chunkZ);
}
return coords;
} else if (this.chunks_coords != null) {
return this.chunks_coords;
} else {
return new long[0];
}
}
}
/**
* Gets the Y-coordinates of all the regions to look for chunk data. A region stores 32 chunk
* slices vertically, and goes up/down 512 blocks every coordinate increase/decrease.
*
* @return region Y-coordinates
*/
public int[] getRegionYCoordinates() {
return this.region_y_coords;
}
@Override
public int getChunkCount() {
synchronized (this.chunks_lock) {
if (this.chunks == null) {
return this.done ? 0 : this.chunks_coords.length;
} else {
int faults = 0;
for (LightingChunk chunk : this.chunks) {
if (chunk.hasFaults()) {
faults++;
}
}
return faults;
}
}
}
@Override
public long getTimeStarted() {
return this.timeStarted;
}
private static final class BatchChunkInfo {
public final int cx;
public final int cz;
public final int count;
public BatchChunkInfo(int cx, int cz, int count) {
this.cx = cx;
this.cz = cz;
this.count = count;
}
}
public BatchChunkInfo getAverageChunk() {
int count = 0;
long cx = 0;
long cz = 0;
synchronized (this.chunks_lock) {
if (this.chunks != null) {
count = this.chunks.length;
for (LightingChunk chunk : this.chunks) {
cx += chunk.chunkX;
cz += chunk.chunkZ;
}
} else if (this.chunks_coords != null) {
count = this.chunks_coords.length;
for (long chunk : this.chunks_coords) {
cx += MathUtil.longHashMsw(chunk);
cz += MathUtil.longHashLsw(chunk);
}
} else {
return null;
}
}
if (count > 0) {
cx /= count;
cz /= count;
}
return new BatchChunkInfo((int) cx, (int) cz, count);
}
@Override
public String getStatus() {
BatchChunkInfo chunk = this.getAverageChunk();
if (chunk != null) {
String postfix = " chunks near " +
"x=" + (chunk.cx*16) + " z=" + (chunk.cz*16);
if (this.stage == Stage.LOADING) {
synchronized (this.chunks_lock) {
if (this.chunks != null) {
int num_loaded = 0;
for (LightingChunk lc : this.chunks) {
if (!lc.forcedChunk.isNone() && lc.forcedChunk.getChunkAsync().isDone()) {
num_loaded++;
}
}
return "Loaded " + num_loaded + "/" + chunk.count + postfix;
}
}
} else if (this.stage == Stage.APPLYING) {
synchronized (this.chunks_lock) {
if (this.chunks != null) {
int num_saved = 0;
for (LightingChunk lc : this.chunks) {
if (lc.isApplied) {
num_saved++;
}
}
return "Saved " + num_saved + "/" + chunk.count + postfix;
}
}
}
return "Cleaning " + chunk.count + postfix;
} else {
return done ? "Done" : "No Data";
}
}
private String getShortStatus() {
BatchChunkInfo chunk = this.getAverageChunk();
if (chunk != null) {
return "[x=" + (chunk.cx*16) + " z=" + (chunk.cz*16) + " count=" + chunk.count + "]";
} else {
return "[Unknown]";
}
}
private boolean waitForCheckAborted(CompletableFuture<?> future) {
while (!aborted) {
try {
future.get(200, TimeUnit.MILLISECONDS);
return true;
} catch (InterruptedException | TimeoutException e1) {
// Ignore
} catch (ExecutionException ex) {
ex.printStackTrace();
Iris.error("Error while processing");
return false;
}
}
return false;
}
private void tryLoadMoreChunks(final CompletableFuture<Void>[] chunkFutures) {
if (this.aborted) {
return;
}
int i = 0;
while (true) {
// While synchronized, pick the next chunk to load
LightingChunk nextChunk = null;
CompletableFuture<Void> nextChunkFuture = null;
synchronized (chunks_lock) {
for (; i < chunks.length && numBeingLoaded < Iris.getThreadCount(); i++) {
LightingChunk lc = chunks[i];
if (lc.loadingStarted) {
continue; // Already (being) loaded
}
// Pick it
numBeingLoaded++;
lc.loadingStarted = true;
nextChunk = lc;
nextChunkFuture = chunkFutures[i];
break;
}
}
// No more chunks to load / capacity reached
if (nextChunk == null) {
break;
}
// This shouldn't happen, but just in case, a check
if (nextChunkFuture.isDone()) {
continue;
}
// Outside of the lock, start loading the next chunk
final CompletableFuture<Void> f_nextChunkFuture = nextChunkFuture;
nextChunk.forcedChunk.move(LightingForcedChunkCache.get(world, nextChunk.chunkX, nextChunk.chunkZ));
nextChunk.forcedChunk.getChunkAsync().whenComplete((chunk, t) -> {
synchronized (chunks_lock) {
numBeingLoaded--;
}
f_nextChunkFuture.complete(null);
tryLoadMoreChunks(chunkFutures);
});
}
}
@SuppressWarnings("unchecked")
private CompletableFuture<Void> loadChunks() {
// For every LightingChunk, make a completable future
// Once all these futures are resolved the returned completable future resolves
CompletableFuture<Void>[] chunkFutures;
synchronized (this.chunks_lock) {
chunkFutures = new CompletableFuture[this.chunks.length];
}
for (int i = 0; i < chunkFutures.length; i++) {
chunkFutures[i] = new CompletableFuture<Void>();
}
// Start loading up to [asyncLoadConcurrency] number of chunks right now
// When a callback for a chunk load completes, we start loading additional chunks
tryLoadMoreChunks(chunkFutures);
return CompletableFuture.allOf(chunkFutures);
}
@Override
public void process() {
// Begin
this.stage = Stage.LOADING;
this.timeStarted = System.currentTimeMillis();
// Initialize lighting chunks
synchronized (this.chunks_lock) {
LightingChunk[] chunks_new = new LightingChunk[this.chunks_coords.length];
this.done = false;
int chunkIdx = 0;
for (long longCoord : this.chunks_coords) {
int x = MathUtil.longHashMsw(longCoord);
int z = MathUtil.longHashLsw(longCoord);
chunks_new[chunkIdx++] = new LightingChunk(this.world, x, z);
if (this.aborted) {
return;
}
}
// Update fields. We can remove the coordinates to free memory.
this.chunks = chunks_new;
this.chunks_coords = null;
}
// Check aborted
if (aborted) {
return;
}
// Load all the chunks. Wait for loading to finish.
// Regularly check that this task is not aborted
CompletableFuture<Void> loadChunksFuture = this.loadChunks();
if (!waitForCheckAborted(loadChunksFuture)) {
return;
}
// Causes all chunks in cache not used for this task to unload
// All chunks of this task are put into the cache, instead
LightingForcedChunkCache.reset();
for (LightingChunk lc : LightingTaskBatch.this.chunks) {
LightingForcedChunkCache.store(lc.forcedChunk);
}
// All chunks that can be loaded, are now loaded.
// Some chunks may have failed to be loaded, get rid of those now!
// To avoid massive spam, only show the average x/z coordinates of the chunk affected
synchronized (this.chunks_lock) {
long failed_chunk_avg_x = 0;
long failed_chunk_avg_z = 0;
int failed_chunk_count = 0;
LightingChunk[] new_chunks = this.chunks;
for (int i = new_chunks.length-1; i >= 0; i--) {
LightingChunk lc = new_chunks[i];
if (lc.forcedChunk.getChunkAsync().isCompletedExceptionally()) {
failed_chunk_avg_x += lc.chunkX;
failed_chunk_avg_z += lc.chunkZ;
failed_chunk_count++;
new_chunks = LogicUtil.removeArrayElement(new_chunks, i);
}
}
this.chunks = new_chunks;
// Tell all the (remaining) chunks about other neighbouring chunks before initialization
for (LightingChunk lc : new_chunks) {
for (LightingChunk neigh : new_chunks) {
lc.notifyAccessible(neigh);
}
}
// Log when chunks fail to be loaded
if (failed_chunk_count > 0) {
failed_chunk_avg_x = ((failed_chunk_avg_x / failed_chunk_count) << 4);
failed_chunk_avg_z = ((failed_chunk_avg_z / failed_chunk_count) << 4);
Iris.error("Failed to load " + failed_chunk_count + " chunks near " +
"world=" + world.getName() + " x=" + failed_chunk_avg_x + " z=" + failed_chunk_avg_z);
}
}
// Schedule, on the main thread, to fill all the loaded chunks with data
CompletableFuture<Void> chunkFillFuture = CompletableFuture.runAsync(() -> {
synchronized (this.chunks_lock) {
for (LightingChunk lc : chunks) {
lc.fill(lc.forcedChunk.getChunk(), region_y_coords);
}
}
}, CommonUtil.getPluginExecutor(Iris.instance));
if (!waitForCheckAborted(chunkFillFuture)) {
return;
}
// Now that all chunks we can process are filled, let all the 16x16x16 cubes know of their neighbors
// This neighboring data is only used during the fix() (initialize + spread) phase
synchronized (this.chunks_lock) {
for (LightingChunk lc : chunks) {
lc.detectCubeNeighbors();
}
}
// Fix
this.stage = Stage.FIXING;
fix();
if (this.aborted) {
return;
}
// Apply and wait for it to be finished
// Wait in 200ms intervals to allow for aborting
// After 2 minutes of inactivity, stop waiting and consider applying failed
this.stage = Stage.APPLYING;
try {
CompletableFuture<Void> future = apply();
int max_num_of_waits = (5*120);
while (true) {
if (--max_num_of_waits == 0) {
Iris.error("Failed to apply lighting data for " + getShortStatus() + ": Timeout");
break;
}
try {
future.get(200, TimeUnit.MILLISECONDS);
break;
} catch (TimeoutException e) {
if (this.aborted) {
return;
}
}
}
} catch (InterruptedException e) {
// Ignore
} catch (ExecutionException e) {
e.printStackTrace();
Iris.error("Failed to apply lighting data for " + getShortStatus());
}
this.done = true;
synchronized (this.chunks_lock) {
this.chunks = null;
}
}
@Override
public void abort() {
this.aborted = true;
// Close chunks kept loaded
LightingChunk[] chunks;
synchronized (this.chunks_lock) {
chunks = this.chunks;
}
if (chunks != null) {
for (LightingChunk lc : chunks) {
lc.forcedChunk.close();
}
}
}
/**
* Starts applying the new data to the world.
* This is done in several ticks on the main thread.
* The completable future is resolved when applying is finished.
*/
public CompletableFuture<Void> apply() {
// Apply data to chunks and unload if needed
LightingChunk[] chunks = LightingTaskBatch.this.chunks;
CompletableFuture<?>[] applyFutures = new CompletableFuture[chunks.length];
for (int i = 0; i < chunks.length; i++) {
LightingChunk lc = chunks[i];
Chunk bchunk = lc.forcedChunk.getChunk();
// Save to chunk
applyFutures[i] = lc.saveToChunk(bchunk).whenComplete((changed, t) -> {
if (t != null) {
t.printStackTrace();
} else if (changed.booleanValue()) {
WorldUtil.queueChunkSendLight(world, lc.chunkX, lc.chunkZ);
}
// Closes our forced chunk, may cause the chunk to now unload
lc.forcedChunk.close();
});
}
return CompletableFuture.allOf(applyFutures);
}
/**
* Performs the (slow) fixing procedure (call from another thread)
*/
public void fix() {
// Initialize light
for (LightingCategory category : LightingCategory.values()) {
for (LightingChunk chunk : chunks) {
category.initialize(chunk);
if (this.aborted) {
return;
}
}
}
// Skip spread phase when debug mode is active
if (this.options.getDebugMakeCorrupted()) {
return;
}
// Before spreading, change the opacity values to have a minimum of 1
// Spreading can never be done without losing light
// This isn't done during initialization because it is important
// for calculating the first opacity>0 block for sky light.
for (LightingChunk chunk : chunks) {
for (LightingCube section : chunk.getSections()) {
//TODO: Maybe build something into BKCommonLib for this
int x, y, z;
for (y = 0; y < 16; y++) {
for (z = 0; z < 16; z++) {
for (x = 0; x < 16; x++) {
if (section.opacity.get(x, y, z) == 0) {
section.opacity.set(x, y, z, 1);
}
}
}
}
}
}
// Spread (timed, for debug)
boolean hasFaults;
long startTime = System.currentTimeMillis();
int totalLoops = 0;
do {
hasFaults = false;
for (LightingChunk chunk : chunks) {
int count = chunk.spread();
totalLoops += count;
hasFaults |= count > 0;
}
} while (hasFaults && !this.aborted);
long duration = System.currentTimeMillis() - startTime;
if (DEBUG_LOG) {
System.out.println("Processed " + totalLoops + " in " + duration + " ms");
}
}
@Override
public void applyOptions(LightingService.ScheduleArguments args) {
this.options = args;
}
@Override
public boolean canSave() {
return !this.options.getLoadedChunksOnly() && !this.options.getDebugMakeCorrupted();
}
private static enum Stage {
LOADING, FIXING, APPLYING
}
}

View File

@ -0,0 +1,173 @@
package com.volmit.iris.v2.lighting;
import com.bergerkiller.bukkit.common.utils.CommonUtil;
import com.bergerkiller.bukkit.common.wrappers.LongHashSet;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import org.bukkit.World;
public class LightingTaskWorld implements LightingTask {
private static final int ASSUMED_CHUNKS_PER_REGION = 34 * 34;
private final World world;
private volatile FlatRegionInfoMap regions = null;
private volatile int regionCountLoaded;
private volatile int chunkCount;
private volatile long timeStarted;
private volatile boolean aborted;
private LightingService.ScheduleArguments options = new LightingService.ScheduleArguments();
public LightingTaskWorld(World world) {
this.world = world;
this.regionCountLoaded = 0;
this.aborted = false;
this.chunkCount = 0;
this.timeStarted = 0;
}
@Override
public World getWorld() {
return this.world;
}
@Override
public int getChunkCount() {
return chunkCount;
}
@Override
public long getTimeStarted() {
return this.timeStarted;
}
@Override
public String getStatus() {
if (regions == null) {
return "Reading available regions from world " + getWorld().getName();
} else {
return "Reading available chunks from world " + getWorld().getName() + " (region " + (regionCountLoaded+1) + "/" + regions.getRegionCount() + ")";
}
}
@Override
public void process() {
// Load regions on the main thread
// TODO: Can use main thread executor instead
this.timeStarted = System.currentTimeMillis();
final CompletableFuture<Void> regionsLoadedFuture = new CompletableFuture<Void>();
CommonUtil.nextTick(() -> {
try {
if (this.options.getLoadedChunksOnly()) {
this.regions = FlatRegionInfoMap.createLoaded(this.getWorld());
this.regionCountLoaded = this.regions.getRegionCount();
this.chunkCount = 0;
for (FlatRegionInfo region : this.regions.getRegions()) {
this.chunkCount += region.getChunkCount();
}
} else {
this.regions = FlatRegionInfoMap.create(this.getWorld());
this.regionCountLoaded = 0;
this.chunkCount = this.regions.getRegionCount() * ASSUMED_CHUNKS_PER_REGION;
}
regionsLoadedFuture.complete(null);
} catch (Throwable ex) {
regionsLoadedFuture.completeExceptionally(ex);
}
});
// Wait until region list is loaded synchronously
try {
regionsLoadedFuture.get();
} catch (InterruptedException ex) {
// Ignore
} catch (ExecutionException ex) {
throw new RuntimeException("Failed to load regions", ex.getCause());
}
// Check aborted
if (this.aborted) {
return;
}
// Start loading all chunks contained in the regions
if (!this.options.getLoadedChunksOnly()) {
for (FlatRegionInfo region : this.regions.getRegions()) {
// Abort handling
if (this.aborted) {
return;
}
// Load and update stats
region.load();
this.chunkCount -= ASSUMED_CHUNKS_PER_REGION - region.getChunkCount();
this.regionCountLoaded++;
}
}
// We now know of all the regions to be processed, convert all of them into tasks
// Use a slightly larger area to avoid cross-region errors
for (FlatRegionInfo region : regions.getRegions()) {
// Abort handling
if (this.aborted) {
return;
}
// If empty, skip
if (region.getChunkCount() == 0) {
continue;
}
// Find region Y-coordinates for this 34x34 section of chunks
int[] region_y_coordinates = regions.getRegionYCoordinatesSelfAndNeighbours(region);
// Reduce count, schedule and clear the buffer
// Put the coordinates that are available
final LongHashSet buffer = new LongHashSet(34*34);
if (true) {
int dx, dz;
for (dx = -1; dx < 33; dx++) {
for (dz = -1; dz < 33; dz++) {
int cx = region.cx + dx;
int cz = region.cz + dz;
if (this.regions.containsChunkAndNeighbours(cx, cz)) {
buffer.add(cx, cz);
}
}
}
} else {
int dx, dz;
for (dx = -1; dx < 33; dx++) {
for (dz = -1; dz < 33; dz++) {
int cx = region.cx + dx;
int cz = region.cz + dz;
if (this.regions.containsChunk(cx, cz)) {
buffer.add(cx, cz);
}
}
}
}
// Schedule and return amount of chunks
this.chunkCount -= buffer.size();
LightingTaskBatch batch_task = new LightingTaskBatch(this.getWorld(), region_y_coordinates, buffer);
batch_task.applyOptions(this.options);
LightingService.schedule(batch_task);
}
}
@Override
public void abort() {
this.aborted = true;
}
@Override
public void applyOptions(LightingService.ScheduleArguments args) {
this.options = args;
}
@Override
public boolean canSave() {
return false;
}
}

View File

@ -0,0 +1,30 @@
package com.volmit.iris.v2.lighting;
import com.bergerkiller.bukkit.common.utils.MathUtil;
/**
* Just some utilities used by Light Cleaner
*/
public class LightingUtil {
private static TimeDurationFormat timeFormat_hh_mm = new TimeDurationFormat("HH 'hours' mm 'minutes'");
private static TimeDurationFormat timeFormat_mm_ss = new TimeDurationFormat("mm 'minutes' ss 'seconds'");
private static final long SECOND_MILLIS = 1000L;
private static final long MINUTE_MILLIS = 60L * SECOND_MILLIS;
private static final long HOUR_MILLIS = 60L * MINUTE_MILLIS;
private static final long DAY_MILLIS = 24L * HOUR_MILLIS;
public static String formatDuration(long duration) {
if (duration < MINUTE_MILLIS) {
return MathUtil.round((double) duration / (double) SECOND_MILLIS, 2) + " seconds";
} else if (duration < HOUR_MILLIS) {
return timeFormat_mm_ss.format(duration);
} else if (duration < (2*DAY_MILLIS)) {
return timeFormat_hh_mm.format(duration);
} else {
long num_days = duration / DAY_MILLIS;
long num_hours = (duration % DAY_MILLIS) / HOUR_MILLIS;
return num_days + " days " + num_hours + " hours";
}
}
}

View File

@ -0,0 +1,44 @@
package com.volmit.iris.v2.lighting;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Locale;
import java.util.TimeZone;
/**
* Formatter for a duration String.
* Can represent a duration in milliseconds as a String.
* Taken from Traincarts (permission granted by same author)<br>
* <br>
* https://github.com/bergerhealer/TrainCarts/blob/master/src/main/java/com/bergerkiller/bukkit/tc/utils/TimeDurationFormat.java
*/
public class TimeDurationFormat {
private final TimeZone timeZone;
private final SimpleDateFormat sdf;
/**
* Creates a new time duration format. The format accepts the same formatting
* tokens as the Date formatter does.
*
* @param format
* @throws IllegalArgumentException if the input format is invalid
*/
public TimeDurationFormat(String format) {
if (format == null) {
throw new IllegalArgumentException("Input format should not be null");
}
this.timeZone = TimeZone.getTimeZone("GMT+0");
this.sdf = new SimpleDateFormat(format, Locale.getDefault());
this.sdf.setTimeZone(this.timeZone);
}
/**
* Formats the duration
*
* @param durationMillis
* @return formatted string
*/
public String format(long durationMillis) {
return this.sdf.format(new Date(durationMillis - this.timeZone.getRawOffset()));
}
}

View File

@ -14,6 +14,10 @@ import com.volmit.iris.v2.scaffold.parallax.ParallaxAccess;
public interface Engine extends DataProvider
{
public void close();
public EngineWorldManager getWorldManager();
public void setParallelism(int parallelism);
public int getParallelism();

View File

@ -1,7 +0,0 @@
package com.volmit.iris.v2.scaffold.engine;
public abstract class EngineAssignedStructure extends EngineAssignedComponent implements EngineStructure {
public EngineAssignedStructure(Engine engine) {
super(engine, "Structure");
}
}

View File

@ -0,0 +1,7 @@
package com.volmit.iris.v2.scaffold.engine;
public abstract class EngineAssignedStructureManager extends EngineAssignedComponent implements EngineStructureManager {
public EngineAssignedStructureManager(Engine engine) {
super(engine, "Structure");
}
}

View File

@ -0,0 +1,73 @@
package com.volmit.iris.v2.scaffold.engine;
import com.volmit.iris.Iris;
import org.bukkit.Bukkit;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.entity.EntitySpawnEvent;
import org.bukkit.event.world.WorldSaveEvent;
import org.bukkit.event.world.WorldUnloadEvent;
public abstract class EngineAssignedWorldManager extends EngineAssignedComponent implements EngineWorldManager, Listener {
private final int taskId;
public EngineAssignedWorldManager(Engine engine) {
super(engine, "World");
Iris.instance.registerListener(this);
taskId = Bukkit.getScheduler().scheduleSyncRepeatingTask(Iris.instance, this::onTick, 0, 0);
}
@EventHandler
public void on(WorldSaveEvent e)
{
if(e.getWorld().equals(getTarget().getWorld()))
{
onSave();
}
}
@EventHandler
public void on(WorldUnloadEvent e)
{
if(e.getWorld().equals(getTarget().getWorld()))
{
getEngine().close();
}
}
@EventHandler
public void on(EntitySpawnEvent e)
{
if(e.getEntity().getWorld().equals(getTarget().getWorld()))
{
onEntitySpawn(e);
}
}
@EventHandler
public void on(BlockBreakEvent e)
{
if(e.getPlayer().getWorld().equals(getTarget().getWorld()))
{
onBlockBreak(e);
}
}
@EventHandler
public void on(BlockPlaceEvent e)
{
if(e.getPlayer().getWorld().equals(getTarget().getWorld()))
{
onBlockPlace(e);
}
}
@Override
public void close() {
super.close();
Iris.instance.unregisterListener(this);
Bukkit.getScheduler().cancelTask(taskId);
}
}

View File

@ -1,10 +1,13 @@
package com.volmit.iris.v2.scaffold.engine;
import com.volmit.iris.Iris;
import com.volmit.iris.manager.IrisDataManager;
import com.volmit.iris.object.IrisDimension;
import com.volmit.iris.util.RollingSequence;
import com.volmit.iris.v2.generator.IrisComplex;
import com.volmit.iris.v2.scaffold.parallax.ParallaxAccess;
import org.bukkit.Bukkit;
import org.bukkit.event.Listener;
public interface EngineComponent {
public Engine getEngine();
@ -13,6 +16,22 @@ public interface EngineComponent {
public String getName();
default void close()
{
try
{
if(this instanceof Listener)
{
Iris.instance.unregisterListener((Listener) this);
}
}
catch(Throwable ignored)
{
}
}
default double modX(double x)
{
return getEngine().modifyX(x);

View File

@ -1,7 +1,6 @@
package com.volmit.iris.v2.scaffold.engine;
import com.volmit.iris.util.M;
import com.volmit.iris.v2.generator.modifier.IrisCaveModifier;
import com.volmit.iris.v2.scaffold.parallel.MultiBurst;
import org.bukkit.block.Biome;
import org.bukkit.block.data.BlockData;
@ -16,7 +15,7 @@ public interface EngineFramework extends DataProvider
public IrisComplex getComplex();
public EngineParallax getEngineParallax();
public EngineParallaxManager getEngineParallax();
default IrisDataManager getData() {
return getComplex().getData();
@ -46,4 +45,6 @@ public interface EngineFramework extends DataProvider
public EngineModifier<BlockData> getDepositModifier();
public EngineModifier<BlockData> getPostModifier();
void close();
}

View File

@ -1,10 +1,7 @@
package com.volmit.iris.v2.scaffold.engine;
import java.lang.reflect.Parameter;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import com.volmit.iris.gen.ParallaxTerrainProvider;
import com.volmit.iris.object.*;
import com.volmit.iris.util.*;
import com.volmit.iris.v2.generator.actuator.IrisTerrainActuator;
@ -23,7 +20,7 @@ import com.volmit.iris.v2.scaffold.parallax.ParallaxAccess;
import com.volmit.iris.v2.scaffold.parallel.BurstExecutor;
import com.volmit.iris.v2.scaffold.parallel.MultiBurst;
public interface EngineParallax extends DataProvider, IObjectPlacer
public interface EngineParallaxManager extends DataProvider, IObjectPlacer
{
public static final BlockData AIR = B.get("AIR");
@ -31,7 +28,7 @@ public interface EngineParallax extends DataProvider, IObjectPlacer
public int getParallaxSize();
public EngineStructure getStructureManager();
public EngineStructureManager getStructureManager();
default EngineFramework getFramework()
{
@ -438,4 +435,9 @@ public interface EngineParallax extends DataProvider, IObjectPlacer
default boolean isDebugSmartBore() {
return getEngine().getDimension().isDebugSmartBore();
}
default void close()
{
}
}

View File

@ -9,7 +9,7 @@ import com.volmit.iris.util.KSet;
import com.volmit.iris.util.RNG;
import com.volmit.iris.v2.scaffold.parallax.ParallaxChunkMeta;
public interface EngineStructure extends EngineComponent
public interface EngineStructureManager extends EngineComponent
{
default void placeStructure(IrisStructurePlacement structure, RNG rngno, int cx, int cz)
{

View File

@ -0,0 +1,23 @@
package com.volmit.iris.v2.scaffold.engine;
import org.bukkit.Chunk;
import org.bukkit.event.block.BlockBreakEvent;
import org.bukkit.event.block.BlockPlaceEvent;
import org.bukkit.event.entity.EntitySpawnEvent;
public interface EngineWorldManager
{
public void close();
public void onEntitySpawn(EntitySpawnEvent e);
public void onTick();
public void onSave();
public void spawnInitialEntities(Chunk chunk);
public void onBlockBreak(BlockBreakEvent e);
public void onBlockPlace(BlockPlaceEvent e);
}

View File

@ -597,6 +597,22 @@ public interface Hunk<T>
return this;
}
default Hunk<T> iterateSync(Consumer4<Integer, Integer, Integer, T> c)
{
for(int i = 0; i < getWidth(); i++)
{
for(int j = 0; j < getHeight(); j++)
{
for(int k = 0; k < getDepth(); k++)
{
c.accept(i, j, k, get(i,j,k));
}
}
}
return this;
}
default Hunk<T> iterate(int parallelism, Consumer3<Integer, Integer, Integer> c)
{
compute3D(parallelism, (x, y, z, h) ->

View File

@ -1,5 +1,6 @@
package com.volmit.iris.v2.scaffold.hunk.storage;
import com.volmit.iris.util.Consumer4;
import com.volmit.iris.v2.scaffold.hunk.Hunk;
import com.volmit.iris.util.BlockPosition;
import com.volmit.iris.util.KMap;
@ -7,6 +8,8 @@ import com.volmit.iris.util.KMap;
import lombok.Data;
import lombok.EqualsAndHashCode;
import java.util.Map;
@Data
@EqualsAndHashCode(callSuper = false)
public class MappedHunk<T> extends StorageHunk<T> implements Hunk<T>
@ -25,6 +28,17 @@ public class MappedHunk<T> extends StorageHunk<T> implements Hunk<T>
data.put(new BlockPosition(x, y, z), t);
}
@Override
public Hunk<T> iterateSync(Consumer4<Integer, Integer, Integer, T> c)
{
for(Map.Entry<BlockPosition, T> g : data.entrySet())
{
c.accept( g.getKey().getX(), g.getKey().getY(), g.getKey().getZ(), g.getValue());
}
return this;
}
@Override
public T getRaw(int x, int y, int z)
{

View File

@ -17,6 +17,7 @@ public class ParallaxChunkMeta {
public static final Function<CompoundTag, HunkIOAdapter<ParallaxChunkMeta>> adapter = (c) -> new PaletteHunkIOAdapter<ParallaxChunkMeta>() {
@Override
public void write(ParallaxChunkMeta parallaxChunkMeta, DataOutputStream dos) throws IOException {
dos.writeBoolean(parallaxChunkMeta.isUpdates());
dos.writeBoolean(parallaxChunkMeta.isGenerated());
dos.writeBoolean(parallaxChunkMeta.isParallaxGenerated());
dos.writeBoolean(parallaxChunkMeta.isObjects());
@ -30,15 +31,17 @@ public class ParallaxChunkMeta {
@Override
public ParallaxChunkMeta read(DataInputStream din) throws IOException {
boolean bb = din.readBoolean();
boolean g = din.readBoolean();
boolean p = din.readBoolean();
boolean o = din.readBoolean();
int min = o ? din.readByte() - Byte.MIN_VALUE : -1;
int max = o ? din.readByte() - Byte.MIN_VALUE : -1;
return new ParallaxChunkMeta(g, p, o, min, max);
return new ParallaxChunkMeta(bb, g, p, o, min, max);
}
};
private boolean updates;
private boolean generated;
private boolean parallaxGenerated;
private boolean objects;
@ -47,6 +50,6 @@ public class ParallaxChunkMeta {
public ParallaxChunkMeta()
{
this(false, false, false, -1, -1);
this(false, false, false, false, -1, -1);
}
}