mirror of
https://github.com/VolmitSoftware/Iris.git
synced 2025-07-04 00:46:08 +00:00
implement version header for tectonic plates
This commit is contained in:
parent
90c6457d37
commit
f0476fea9b
@ -113,6 +113,11 @@ shadowJar {
|
|||||||
relocate 'net.kyori', 'com.volmit.iris.util.kyori'
|
relocate 'net.kyori', 'com.volmit.iris.util.kyori'
|
||||||
relocate 'org.bstats', 'com.volmit.util.metrics'
|
relocate 'org.bstats', 'com.volmit.util.metrics'
|
||||||
archiveFileName.set("Iris-${project.version}.jar")
|
archiveFileName.set("Iris-${project.version}.jar")
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
exclude(dependency("org.ow2.asm:asm:"))
|
||||||
|
exclude(dependency("org.jetbrains:"))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
@ -191,6 +191,7 @@ public class IrisSettings {
|
|||||||
public boolean DoomsdayAnnihilationSelfDestructMode = false;
|
public boolean DoomsdayAnnihilationSelfDestructMode = false;
|
||||||
public boolean commandSounds = true;
|
public boolean commandSounds = true;
|
||||||
public boolean debug = false;
|
public boolean debug = false;
|
||||||
|
public boolean dumpMantleOnError = false;
|
||||||
public boolean disableNMS = false;
|
public boolean disableNMS = false;
|
||||||
public boolean pluginMetrics = true;
|
public boolean pluginMetrics = true;
|
||||||
public boolean splashLogoStartup = true;
|
public boolean splashLogoStartup = true;
|
||||||
|
@ -166,7 +166,7 @@ public class CommandDeveloper implements DecreeExecutor {
|
|||||||
|
|
||||||
File tectonicplates = new File(folder, "mantle");
|
File tectonicplates = new File(folder, "mantle");
|
||||||
for (File i : Objects.requireNonNull(tectonicplates.listFiles())) {
|
for (File i : Objects.requireNonNull(tectonicplates.listFiles())) {
|
||||||
TectonicPlate.read(maxHeight, i);
|
TectonicPlate.read(maxHeight, i, true);
|
||||||
c++;
|
c++;
|
||||||
Iris.info("Loaded count: " + c );
|
Iris.info("Loaded count: " + c );
|
||||||
|
|
||||||
@ -272,7 +272,8 @@ public class CommandDeveloper implements DecreeExecutor {
|
|||||||
@Param(description = "base IrisWorld") World world,
|
@Param(description = "base IrisWorld") World world,
|
||||||
@Param(description = "raw TectonicPlate File") String path,
|
@Param(description = "raw TectonicPlate File") String path,
|
||||||
@Param(description = "Algorithm to Test") String algorithm,
|
@Param(description = "Algorithm to Test") String algorithm,
|
||||||
@Param(description = "Amount of Tests") int amount) {
|
@Param(description = "Amount of Tests") int amount,
|
||||||
|
@Param(description = "Is versioned", defaultValue = "false") boolean versioned) {
|
||||||
if (!IrisToolbelt.isIrisWorld(world)) {
|
if (!IrisToolbelt.isIrisWorld(world)) {
|
||||||
sender().sendMessage(C.RED + "This is not an Iris world. Iris worlds: " + String.join(", ", Bukkit.getServer().getWorlds().stream().filter(IrisToolbelt::isIrisWorld).map(World::getName).toList()));
|
sender().sendMessage(C.RED + "This is not an Iris world. Iris worlds: " + String.join(", ", Bukkit.getServer().getWorlds().stream().filter(IrisToolbelt::isIrisWorld).map(World::getName).toList()));
|
||||||
return;
|
return;
|
||||||
@ -289,7 +290,7 @@ public class CommandDeveloper implements DecreeExecutor {
|
|||||||
service.submit(() -> {
|
service.submit(() -> {
|
||||||
try {
|
try {
|
||||||
CountingDataInputStream raw = CountingDataInputStream.wrap(new FileInputStream(file));
|
CountingDataInputStream raw = CountingDataInputStream.wrap(new FileInputStream(file));
|
||||||
TectonicPlate plate = new TectonicPlate(height, raw);
|
TectonicPlate plate = new TectonicPlate(height, raw, versioned);
|
||||||
raw.close();
|
raw.close();
|
||||||
|
|
||||||
double d1 = 0;
|
double d1 = 0;
|
||||||
@ -308,7 +309,7 @@ public class CommandDeveloper implements DecreeExecutor {
|
|||||||
size = tmp.length();
|
size = tmp.length();
|
||||||
start = System.currentTimeMillis();
|
start = System.currentTimeMillis();
|
||||||
CountingDataInputStream din = createInput(tmp, algorithm);
|
CountingDataInputStream din = createInput(tmp, algorithm);
|
||||||
new TectonicPlate(height, din);
|
new TectonicPlate(height, din, true);
|
||||||
din.close();
|
din.close();
|
||||||
d2 += System.currentTimeMillis() - start;
|
d2 += System.currentTimeMillis() - start;
|
||||||
tmp.delete();
|
tmp.delete();
|
||||||
|
@ -31,11 +31,11 @@ import com.volmit.iris.util.data.DoubleArrayUtils;
|
|||||||
*/
|
*/
|
||||||
public class AtomicAverage {
|
public class AtomicAverage {
|
||||||
protected final AtomicDoubleArray values;
|
protected final AtomicDoubleArray values;
|
||||||
protected int cursor;
|
protected transient int cursor;
|
||||||
private double average;
|
private transient double average;
|
||||||
private double lastSum;
|
private transient double lastSum;
|
||||||
private boolean dirty;
|
private transient boolean dirty;
|
||||||
private boolean brandNew;
|
private transient boolean brandNew;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an average holder
|
* Create an average holder
|
||||||
@ -57,7 +57,7 @@ public class AtomicAverage {
|
|||||||
*
|
*
|
||||||
* @param i the value
|
* @param i the value
|
||||||
*/
|
*/
|
||||||
public void put(double i) {
|
public synchronized void put(double i) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
dirty = true;
|
dirty = true;
|
||||||
|
@ -18,29 +18,67 @@
|
|||||||
|
|
||||||
package com.volmit.iris.util.collection;
|
package com.volmit.iris.util.collection;
|
||||||
|
|
||||||
import java.util.Collection;
|
import org.jetbrains.annotations.NotNull;
|
||||||
import java.util.HashSet;
|
|
||||||
|
|
||||||
public class KSet<T> extends HashSet<T> {
|
import java.io.Serializable;
|
||||||
|
import java.util.AbstractSet;
|
||||||
|
import java.util.Collection;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
|
public class KSet<T> extends AbstractSet<T> implements Serializable {
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
private final ConcurrentHashMap<T, Boolean> map;
|
||||||
|
|
||||||
public KSet() {
|
public KSet() {
|
||||||
super();
|
map = new ConcurrentHashMap<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
public KSet(Collection<? extends T> c) {
|
public KSet(Collection<? extends T> c) {
|
||||||
super(c);
|
this();
|
||||||
|
addAll(c);
|
||||||
}
|
}
|
||||||
|
|
||||||
public KSet(int initialCapacity, float loadFactor) {
|
public KSet(int initialCapacity, float loadFactor) {
|
||||||
super(initialCapacity, loadFactor);
|
map = new ConcurrentHashMap<>(initialCapacity, loadFactor);
|
||||||
}
|
}
|
||||||
|
|
||||||
public KSet(int initialCapacity) {
|
public KSet(int initialCapacity) {
|
||||||
super(initialCapacity);
|
map = new ConcurrentHashMap<>(initialCapacity);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int size() {
|
||||||
|
return map.size();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean contains(Object o) {
|
||||||
|
return map.containsKey(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean add(T t) {
|
||||||
|
return map.putIfAbsent(t, Boolean.TRUE) == null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean remove(Object o) {
|
||||||
|
return map.remove(o) != null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void clear() {
|
||||||
|
map.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
@NotNull
|
||||||
|
@Override
|
||||||
|
public Iterator<T> iterator() {
|
||||||
|
return map.keySet().iterator();
|
||||||
}
|
}
|
||||||
|
|
||||||
public KSet<T> copy() {
|
public KSet<T> copy() {
|
||||||
return new KSet<T>(this);
|
return new KSet<>(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -103,7 +103,7 @@ public class Mantle {
|
|||||||
* @return the file
|
* @return the file
|
||||||
*/
|
*/
|
||||||
public static File fileForRegion(File folder, int x, int z) {
|
public static File fileForRegion(File folder, int x, int z) {
|
||||||
return fileForRegion(folder, key(x, z));
|
return fileForRegion(folder, key(x, z), true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -113,12 +113,28 @@ public class Mantle {
|
|||||||
* @param key the region key
|
* @param key the region key
|
||||||
* @return the file
|
* @return the file
|
||||||
*/
|
*/
|
||||||
public static File fileForRegion(File folder, Long key) {
|
public static File fileForRegion(File folder, Long key, boolean convert) {
|
||||||
File f = new File(folder, "p." + key + ".ttp.lz4b");
|
File f = oldFileForRegion(folder, key);
|
||||||
if (!f.getParentFile().exists()) {
|
File fv = new File(folder, "pv." + key + ".ttp.lz4b");
|
||||||
f.getParentFile().mkdirs();
|
if (f.exists() && !fv.exists() && convert)
|
||||||
|
return f;
|
||||||
|
|
||||||
|
if (!fv.getParentFile().exists()) {
|
||||||
|
fv.getParentFile().mkdirs();
|
||||||
}
|
}
|
||||||
return f;
|
return fv;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the old file for the given region
|
||||||
|
*
|
||||||
|
* @param folder the data folder
|
||||||
|
* @param key the region key
|
||||||
|
* @return the file
|
||||||
|
*/
|
||||||
|
public static File oldFileForRegion(File folder, Long key) {
|
||||||
|
return new File(folder, "p." + key + ".ttp.lz4b");
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -210,7 +226,7 @@ public class Mantle {
|
|||||||
@RegionCoordinates
|
@RegionCoordinates
|
||||||
public boolean hasTectonicPlate(int x, int z) {
|
public boolean hasTectonicPlate(int x, int z) {
|
||||||
Long k = key(x, z);
|
Long k = key(x, z);
|
||||||
return loadedRegions.containsKey(k) || fileForRegion(dataFolder, k).exists();
|
return loadedRegions.containsKey(k) || fileForRegion(dataFolder, k, true).exists();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -364,7 +380,8 @@ public class Mantle {
|
|||||||
loadedRegions.forEach((i, plate) -> b.queue(() -> {
|
loadedRegions.forEach((i, plate) -> b.queue(() -> {
|
||||||
try {
|
try {
|
||||||
plate.close();
|
plate.close();
|
||||||
plate.write(fileForRegion(dataFolder, i));
|
plate.write(fileForRegion(dataFolder, i, false));
|
||||||
|
oldFileForRegion(dataFolder, i).delete();
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
Iris.error("Failed to write Tectonic Plate " + C.DARK_GREEN + Cache.keyX(i) + " " + Cache.keyZ(i));
|
Iris.error("Failed to write Tectonic Plate " + C.DARK_GREEN + Cache.keyX(i) + " " + Cache.keyZ(i));
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
@ -479,7 +496,8 @@ public class Mantle {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
m.write(fileForRegion(dataFolder, id));
|
m.write(fileForRegion(dataFolder, id, false));
|
||||||
|
oldFileForRegion(dataFolder, id).delete();
|
||||||
loadedRegions.remove(id);
|
loadedRegions.remove(id);
|
||||||
lastUse.remove(id);
|
lastUse.remove(id);
|
||||||
if (disableClear) toUnload.remove(id);
|
if (disableClear) toUnload.remove(id);
|
||||||
@ -577,7 +595,7 @@ public class Mantle {
|
|||||||
if (file.exists()) {
|
if (file.exists()) {
|
||||||
try {
|
try {
|
||||||
Iris.addPanic("reading.tectonic-plate", file.getAbsolutePath());
|
Iris.addPanic("reading.tectonic-plate", file.getAbsolutePath());
|
||||||
region = TectonicPlate.read(worldHeight, file);
|
region = TectonicPlate.read(worldHeight, file, file.getName().startsWith("pv."));
|
||||||
|
|
||||||
if (region.getX() != x || region.getZ() != z) {
|
if (region.getX() != x || region.getZ() != z) {
|
||||||
Iris.warn("Loaded Tectonic Plate " + x + "," + z + " but read it as " + region.getX() + "," + region.getZ() + "... Assuming " + x + "," + z);
|
Iris.warn("Loaded Tectonic Plate " + x + "," + z + " but read it as " + region.getX() + "," + region.getZ() + "... Assuming " + x + "," + z);
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
package com.volmit.iris.util.mantle;
|
package com.volmit.iris.util.mantle;
|
||||||
|
|
||||||
import com.volmit.iris.Iris;
|
import com.volmit.iris.Iris;
|
||||||
|
import com.volmit.iris.util.data.Varint;
|
||||||
import com.volmit.iris.util.documentation.ChunkCoordinates;
|
import com.volmit.iris.util.documentation.ChunkCoordinates;
|
||||||
import com.volmit.iris.util.function.Consumer4;
|
import com.volmit.iris.util.function.Consumer4;
|
||||||
import com.volmit.iris.util.io.CountingDataInputStream;
|
import com.volmit.iris.util.io.CountingDataInputStream;
|
||||||
@ -74,11 +75,12 @@ public class MantleChunk {
|
|||||||
* @throws IOException shit happens
|
* @throws IOException shit happens
|
||||||
* @throws ClassNotFoundException shit happens
|
* @throws ClassNotFoundException shit happens
|
||||||
*/
|
*/
|
||||||
public MantleChunk(int sectionHeight, CountingDataInputStream din) throws IOException {
|
public MantleChunk(int version, int sectionHeight, CountingDataInputStream din) throws IOException {
|
||||||
this(sectionHeight, din.readByte(), din.readByte());
|
this(sectionHeight, din.readByte(), din.readByte());
|
||||||
int s = din.readByte();
|
int s = din.readByte();
|
||||||
|
int l = version < 0 ? flags.length() : Varint.readUnsignedVarInt(din);
|
||||||
|
|
||||||
for (int i = 0; i < flags.length(); i++) {
|
for (int i = 0; i < flags.length() && i < l; i++) {
|
||||||
flags.set(i, din.readBoolean() ? 1 : 0);
|
flags.set(i, din.readBoolean() ? 1 : 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -87,6 +89,10 @@ public class MantleChunk {
|
|||||||
long size = din.readInt();
|
long size = din.readInt();
|
||||||
if (size == 0) continue;
|
if (size == 0) continue;
|
||||||
long start = din.count();
|
long start = din.count();
|
||||||
|
if (i >= sectionHeight) {
|
||||||
|
din.skipTo(start + size);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
sections.set(i, Matter.readDin(din));
|
sections.set(i, Matter.readDin(din));
|
||||||
@ -210,6 +216,7 @@ public class MantleChunk {
|
|||||||
dos.writeByte(x);
|
dos.writeByte(x);
|
||||||
dos.writeByte(z);
|
dos.writeByte(z);
|
||||||
dos.writeByte(sections.length());
|
dos.writeByte(sections.length());
|
||||||
|
Varint.writeUnsignedVarInt(flags.length(), dos);
|
||||||
|
|
||||||
for (int i = 0; i < flags.length(); i++) {
|
for (int i = 0; i < flags.length(); i++) {
|
||||||
dos.writeBoolean(flags.get(i) == 1);
|
dos.writeBoolean(flags.get(i) == 1);
|
||||||
|
@ -19,9 +19,10 @@
|
|||||||
package com.volmit.iris.util.mantle;
|
package com.volmit.iris.util.mantle;
|
||||||
|
|
||||||
import com.volmit.iris.Iris;
|
import com.volmit.iris.Iris;
|
||||||
|
import com.volmit.iris.core.IrisSettings;
|
||||||
import com.volmit.iris.engine.EnginePanic;
|
import com.volmit.iris.engine.EnginePanic;
|
||||||
import com.volmit.iris.engine.data.cache.Cache;
|
import com.volmit.iris.engine.data.cache.Cache;
|
||||||
import com.volmit.iris.util.collection.KSet;
|
import com.volmit.iris.util.data.Varint;
|
||||||
import com.volmit.iris.util.documentation.ChunkCoordinates;
|
import com.volmit.iris.util.documentation.ChunkCoordinates;
|
||||||
import com.volmit.iris.util.format.C;
|
import com.volmit.iris.util.format.C;
|
||||||
import com.volmit.iris.util.format.Form;
|
import com.volmit.iris.util.format.Form;
|
||||||
@ -44,7 +45,9 @@ import java.util.concurrent.atomic.AtomicReferenceArray;
|
|||||||
* Tectonic Plates are fully atomic & thread safe
|
* Tectonic Plates are fully atomic & thread safe
|
||||||
*/
|
*/
|
||||||
public class TectonicPlate {
|
public class TectonicPlate {
|
||||||
private static final KSet<Thread> errors = new KSet<>();
|
private static final ThreadLocal<Boolean> errors = ThreadLocal.withInitial(() -> false);
|
||||||
|
public static final int MISSING = -1;
|
||||||
|
public static final int CURRENT = 0;
|
||||||
|
|
||||||
private final int sectionHeight;
|
private final int sectionHeight;
|
||||||
private final AtomicReferenceArray<MantleChunk> chunks;
|
private final AtomicReferenceArray<MantleChunk> chunks;
|
||||||
@ -74,11 +77,12 @@ public class TectonicPlate {
|
|||||||
* @param din the data input
|
* @param din the data input
|
||||||
* @throws IOException shit happens yo
|
* @throws IOException shit happens yo
|
||||||
*/
|
*/
|
||||||
public TectonicPlate(int worldHeight, CountingDataInputStream din) throws IOException {
|
public TectonicPlate(int worldHeight, CountingDataInputStream din, boolean versioned) throws IOException {
|
||||||
this(worldHeight, din.readInt(), din.readInt());
|
this(worldHeight, din.readInt(), din.readInt());
|
||||||
if (!din.markSupported())
|
if (!din.markSupported())
|
||||||
throw new IOException("Mark not supported!");
|
throw new IOException("Mark not supported!");
|
||||||
|
|
||||||
|
int v = versioned ? Varint.readUnsignedVarInt(din) : MISSING;
|
||||||
for (int i = 0; i < chunks.length(); i++) {
|
for (int i = 0; i < chunks.length(); i++) {
|
||||||
long size = din.readInt();
|
long size = din.readInt();
|
||||||
if (size == 0) continue;
|
if (size == 0) continue;
|
||||||
@ -86,7 +90,7 @@ public class TectonicPlate {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
Iris.addPanic("read-chunk", "Chunk[" + i + "]");
|
Iris.addPanic("read-chunk", "Chunk[" + i + "]");
|
||||||
chunks.set(i, new MantleChunk(sectionHeight, din));
|
chunks.set(i, new MantleChunk(v, sectionHeight, din));
|
||||||
EnginePanic.saveLast();
|
EnginePanic.saveLast();
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
long end = start + size;
|
long end = start + size;
|
||||||
@ -103,7 +107,7 @@ public class TectonicPlate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static TectonicPlate read(int worldHeight, File file) throws IOException {
|
public static TectonicPlate read(int worldHeight, File file, boolean versioned) throws IOException {
|
||||||
try (FileChannel fc = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.SYNC)) {
|
try (FileChannel fc = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.SYNC)) {
|
||||||
fc.lock();
|
fc.lock();
|
||||||
|
|
||||||
@ -111,10 +115,10 @@ public class TectonicPlate {
|
|||||||
LZ4BlockInputStream lz4 = new LZ4BlockInputStream(fin);
|
LZ4BlockInputStream lz4 = new LZ4BlockInputStream(fin);
|
||||||
BufferedInputStream bis = new BufferedInputStream(lz4);
|
BufferedInputStream bis = new BufferedInputStream(lz4);
|
||||||
try (CountingDataInputStream din = CountingDataInputStream.wrap(bis)) {
|
try (CountingDataInputStream din = CountingDataInputStream.wrap(bis)) {
|
||||||
return new TectonicPlate(worldHeight, din);
|
return new TectonicPlate(worldHeight, din, versioned);
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
if (errors.remove(Thread.currentThread())) {
|
if (IrisSettings.get().getGeneral().isDumpMantleOnError() && errors.get()) {
|
||||||
File dump = Iris.instance.getDataFolder("dump", file.getName() + ".bin");
|
File dump = Iris.instance.getDataFolder("dump", file.getName() + ".bin");
|
||||||
try (FileChannel fc = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.SYNC)) {
|
try (FileChannel fc = FileChannel.open(file.toPath(), StandardOpenOption.READ, StandardOpenOption.WRITE, StandardOpenOption.SYNC)) {
|
||||||
fc.lock();
|
fc.lock();
|
||||||
@ -124,6 +128,7 @@ public class TectonicPlate {
|
|||||||
Files.copy(lz4, dump.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
Files.copy(lz4, dump.toPath(), StandardCopyOption.REPLACE_EXISTING);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
errors.remove();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -222,7 +227,7 @@ public class TectonicPlate {
|
|||||||
write(dos);
|
write(dos);
|
||||||
}
|
}
|
||||||
Files.move(temp.toPath(), file.toPath(), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
|
Files.move(temp.toPath(), file.toPath(), StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE);
|
||||||
Iris.debug("Saved Tectonic Plate " + C.DARK_GREEN + file.getName().split("\\Q.\\E")[0] + C.RED + " in " + Form.duration(p.getMilliseconds(), 2));
|
Iris.debug("Saved Tectonic Plate " + C.DARK_GREEN + file.getName() + C.RED + " in " + Form.duration(p.getMilliseconds(), 2));
|
||||||
temp.delete();
|
temp.delete();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,6 +240,7 @@ public class TectonicPlate {
|
|||||||
public void write(DataOutputStream dos) throws IOException {
|
public void write(DataOutputStream dos) throws IOException {
|
||||||
dos.writeInt(x);
|
dos.writeInt(x);
|
||||||
dos.writeInt(z);
|
dos.writeInt(z);
|
||||||
|
Varint.writeUnsignedVarInt(CURRENT, dos);
|
||||||
|
|
||||||
var bytes = new ByteArrayOutputStream(8192);
|
var bytes = new ByteArrayOutputStream(8192);
|
||||||
var sub = new DataOutputStream(bytes);
|
var sub = new DataOutputStream(bytes);
|
||||||
@ -256,6 +262,6 @@ public class TectonicPlate {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static void addError() {
|
public static void addError() {
|
||||||
errors.add(Thread.currentThread());
|
errors.set(true);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -154,15 +154,16 @@ public interface Matter {
|
|||||||
matter.putSlice(type, slice);
|
matter.putSlice(type, slice);
|
||||||
} catch (Throwable e) {
|
} catch (Throwable e) {
|
||||||
long end = start + size;
|
long end = start + size;
|
||||||
Iris.error("Failed to read matter slice, skipping it.");
|
if (!(e instanceof ClassNotFoundException)) {
|
||||||
Iris.addPanic("read.byte.range", start + " " + end);
|
Iris.error("Failed to read matter slice, skipping it.");
|
||||||
Iris.addPanic("read.byte.current", din.count() + "");
|
Iris.addPanic("read.byte.range", start + " " + end);
|
||||||
Iris.reportError(e);
|
Iris.addPanic("read.byte.current", din.count() + "");
|
||||||
e.printStackTrace();
|
Iris.reportError(e);
|
||||||
Iris.panic();
|
e.printStackTrace();
|
||||||
|
Iris.panic();
|
||||||
|
TectonicPlate.addError();
|
||||||
|
}
|
||||||
din.skipTo(end);
|
din.skipTo(end);
|
||||||
TectonicPlate.addError();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user