mirror of
https://github.com/VolmitSoftware/Iris.git
synced 2025-07-18 18:23:06 +00:00
Fix loot
This commit is contained in:
parent
ed6fe5631a
commit
200e576ba8
@ -752,7 +752,7 @@ public class ProjectManager
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
JSONObject ws = newWorkspaceConfig();
|
JSONObject ws = newWorkspaceConfig(Iris.instance.getDataFolder("packs", newName));
|
||||||
IO.writeAll(Iris.instance.getDataFile("packs", newName, newName + ".code-workspace"), ws.toString(0));
|
IO.writeAll(Iris.instance.getDataFile("packs", newName, newName + ".code-workspace"), ws.toString(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -878,7 +878,7 @@ public class ProjectManager
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
JSONObject ws = newWorkspaceConfig();
|
JSONObject ws = newWorkspaceConfig(Iris.instance.getDataFolder("packs", dimension.getLoadKey()));
|
||||||
IO.writeAll(Iris.instance.getDataFile("packs", dimension.getLoadKey(), "dimensions", dimension.getLoadKey() + ".json"), new JSONObject(new Gson().toJson(dimension)).toString(4));
|
IO.writeAll(Iris.instance.getDataFile("packs", dimension.getLoadKey(), "dimensions", dimension.getLoadKey() + ".json"), new JSONObject(new Gson().toJson(dimension)).toString(4));
|
||||||
IO.writeAll(Iris.instance.getDataFile("packs", dimension.getLoadKey(), "regions", exampleRegion.getLoadKey() + ".json"), new JSONObject(new Gson().toJson(exampleRegion)).toString(4));
|
IO.writeAll(Iris.instance.getDataFile("packs", dimension.getLoadKey(), "regions", exampleRegion.getLoadKey() + ".json"), new JSONObject(new Gson().toJson(exampleRegion)).toString(4));
|
||||||
IO.writeAll(Iris.instance.getDataFile("packs", dimension.getLoadKey(), "biomes", exampleLand1.getLoadKey() + ".json"), new JSONObject(new Gson().toJson(exampleLand1)).toString(4));
|
IO.writeAll(Iris.instance.getDataFile("packs", dimension.getLoadKey(), "biomes", exampleLand1.getLoadKey() + ".json"), new JSONObject(new Gson().toJson(exampleLand1)).toString(4));
|
||||||
@ -899,7 +899,7 @@ public class ProjectManager
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
private JSONObject newWorkspaceConfig()
|
private JSONObject newWorkspaceConfig(File pack)
|
||||||
{
|
{
|
||||||
Iris.globaldata.clearLists();
|
Iris.globaldata.clearLists();
|
||||||
JSONObject ws = new JSONObject();
|
JSONObject ws = new JSONObject();
|
||||||
@ -934,7 +934,7 @@ public class ProjectManager
|
|||||||
settings.put("[json]", jc);
|
settings.put("[json]", jc);
|
||||||
settings.put("json.maxItemsComputed", 15000);
|
settings.put("json.maxItemsComputed", 15000);
|
||||||
|
|
||||||
JSONArray schemas = buildSchemas(Iris.globaldata);
|
JSONArray schemas = buildSchemas(Iris.globaldata, pack);
|
||||||
settings.put("json.schemas", schemas);
|
settings.put("json.schemas", schemas);
|
||||||
ws.put("settings", settings);
|
ws.put("settings", settings);
|
||||||
|
|
||||||
@ -952,7 +952,7 @@ public class ProjectManager
|
|||||||
{
|
{
|
||||||
Iris.info("Updating Workspace: " + ws.getPath());
|
Iris.info("Updating Workspace: " + ws.getPath());
|
||||||
J.attemptAsync(() -> writeDocs(ws.getParentFile()));
|
J.attemptAsync(() -> writeDocs(ws.getParentFile()));
|
||||||
JSONObject j = newWorkspaceConfig();
|
JSONObject j = newWorkspaceConfig(ws.getParentFile());
|
||||||
IO.writeAll(ws, j.toString(4));
|
IO.writeAll(ws, j.toString(4));
|
||||||
Iris.info("Updated Workspace: " + ws.getPath());
|
Iris.info("Updated Workspace: " + ws.getPath());
|
||||||
}
|
}
|
||||||
@ -963,7 +963,7 @@ public class ProjectManager
|
|||||||
|
|
||||||
try
|
try
|
||||||
{
|
{
|
||||||
IO.writeAll(ws, newWorkspaceConfig());
|
IO.writeAll(ws, newWorkspaceConfig(ws.getParentFile()));
|
||||||
}
|
}
|
||||||
|
|
||||||
catch(IOException e1)
|
catch(IOException e1)
|
||||||
@ -973,25 +973,45 @@ public class ProjectManager
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void ex(JSONArray schemas, Class<?> c, IrisDataManager dat, String v)
|
private void ex(JSONArray schemas, Class<?> c, IrisDataManager dat, String v, File pack)
|
||||||
{
|
{
|
||||||
JSONObject o = getSchemaEntry(c, dat, v);
|
JSONObject o = getSchemaEntry(c, dat, v);
|
||||||
lock.lock();
|
lock.lock();
|
||||||
schemas.put(o);
|
schemas.put(o);
|
||||||
lock.unlock();
|
lock.unlock();
|
||||||
|
|
||||||
|
J.a(() ->
|
||||||
|
{
|
||||||
|
File f = new File(pack, "_docs/schema/" + c.getSimpleName().replaceAll("\\QIris\\E", "").toLowerCase() + ".json");
|
||||||
|
f.getParentFile().mkdirs();
|
||||||
|
try
|
||||||
|
{
|
||||||
|
IO.writeAll(f, o.toString(4));
|
||||||
|
}
|
||||||
|
|
||||||
|
catch(JSONException e)
|
||||||
|
{
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
catch(IOException e)
|
||||||
|
{
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private JSONArray buildSchemas(IrisDataManager dat)
|
private JSONArray buildSchemas(IrisDataManager dat, File pack)
|
||||||
{
|
{
|
||||||
JSONArray schemas = new JSONArray();
|
JSONArray schemas = new JSONArray();
|
||||||
TaskGroup g = tx.startWork();
|
TaskGroup g = tx.startWork();
|
||||||
g.queue(() -> ex(schemas, IrisDimension.class, dat, "/dimensions/*.json"));
|
g.queue(() -> ex(schemas, IrisDimension.class, dat, "/dimensions/*.json", pack));
|
||||||
g.queue(() -> ex(schemas, IrisEntity.class, dat, "/entities/*.json"));
|
g.queue(() -> ex(schemas, IrisEntity.class, dat, "/entities/*.json", pack));
|
||||||
g.queue(() -> ex(schemas, IrisBiome.class, dat, "/biomes/*.json"));
|
g.queue(() -> ex(schemas, IrisBiome.class, dat, "/biomes/*.json", pack));
|
||||||
g.queue(() -> ex(schemas, IrisRegion.class, dat, "/regions/*.json"));
|
g.queue(() -> ex(schemas, IrisRegion.class, dat, "/regions/*.json", pack));
|
||||||
g.queue(() -> ex(schemas, IrisGenerator.class, dat, "/generators/*.json"));
|
g.queue(() -> ex(schemas, IrisGenerator.class, dat, "/generators/*.json", pack));
|
||||||
g.queue(() -> ex(schemas, IrisStructure.class, dat, "/structures/*.json"));
|
g.queue(() -> ex(schemas, IrisStructure.class, dat, "/structures/*.json", pack));
|
||||||
g.queue(() -> ex(schemas, IrisLootTable.class, dat, "/loot/*.json"));
|
g.queue(() -> ex(schemas, IrisLootTable.class, dat, "/loot/*.json", pack));
|
||||||
g.execute();
|
g.execute();
|
||||||
|
|
||||||
return schemas;
|
return schemas;
|
||||||
|
@ -1,54 +0,0 @@
|
|||||||
package com.volmit.iris.object;
|
|
||||||
|
|
||||||
import com.volmit.iris.util.Desc;
|
|
||||||
import com.volmit.iris.util.DontObfuscate;
|
|
||||||
import com.volmit.iris.util.MinNumber;
|
|
||||||
import com.volmit.iris.util.Required;
|
|
||||||
|
|
||||||
import lombok.AllArgsConstructor;
|
|
||||||
import lombok.Data;
|
|
||||||
import lombok.NoArgsConstructor;
|
|
||||||
import lombok.experimental.Accessors;
|
|
||||||
|
|
||||||
@Accessors(chain = true)
|
|
||||||
@NoArgsConstructor
|
|
||||||
@AllArgsConstructor
|
|
||||||
@Desc("Represents a carving that slices through the surface")
|
|
||||||
@Data
|
|
||||||
public class IrisRavineLayer
|
|
||||||
{
|
|
||||||
|
|
||||||
@Required
|
|
||||||
@DontObfuscate
|
|
||||||
@Desc("The vertical slope this cave layer follows typically you would set both the min and max values to negative values so the ravine is always under the surface.")
|
|
||||||
private IrisShapedGeneratorStyle verticalSlope = new IrisShapedGeneratorStyle(new IrisGeneratorStyle(NoiseStyle.IRIS_THICK), -19, -11);
|
|
||||||
|
|
||||||
@Required
|
|
||||||
@DontObfuscate
|
|
||||||
@Desc("The horizontal slope this cave layer follows. This affects if the ravine is straight or curves or even whirls around")
|
|
||||||
private IrisShapedGeneratorStyle horizontalSlope = new IrisShapedGeneratorStyle(new IrisGeneratorStyle(NoiseStyle.IRIS), -30, 30);
|
|
||||||
|
|
||||||
@DontObfuscate
|
|
||||||
@Desc("If defined, a cave fluid will fill this cave below (or above) the specified fluidHeight in this object.")
|
|
||||||
private IrisCaveFluid fluid = new IrisCaveFluid();
|
|
||||||
|
|
||||||
@MinNumber(0.001)
|
|
||||||
@DontObfuscate
|
|
||||||
@Desc("The cave zoom. Higher values makes caves spread out further and branch less often, but are thicker.")
|
|
||||||
private double caveZoom = 1D;
|
|
||||||
|
|
||||||
@MinNumber(0.001)
|
|
||||||
@DontObfuscate
|
|
||||||
@Desc("The ravine thickness.")
|
|
||||||
private double ravineThickness = 1D;
|
|
||||||
|
|
||||||
@MinNumber(1)
|
|
||||||
@DontObfuscate
|
|
||||||
@Desc("The ravine rarity as 1 in rarity chance.")
|
|
||||||
private int rarity = 12;
|
|
||||||
|
|
||||||
@MinNumber(0.001)
|
|
||||||
@DontObfuscate
|
|
||||||
@Desc("The ravine rarity zoom is how large of a check area at a time iris will do. For example, with higher zooms ravines will have the same effective rarity, but when you actually find a ravine, it will be near a whole patch of ravines. Setting a lower zoom such as 0.25 will make the check density higher resulting in a more uniform distribution of ravines. A zoom that is too small may also reduce the ravine sizes.")
|
|
||||||
private double rarityZoom = 1;
|
|
||||||
}
|
|
@ -29,8 +29,9 @@ public class B
|
|||||||
return getBlockData(bd);
|
return getBlockData(bd);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Material getMaterial(String bd)
|
public static Material getMaterial(String bdx)
|
||||||
{
|
{
|
||||||
|
String bd = bdx.trim().toUpperCase();
|
||||||
return types.compute(bd, (k, v) ->
|
return types.compute(bd, (k, v) ->
|
||||||
{
|
{
|
||||||
if(k != null && v != null)
|
if(k != null && v != null)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user