mirror of
https://github.com/PolyhedralDev/Terra.git
synced 2026-02-16 10:30:42 +00:00
if statements with code blocks
This commit is contained in:
@@ -1,12 +0,0 @@
|
||||
package com.dfsek.terra.api.structures.parser;
|
||||
|
||||
import com.dfsek.terra.api.math.vector.Location;
|
||||
import com.dfsek.terra.api.platform.world.Chunk;
|
||||
|
||||
public interface Function<T> {
|
||||
T apply(Location location);
|
||||
|
||||
T apply(Location location, Chunk chunk);
|
||||
|
||||
String name();
|
||||
}
|
||||
@@ -1,6 +1,8 @@
|
||||
package com.dfsek.terra.api.structures.parser;
|
||||
|
||||
import com.dfsek.terra.api.structures.parser.exceptions.ParseException;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Argument;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Function;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
package com.dfsek.terra.api.structures.parser;
|
||||
|
||||
import com.dfsek.terra.api.structures.parser.exceptions.ParseException;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Block;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Function;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Item;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Keyword;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Statement;
|
||||
import com.dfsek.terra.api.structures.parser.lang.keywords.IfKeyword;
|
||||
import com.dfsek.terra.api.structures.parser.lang.statements.EqualsStatement;
|
||||
import com.dfsek.terra.api.structures.tokenizer.Token;
|
||||
import com.dfsek.terra.api.structures.tokenizer.Tokenizer;
|
||||
import com.dfsek.terra.api.structures.tokenizer.exceptions.TokenizerException;
|
||||
@@ -16,6 +23,8 @@ import java.util.stream.Collectors;
|
||||
public class Parser {
|
||||
private final String data;
|
||||
private final Map<String, FunctionBuilder<? extends Function<?>>> functions = new HashMap<>();
|
||||
private final Set<String> keywords = Sets.newHashSet("if");
|
||||
|
||||
Set<Token.Type> allowedArguments = Sets.newHashSet(Token.Type.STRING, Token.Type.NUMBER, Token.Type.IDENTIFIER);
|
||||
|
||||
public Parser(String data) {
|
||||
@@ -27,49 +36,94 @@ public class Parser {
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<Function<?>> parse() throws ParseException {
|
||||
public Block parse() throws ParseException {
|
||||
Tokenizer tokenizer = new Tokenizer(data);
|
||||
List<Function<?>> builtFunctions = new GlueList<>();
|
||||
List<Token> functionBuilder = new GlueList<>();
|
||||
Token token = null;
|
||||
while(tokenizer.hasNext()) {
|
||||
try {
|
||||
token = tokenizer.fetch();
|
||||
functionBuilder.add(token);
|
||||
|
||||
if(token.getType().equals(Token.Type.STATEMENT_END)) {
|
||||
Token identifier = functionBuilder.remove(0);
|
||||
checkType(identifier, Token.Type.IDENTIFIER); // First token must be identifier
|
||||
List<Token> tokens = new GlueList<>();
|
||||
try {
|
||||
while(tokenizer.hasNext()) tokens.add(tokenizer.fetch());
|
||||
} catch(TokenizerException e) {
|
||||
throw new ParseException("Failed to tokenize input", e);
|
||||
}
|
||||
|
||||
if(!functions.containsKey(identifier.getContent()))
|
||||
throw new ParseException("No such function " + identifier.getContent() + ": " + identifier.getStart());
|
||||
|
||||
checkType(functionBuilder.remove(0), Token.Type.BODY_BEGIN); // Second is body begin
|
||||
return parseBlock(tokens);
|
||||
}
|
||||
|
||||
|
||||
List<Token> args = getArgs(functionBuilder); // Extract arguments, consume the rest.
|
||||
private Keyword<?> parseKeyword(List<Token> tokens, List<Token> functionAndArguments) throws ParseException {
|
||||
|
||||
functionBuilder.remove(0); // Remove body end
|
||||
Token identifier = functionAndArguments.remove(0);
|
||||
System.out.println("Parsing keyword at " + identifier.getStart());
|
||||
checkType(identifier, Token.Type.IDENTIFIER);
|
||||
if(!keywords.contains(identifier.getContent()))
|
||||
throw new ParseException("No such keyword " + identifier.getContent() + ": " + identifier.getStart());
|
||||
Keyword<?> k = null;
|
||||
if(identifier.getContent().equals("if")) {
|
||||
|
||||
checkType(functionBuilder.remove(0), Token.Type.STATEMENT_END);
|
||||
checkType(functionAndArguments.remove(0), Token.Type.BODY_BEGIN);
|
||||
|
||||
List<String> arg = args.stream().map(Token::getContent).collect(Collectors.toList());
|
||||
Function<?> left = parseFunction(functionAndArguments, false);
|
||||
|
||||
FunctionBuilder<?> builder = functions.get(identifier.getContent());
|
||||
if(arg.size() != builder.getArguments().size())
|
||||
throw new ParseException("Expected " + builder.getArguments().size() + " arguments, found " + arg.size() + ": " + identifier.getStart());
|
||||
Statement statement = null;
|
||||
Token comparator = functionAndArguments.remove(0);
|
||||
checkType(comparator, Token.Type.BOOLEAN_OPERATOR);
|
||||
|
||||
builtFunctions.add(functions.get(identifier.getContent()).build(arg));
|
||||
Function<?> right = parseFunction(functionAndArguments, false);
|
||||
|
||||
functionBuilder.clear();
|
||||
}
|
||||
} catch(TokenizerException e) {
|
||||
throw new ParseException("Failed to tokenize input", e);
|
||||
checkType(functionAndArguments.remove(0), Token.Type.BODY_END);
|
||||
if(comparator.getContent().equals("==")) {
|
||||
statement = new EqualsStatement(left, right);
|
||||
}
|
||||
|
||||
k = new IfKeyword(parseBlock(tokens), statement);
|
||||
|
||||
}
|
||||
if(token != null) checkType(token, Token.Type.STATEMENT_END);
|
||||
return builtFunctions;
|
||||
return k;
|
||||
}
|
||||
|
||||
private Block parseBlock(List<Token> tokens) throws ParseException {
|
||||
List<Item<?>> parsedItems = new GlueList<>();
|
||||
List<Token> functionArgs = new GlueList<>();
|
||||
|
||||
while(tokens.size() > 0) {
|
||||
Token token = tokens.remove(0);
|
||||
System.out.println(token);
|
||||
if(token.getType().equals(Token.Type.BLOCK_END)) break;
|
||||
functionArgs.add(token);
|
||||
if(token.getType().equals(Token.Type.STATEMENT_END)) {
|
||||
parsedItems.add(parseFunction(functionArgs, true));
|
||||
functionArgs.clear();
|
||||
} else if(token.getType().equals(Token.Type.BLOCK_BEGIN)) {
|
||||
parsedItems.add(parseKeyword(tokens, functionArgs));
|
||||
functionArgs.clear();
|
||||
}
|
||||
}
|
||||
return new Block(parsedItems);
|
||||
}
|
||||
|
||||
private Function<?> parseFunction(List<Token> functionAndArguments, boolean fullStatement) throws ParseException {
|
||||
Token identifier = functionAndArguments.remove(0);
|
||||
System.out.println("Parsing function at " + identifier.getStart());
|
||||
checkType(identifier, Token.Type.IDENTIFIER); // First token must be identifier
|
||||
|
||||
if(!functions.containsKey(identifier.getContent()))
|
||||
throw new ParseException("No such function " + identifier.getContent() + ": " + identifier.getStart());
|
||||
|
||||
checkType(functionAndArguments.remove(0), Token.Type.BODY_BEGIN); // Second is body begin
|
||||
|
||||
|
||||
List<Token> args = getArgs(functionAndArguments); // Extract arguments, consume the rest.
|
||||
|
||||
functionAndArguments.remove(0); // Remove body end
|
||||
|
||||
if(fullStatement) checkType(functionAndArguments.remove(0), Token.Type.STATEMENT_END);
|
||||
|
||||
List<String> arg = args.stream().map(Token::getContent).collect(Collectors.toList());
|
||||
|
||||
FunctionBuilder<?> builder = functions.get(identifier.getContent());
|
||||
if(arg.size() != builder.getArguments().size())
|
||||
throw new ParseException("Expected " + builder.getArguments().size() + " arguments, found " + arg.size() + ": " + identifier.getStart());
|
||||
return functions.get(identifier.getContent()).build(arg);
|
||||
}
|
||||
|
||||
private List<Token> getArgs(List<Token> functionBuilder) throws ParseException {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.dfsek.terra.api.structures.parser;
|
||||
package com.dfsek.terra.api.structures.parser.lang;
|
||||
|
||||
public interface Argument<T> {
|
||||
T parse(String input);
|
||||
@@ -1,4 +1,30 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang;
|
||||
|
||||
public class Block {
|
||||
import com.dfsek.terra.api.math.vector.Location;
|
||||
import com.dfsek.terra.api.platform.world.Chunk;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class Block implements Item<Void> {
|
||||
private final List<Item<?>> items;
|
||||
|
||||
public Block(List<Item<?>> items) {
|
||||
this.items = items;
|
||||
}
|
||||
|
||||
public List<Item<?>> getItems() {
|
||||
return items;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void apply(Location location) {
|
||||
items.forEach(item -> item.apply(location));
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void apply(Location location, Chunk chunk) {
|
||||
items.forEach(item -> item.apply(location, chunk));
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang;
|
||||
|
||||
public class Expression {
|
||||
public interface Expression<T> extends Item<T> {
|
||||
}
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang;
|
||||
|
||||
public interface Function<T> extends Expression<T> {
|
||||
|
||||
String name();
|
||||
}
|
||||
@@ -1,4 +1,10 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang;
|
||||
|
||||
public interface Item {
|
||||
import com.dfsek.terra.api.math.vector.Location;
|
||||
import com.dfsek.terra.api.platform.world.Chunk;
|
||||
|
||||
public interface Item<T> {
|
||||
T apply(Location location);
|
||||
|
||||
T apply(Location location, Chunk chunk);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang;
|
||||
|
||||
public interface Keyword<T> extends Expression<T> {
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang;
|
||||
|
||||
public interface Statement extends Item<Boolean> {
|
||||
}
|
||||
@@ -0,0 +1,29 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang.keywords;
|
||||
|
||||
import com.dfsek.terra.api.math.vector.Location;
|
||||
import com.dfsek.terra.api.platform.world.Chunk;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Block;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Keyword;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Statement;
|
||||
|
||||
public class IfKeyword implements Keyword<Void> {
|
||||
private final Block conditional;
|
||||
private final Statement statement;
|
||||
|
||||
public IfKeyword(Block conditional, Statement statement) {
|
||||
this.conditional = conditional;
|
||||
this.statement = statement;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void apply(Location location) {
|
||||
if(statement.apply(location)) conditional.apply(location);
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void apply(Location location, Chunk chunk) {
|
||||
if(statement.apply(location, chunk)) conditional.apply(location, chunk);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang.keywords;
|
||||
|
||||
import com.dfsek.terra.api.math.vector.Location;
|
||||
import com.dfsek.terra.api.platform.world.Chunk;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Keyword;
|
||||
|
||||
public class ReturnKeyword implements Keyword<Void> {
|
||||
@Override
|
||||
public Void apply(Location location) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Void apply(Location location, Chunk chunk) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
package com.dfsek.terra.api.structures.parser.lang.statements;
|
||||
|
||||
import com.dfsek.terra.api.math.vector.Location;
|
||||
import com.dfsek.terra.api.platform.world.Chunk;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Item;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Statement;
|
||||
|
||||
public class EqualsStatement implements Statement {
|
||||
private final Item<?> left;
|
||||
private final Item<?> right;
|
||||
|
||||
public EqualsStatement(Item<?> left, Item<?> right) {
|
||||
this.left = left;
|
||||
this.right = right;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean apply(Location location) {
|
||||
return left.apply(location).equals(right.apply(location));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Boolean apply(Location location, Chunk chunk) {
|
||||
return left.apply(location, chunk).equals(right.apply(location, chunk));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
package com.dfsek.terra.api.structures.script;
|
||||
|
||||
public class StructureScript {
|
||||
}
|
||||
@@ -11,6 +11,6 @@ public class Position {
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return (line + 1) + ":" + (index + 1);
|
||||
return (line + 1) + ":" + index;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,53 @@ public class Token {
|
||||
}
|
||||
|
||||
public enum Type {
|
||||
IDENTIFIER, NUMBER, STRING, BOOLEAN, BODY_BEGIN, BODY_END, STATEMENT_END, SEPARATOR, BLOCK_BEGIN, BLOCK_END
|
||||
/**
|
||||
* Function identifier or language keyword
|
||||
*/
|
||||
IDENTIFIER,
|
||||
/**
|
||||
* Numeric literal
|
||||
*/
|
||||
NUMBER,
|
||||
/**
|
||||
* String literal
|
||||
*/
|
||||
STRING,
|
||||
/**
|
||||
* Boolean literal
|
||||
*/
|
||||
BOOLEAN,
|
||||
/**
|
||||
* Beginning of function body
|
||||
*/
|
||||
BODY_BEGIN,
|
||||
/**
|
||||
* Ending of function body
|
||||
*/
|
||||
BODY_END,
|
||||
/**
|
||||
* End of statement
|
||||
*/
|
||||
STATEMENT_END,
|
||||
/**
|
||||
* Argument separator
|
||||
*/
|
||||
SEPARATOR,
|
||||
/**
|
||||
* Beginning of code block
|
||||
*/
|
||||
BLOCK_BEGIN,
|
||||
/**
|
||||
* End of code block
|
||||
*/
|
||||
BLOCK_END,
|
||||
/**
|
||||
* assignment operator
|
||||
*/
|
||||
ASSIGNMENT,
|
||||
/**
|
||||
* Boolean operator
|
||||
*/
|
||||
BOOLEAN_OPERATOR
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import java.util.Set;
|
||||
public class Tokenizer {
|
||||
private final Lookahead reader;
|
||||
|
||||
private final Set<Character> syntaxSignificant = Sets.newHashSet(';', '(', ')', '"', ',', '\\', // Currently used chars
|
||||
private final Set<Character> syntaxSignificant = Sets.newHashSet(';', '(', ')', '"', ',', '\\', '=', // Currently used chars
|
||||
'{', '}'); // Reserved chars
|
||||
|
||||
|
||||
@@ -32,6 +32,13 @@ public class Tokenizer {
|
||||
|
||||
if(reader.matches("/*", true)) skipTo("*/"); // Skip multi line comment
|
||||
|
||||
if(reader.matches("true", true))
|
||||
return new Token("true", Token.Type.BOOLEAN, new Position(reader.getLine(), reader.getIndex()));
|
||||
if(reader.matches("false", true))
|
||||
return new Token("false", Token.Type.BOOLEAN, new Position(reader.getLine(), reader.getIndex()));
|
||||
if(reader.matches("==", true))
|
||||
return new Token("==", Token.Type.BOOLEAN_OPERATOR, new Position(reader.getLine(), reader.getIndex()));
|
||||
|
||||
if(isNumberStart()) {
|
||||
StringBuilder num = new StringBuilder();
|
||||
while(!reader.current().isEOF() && isNumberLike()) {
|
||||
@@ -55,6 +62,7 @@ public class Tokenizer {
|
||||
string.append(reader.consume());
|
||||
}
|
||||
reader.consume(); // Consume last quote
|
||||
|
||||
return new Token(string.toString(), Token.Type.STRING, new Position(reader.getLine(), reader.getIndex()));
|
||||
}
|
||||
|
||||
@@ -70,6 +78,8 @@ public class Tokenizer {
|
||||
return new Token(reader.consume().toString(), Token.Type.BLOCK_BEGIN, new Position(reader.getLine(), reader.getIndex()));
|
||||
if(reader.current().is('}'))
|
||||
return new Token(reader.consume().toString(), Token.Type.BLOCK_END, new Position(reader.getLine(), reader.getIndex()));
|
||||
if(reader.current().is('='))
|
||||
return new Token(reader.consume().toString(), Token.Type.ASSIGNMENT, new Position(reader.getLine(), reader.getIndex()));
|
||||
|
||||
StringBuilder token = new StringBuilder();
|
||||
while(!reader.current().isEOF() && !isSyntaxSignificant(reader.current().getCharacter())) {
|
||||
@@ -77,7 +87,10 @@ public class Tokenizer {
|
||||
if(!c.isWhitespace()) token.append(c);
|
||||
}
|
||||
|
||||
return new Token(token.toString(), Token.Type.IDENTIFIER, new Position(reader.getLine(), reader.getIndex()));
|
||||
String tokenString = token.toString();
|
||||
|
||||
|
||||
return new Token(tokenString, Token.Type.IDENTIFIER, new Position(reader.getLine(), reader.getIndex()));
|
||||
}
|
||||
|
||||
private boolean isNumberLike() {
|
||||
|
||||
@@ -2,11 +2,12 @@ package structure;
|
||||
|
||||
import com.dfsek.terra.api.math.vector.Location;
|
||||
import com.dfsek.terra.api.platform.world.Chunk;
|
||||
import com.dfsek.terra.api.structures.parser.Argument;
|
||||
import com.dfsek.terra.api.structures.parser.Function;
|
||||
import com.dfsek.terra.api.structures.parser.FunctionBuilder;
|
||||
import com.dfsek.terra.api.structures.parser.Parser;
|
||||
import com.dfsek.terra.api.structures.parser.exceptions.ParseException;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Argument;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Function;
|
||||
import com.dfsek.terra.api.structures.parser.lang.Item;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
@@ -32,11 +33,11 @@ public class ParserTest {
|
||||
});
|
||||
|
||||
long l = System.nanoTime();
|
||||
List<Function<?>> functions = parser.parse();
|
||||
List<Item<?>> functions = parser.parse().getItems();
|
||||
long t = System.nanoTime() - l;
|
||||
System.out.println("Took " + (double) t / 1000000);
|
||||
|
||||
for(Function<?> f : functions) System.out.println(f);
|
||||
for(Item<?> f : functions) System.out.println(f);
|
||||
}
|
||||
|
||||
private static class Test1 implements Function<Void> {
|
||||
|
||||
@@ -1,2 +1,8 @@
|
||||
test("hello", 1);
|
||||
test("ghgj{}()\"\\hgjhgj", 3.4);
|
||||
|
||||
if(test("hello", 1) == test("hello", 1)) {
|
||||
test("hello", 1);
|
||||
}
|
||||
|
||||
test("ghgj{}()\"\\hgjhgj", 3.4);
|
||||
Reference in New Issue
Block a user