diff --git a/common/src/main/java/com/dfsek/terra/api/structures/parser/Parser.java b/common/src/main/java/com/dfsek/terra/api/structures/parser/Parser.java index 84de91448..a0c5cf800 100644 --- a/common/src/main/java/com/dfsek/terra/api/structures/parser/Parser.java +++ b/common/src/main/java/com/dfsek/terra/api/structures/parser/Parser.java @@ -87,7 +87,11 @@ public class Parser { TokenHolder tokens = new TokenHolder(); try { - while(tokenizer.hasNext()) tokens.add(tokenizer.fetch()); + Token t = tokenizer.fetch(); + while(t != null) { + tokens.add(t); + t = tokenizer.fetch(); + } } catch(TokenizerException e) { throw new ParseException("Failed to tokenize input", new Position(0, 0), e); } diff --git a/common/src/main/java/com/dfsek/terra/api/structures/tokenizer/Tokenizer.java b/common/src/main/java/com/dfsek/terra/api/structures/tokenizer/Tokenizer.java index b5335f1c9..cd25b52f4 100644 --- a/common/src/main/java/com/dfsek/terra/api/structures/tokenizer/Tokenizer.java +++ b/common/src/main/java/com/dfsek/terra/api/structures/tokenizer/Tokenizer.java @@ -18,23 +18,14 @@ public class Tokenizer { reader = new Lookahead(new StringReader(data + '\0')); } - public boolean hasNext() { - - int whiteEnd = 0; - - while(!reader.next(whiteEnd).isEOF() && reader.next(whiteEnd).isWhitespace()) whiteEnd++; // Consume whitespace. - - return !reader.next(whiteEnd).isEOF(); - } - public Token fetch() throws TokenizerException { while(!reader.current().isEOF() && reader.current().isWhitespace()) reader.consume(); - if(reader.current().isEOF()) return null; // EOF - if(reader.matches("//", true)) skipLine(); // Skip line if comment + while(reader.matches("//", true)) skipLine(); // Skip line if comment if(reader.matches("/*", true)) skipTo("*/"); // Skip multi line comment + if(reader.current().isEOF()) return null; // EOF if(reader.matches("==", true)) return new Token("==", Token.Type.EQUALS_OPERATOR, new Position(reader.getLine(), reader.getIndex()));