Skip to content

Commit

Permalink
streamline parsing and evaluation structure
Browse files Browse the repository at this point in the history
  • Loading branch information
gnembon committed Feb 16, 2025
1 parent 14be0ae commit 29e1397
Show file tree
Hide file tree
Showing 3 changed files with 36 additions and 44 deletions.
2 changes: 1 addition & 1 deletion src/main/java/carpet/script/CarpetScriptHost.java
Original file line number Diff line number Diff line change
Expand Up @@ -754,7 +754,7 @@ public Value callLegacy(CommandSourceStack source, String call, List<Integer> co
}
}
String sign = "";
for (Token tok : Tokenizer.simplepass(arg))
for (Token tok : Tokenizer.simple(arg).parseTokens())
{
switch (tok.type)
{
Expand Down
29 changes: 16 additions & 13 deletions src/main/java/carpet/script/Expression.java
Original file line number Diff line number Diff line change
Expand Up @@ -860,18 +860,13 @@ public Expression(String expression)
}


private List<Token> shuntingYard(Context c)
private List<Token> shuntingYard(Context c, List<Token> tokens)
{
List<Token> outputQueue = new ArrayList<>();
Stack<Token> stack = new ObjectArrayList<>();

Tokenizer tokenizer = new Tokenizer(c, this, expression, allowComments, allowNewlineSubstitutions);
// stripping lousy but acceptable semicolons
List<Token> cleanedTokens = tokenizer.postProcess();

Token lastFunction = null;
Token previousToken = null;
for (Token token : cleanedTokens)
for (Token token : tokens)
{
switch (token.type)
{
Expand Down Expand Up @@ -1188,7 +1183,11 @@ private ExpressionNode RPNToParseTree(List<Token> tokens, Context context)

private LazyValue getAST(Context context)
{
List<Token> rpn = shuntingYard(context);
Tokenizer tokenizer = new Tokenizer(context, this, expression, allowComments, allowNewlineSubstitutions);
// stripping lousy but acceptable semicolons
List<Token> cleanedTokens = Tokenizer.postProcess(tokenizer.parseTokens());

List<Token> rpn = shuntingYard(context, cleanedTokens);
validate(context, rpn);
ExpressionNode root = RPNToParseTree(rpn, context);
if (!Vanilla.ScriptServer_scriptOptimizations(((CarpetScriptServer)context.scriptServer()).server))
Expand All @@ -1197,7 +1196,12 @@ private LazyValue getAST(Context context)
}

Context optimizeOnlyContext = new Context.ContextForErrorReporting(context);
boolean scriptsDebugging = Vanilla.ScriptServer_scriptDebugging(((CarpetScriptServer)context.scriptServer()).server);
optimizeTree(context, root, optimizeOnlyContext);
return extractOp(optimizeOnlyContext, root, Context.Type.NONE);
}

private void optimizeTree(Context context, ExpressionNode root, Context optimizeOnlyContext) {
boolean scriptsDebugging = Vanilla.ScriptServer_scriptDebugging(((CarpetScriptServer) context.scriptServer()).server);
if (scriptsDebugging)
{
CarpetScriptServer.LOG.info("Input code size for " + getModuleName() + ": " + treeSize(root) + " nodes, " + treeDepth(root) + " deep");
Expand Down Expand Up @@ -1236,7 +1240,7 @@ private LazyValue getAST(Context context)
prevTreeSize = treeSize(root);
prevTreeDepth = treeDepth(root);
}
boolean optimized = optimizeTree(optimizeOnlyContext, root, Context.Type.NONE, 0, scriptsDebugging);
boolean optimized = optimizeConstantsAndPureFunctions(optimizeOnlyContext, root, Context.Type.NONE, 0, scriptsDebugging);
if (!optimized)
{
break;
Expand All @@ -1248,7 +1252,6 @@ private LazyValue getAST(Context context)
}
}
}
return extractOp(optimizeOnlyContext, root, Context.Type.NONE);
}

public Value explain(Context context)
Expand Down Expand Up @@ -1378,7 +1381,7 @@ else if (rop.equals("return"))
return optimized;
}

private boolean optimizeTree(Context ctx, ExpressionNode node, Context.Type expectedType, int indent, boolean scriptsDebugging)
private boolean optimizeConstantsAndPureFunctions(Context ctx, ExpressionNode node, Context.Type expectedType, int indent, boolean scriptsDebugging)
{
// ctx is just to report errors, not values evaluation
boolean optimized = false;
Expand All @@ -1400,7 +1403,7 @@ private boolean optimizeTree(Context ctx, ExpressionNode node, Context.Type expe
Context.Type requestedType = operation.staticType(expectedType);
for (ExpressionNode arg : node.args)
{
if (optimizeTree(ctx, arg, requestedType, indent + 1, scriptsDebugging))
if (optimizeConstantsAndPureFunctions(ctx, arg, requestedType, indent + 1, scriptsDebugging))
{
optimized = true;
}
Expand Down
49 changes: 19 additions & 30 deletions src/main/java/carpet/script/Tokenizer.java
Original file line number Diff line number Diff line change
@@ -1,20 +1,16 @@
package carpet.script;

import carpet.script.exception.ExpressionException;
import carpet.script.exception.InternalExpressionException;

import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;

/**
* Expression tokenizer that allows to iterate over a {@link String}
* expression token by token. Blank characters will be skipped.
*/
public class Tokenizer implements Iterator<Token>
public class Tokenizer
{
/**
* What character to use for decimal separators.
Expand Down Expand Up @@ -53,15 +49,18 @@ public class Tokenizer implements Iterator<Token>
this.newLinesMarkers = allowNewLineMakers;
}

public List<Token> postProcess()
public static Tokenizer simple(String input)
{
return new Tokenizer(null, null, input, false, false);
}

public static List<Token> postProcess(List<Token> originalTokens)
{
Iterable<Token> iterable = () -> this;
List<Token> originalTokens = StreamSupport.stream(iterable.spliterator(), false).collect(Collectors.toList());
List<Token> cleanedTokens = new ArrayList<>();
Token last = null;
while (!originalTokens.isEmpty())
for (int i = originalTokens.size() - 1; i >= 0; i--)
{
Token current = originalTokens.remove(originalTokens.size() - 1);
Token current = originalTokens.get(i);
if (current.type == Token.TokenType.MARKER && current.surface.startsWith("//"))
{
continue;
Expand Down Expand Up @@ -104,7 +103,16 @@ else if ("}".equals(current.surface) || "]".equals(current.surface))
return cleanedTokens;
}

@Override
public List<Token> parseTokens()
{
List<Token> tokens = new ArrayList<>();
while (hasNext())
{
tokens.add(next());
}
return tokens;
}

public boolean hasNext()
{
return (pos < input.length());
Expand Down Expand Up @@ -132,18 +140,6 @@ private static boolean isSemicolon(Token tok)
|| (tok.type == Token.TokenType.UNARY_OPERATOR && tok.surface.equals(";u"));
}

public static List<Token> simplepass(String input)
{
Tokenizer tok = new Tokenizer(null, null, input, false, false);
List<Token> res = new ArrayList<>();
while (tok.hasNext())
{
res.add(tok.next());
}
return res;
}

@Override
public Token next()
{
Token token = new Token();
Expand Down Expand Up @@ -414,11 +410,4 @@ else if (ch == ',')
}
return previousToken = token;
}

@Override
public void remove()
{
throw new InternalExpressionException("remove() not supported");
}

}

0 comments on commit 29e1397

Please sign in to comment.