summaryrefslogtreecommitdiff
path: root/src/passes/OptimizeInstructions.cpp
diff options
context:
space:
mode:
authorAlon Zakai <azakai@google.com>2021-04-19 16:15:25 -0700
committerGitHub <noreply@github.com>2021-04-19 16:15:25 -0700
commitf180f6c11d2ddf9acc806333c0a452bb57f8acf9 (patch)
tree2163cef56d35724cfc70bb7ed4ab44400bdb60cf /src/passes/OptimizeInstructions.cpp
parent069c9b8034f965023a4db0449e6bf6f5215b6199 (diff)
downloadbinaryen-f180f6c11d2ddf9acc806333c0a452bb57f8acf9.tar.gz
binaryen-f180f6c11d2ddf9acc806333c0a452bb57f8acf9.tar.bz2
binaryen-f180f6c11d2ddf9acc806333c0a452bb57f8acf9.zip
[Wasm GC] Reorder ref.as_non_null with tee and cast (#3820)
In both cases doing the ref.as_non_null last is beneficial as we have optimizations that can remove it based on where it is consumed.
Diffstat (limited to 'src/passes/OptimizeInstructions.cpp')
-rw-r--r--src/passes/OptimizeInstructions.cpp51
1 files changed, 51 insertions, 0 deletions
diff --git a/src/passes/OptimizeInstructions.cpp b/src/passes/OptimizeInstructions.cpp
index 3c29f7042..609caf588 100644
--- a/src/passes/OptimizeInstructions.cpp
+++ b/src/passes/OptimizeInstructions.cpp
@@ -917,6 +917,27 @@ struct OptimizeInstructions
}
}
+ void visitLocalSet(LocalSet* curr) {
+ // (local.tee (ref.as_non_null ..))
+ // can be reordered to
+ // (ref.as_non_null (local.tee ..))
+ // if the local is nullable (which it must be until some form of let is
+ // added). The reordering allows the ref.as to be potentially optimized
+ // further based on where the value flows to.
+ if (curr->isTee()) {
+ if (auto* as = curr->value->dynCast<RefAs>()) {
+ if (as->op == RefAsNonNull &&
+ getFunction()->getLocalType(curr->index).isNullable()) {
+ curr->value = as->value;
+ curr->finalize();
+ as->value = curr;
+ as->finalize();
+ replaceCurrent(as);
+ }
+ }
+ }
+ }
+
void visitBreak(Break* curr) {
if (curr->condition) {
curr->condition = optimizeBoolean(curr->condition);
@@ -1072,6 +1093,36 @@ struct OptimizeInstructions
Builder builder(*getModule());
replaceCurrent(
builder.makeSequence(builder.makeDrop(curr->rtt), curr->ref));
+ return;
+ }
+ }
+
+ // ref.cast can be reordered with ref.as_non_null,
+ //
+ // (ref.cast (ref.as_non_null ..))
+ // =>
+ // (ref.as_non_null (ref.cast ..))
+ //
+ // This is valid because both pass through the value if they do not trap,
+ // and so reordering does not change whether a trap happens (and reordering
+ // traps is allowed), and does not change the value flowing out at the end.
+ // It is better to have the ref.as_non_null on the outside since it allows
+ // outer instructions to potentially optimize it away (should we find
+ // optimizations that can fold away a ref.cast on an outer instruction, that
+ // might motivate changing this).
+ //
+ // Note that other ref.as* methods, like ref.as_func, are not obviously
+ // worth reordering with ref.cast. For example, the type of ref.as_data is
+ // (ref data), which is less specific than what ref.cast would have.
+ // TODO optimize ref.cast of ref.as_[func|data|i31] in other ways.
+ if (auto* as = curr->ref->dynCast<RefAs>()) {
+ if (as->op == RefAsNonNull) {
+ curr->ref = as->value;
+ curr->finalize();
+ as->value = curr;
+ as->finalize();
+ replaceCurrent(as);
+ return;
}
}
}