summaryrefslogtreecommitdiff
path: root/src/passes/OptimizeInstructions.cpp
diff options
context:
space:
mode:
authorAlon Zakai <azakai@google.com>2021-04-12 18:36:46 -0700
committerGitHub <noreply@github.com>2021-04-12 18:36:46 -0700
commit5c8dcfe744d759b9436a04b0267978f42e4725ab (patch)
tree74d1529167b3580578672267c7cdcee7dcc0c6bb /src/passes/OptimizeInstructions.cpp
parent0d7dac2923f9164ca7f1450b6f394ef8fa9d8a68 (diff)
downloadbinaryen-5c8dcfe744d759b9436a04b0267978f42e4725ab.tar.gz
binaryen-5c8dcfe744d759b9436a04b0267978f42e4725ab.tar.bz2
binaryen-5c8dcfe744d759b9436a04b0267978f42e4725ab.zip
[Wasm GC] Optimize away unnecessary non-null assertions (#3800)
ref.as_non_null is not needed if the value flows into a place that traps on null anyhow. We replace a trap on one instruction with a trap on another, but we allow such things (and even changing trap types, which does not happen here).
Diffstat (limited to 'src/passes/OptimizeInstructions.cpp')
-rw-r--r--src/passes/OptimizeInstructions.cpp27
1 files changed, 27 insertions, 0 deletions
diff --git a/src/passes/OptimizeInstructions.cpp b/src/passes/OptimizeInstructions.cpp
index e71506e28..1f55c0a4b 100644
--- a/src/passes/OptimizeInstructions.cpp
+++ b/src/passes/OptimizeInstructions.cpp
@@ -992,20 +992,45 @@ struct OptimizeInstructions
}
}
+ // If an instruction traps on a null input, there is no need for a
+ // ref.as_non_null on that input: we will trap either way (and the binaryen
+ // optimizer does not differentiate traps).
+ void skipNonNullCast(Expression*& input) {
+ while (1) {
+ if (auto* as = input->dynCast<RefAs>()) {
+ if (as->op == RefAsNonNull) {
+ input = as->value;
+ continue;
+ }
+ }
+ break;
+ }
+ }
+
+ void visitStructGet(StructGet* curr) { skipNonNullCast(curr->ref); }
+
void visitStructSet(StructSet* curr) {
+ skipNonNullCast(curr->ref);
+
if (curr->ref->type != Type::unreachable && curr->value->type.isInteger()) {
const auto& fields = curr->ref->type.getHeapType().getStruct().fields;
optimizeStoredValue(curr->value, fields[curr->index].getByteSize());
}
}
+ void visitArrayGet(ArrayGet* curr) { skipNonNullCast(curr->ref); }
+
void visitArraySet(ArraySet* curr) {
+ skipNonNullCast(curr->ref);
+
if (curr->ref->type != Type::unreachable && curr->value->type.isInteger()) {
auto element = curr->ref->type.getHeapType().getArray().element;
optimizeStoredValue(curr->value, element.getByteSize());
}
}
+ void visitArrayLen(ArrayLen* curr) { skipNonNullCast(curr->ref); }
+
void visitRefCast(RefCast* curr) {
if (curr->type == Type::unreachable) {
return;
@@ -1110,6 +1135,8 @@ struct OptimizeInstructions
return;
}
+ skipNonNullCast(curr->value);
+
// Check if the type is the kind we are checking for.
auto result = GCTypeUtils::evaluateKindCheck(curr);