summaryrefslogtreecommitdiff
path: root/src/passes/OptimizeInstructions.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'src/passes/OptimizeInstructions.cpp')
-rw-r--r--src/passes/OptimizeInstructions.cpp27
1 files changed, 27 insertions, 0 deletions
diff --git a/src/passes/OptimizeInstructions.cpp b/src/passes/OptimizeInstructions.cpp
index e71506e28..1f55c0a4b 100644
--- a/src/passes/OptimizeInstructions.cpp
+++ b/src/passes/OptimizeInstructions.cpp
@@ -992,20 +992,45 @@ struct OptimizeInstructions
}
}
+ // If an instruction traps on a null input, there is no need for a
+ // ref.as_non_null on that input: we will trap either way (and the binaryen
+ // optimizer does not differentiate traps).
+ void skipNonNullCast(Expression*& input) {
+ while (1) {
+ if (auto* as = input->dynCast<RefAs>()) {
+ if (as->op == RefAsNonNull) {
+ input = as->value;
+ continue;
+ }
+ }
+ break;
+ }
+ }
+
+ void visitStructGet(StructGet* curr) { skipNonNullCast(curr->ref); }
+
void visitStructSet(StructSet* curr) {
+ skipNonNullCast(curr->ref);
+
if (curr->ref->type != Type::unreachable && curr->value->type.isInteger()) {
const auto& fields = curr->ref->type.getHeapType().getStruct().fields;
optimizeStoredValue(curr->value, fields[curr->index].getByteSize());
}
}
+ void visitArrayGet(ArrayGet* curr) { skipNonNullCast(curr->ref); }
+
void visitArraySet(ArraySet* curr) {
+ skipNonNullCast(curr->ref);
+
if (curr->ref->type != Type::unreachable && curr->value->type.isInteger()) {
auto element = curr->ref->type.getHeapType().getArray().element;
optimizeStoredValue(curr->value, element.getByteSize());
}
}
+ void visitArrayLen(ArrayLen* curr) { skipNonNullCast(curr->ref); }
+
void visitRefCast(RefCast* curr) {
if (curr->type == Type::unreachable) {
return;
@@ -1110,6 +1135,8 @@ struct OptimizeInstructions
return;
}
+ skipNonNullCast(curr->value);
+
// Check if the type is the kind we are checking for.
auto result = GCTypeUtils::evaluateKindCheck(curr);