summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--src/passes/RemoveUnusedBrs.cpp176
-rw-r--r--test/lit/passes/remove-unused-brs-gc.wast449
-rw-r--r--test/lit/passes/remove-unused-brs_all-features.wast121
3 files changed, 638 insertions, 108 deletions
diff --git a/src/passes/RemoveUnusedBrs.cpp b/src/passes/RemoveUnusedBrs.cpp
index dc7741c0e..4fda7afb2 100644
--- a/src/passes/RemoveUnusedBrs.cpp
+++ b/src/passes/RemoveUnusedBrs.cpp
@@ -699,8 +699,11 @@ struct RemoveUnusedBrs : public WalkerPass<PostWalker<RemoveUnusedBrs>> {
}
struct Optimizer : public PostWalker<Optimizer> {
+ PassOptions& passOptions;
bool worked = false;
+ Optimizer(PassOptions& passOptions) : passOptions(passOptions) {}
+
void visitBrOn(BrOn* curr) {
// Ignore unreachable BrOns which we cannot improve anyhow. Note that
// we must check the ref field manually, as we may be changing types as
@@ -712,56 +715,159 @@ struct RemoveUnusedBrs : public WalkerPass<PostWalker<RemoveUnusedBrs>> {
return;
}
- // First, check for a possible null which would prevent optimizations on
- // null checks.
- // TODO: Use the fallthrough to determine in more cases that we
- // definitely have a null.
- auto refType = curr->ref->type;
- if (refType.isNullable() &&
- (curr->op == BrOnNull || curr->op == BrOnNonNull)) {
- return;
- }
+ Builder builder(*getModule());
+
+ Type refType =
+ Properties::getFallthroughType(curr->ref, passOptions, *getModule());
+ assert(refType.isRef());
+
+ // When we optimize based on all the fallthrough type information
+ // available, we may need to insert a cast to maintain validity. For
+ // example, in this case we know the cast will succeed, but it would be
+ // invalid to send curr->ref directly:
+ //
+ // (br_on_cast $l anyref i31ref
+ // (block (result anyref)
+ // (i31.new ...)))
+ //
+ // We could just always do the cast and leave removing the casts to
+ // OptimizeInstructions, but it's simple enough to avoid unnecessary
+ // casting here.
+ auto maybeCast = [&](Expression* expr, Type type) -> Expression* {
+ assert(expr->type.isRef() && type.isRef());
+ if (Type::isSubType(expr->type, type)) {
+ return expr;
+ }
+ if (HeapType::isSubType(expr->type.getHeapType(),
+ type.getHeapType())) {
+ return builder.makeRefAs(RefAsNonNull, expr);
+ }
+ return builder.makeRefCast(expr, type);
+ };
if (curr->op == BrOnNull) {
- assert(refType.isNonNullable());
- // This cannot be null, so the br is never taken, and the non-null
- // value flows through.
- replaceCurrent(curr->ref);
- worked = true;
+ if (refType.isNull()) {
+ // The branch will definitely be taken.
+ replaceCurrent(builder.makeSequence(builder.makeDrop(curr->ref),
+ builder.makeBreak(curr->name)));
+ worked = true;
+ return;
+ }
+ if (refType.isNonNullable()) {
+ // The branch will definitely not be taken.
+ replaceCurrent(maybeCast(curr->ref, curr->type));
+ worked = true;
+ return;
+ }
return;
}
+
if (curr->op == BrOnNonNull) {
- assert(refType.isNonNullable());
- // This cannot be null, so the br is always taken.
- replaceCurrent(
- Builder(*getModule()).makeBreak(curr->name, curr->ref));
- worked = true;
+ if (refType.isNull()) {
+ // Definitely not taken.
+ replaceCurrent(builder.makeDrop(curr->ref));
+ worked = true;
+ return;
+ }
+ if (refType.isNonNullable()) {
+ // Definitely taken.
+ replaceCurrent(builder.makeBreak(
+ curr->name, maybeCast(curr->ref, curr->getSentType())));
+ worked = true;
+ return;
+ }
return;
}
- // Check if the type is the kind we are checking for.
+ // Improve the cast target type as much as possible given what we know
+ // about the input. Unlike in BrOn::finalize(), we consider type
+ // information from all the fallthrough values here. We can continue to
+ // further optimizations after this, and those optimizations might even
+ // benefit from this improvement.
+ auto glb = Type::getGreatestLowerBound(curr->castType, refType);
+ if (glb != Type::unreachable && glb != curr->castType) {
+ curr->castType = glb;
+ curr->finalize();
+ worked = true;
+ }
+
+ // Depending on what we know about the cast results, we may be able to
+ // optimize.
auto result = GCTypeUtils::evaluateCastCheck(refType, curr->castType);
if (curr->op == BrOnCastFail) {
result = GCTypeUtils::flipEvaluationResult(result);
}
- if (result == GCTypeUtils::Success) {
- // The cast succeeds, so we can switch from BrOn to a simple br that
- // is always taken.
- replaceCurrent(
- Builder(*getModule()).makeBreak(curr->name, curr->ref));
- worked = true;
- } else if (result == GCTypeUtils::Failure ||
- result == GCTypeUtils::Unreachable) {
- // The cast fails, so the branch is never taken, and the value just
- // flows through. Or, the cast cannot even be reached, so it does not
- // matter what we do, and we can handle it as a failure.
- replaceCurrent(curr->ref);
- worked = true;
+ switch (result) {
+ case GCTypeUtils::Unknown:
+ // Anything could happen, so we cannot optimize.
+ return;
+ case GCTypeUtils::Success: {
+ replaceCurrent(builder.makeBreak(
+ curr->name, maybeCast(curr->ref, curr->getSentType())));
+ worked = true;
+ return;
+ }
+ case GCTypeUtils::Failure: {
+ replaceCurrent(maybeCast(curr->ref, curr->type));
+ worked = true;
+ return;
+ }
+ case GCTypeUtils::SuccessOnlyIfNull: {
+ // TODO: optimize this case using the following replacement, which
+ // avoids using any scratch locals and only does a single null
+ // check, but does require generating a fresh label:
+ //
+ // (br_on_cast $l (ref null $X) (ref null $Y)
+ // (...)
+ // )
+ // =>
+ // (block $l' (result (ref $X))
+ // (br_on_non_null $l' ;; reuses `curr`
+ // (...)
+ // )
+ // (br $l
+ // (ref.null bot<X>)
+ // )
+ // )
+ return;
+ }
+ case GCTypeUtils::SuccessOnlyIfNonNull: {
+ // Perform this replacement:
+ //
+ // (br_on_cast $l (ref null $X') (ref $X))
+ // (...)
+ // )
+ // =>
+ // (block (result (ref bot<X>))
+ // (br_on_non_null $l ;; reuses `curr`
+ // (...)
+ // (ref.null bot<X>)
+ // )
+ curr->ref = maybeCast(
+ curr->ref, Type(curr->getSentType().getHeapType(), Nullable));
+ curr->op = BrOnNonNull;
+ curr->castType = Type::none;
+ curr->type = Type::none;
+
+ assert(curr->ref->type.isRef());
+ auto* refNull = builder.makeRefNull(curr->ref->type.getHeapType());
+ replaceCurrent(builder.makeBlock({curr, refNull}, refNull->type));
+ worked = true;
+ return;
+ }
+ case GCTypeUtils::Unreachable: {
+ // The cast is never executed, possibly because its input type is
+ // uninhabitable. Replace it with unreachable.
+ auto* drop = builder.makeDrop(curr->ref);
+ auto* unreachable = ExpressionManipulator::unreachable(curr);
+ replaceCurrent(builder.makeBlock({drop, unreachable}));
+ worked = true;
+ return;
+ }
}
- // TODO: Handle SuccessOnlyIfNull and SuccessOnlyIfNonNull.
}
- } optimizer;
+ } optimizer(getPassOptions());
optimizer.setModule(getModule());
optimizer.doWalkFunction(func);
diff --git a/test/lit/passes/remove-unused-brs-gc.wast b/test/lit/passes/remove-unused-brs-gc.wast
index 94dbabc6b..0d0a94cd8 100644
--- a/test/lit/passes/remove-unused-brs-gc.wast
+++ b/test/lit/passes/remove-unused-brs-gc.wast
@@ -6,9 +6,11 @@
(rec
;; CHECK: (rec
;; CHECK-NEXT: (type $struct (struct ))
- (type $struct (struct))
+ (type $struct (sub (struct)))
;; CHECK: (type $struct2 (struct ))
(type $struct2 (struct))
+ ;; CHECK: (type $substruct (sub $struct (struct )))
+ (type $substruct (sub $struct (struct)))
)
;; CHECK: (func $br_on-if (type $ref|struct|_=>_none) (param $0 (ref struct))
@@ -43,21 +45,49 @@
)
;; CHECK: (func $br_on_cast (type $none_=>_ref|$struct|) (result (ref $struct))
+ ;; CHECK-NEXT: (local $struct (ref null $struct))
;; CHECK-NEXT: (block $block (result (ref $struct))
;; CHECK-NEXT: (drop
;; CHECK-NEXT: (br $block
;; CHECK-NEXT: (struct.new_default $struct)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (block (result nullref)
+ ;; CHECK-NEXT: (br_on_non_null $block
+ ;; CHECK-NEXT: (local.get $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (ref.null none)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_cast $block (ref $struct) (ref $substruct)
+ ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
;; CHECK-NEXT: (unreachable)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
(func $br_on_cast (result (ref $struct))
+ (local $struct (ref null $struct))
(block $block (result (ref $struct))
(drop
- ;; This static cast can be computed at compile time: it will definitely be
- ;; taken, so we can turn it into a normal br.
+ ;; This cast can be computed at compile time: it will definitely be taken,
+ ;; so we can turn it into a normal br.
+ (br_on_cast $block anyref (ref $struct)
+ (struct.new $struct)
+ )
+ )
+ (drop
+ ;; This cast can be partially computed at compile time, but we still need to
+ ;; do a null check.
(br_on_cast $block anyref (ref $struct)
+ (local.get $struct)
+ )
+ )
+ (drop
+ ;; This cast cannot be optimized at all.
+ (br_on_cast $block anyref (ref $substruct)
(struct.new $struct)
)
)
@@ -65,6 +95,69 @@
)
)
+ ;; CHECK: (func $br_on_cast-fallthrough (type $none_=>_ref|$struct|) (result (ref $struct))
+ ;; CHECK-NEXT: (local $struct (ref null $struct))
+ ;; CHECK-NEXT: (local $any anyref)
+ ;; CHECK-NEXT: (block $block (result (ref $struct))
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br $block
+ ;; CHECK-NEXT: (ref.cast $struct
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (block (result nullref)
+ ;; CHECK-NEXT: (br_on_non_null $block
+ ;; CHECK-NEXT: (ref.cast null $struct
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (local.get $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (ref.null none)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_cast $block anyref (ref $substruct)
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ (func $br_on_cast-fallthrough (result (ref $struct))
+ ;; Same as above, but now the type information comes from fallthrough values.
+ (local $struct (ref null $struct))
+ (local $any anyref)
+ (block $block (result (ref $struct))
+ (drop
+ ;; Definitely taken, but will need a cast for validity.
+ (br_on_cast $block anyref (ref $struct)
+ (local.tee $any (struct.new $struct))
+ )
+ )
+ (drop
+ ;; Needs a null check and cast for validity.
+ (br_on_cast $block anyref (ref $struct)
+ (local.tee $any (local.get $struct))
+ )
+ )
+ (drop
+ ;; This cannot be optimized, but at least it still doesn't need an
+ ;; additional cast.
+ (br_on_cast $block anyref (ref $substruct)
+ (local.tee $any (struct.new $struct))
+ )
+ )
+ (unreachable)
+ )
+ )
+
;; CHECK: (func $nested_br_on_cast (type $none_=>_i31ref) (result i31ref)
;; CHECK-NEXT: (block $label$1 (result (ref i31))
;; CHECK-NEXT: (drop
@@ -136,8 +229,9 @@
)
)
(drop
- ;; But if both are nullable, then we can't optimize because the cast would
- ;; succeed if the value is a null.
+ ;; But if both are nullable, then the cast will succeed only if the value is
+ ;; null, so we can partially optimize.
+ ;; TODO: Optimize this.
(br_on_cast $block anyref (ref null $struct)
(local.get $nullable-struct2)
)
@@ -146,6 +240,172 @@
)
)
+ ;; CHECK: (func $br_on_cast_unrelated-fallthrough (type $none_=>_ref?|$struct|) (result (ref null $struct))
+ ;; CHECK-NEXT: (local $any anyref)
+ ;; CHECK-NEXT: (local $nullable-struct2 (ref null $struct2))
+ ;; CHECK-NEXT: (block $block (result nullref)
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (local.get $nullable-struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_cast $block anyref nullref
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (local.get $nullable-struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ (func $br_on_cast_unrelated-fallthrough (result (ref null $struct))
+ ;; Same as above, but now all the type information comes from fallthrough values.
+ (local $any anyref)
+ (local $nullable-struct2 (ref null $struct2))
+ (block $block (result (ref null $struct))
+ (drop
+ ;; Definitely not taken.
+ (br_on_cast $block anyref (ref $struct)
+ (local.tee $any (struct.new $struct2))
+ )
+ )
+ (drop
+ ;; Still not taken.
+ (br_on_cast $block anyref (ref null $struct)
+ (local.tee $any (struct.new $struct2))
+ )
+ )
+ (drop
+ ;; Also not taken.
+ (br_on_cast $block anyref (ref $struct)
+ (local.tee $any (local.get $nullable-struct2))
+ )
+ )
+ (drop
+ ;; Taken only if null.
+ ;; TODO: Optimize this.
+ (br_on_cast $block anyref (ref null $struct)
+ (local.tee $any (local.get $nullable-struct2))
+ )
+ )
+ (unreachable)
+ )
+ )
+
+ ;; CHECK: (func $br_on_cast_fail (type $none_=>_anyref) (result anyref)
+ ;; CHECK-NEXT: (local $struct (ref null $struct))
+ ;; CHECK-NEXT: (block $block (result (ref null $struct))
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_cast_fail $block (ref null $struct) (ref $struct)
+ ;; CHECK-NEXT: (local.get $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_cast_fail $block (ref $struct) (ref $substruct)
+ ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ (func $br_on_cast_fail (result anyref)
+ (local $struct (ref null $struct))
+ (block $block (result anyref)
+ (drop
+ ;; This cast can be computed at compile time: it will definitely succeed so
+ ;; the branch will not be taken.
+ (br_on_cast_fail $block anyref (ref $struct)
+ (struct.new $struct)
+ )
+ )
+ (drop
+ ;; This cast can be partially computed at compile time, but we still need to
+ ;; do a null check.
+ ;; TODO: optimize this.
+ (br_on_cast_fail $block anyref (ref $struct)
+ (local.get $struct)
+ )
+ )
+ (drop
+ ;; This cast cannot be optimized at all.
+ (br_on_cast_fail $block anyref (ref $substruct)
+ (struct.new $struct)
+ )
+ )
+ (unreachable)
+ )
+ )
+
+ ;; CHECK: (func $br_on_cast_fail-fallthrough (type $none_=>_anyref) (result anyref)
+ ;; CHECK-NEXT: (local $any anyref)
+ ;; CHECK-NEXT: (local $struct (ref null $struct))
+ ;; CHECK-NEXT: (block $block (result anyref)
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (ref.cast $struct
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_cast_fail $block anyref (ref $struct)
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (local.get $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_cast_fail $block anyref (ref $substruct)
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ (func $br_on_cast_fail-fallthrough (result anyref)
+ ;; Same as above, but now the type information comes from fallthrough values.
+ (local $any anyref)
+ (local $struct (ref null $struct))
+ (block $block (result anyref)
+ (drop
+ ;; This cast will succeed. We will need a cast for validity.
+ (br_on_cast_fail $block anyref (ref $struct)
+ (local.tee $any (struct.new $struct))
+ )
+ )
+ (drop
+ ;; We will still need a null check.
+ ;; TODO: optimize this.
+ (br_on_cast_fail $block anyref (ref $struct)
+ (local.tee $any (local.get $struct))
+ )
+ )
+ (drop
+ ;; This cast cannot be optimized at all.
+ (br_on_cast_fail $block anyref (ref $substruct)
+ (local.tee $any (struct.new $struct))
+ )
+ )
+ (unreachable)
+ )
+ )
+
;; CHECK: (func $br_on_cast_fail_unrelated (type $none_=>_anyref) (result anyref)
;; CHECK-NEXT: (local $nullable-struct2 (ref null $struct2))
;; CHECK-NEXT: (block $block (result (ref null $struct2))
@@ -165,8 +425,11 @@
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (br_on_cast_fail $block (ref null $struct2) nullref
- ;; CHECK-NEXT: (local.get $nullable-struct2)
+ ;; CHECK-NEXT: (block (result nullref)
+ ;; CHECK-NEXT: (br_on_non_null $block
+ ;; CHECK-NEXT: (local.get $nullable-struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (ref.null none)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: (unreachable)
@@ -195,8 +458,8 @@
)
)
(drop
- ;; But if both are nullable, then we can't optimize because the cast would
- ;; succeed if the value is a null.
+ ;; But if both are nullable, then we can only partially optimize because we
+ ;; still have to do a null check.
(br_on_cast_fail $block anyref (ref null $struct)
(local.get $nullable-struct2)
)
@@ -205,70 +468,130 @@
)
)
- ;; CHECK: (func $br_on_cast_no (type $none_=>_ref|$struct|) (result (ref $struct))
- ;; CHECK-NEXT: (local $struct (ref null $struct))
- ;; CHECK-NEXT: (block $block (result (ref $struct))
+ ;; CHECK: (func $br_on_cast_fail_unrelated-fallthrough (type $none_=>_anyref) (result anyref)
+ ;; CHECK-NEXT: (local $any anyref)
+ ;; CHECK-NEXT: (local $nullable-struct2 (ref null $struct2))
+ ;; CHECK-NEXT: (block $block (result anyref)
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (br_on_cast $block (ref null $struct) (ref $struct)
- ;; CHECK-NEXT: (local.get $struct)
+ ;; CHECK-NEXT: (br $block
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br $block
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (struct.new_default $struct2)
+ ;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: )
- ;; CHECK-NEXT: (unreachable)
- ;; CHECK-NEXT: )
- ;; CHECK-NEXT: )
- (func $br_on_cast_no (result (ref $struct))
- (local $struct (ref null $struct))
- (block $block (result (ref $struct))
- (drop
- (br_on_cast $block anyref (ref $struct)
- ;; As above, but now the type is nullable, so we cannot infer anything.
- (local.get $struct)
- )
- )
- (unreachable)
- )
- )
-
- ;; CHECK: (func $br_on_cast_nullable (type $none_=>_ref?|$struct|) (result (ref null $struct))
- ;; CHECK-NEXT: (block $block (result nullref)
;; CHECK-NEXT: (drop
;; CHECK-NEXT: (br $block
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (local.get $nullable-struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (block (result nullref)
+ ;; CHECK-NEXT: (br_on_non_null $block
+ ;; CHECK-NEXT: (local.tee $any
+ ;; CHECK-NEXT: (local.get $nullable-struct2)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
;; CHECK-NEXT: (ref.null none)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: (unreachable)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
- (func $br_on_cast_nullable (result (ref null $struct))
- (block $block (result (ref null $struct))
+ (func $br_on_cast_fail_unrelated-fallthrough (result anyref)
+ ;; Same as above, but now type information comes from fallthrough values.
+ (local $any anyref)
+ (local $nullable-struct2 (ref null $struct2))
+ (block $block (result anyref)
(drop
- (br_on_cast $block anyref (ref null $struct)
- ;; As above, but now the cast allows nulls, so we can optimize.
- (ref.null $struct)
+ ;; Will definitely take the branch.
+ (br_on_cast_fail $block anyref (ref $struct)
+ (local.tee $any (struct.new $struct2))
+ )
+ )
+ (drop
+ ;; Ditto.
+ (br_on_cast_fail $block anyref (ref null $struct)
+ (local.tee $any (struct.new $struct2))
+ )
+ )
+ (drop
+ ;; Ditto.
+ (br_on_cast_fail $block anyref (ref $struct)
+ (local.tee $any (local.get $nullable-struct2))
+ )
+ )
+ (drop
+ ;; Still has to do a null check.
+ (br_on_cast_fail $block anyref (ref null $struct)
+ (local.tee $any (local.get $nullable-struct2))
)
)
(unreachable)
)
)
- ;; CHECK: (func $br_on_cast_fail (type $none_=>_ref|$struct|) (result (ref $struct))
+ ;; CHECK: (func $br_on_cast-unreachable (type $i31ref_=>_anyref) (param $i31ref i31ref) (result anyref)
;; CHECK-NEXT: (block $block
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (struct.new_default $struct)
+ ;; CHECK-NEXT: (block
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (block (result (ref none))
+ ;; CHECK-NEXT: (ref.cast none
+ ;; CHECK-NEXT: (block (result i31ref)
+ ;; CHECK-NEXT: (local.get $i31ref)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (block
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (block (result (ref none))
+ ;; CHECK-NEXT: (ref.cast none
+ ;; CHECK-NEXT: (block (result i31ref)
+ ;; CHECK-NEXT: (local.get $i31ref)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
;; CHECK-NEXT: )
- ;; CHECK-NEXT: (unreachable)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
- (func $br_on_cast_fail (result (ref $struct))
- (block $block (result (ref $struct))
+ (func $br_on_cast-unreachable (param $i31ref i31ref) (result anyref)
+ ;; Optimize out br_on_cast* where the input is uninhabitable.
+ (block $block (result anyref)
(drop
- ;; As $br_on_cast, but this checks for a failing cast, so we know it will
- ;; *not* be taken.
- (br_on_cast_fail $block anyref (ref $struct)
- (struct.new $struct)
+ (br_on_cast $block anyref (ref i31)
+ (block (result anyref)
+ (ref.cast struct
+ (block (result anyref)
+ (local.get $i31ref)
+ )
+ )
+ )
+ )
+ )
+ (br_on_cast_fail $block anyref (ref i31)
+ (block (result anyref)
+ (ref.cast struct
+ (block (result anyref)
+ (local.get $i31ref)
+ )
+ )
)
)
- (unreachable)
)
)
@@ -284,7 +607,7 @@
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (if (result anyref)
+ ;; CHECK-NEXT: (if (result nullref)
;; CHECK-NEXT: (local.get $x)
;; CHECK-NEXT: (ref.null none)
;; CHECK-NEXT: (ref.cast null none
@@ -293,26 +616,34 @@
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (if (result anyref)
- ;; CHECK-NEXT: (local.get $x)
- ;; CHECK-NEXT: (block $something (result anyref)
- ;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (br_on_cast $something (ref null $struct) (ref $struct)
- ;; CHECK-NEXT: (local.get $struct)
+ ;; CHECK-NEXT: (select (result (ref null $struct))
+ ;; CHECK-NEXT: (block (result (ref null $struct))
+ ;; CHECK-NEXT: (block $something (result (ref null $struct))
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (block (result nullref)
+ ;; CHECK-NEXT: (br_on_non_null $something
+ ;; CHECK-NEXT: (local.get $struct)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (ref.null none)
+ ;; CHECK-NEXT: )
;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (ref.null none)
;; CHECK-NEXT: )
- ;; CHECK-NEXT: (ref.null none)
;; CHECK-NEXT: )
;; CHECK-NEXT: (ref.null none)
+ ;; CHECK-NEXT: (local.get $x)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (select (result anyref)
- ;; CHECK-NEXT: (block (result anyref)
+ ;; CHECK-NEXT: (select (result nullref)
+ ;; CHECK-NEXT: (block (result nullref)
;; CHECK-NEXT: (block $nothing
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (br_on_null $nothing
- ;; CHECK-NEXT: (ref.null none)
+ ;; CHECK-NEXT: (block
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (ref.null none)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (br $nothing)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: )
diff --git a/test/lit/passes/remove-unused-brs_all-features.wast b/test/lit/passes/remove-unused-brs_all-features.wast
index c93f25df2..3482afc29 100644
--- a/test/lit/passes/remove-unused-brs_all-features.wast
+++ b/test/lit/passes/remove-unused-brs_all-features.wast
@@ -10,8 +10,6 @@
(type $struct (struct (field (ref null $vector))))
;; CHECK: (type $i32_=>_none (func (param i32)))
- ;; CHECK: (type $none_=>_funcref (func (result funcref)))
-
;; CHECK: (type $none_=>_ref?|$struct| (func (result (ref null $struct))))
;; CHECK: (type $none_=>_f64 (func (result f64)))
@@ -20,8 +18,14 @@
;; CHECK: (type $i32_=>_funcref (func (param i32) (result funcref)))
+ ;; CHECK: (type $funcref_=>_none (func (param funcref)))
+
;; CHECK: (type $none_=>_none (func))
+ ;; CHECK: (type $funcref_=>_funcref (func (param funcref) (result funcref)))
+
+ ;; CHECK: (type $none_=>_funcref (func (result funcref)))
+
;; CHECK: (import "out" "log" (func $log (type $i32_=>_none) (param i32)))
(import "out" "log" (func $log (param i32)))
;; CHECK: (elem declare func $br_on_non_null $br_on_null $i32_=>_none $none_=>_i32)
@@ -118,22 +122,29 @@
)
)
- ;; CHECK: (func $br_on_null (type $none_=>_none)
+ ;; CHECK: (func $br_on_null (type $funcref_=>_none) (param $x funcref)
;; CHECK-NEXT: (block $null
;; CHECK-NEXT: (drop
- ;; CHECK-NEXT: (br_on_null $null
- ;; CHECK-NEXT: (ref.null nofunc)
+ ;; CHECK-NEXT: (block
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (ref.null nofunc)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (br $null)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: (drop
;; CHECK-NEXT: (ref.func $br_on_null)
;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (br_on_null $null
+ ;; CHECK-NEXT: (local.get $x)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
;; CHECK-NEXT: )
;; CHECK-NEXT: )
- (func $br_on_null
+ (func $br_on_null (param $x funcref)
(block $null
;; A null reference to bottom is definitely null, and the br is always taken.
- ;; TODO: Optimize this.
(drop
(br_on_null $null (ref.null nofunc))
)
@@ -142,21 +153,66 @@
(drop
(br_on_null $null (ref.func $br_on_null))
)
+ ;; If we don't know whether the input is null, we can't optimize.
+ (drop
+ (br_on_null $null (local.get $x))
+ )
)
)
- ;; CHECK: (func $br_on_non_null (type $none_=>_funcref) (result funcref)
- ;; CHECK-NEXT: (block $non-null (result (ref $none_=>_funcref))
+ ;; CHECK: (func $br_on_null-fallthrough (type $none_=>_none)
+ ;; CHECK-NEXT: (local $x funcref)
+ ;; CHECK-NEXT: (block $null
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (block
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (local.tee $x
+ ;; CHECK-NEXT: (ref.null nofunc)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (br $null)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (ref.as_non_null
+ ;; CHECK-NEXT: (local.tee $x
+ ;; CHECK-NEXT: (ref.func $br_on_null)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ (func $br_on_null-fallthrough
+ ;; This is the same as above, but now the necessary type information comes
+ ;; from fallthrough values.
+ (local $x funcref)
+ (block $null
+ ;; Definitely taken.
+ (drop
+ (br_on_null $null (local.tee $x (ref.null nofunc)))
+ )
+ ;; Definitely not taken. Optimizable, but still requires a cast for validity.
+ (drop
+ (br_on_null $null (local.tee $x (ref.func $br_on_null)))
+ )
+ )
+ )
+
+ ;; CHECK: (func $br_on_non_null (type $funcref_=>_funcref) (param $x funcref) (result funcref)
+ ;; CHECK-NEXT: (block $non-null (result (ref func))
;; CHECK-NEXT: (br $non-null
;; CHECK-NEXT: (ref.func $br_on_non_null)
;; CHECK-NEXT: )
- ;; CHECK-NEXT: (br_on_non_null $non-null
+ ;; CHECK-NEXT: (drop
;; CHECK-NEXT: (ref.null nofunc)
;; CHECK-NEXT: )
- ;; CHECK-NEXT: (ref.func $br_on_non_null)
+ ;; CHECK-NEXT: (br_on_non_null $non-null
+ ;; CHECK-NEXT: (local.get $x)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
;; CHECK-NEXT: )
;; CHECK-NEXT: )
- (func $br_on_non_null (result funcref)
+ (func $br_on_non_null (param $x funcref) (result funcref)
(block $non-null (result (ref func))
;; A non-null reference is not null, and the br is always taken.
(br_on_non_null $non-null
@@ -164,11 +220,48 @@
)
;; On the other hand, if we know the input is null, the branch will never be
;; taken.
- ;; TODO: Optimize this.
(br_on_non_null $non-null
(ref.null nofunc)
)
- (ref.func $br_on_non_null)
+ ;; If we don't know whether the input is null, we can't optimize.
+ (br_on_non_null $non-null
+ (local.get $x)
+ )
+ (unreachable)
+ )
+ )
+
+ ;; CHECK: (func $br_on_non_null-fallthrough (type $none_=>_funcref) (result funcref)
+ ;; CHECK-NEXT: (local $x funcref)
+ ;; CHECK-NEXT: (block $non-null (result (ref func))
+ ;; CHECK-NEXT: (br $non-null
+ ;; CHECK-NEXT: (ref.as_non_null
+ ;; CHECK-NEXT: (local.tee $x
+ ;; CHECK-NEXT: (ref.func $br_on_non_null)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (drop
+ ;; CHECK-NEXT: (local.tee $x
+ ;; CHECK-NEXT: (ref.null nofunc)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: (unreachable)
+ ;; CHECK-NEXT: )
+ ;; CHECK-NEXT: )
+ (func $br_on_non_null-fallthrough (result funcref)
+ ;; Same as above, but now using fallthrough values.
+ (local $x funcref)
+ (block $non-null (result (ref func))
+ ;; Definitely taken. Requires cast.
+ (br_on_non_null $non-null
+ (local.tee $x (ref.func $br_on_non_null))
+ )
+ ;; Definitely not taken.
+ (br_on_non_null $non-null
+ (local.tee $x (ref.null nofunc))
+ )
+ (unreachable)
)
)
)