summaryrefslogtreecommitdiff
path: root/src/passes/Heap2Local.cpp
diff options
context:
space:
mode:
authorAlon Zakai <azakai@google.com>2024-06-25 09:05:05 -0700
committerGitHub <noreply@github.com>2024-06-25 09:05:05 -0700
commit4e07d867f8c4cbcf0180ad439a08c46353bc3751 (patch)
tree125d432f88ba51d94f89832ddb79ad114552489c /src/passes/Heap2Local.cpp
parent78b3c40310e886fb93480008fc55cdcd4fffa52c (diff)
downloadbinaryen-4e07d867f8c4cbcf0180ad439a08c46353bc3751.tar.gz
binaryen-4e07d867f8c4cbcf0180ad439a08c46353bc3751.tar.bz2
binaryen-4e07d867f8c4cbcf0180ad439a08c46353bc3751.zip
[NFC] Remove a minor compile-time optimization in Heap2Local (#6699)
We tracked which expressions we saw an allocated struct/array reach, and then quickly exited when another one did (as when two allocations mix, we can optimize neither). It turns out that this helps very little in actual measurements (looks like within noise - likely we are ruling out the un-optimizable cases early otherwise anyhow). Also the complexity it adds is a problem for an improvement I want to make to the pass, so remove it.
Diffstat (limited to 'src/passes/Heap2Local.cpp')
-rw-r--r--src/passes/Heap2Local.cpp49
1 files changed, 6 insertions, 43 deletions
diff --git a/src/passes/Heap2Local.cpp b/src/passes/Heap2Local.cpp
index 9d1763ddc..519c45fdc 100644
--- a/src/passes/Heap2Local.cpp
+++ b/src/passes/Heap2Local.cpp
@@ -170,16 +170,8 @@ namespace {
// Core analysis that provides an escapes() method to check if an allocation
// escapes in a way that prevents optimizing it away as described above. It also
// stashes information about the relevant expressions as it goes, which helps
-// optimization later (|seen| and |reached|).
+// optimization later (|reached|).
struct EscapeAnalyzer {
- // All the expressions that have already been seen by the optimizer, see the
- // comment above on exclusivity: once we have seen something when analyzing
- // one allocation, if we reach it again then we can exit early since seeing it
- // a second time proves we lost exclusivity. We must track this across
- // multiple instances of EscapeAnalyzer as each handles a particular
- // allocation.
- std::unordered_set<Expression*>& seen;
-
// To find what escapes, we need to follow where values flow, both up to
// parents, and via branches, and through locals.
// TODO: for efficiency, only scan reference types in LocalGraph
@@ -190,14 +182,13 @@ struct EscapeAnalyzer {
const PassOptions& passOptions;
Module& wasm;
- EscapeAnalyzer(std::unordered_set<Expression*>& seen,
- const LocalGraph& localGraph,
+ EscapeAnalyzer(const LocalGraph& localGraph,
const Parents& parents,
const BranchUtils::BranchTargets& branchTargets,
const PassOptions& passOptions,
Module& wasm)
- : seen(seen), localGraph(localGraph), parents(parents),
- branchTargets(branchTargets), passOptions(passOptions), wasm(wasm) {}
+ : localGraph(localGraph), parents(parents), branchTargets(branchTargets),
+ passOptions(passOptions), wasm(wasm) {}
// We must track all the local.sets that write the allocation, to verify
// exclusivity.
@@ -261,30 +252,6 @@ struct EscapeAnalyzer {
assert(interaction == ParentChildInteraction::FullyConsumes ||
interaction == ParentChildInteraction::Flows);
- // If we've already seen an expression, stop since we cannot optimize
- // things that overlap in any way (see the notes on exclusivity, above).
- // Note that we use a nonrepeating queue here, so we already do not visit
- // the same thing more than once; what this check does is verify we don't
- // look at something that another allocation reached, which would be in a
- // different call to this function and use a different queue (any overlap
- // between calls would prove non-exclusivity).
- //
- // Note that we do this after the check for Escapes/Mixes above: it is
- // possible for a parent to receive two children and handle them
- // differently:
- //
- // (struct.set
- // (local.get $ref)
- // (local.get $value)
- // )
- //
- // The value escapes, but the ref does not, and might be optimized. If we
- // added the parent to |seen| for both children, the reference would get
- // blocked from being optimized.
- if (!seen.emplace(parent).second) {
- return true;
- }
-
// We can proceed, as the parent interacts with us properly, and we are
// the only allocation to get here.
@@ -1027,10 +994,6 @@ struct Heap2Local {
// flow to.
localGraph.computeSetInfluences();
- // All the expressions we have already looked at. We use this to avoid
- // repeated work, see above.
- std::unordered_set<Expression*> seen;
-
// Find all the relevant allocations in the function: StructNew, ArrayNew,
// ArrayNewFixed.
struct AllocationFinder : public PostWalker<AllocationFinder> {
@@ -1090,7 +1053,7 @@ struct Heap2Local {
}
EscapeAnalyzer analyzer(
- seen, localGraph, parents, branchTargets, passOptions, wasm);
+ localGraph, parents, branchTargets, passOptions, wasm);
if (!analyzer.escapes(allocation)) {
// Convert the allocation and all its uses into a struct. Then convert
// the struct into locals.
@@ -1110,7 +1073,7 @@ struct Heap2Local {
// Check for escaping, noting relevant information as we go. If this does
// not escape, optimize it into locals.
EscapeAnalyzer analyzer(
- seen, localGraph, parents, branchTargets, passOptions, wasm);
+ localGraph, parents, branchTargets, passOptions, wasm);
if (!analyzer.escapes(allocation)) {
Struct2Local(allocation, analyzer, func, wasm);
}