summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorAlon Zakai <azakai@google.com>2020-11-02 15:03:58 -0800
committerGitHub <noreply@github.com>2020-11-02 15:03:58 -0800
commitcdc2d2c006bc0406c93509ed0d7a2dfbb6f51ea5 (patch)
tree3d2e775e6fc3d37de8461a7a21649c234093980d /src
parentab68c1b2ffd034f8c30d2a4820c5c708491679cb (diff)
downloadbinaryen-cdc2d2c006bc0406c93509ed0d7a2dfbb6f51ea5.tar.gz
binaryen-cdc2d2c006bc0406c93509ed0d7a2dfbb6f51ea5.tar.bz2
binaryen-cdc2d2c006bc0406c93509ed0d7a2dfbb6f51ea5.zip
MemoryPacking: Properly notice zeroFilledMemory (#3306)
We can only pack memory if we know it is zero-filled before us.
Diffstat (limited to 'src')
-rw-r--r--src/passes/MemoryPacking.cpp28
-rw-r--r--src/wasm.h2
2 files changed, 20 insertions, 10 deletions
diff --git a/src/passes/MemoryPacking.cpp b/src/passes/MemoryPacking.cpp
index d4d75e6d6..b6ee7acf9 100644
--- a/src/passes/MemoryPacking.cpp
+++ b/src/passes/MemoryPacking.cpp
@@ -97,7 +97,7 @@ struct MemoryPacking : public Pass {
uint32_t maxSegments;
void run(PassRunner* runner, Module* module) override;
- bool canOptimize(const std::vector<Memory::Segment>& segments);
+ bool canOptimize(const Memory& memory, const PassOptions& passOptions);
void optimizeBulkMemoryOps(PassRunner* runner, Module* module);
void getSegmentReferrers(Module* module, std::vector<Referrers>& referrers);
void dropUnusedSegments(std::vector<Memory::Segment>& segments,
@@ -122,13 +122,7 @@ struct MemoryPacking : public Pass {
};
void MemoryPacking::run(PassRunner* runner, Module* module) {
- if (!module->memory.exists) {
- return;
- }
-
- auto& segments = module->memory.segments;
-
- if (!canOptimize(segments)) {
+ if (!canOptimize(module->memory, runner->options)) {
return;
}
@@ -136,6 +130,8 @@ void MemoryPacking::run(PassRunner* runner, Module* module) {
? 63
: uint32_t(WebLimitations::MaxDataSegments);
+ auto& segments = module->memory.segments;
+
// For each segment, a list of bulk memory instructions that refer to it
std::vector<Referrers> referrers(segments.size());
@@ -182,7 +178,21 @@ void MemoryPacking::run(PassRunner* runner, Module* module) {
}
}
-bool MemoryPacking::canOptimize(const std::vector<Memory::Segment>& segments) {
+bool MemoryPacking::canOptimize(const Memory& memory,
+ const PassOptions& passOptions) {
+ if (!memory.exists) {
+ return false;
+ }
+
+ // We must optimize under the assumption that memory has been initialized to
+ // zero. That is the case for a memory declared in the module, but for a
+ // memory that is imported, we must be told that it is zero-initialized.
+ if (memory.imported() && !passOptions.zeroFilledMemory) {
+ return false;
+ }
+
+ auto& segments = memory.segments;
+
// One segment is always ok to optimize, as it does not have the potential
// problems handled below.
if (segments.size() <= 1) {
diff --git a/src/wasm.h b/src/wasm.h
index feaa51e40..2f655fa7f 100644
--- a/src/wasm.h
+++ b/src/wasm.h
@@ -1391,7 +1391,7 @@ struct Importable {
// If these are set, then this is an import, as module.base
Name module, base;
- bool imported() { return module.is(); }
+ bool imported() const { return module.is(); }
void setName(Name name_, bool hasExplicitName_) {
name = name_;