summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/ir/bits.h24
-rw-r--r--src/passes/ConstantFieldPropagation.cpp9
-rw-r--r--src/passes/Heap2Local.cpp35
3 files changed, 34 insertions, 34 deletions
diff --git a/src/ir/bits.h b/src/ir/bits.h
index 25d80fba7..15168ca66 100644
--- a/src/ir/bits.h
+++ b/src/ir/bits.h
@@ -107,6 +107,30 @@ inline Expression* makeSignExt(Expression* value, Index bytes, Module& wasm) {
}
}
+// Given a value that is read from a field, as a replacement for a
+// StructGet/ArrayGet that we inferred the value of, and given the signedness of
+// the get and the field info, if we are doing a signed read of a packed field
+// then sign-extend it, or if it is unsigned then truncate. This fixes up cases
+// where we can replace the StructGet/ArrayGet with the value we know must be
+// there (without making any assumptions about |value|, that is, we do not
+// assume it has been truncated already).
+inline Expression* makePackedFieldGet(Expression* value,
+ const Field& field,
+ bool signed_,
+ Module& wasm) {
+ if (!field.isPacked()) {
+ return value;
+ }
+
+ if (signed_) {
+ return makeSignExt(value, field.getByteSize(), wasm);
+ }
+
+ Builder builder(wasm);
+ auto mask = Bits::lowBitMask(field.getByteSize() * 8);
+ return builder.makeBinary(AndInt32, value, builder.makeConst(int32_t(mask)));
+}
+
// getMaxBits() helper that has pessimistic results for the bits used in locals.
struct DummyLocalInfoProvider {
Index getMaxBitsForLocal(LocalGet* get) {
diff --git a/src/passes/ConstantFieldPropagation.cpp b/src/passes/ConstantFieldPropagation.cpp
index 2b7dae147..e94da0ade 100644
--- a/src/passes/ConstantFieldPropagation.cpp
+++ b/src/passes/ConstantFieldPropagation.cpp
@@ -125,13 +125,8 @@ struct FunctionOptimizer : public WalkerPass<PostWalker<FunctionOptimizer>> {
Expression* value = info.makeExpression(*getModule());
auto field = GCTypeUtils::getField(type, curr->index);
assert(field);
- if (field->isPacked()) {
- // We cannot just pass through a value that is packed, as the input gets
- // truncated.
- auto mask = Bits::lowBitMask(field->getByteSize() * 8);
- value =
- builder.makeBinary(AndInt32, value, builder.makeConst(int32_t(mask)));
- }
+ value =
+ Bits::makePackedFieldGet(value, *field, curr->signed_, *getModule());
replaceCurrent(builder.makeSequence(
builder.makeDrop(builder.makeRefAs(RefAsNonNull, curr->ref)), value));
changed = true;
diff --git a/src/passes/Heap2Local.cpp b/src/passes/Heap2Local.cpp
index b77775462..9d1763ddc 100644
--- a/src/passes/Heap2Local.cpp
+++ b/src/passes/Heap2Local.cpp
@@ -649,21 +649,6 @@ struct Struct2Local : PostWalker<Struct2Local> {
curr->finalize();
}
- // Add a mask for packed fields. We add masks on sets rather than on gets
- // because gets tend to be more numerous both in code appearances and in
- // runtime execution. As a result of masking on sets, the value in the local
- // is always the masked unsigned value (which is also nice for debugging,
- // incidentally).
- Expression* addMask(Expression* value, const Field& field) {
- if (!field.isPacked()) {
- return value;
- }
-
- auto mask = Bits::lowBitMask(field.getByteSize() * 8);
- return builder.makeBinary(
- AndInt32, value, builder.makeConst(int32_t(mask)));
- }
-
void visitStructNew(StructNew* curr) {
if (curr != allocation) {
return;
@@ -710,9 +695,7 @@ struct Struct2Local : PostWalker<Struct2Local> {
// Copy them to the normal ones.
for (Index i = 0; i < tempIndexes.size(); i++) {
auto* value = builder.makeLocalGet(tempIndexes[i], fields[i].type);
- // Add a mask on the values we write.
- contents.push_back(
- builder.makeLocalSet(localIndexes[i], addMask(value, fields[i])));
+ contents.push_back(builder.makeLocalSet(localIndexes[i], value));
}
// TODO Check if the nondefault case does not increase code size in some
@@ -724,8 +707,6 @@ struct Struct2Local : PostWalker<Struct2Local> {
//
// Note that we must assign the defaults because we might be in a loop,
// that is, there might be a previous value.
- //
- // Note there is no need to mask as these are zeros anyhow.
for (Index i = 0; i < localIndexes.size(); i++) {
contents.push_back(builder.makeLocalSet(
localIndexes[i],
@@ -786,8 +767,7 @@ struct Struct2Local : PostWalker<Struct2Local> {
// write the data to the local instead of the heap allocation.
replaceCurrent(builder.makeSequence(
builder.makeDrop(curr->ref),
- builder.makeLocalSet(localIndexes[curr->index],
- addMask(curr->value, fields[curr->index]))));
+ builder.makeLocalSet(localIndexes[curr->index], curr->value)));
}
void visitStructGet(StructGet* curr) {
@@ -813,11 +793,12 @@ struct Struct2Local : PostWalker<Struct2Local> {
refinalize = true;
}
Expression* value = builder.makeLocalGet(localIndexes[curr->index], type);
- if (field.isPacked() && curr->signed_) {
- // The value in the local is the masked unsigned value, which we must
- // sign-extend.
- value = Bits::makeSignExt(value, field.getByteSize(), wasm);
- }
+ // Note that in theory we could try to do better here than to fix up the
+ // packing and signedness on gets: we could truncate on sets. That would be
+ // more efficient if all gets are unsigned, as gets outnumber sets in
+ // general. However, signed gets make that more complicated, so leave this
+ // for other opts to handle.
+ value = Bits::makePackedFieldGet(value, field, curr->signed_, wasm);
replaceCurrent(builder.makeSequence(builder.makeDrop(curr->ref), value));
}
};