summaryrefslogtreecommitdiff
path: root/src/wasm/wasm-ir-builder.cpp
diff options
context:
space:
mode:
authorThomas Lively <tlively@google.com>2024-04-16 10:43:50 -0700
committerGitHub <noreply@github.com>2024-04-16 10:43:50 -0700
commit359d5aa30ca8349fd38e6968350e7ab4280c1cbb (patch)
treec2e32021ae74c2dfa706dc4d4ebd4257d19ad2db /src/wasm/wasm-ir-builder.cpp
parente29d77848b89846cf3519491fee581b507e3b194 (diff)
downloadbinaryen-359d5aa30ca8349fd38e6968350e7ab4280c1cbb.tar.gz
binaryen-359d5aa30ca8349fd38e6968350e7ab4280c1cbb.tar.bz2
binaryen-359d5aa30ca8349fd38e6968350e7ab4280c1cbb.zip
[Parser] Pop past unreachables where possible (#6489)
We previously would eagerly drop all concretely typed expressions on the stack when pushing an unreachable instruction. This was semantically correct and closely modeled the semantics of unreachable instructions, which implicitly drop the entire stack and start a new polymorphic stack. However, it also meant that the structure of the parsed IR did not match the structure of the folded input, which meant that tests involving unreachable children would not parse as intended, preventing the test from testing the intended behavior. For example, this wat: ```wasm (i32.add (i32.const 0) (unreachable) ) ``` Would previously parse into this IR: ```wasm (drop (i32.const 0) ) (i32.add (unreachable) (unreachable) ) ``` To fix this problem, we need to stop eagerly dropping stack values when encountering an unreachable instruction so we can still pop expressions pushed before the unreachable as direct children of later instructions. In the example above, we need to keep the `i32.const 0` on the stack so it is available to be popped and become a child of the `i32.add`. However, the naive solution of simply popping past unreachables would produce invalid IR in some cases. For example, consider this wat: ```wasm f32.const 0 unreachable i32.add ``` The naive solution would parse this wat into this IR: ```wasm (i32.add (f32.const 0) (unreachable) ) ``` But we do not want to parse an `i32.add` with an `f32`-typed child. Neither do we want to reject this input, since it is a perfectly valid Wasm fragment. In this case, we actually want the old behavior of dropping the `f32.const` and replacing it with another `unreachable` as the first child of the `i32.add`. To both match the input structure where possible and also gracefully fall back to the old behavior of dropping expressions prior to the unreachable, collect constraints on the types of each child for each kind of expression and compare them to the types of available expressions on the stack when an unreachable instruction will be popped. When the constraints are satisfied, pop expressions normally, even after popping the unreachable instruction. Otherwise, drop the instructions that precede the unreachable instruction to ensure we parse valid IR. To collect the constraints, add a new `ChildTyper` utility that calls a different callback for each kind of possible type constraint for each child. In the future, this utility can be used to simplify the validator as well.
Diffstat (limited to 'src/wasm/wasm-ir-builder.cpp')
-rw-r--r--src/wasm/wasm-ir-builder.cpp957
1 files changed, 497 insertions, 460 deletions
diff --git a/src/wasm/wasm-ir-builder.cpp b/src/wasm/wasm-ir-builder.cpp
index 7b88d345f..bee858435 100644
--- a/src/wasm/wasm-ir-builder.cpp
+++ b/src/wasm/wasm-ir-builder.cpp
@@ -16,6 +16,7 @@
#include <cassert>
+#include "ir/child-typer.h"
#include "ir/names.h"
#include "ir/properties.h"
#include "ir/utils.h"
@@ -140,13 +141,6 @@ Result<> IRBuilder::packageHoistedValue(const HoistedVal& hoisted,
void IRBuilder::push(Expression* expr) {
auto& scope = getScope();
if (expr->type == Type::unreachable) {
- // We want to avoid popping back past this most recent unreachable
- // instruction. Drop all prior instructions so they won't be consumed by
- // later instructions but will still be emitted for their side effects, if
- // any.
- for (auto& expr : scope.exprStack) {
- expr = builder.dropIfConcretelyTyped(expr);
- }
scope.unreachable = true;
}
scope.exprStack.push_back(expr);
@@ -157,44 +151,6 @@ void IRBuilder::push(Expression* expr) {
DBG(dump());
}
-Result<Expression*> IRBuilder::pop(size_t size) {
- assert(size >= 1);
- auto& scope = getScope();
-
- // Find the suffix of expressions that do not produce values.
- auto hoisted = hoistLastValue();
- CHECK_ERR(hoisted);
- if (!hoisted) {
- // There are no expressions that produce values.
- if (scope.unreachable) {
- return builder.makeUnreachable();
- }
- return Err{"popping from empty stack"};
- }
-
- CHECK_ERR(packageHoistedValue(*hoisted, size));
-
- auto* ret = scope.exprStack.back();
- // If the top value has the correct size, we can pop it and be done.
- // Unreachable values satisfy any size.
- if (ret->type.size() == size || ret->type == Type::unreachable) {
- scope.exprStack.pop_back();
- return ret;
- }
-
- // The last value-producing expression did not produce exactly the right
- // number of values, so we need to construct a tuple piecewise instead.
- assert(size > 1);
- std::vector<Expression*> elems;
- elems.resize(size);
- for (int i = size - 1; i >= 0; --i) {
- auto elem = pop();
- CHECK_ERR(elem);
- elems[i] = *elem;
- }
- return builder.makeTupleMake(elems);
-}
-
Result<Expression*> IRBuilder::build() {
if (scopeStack.empty()) {
return builder.makeNop();
@@ -292,417 +248,424 @@ void IRBuilder::dump() {
#endif // IR_BUILDER_DEBUG
}
-Result<> IRBuilder::visit(Expression* curr) {
- // Call either `visitExpression` or an expression-specific override.
- auto val = UnifiedExpressionVisitor<IRBuilder, Result<>>::visit(curr);
- CHECK_ERR(val);
- if (auto* block = curr->dynCast<Block>()) {
- block->finalize(block->type);
- } else {
- // TODO: Call more efficient versions of finalize() that take the known type
- // for other kinds of nodes as well, as done above.
- ReFinalizeNode{}.visit(curr);
- }
- push(curr);
- return Ok{};
-}
+struct IRBuilder::ChildPopper
+ : UnifiedExpressionVisitor<ChildPopper, Result<>> {
+ struct Subtype {
+ Type bound;
+ };
-// Handle the common case of instructions with a constant number of children
-// uniformly.
-Result<> IRBuilder::visitExpression(Expression* curr) {
- if (Properties::isControlFlowStructure(curr)) {
- // Control flow structures (besides `if`, handled separately) do not consume
- // stack values.
- return Ok{};
- }
+ struct AnyType {};
-#define DELEGATE_ID curr->_id
-#define DELEGATE_START(id) [[maybe_unused]] auto* expr = curr->cast<id>();
-#define DELEGATE_GET_FIELD(id, field) expr->field
-#define DELEGATE_FIELD_CHILD(id, field) \
- auto field = pop(); \
- CHECK_ERR(field); \
- expr->field = *field;
-#define DELEGATE_FIELD_SCOPE_NAME_DEF(id, field) \
- if (labelDepths.count(expr->field)) { \
- return Err{"repeated label"}; \
- }
-#define DELEGATE_END(id)
+ struct AnyReference {};
-#define DELEGATE_FIELD_OPTIONAL_CHILD(id, field) \
- WASM_UNREACHABLE("should have called visit" #id " because " #id \
- " has optional child " #field);
-#define DELEGATE_FIELD_CHILD_VECTOR(id, field) \
- WASM_UNREACHABLE("should have called visit" #id " because " #id \
- " has child vector " #field);
+ struct AnyTuple {
+ size_t arity;
+ };
-#define DELEGATE_FIELD_INT(id, field)
-#define DELEGATE_FIELD_LITERAL(id, field)
-#define DELEGATE_FIELD_NAME(id, field)
-#define DELEGATE_FIELD_SCOPE_NAME_USE(id, field)
+ struct Constraint : std::variant<Subtype, AnyType, AnyReference, AnyTuple> {
+ std::optional<Type> getSubtype() const {
+ if (auto* subtype = std::get_if<Subtype>(this)) {
+ return subtype->bound;
+ }
+ return std::nullopt;
+ }
+ bool isAnyType() const { return std::get_if<AnyType>(this); }
+ bool isAnyReference() const { return std::get_if<AnyReference>(this); }
+ std::optional<size_t> getAnyTuple() const {
+ if (auto* tuple = std::get_if<AnyTuple>(this)) {
+ return tuple->arity;
+ }
+ return std::nullopt;
+ }
+ size_t size() const {
+ if (auto type = getSubtype()) {
+ return type->size();
+ }
+ if (auto arity = getAnyTuple()) {
+ return *arity;
+ }
+ return 1;
+ }
+ Constraint operator[](size_t i) const {
+ if (auto type = getSubtype()) {
+ return {Subtype{(*type)[i]}};
+ }
+ if (getAnyTuple()) {
+ return {AnyType{}};
+ }
+ return *this;
+ }
+ };
-#define DELEGATE_FIELD_TYPE(id, field)
-#define DELEGATE_FIELD_HEAPTYPE(id, field)
-#define DELEGATE_FIELD_ADDRESS(id, field)
+ struct Child {
+ Expression** childp;
+ Constraint constraint;
+ };
-#include "wasm-delegations-fields.def"
+ struct ConstraintCollector : ChildTyper<ConstraintCollector> {
+ IRBuilder& builder;
+ std::vector<Child>& children;
- return Ok{};
-}
+ ConstraintCollector(IRBuilder& builder, std::vector<Child>& children)
+ : ChildTyper(builder.wasm, builder.func), builder(builder),
+ children(children) {}
-Result<> IRBuilder::visitDrop(Drop* curr, std::optional<uint32_t> arity) {
- // Multivalue drops must remain multivalue drops.
- if (!arity) {
- arity = curr->value->type.size();
- }
- if (*arity >= 2) {
- auto val = pop(*arity);
- CHECK_ERR(val);
- curr->value = *val;
+ void noteSubtype(Expression** childp, Type type) {
+ children.push_back({childp, {Subtype{type}}});
+ }
+
+ void noteAnyType(Expression** childp) {
+ children.push_back({childp, {AnyType{}}});
+ }
+
+ void noteAnyReferenceType(Expression** childp) {
+ children.push_back({childp, {AnyReference{}}});
+ }
+
+ void noteAnyTupleType(Expression** childp, size_t arity) {
+ children.push_back({childp, {AnyTuple{arity}}});
+ }
+
+ Type getLabelType(Name label) {
+ WASM_UNREACHABLE("labels should be explicitly provided");
+ };
+
+ void visitIf(If* curr) {
+ // Skip the control flow children because we only want to pop the
+ // condition.
+ children.push_back({&curr->condition, {Subtype{Type::i32}}});
+ }
+ };
+
+ IRBuilder& builder;
+
+ ChildPopper(IRBuilder& builder) : builder(builder) {}
+
+private:
+ [[nodiscard]] Result<> popConstrainedChildren(std::vector<Child>& children) {
+ auto& scope = builder.getScope();
+
+ // Two-part indices into the stack of available expressions and the vector
+ // of requirements, allowing them to move independently with the granularity
+ // of a single tuple element.
+ size_t stackIndex = scope.exprStack.size();
+ size_t stackTupleIndex = 0;
+ size_t childIndex = children.size();
+ size_t childTupleIndex = 0;
+
+ // The index of the shallowest unreachable instruction on the stack.
+ std::optional<size_t> unreachableIndex;
+
+ // Whether popping the children past the unreachable would produce a type
+ // mismatch or try to pop from an empty stack.
+ bool needUnreachableFallback = false;
+
+ if (!scope.unreachable) {
+ // We only need to check requirements if there is an unreachable.
+ // Otherwise the validator will catch any problems.
+ goto pop;
+ }
+
+ // Check whether the values on the stack will be able to meet the given
+ // requirements.
+ while (true) {
+ // Advance to the next requirement.
+ if (childTupleIndex > 0) {
+ --childTupleIndex;
+ } else {
+ if (childIndex == 0) {
+ // We have examined all the requirements.
+ break;
+ }
+ --childIndex;
+ childTupleIndex = children[childIndex].constraint.size() - 1;
+ }
+
+ // Advance to the next available value on the stack.
+ while (true) {
+ if (stackTupleIndex > 0) {
+ --stackTupleIndex;
+ } else {
+ if (stackIndex == 0) {
+ // No more available values. This is fine iff we are reaching past
+ // an unreachable. Any error will be caught later when we are
+ // popping.
+ goto pop;
+ }
+ --stackIndex;
+ stackTupleIndex = scope.exprStack[stackIndex]->type.size() - 1;
+ }
+
+ // Skip expressions that don't produce values.
+ if (scope.exprStack[stackIndex]->type == Type::none) {
+ stackTupleIndex = 0;
+ continue;
+ }
+ break;
+ }
+
+ // We have an available type and a constraint. Only check constraints if
+ // we are past an unreachable, since otherwise we can leave problems to be
+ // caught by the validator later.
+ auto type = scope.exprStack[stackIndex]->type[stackTupleIndex];
+ if (unreachableIndex) {
+ auto constraint = children[childIndex].constraint[childTupleIndex];
+ if (constraint.isAnyType()) {
+ // Always succeeds.
+ } else if (constraint.isAnyReference()) {
+ if (!type.isRef() && type != Type::unreachable) {
+ needUnreachableFallback = true;
+ break;
+ }
+ } else if (auto bound = constraint.getSubtype()) {
+ if (!Type::isSubType(type, *bound)) {
+ needUnreachableFallback = true;
+ break;
+ }
+ } else {
+ WASM_UNREACHABLE("unexpected constraint");
+ }
+ }
+
+ // No problems for children after this unreachable.
+ if (type == Type::unreachable) {
+ assert(!needUnreachableFallback);
+ unreachableIndex = stackIndex;
+ }
+ }
+
+ pop:
+ // We have checked all the constraints, so we are ready to pop children.
+ for (int i = children.size() - 1; i >= 0; --i) {
+ if (needUnreachableFallback &&
+ scope.exprStack.size() == *unreachableIndex + 1) {
+ // The expressions remaining on the stack may be executed, but they do
+ // not satisfy the requirements to be children of the current parent.
+ // Explicitly drop them so they will still be executed for their side
+ // effects and so the remaining children will be filled with
+ // unreachables.
+ assert(scope.exprStack.back()->type == Type::unreachable);
+ for (auto& expr : scope.exprStack) {
+ expr = Builder(builder.wasm).dropIfConcretelyTyped(expr);
+ }
+ }
+
+ auto val = pop(children[i].constraint.size());
+ CHECK_ERR(val);
+ *children[i].childp = *val;
+ }
return Ok{};
}
- return visitExpression(curr);
-}
-Result<> IRBuilder::visitIf(If* curr) {
- // Only the condition is popped from the stack. The ifTrue and ifFalse are
- // self-contained so we do not modify them.
- auto cond = pop();
- CHECK_ERR(cond);
- curr->condition = *cond;
- return Ok{};
-}
+ Result<Expression*> pop(size_t size) {
+ assert(size >= 1);
+ auto& scope = builder.getScope();
-Result<> IRBuilder::visitReturn(Return* curr) {
- if (!func) {
- return Err{"cannot return outside of a function"};
- }
- size_t n = func->getResults().size();
- if (n == 0) {
- curr->value = nullptr;
- } else {
- auto val = pop(n);
- CHECK_ERR(val);
- curr->value = *val;
- }
- return Ok{};
-}
+ // Find the suffix of expressions that do not produce values.
+ auto hoisted = builder.hoistLastValue();
+ CHECK_ERR(hoisted);
+ if (!hoisted) {
+ // There are no expressions that produce values.
+ if (scope.unreachable) {
+ return builder.builder.makeUnreachable();
+ }
+ return Err{"popping from empty stack"};
+ }
+
+ CHECK_ERR(builder.packageHoistedValue(*hoisted, size));
+
+ auto* ret = scope.exprStack.back();
+ // If the top value has the correct size, we can pop it and be done.
+ // Unreachable values satisfy any size.
+ if (ret->type.size() == size || ret->type == Type::unreachable) {
+ scope.exprStack.pop_back();
+ return ret;
+ }
-Result<> IRBuilder::visitStructNew(StructNew* curr) {
- for (size_t i = 0, n = curr->operands.size(); i < n; ++i) {
- auto val = pop();
- CHECK_ERR(val);
- curr->operands[n - 1 - i] = *val;
+ // The last value-producing expression did not produce exactly the right
+ // number of values, so we need to construct a tuple piecewise instead.
+ assert(size > 1);
+ std::vector<Expression*> elems;
+ elems.resize(size);
+ for (int i = size - 1; i >= 0; --i) {
+ auto elem = pop(1);
+ CHECK_ERR(elem);
+ elems[i] = *elem;
+ }
+ return builder.builder.makeTupleMake(elems);
}
- return Ok{};
-}
-Result<> IRBuilder::visitArrayNew(ArrayNew* curr) {
- auto size = pop();
- CHECK_ERR(size);
- curr->size = *size;
- if (!curr->isWithDefault()) {
- auto init = pop();
- CHECK_ERR(init);
- curr->init = *init;
+public:
+ Result<> visitExpression(Expression* expr) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visit(expr);
+ return popConstrainedChildren(children);
}
- return Ok{};
-}
-Result<> IRBuilder::visitArrayNewFixed(ArrayNewFixed* curr) {
- for (size_t i = 0, size = curr->values.size(); i < size; ++i) {
- auto val = pop();
- CHECK_ERR(val);
- curr->values[size - i - 1] = *val;
+ Result<> visitAtomicCmpxchg(AtomicCmpxchg* curr,
+ std::optional<Type> type = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitAtomicCmpxchg(curr, type);
+ return popConstrainedChildren(children);
}
- return Ok{};
-}
-Result<Expression*> IRBuilder::getBranchValue(Expression* curr,
- Name labelName,
- std::optional<Index> label) {
- // As new branch instructions are added, one of the existing branch visit*
- // functions is likely to be copied, along with its call to getBranchValue().
- // This assert serves as a reminder to also add an implementation of
- // visit*WithType() for new branch instructions.
- assert(curr->is<Break>() || curr->is<Switch>());
- if (!label) {
- auto index = getLabelIndex(labelName);
- CHECK_ERR(index);
- label = *index;
+ Result<> visitStructGet(StructGet* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitStructGet(curr, ht);
+ return popConstrainedChildren(children);
}
- auto scope = getScope(*label);
- CHECK_ERR(scope);
- // Loops would receive their input type rather than their output type, if we
- // supported that.
- size_t numValues = (*scope)->getLoop() ? 0 : (*scope)->getResultType().size();
- return numValues == 0 ? nullptr : pop(numValues);
-}
-Result<> IRBuilder::visitBreak(Break* curr, std::optional<Index> label) {
- if (curr->condition) {
- auto cond = pop();
- CHECK_ERR(cond);
- curr->condition = *cond;
+ Result<> visitStructSet(StructSet* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitStructSet(curr, ht);
+ return popConstrainedChildren(children);
}
- auto value = getBranchValue(curr, curr->name, label);
- CHECK_ERR(value);
- curr->value = *value;
- return Ok{};
-}
-Result<> IRBuilder::visitBreakWithType(Break* curr, Type type) {
- if (curr->condition) {
- auto cond = pop();
- CHECK_ERR(cond);
- curr->condition = *cond;
+ Result<> visitArrayGet(ArrayGet* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitArrayGet(curr, ht);
+ return popConstrainedChildren(children);
}
- if (type == Type::none) {
- curr->value = nullptr;
- } else {
- auto value = pop(type.size());
- CHECK_ERR(value)
- curr->value = *value;
+
+ Result<> visitArraySet(ArraySet* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitArraySet(curr, ht);
+ return popConstrainedChildren(children);
}
- curr->finalize();
- push(curr);
- return Ok{};
-}
-Result<> IRBuilder::visitSwitch(Switch* curr,
- std::optional<Index> defaultLabel) {
- auto cond = pop();
- CHECK_ERR(cond);
- curr->condition = *cond;
- auto value = getBranchValue(curr, curr->default_, defaultLabel);
- CHECK_ERR(value);
- curr->value = *value;
- return Ok{};
-}
+ Result<> visitArrayCopy(ArrayCopy* curr,
+ std::optional<HeapType> dest = std::nullopt,
+ std::optional<HeapType> src = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitArrayCopy(curr, dest, src);
+ return popConstrainedChildren(children);
+ }
-Result<> IRBuilder::visitSwitchWithType(Switch* curr, Type type) {
- auto cond = pop();
- CHECK_ERR(cond);
- curr->condition = *cond;
- if (type == Type::none) {
- curr->value = nullptr;
- } else {
- auto value = pop(type.size());
- CHECK_ERR(value)
- curr->value = *value;
+ Result<> visitArrayFill(ArrayFill* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitArrayFill(curr, ht);
+ return popConstrainedChildren(children);
}
- curr->finalize();
- push(curr);
- return Ok{};
-}
-Result<> IRBuilder::visitCall(Call* curr) {
- auto numArgs = wasm.getFunction(curr->target)->getNumParams();
- curr->operands.resize(numArgs);
- for (size_t i = 0; i < numArgs; ++i) {
- auto arg = pop();
- CHECK_ERR(arg);
- curr->operands[numArgs - 1 - i] = *arg;
+ Result<> visitArrayInitData(ArrayInitData* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitArrayInitData(curr, ht);
+ return popConstrainedChildren(children);
}
- return Ok{};
-}
-Result<> IRBuilder::visitCallIndirect(CallIndirect* curr) {
- auto target = pop();
- CHECK_ERR(target);
- curr->target = *target;
- auto numArgs = curr->heapType.getSignature().params.size();
- curr->operands.resize(numArgs);
- for (size_t i = 0; i < numArgs; ++i) {
- auto arg = pop();
- CHECK_ERR(arg);
- curr->operands[numArgs - 1 - i] = *arg;
+ Result<> visitArrayInitElem(ArrayInitElem* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitArrayInitElem(curr, ht);
+ return popConstrainedChildren(children);
}
- return Ok{};
-}
-Result<> IRBuilder::visitCallRef(CallRef* curr) {
- auto target = pop();
- CHECK_ERR(target);
- curr->target = *target;
- for (size_t i = 0, numArgs = curr->operands.size(); i < numArgs; ++i) {
- auto arg = pop();
- CHECK_ERR(arg);
- curr->operands[numArgs - 1 - i] = *arg;
+ Result<> visitStringNew(StringNew* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitStringNew(curr, ht);
+ return popConstrainedChildren(children);
}
- return Ok{};
-}
-Result<> IRBuilder::visitLocalSet(LocalSet* curr) {
- auto type = func->getLocalType(curr->index);
- auto val = pop(type.size());
- CHECK_ERR(val);
- curr->value = *val;
- return Ok{};
-}
+ Result<> visitStringEncode(StringEncode* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitStringEncode(curr, ht);
+ return popConstrainedChildren(children);
+ }
-Result<> IRBuilder::visitGlobalSet(GlobalSet* curr) {
- auto type = wasm.getGlobal(curr->name)->type;
- auto val = pop(type.size());
- CHECK_ERR(val);
- curr->value = *val;
- return Ok{};
-}
-Result<> IRBuilder::visitThrow(Throw* curr) {
- auto numArgs = wasm.getTag(curr->tag)->sig.params.size();
- curr->operands.resize(numArgs);
- for (size_t i = 0; i < numArgs; ++i) {
- auto arg = pop();
- CHECK_ERR(arg);
- curr->operands[numArgs - 1 - i] = *arg;
+ Result<> visitCallRef(CallRef* curr,
+ std::optional<HeapType> ht = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitCallRef(curr, ht);
+ return popConstrainedChildren(children);
}
- return Ok{};
-}
-Result<> IRBuilder::visitStringNew(StringNew* curr) {
- switch (curr->op) {
- case StringNewUTF8:
- case StringNewWTF8:
- case StringNewLossyUTF8:
- case StringNewWTF16: {
- auto len = pop();
- CHECK_ERR(len);
- curr->length = *len;
- break;
- }
- case StringNewUTF8Array:
- case StringNewWTF8Array:
- case StringNewLossyUTF8Array:
- case StringNewWTF16Array: {
- auto end = pop();
- CHECK_ERR(end);
- curr->end = *end;
- auto start = pop();
- CHECK_ERR(start);
- curr->start = *start;
- break;
- }
- case StringNewFromCodePoint:
- break;
+ Result<> visitBreak(Break* curr,
+ std::optional<Type> labelType = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitBreak(curr, labelType);
+ return popConstrainedChildren(children);
}
- auto ptr = pop();
- CHECK_ERR(ptr);
- curr->ptr = *ptr;
- return Ok{};
-}
-Result<> IRBuilder::visitStringEncode(StringEncode* curr) {
- switch (curr->op) {
- case StringEncodeUTF8Array:
- case StringEncodeLossyUTF8Array:
- case StringEncodeWTF8Array:
- case StringEncodeWTF16Array: {
- auto start = pop();
- CHECK_ERR(start);
- curr->start = *start;
- }
- [[fallthrough]];
- case StringEncodeUTF8:
- case StringEncodeLossyUTF8:
- case StringEncodeWTF8:
- case StringEncodeWTF16: {
- auto ptr = pop();
- CHECK_ERR(ptr);
- curr->ptr = *ptr;
- auto ref = pop();
- CHECK_ERR(ref);
- curr->ref = *ref;
- return Ok{};
- }
+ Result<> visitSwitch(Switch* curr,
+ std::optional<Type> labelType = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitSwitch(curr, labelType);
+ return popConstrainedChildren(children);
}
- WASM_UNREACHABLE("unexpected op");
-}
-Result<> IRBuilder::visitContBind(ContBind* curr) {
- auto cont = pop();
- CHECK_ERR(cont);
- curr->cont = *cont;
+ Result<> visitDrop(Drop* curr, std::optional<Index> arity = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitDrop(curr, arity);
+ return popConstrainedChildren(children);
+ }
- size_t paramsBefore =
- curr->contTypeBefore.getContinuation().type.getSignature().params.size();
- size_t paramsAfter =
- curr->contTypeAfter.getContinuation().type.getSignature().params.size();
- if (paramsBefore < paramsAfter) {
- return Err{"incompatible continuation types in cont.bind: source type " +
- curr->contTypeBefore.toString() +
- " has fewer parameters than destination " +
- curr->contTypeAfter.toString()};
+ Result<> visitTupleExtract(TupleExtract* curr,
+ std::optional<Index> arity = std::nullopt) {
+ std::vector<Child> children;
+ ConstraintCollector{builder, children}.visitTupleExtract(curr, arity);
+ return popConstrainedChildren(children);
}
- size_t numArgs = paramsBefore - paramsAfter;
+};
- curr->operands.resize(numArgs);
- for (size_t i = 0; i < numArgs; ++i) {
- auto val = pop();
- CHECK_ERR(val);
- curr->operands[numArgs - i - 1] = *val;
+Result<> IRBuilder::visit(Expression* curr) {
+ // Call either `visitExpression` or an expression-specific override.
+ auto val = UnifiedExpressionVisitor<IRBuilder, Result<>>::visit(curr);
+ CHECK_ERR(val);
+ if (auto* block = curr->dynCast<Block>()) {
+ block->finalize(block->type);
+ } else {
+ // TODO: Call more efficient versions of finalize() that take the known type
+ // for other kinds of nodes as well, as done above.
+ ReFinalizeNode{}.visit(curr);
}
+ push(curr);
return Ok{};
}
-Result<> IRBuilder::visitResume(Resume* curr) {
- auto cont = pop();
- CHECK_ERR(cont);
- curr->cont = *cont;
-
- auto sig = curr->contType.getContinuation().type.getSignature();
- auto size = sig.params.size();
- curr->operands.resize(size);
- for (size_t i = 0; i < size; ++i) {
- auto val = pop();
- CHECK_ERR(val);
- curr->operands[size - i - 1] = *val;
+// Handle the common case of instructions with a constant number of children
+// uniformly.
+Result<> IRBuilder::visitExpression(Expression* curr) {
+ if (Properties::isControlFlowStructure(curr) && !curr->is<If>()) {
+ // Control flow structures (besides `if`, handled separately) do not consume
+ // stack values.
+ return Ok{};
}
- return Ok{};
+ return ChildPopper{*this}.visit(curr);
}
-Result<> IRBuilder::visitSuspend(Suspend* curr) {
- auto tag = wasm.getTag(curr->tag);
- auto sig = tag->sig;
- auto size = sig.params.size();
- curr->operands.resize(size);
- for (size_t i = 0; i < size; ++i) {
- auto val = pop();
- CHECK_ERR(val);
- curr->operands[size - i - 1] = *val;
- }
- return Ok{};
+Result<Type> IRBuilder::getLabelType(Index label) {
+ auto scope = getScope(label);
+ CHECK_ERR(scope);
+ // Loops would receive their input type rather than their output type, if we
+ // supported that.
+ return (*scope)->getLoop() ? Type::none : (*scope)->getResultType();
}
-Result<> IRBuilder::visitTupleMake(TupleMake* curr) {
- assert(curr->operands.size() >= 2);
- for (size_t i = 0, size = curr->operands.size(); i < size; ++i) {
- auto elem = pop();
- CHECK_ERR(elem);
- curr->operands[size - 1 - i] = *elem;
- }
- return Ok{};
+Result<Type> IRBuilder::getLabelType(Name labelName) {
+ auto label = getLabelIndex(labelName);
+ CHECK_ERR(label);
+ return getLabelType(*label);
}
-Result<> IRBuilder::visitTupleExtract(TupleExtract* curr,
- std::optional<uint32_t> arity) {
- if (!arity) {
- if (curr->tuple->type == Type::unreachable) {
- // Fallback to an arbitrary valid arity.
- arity = 2;
- } else {
- arity = curr->tuple->type.size();
- }
- }
- assert(*arity >= 2);
- auto tuple = pop(*arity);
- CHECK_ERR(tuple);
- curr->tuple = *tuple;
+Result<> IRBuilder::visitBreakWithType(Break* curr, Type type) {
+ CHECK_ERR(ChildPopper{*this}.visitBreak(curr, type));
+ curr->finalize();
+ push(curr);
return Ok{};
}
-Result<> IRBuilder::visitPop(Pop*) {
- // Do not actually push this pop onto the stack since we generate our own pops
- // as necessary when visiting the beginnings of try blocks.
+Result<> IRBuilder::visitSwitchWithType(Switch* curr, Type type) {
+ CHECK_ERR(ChildPopper{*this}.visitSwitch(curr, type));
+ curr->finalize();
+ push(curr);
return Ok{};
}
@@ -727,9 +690,7 @@ Result<> IRBuilder::visitBlockStart(Block* curr) {
Result<> IRBuilder::visitIfStart(If* iff, Name label) {
applyDebugLoc(iff);
- auto cond = pop();
- CHECK_ERR(cond);
- iff->condition = *cond;
+ CHECK_ERR(visitIf(iff));
pushScope(ScopeCtx::makeIf(iff, label));
return Ok{};
}
@@ -769,52 +730,36 @@ Result<Expression*> IRBuilder::finishScope(Block* block) {
auto& scope = scopeStack.back();
auto type = scope.getResultType();
- if (type.isTuple()) {
- if (scope.unreachable) {
- // We may not have enough concrete values on the stack to construct the
- // full tuple, and if we tried to fill out the beginning of a tuple.make
- // with additional popped `unreachable`s, that could cause a trap to
- // happen before important side effects. Instead, just drop everything on
- // the stack and finish with a single unreachable.
- //
- // TODO: Validate that the available expressions are a correct suffix of
- // the expected type, since this will no longer be caught by normal
- // validation?
- for (auto& expr : scope.exprStack) {
- expr = builder.dropIfConcretelyTyped(expr);
- }
- if (scope.exprStack.back()->type != Type::unreachable) {
- scope.exprStack.push_back(builder.makeUnreachable());
- }
- } else {
- auto hoisted = hoistLastValue();
- CHECK_ERR(hoisted);
- if (!hoisted) {
- return Err{"popping from empty stack"};
- }
- auto hoistedType = scope.exprStack.back()->type;
- if (hoistedType.size() != type.size()) {
- // We cannot propagate the hoisted value directly because it does not
- // have the correct number of elements. Break it up if necessary and
- // construct our returned tuple from parts.
- CHECK_ERR(packageHoistedValue(*hoisted));
- std::vector<Expression*> elems(type.size());
- for (size_t i = 0; i < elems.size(); ++i) {
- auto elem = pop();
- CHECK_ERR(elem);
- elems[elems.size() - 1 - i] = *elem;
- }
- scope.exprStack.push_back(builder.makeTupleMake(std::move(elems)));
+
+ if (scope.unreachable) {
+ // Drop everything before the last unreachable.
+ bool sawUnreachable = false;
+ for (int i = scope.exprStack.size() - 1; i >= 0; --i) {
+ if (sawUnreachable) {
+ scope.exprStack[i] = builder.dropIfConcretelyTyped(scope.exprStack[i]);
+ } else if (scope.exprStack[i]->type == Type::unreachable) {
+ sawUnreachable = true;
}
}
- } else if (type.isConcrete()) {
- // If the value is buried in none-typed expressions, we have to bring it to
- // the top.
+ }
+
+ if (type.isConcrete()) {
auto hoisted = hoistLastValue();
CHECK_ERR(hoisted);
if (!hoisted) {
return Err{"popping from empty stack"};
}
+
+ if (type.isTuple()) {
+ auto hoistedType = scope.exprStack.back()->type;
+ if (hoistedType != Type::unreachable &&
+ hoistedType.size() != type.size()) {
+ // We cannot propagate the hoisted value directly because it does not
+ // have the correct number of elements. Repackage it.
+ CHECK_ERR(packageHoistedValue(*hoisted, hoistedType.size()));
+ CHECK_ERR(makeTupleMake(type.size()));
+ }
+ }
}
Expression* ret = nullptr;
@@ -1124,44 +1069,58 @@ Result<> IRBuilder::makeLoop(Name label, Type type) {
Result<> IRBuilder::makeBreak(Index label, bool isConditional) {
auto name = getLabelName(label);
CHECK_ERR(name);
+ auto labelType = getLabelType(label);
+ CHECK_ERR(labelType);
+
Break curr;
curr.name = *name;
// Use a dummy condition value if we need to pop a condition.
curr.condition = isConditional ? &curr : nullptr;
- CHECK_ERR(visitBreak(&curr, label));
+ CHECK_ERR(ChildPopper{*this}.visitBreak(&curr, *labelType));
push(builder.makeBreak(curr.name, curr.value, curr.condition));
return Ok{};
}
Result<> IRBuilder::makeSwitch(const std::vector<Index>& labels,
Index defaultLabel) {
+ auto defaultType = getLabelType(defaultLabel);
+ CHECK_ERR(defaultType);
+
std::vector<Name> names;
names.reserve(labels.size());
+ Type glbLabelType = *defaultType;
for (auto label : labels) {
auto name = getLabelName(label);
CHECK_ERR(name);
names.push_back(*name);
+ auto type = getLabelType(label);
+ CHECK_ERR(type);
+ glbLabelType = Type::getGreatestLowerBound(glbLabelType, *type);
}
+
auto defaultName = getLabelName(defaultLabel);
CHECK_ERR(defaultName);
+
Switch curr(wasm.allocator);
- CHECK_ERR(visitSwitch(&curr, defaultLabel));
+ CHECK_ERR(ChildPopper{*this}.visitSwitch(&curr, glbLabelType));
push(builder.makeSwitch(names, *defaultName, curr.condition, curr.value));
return Ok{};
}
Result<> IRBuilder::makeCall(Name func, bool isReturn) {
+ auto sig = wasm.getFunction(func)->getSig();
Call curr(wasm.allocator);
curr.target = func;
+ curr.operands.resize(sig.params.size());
CHECK_ERR(visitCall(&curr));
- auto type = wasm.getFunction(func)->getResults();
- push(builder.makeCall(curr.target, curr.operands, type, isReturn));
+ push(builder.makeCall(curr.target, curr.operands, sig.results, isReturn));
return Ok{};
}
Result<> IRBuilder::makeCallIndirect(Name table, HeapType type, bool isReturn) {
CallIndirect curr(wasm.allocator);
curr.heapType = type;
+ curr.operands.resize(type.getSignature().params.size());
CHECK_ERR(visitCallIndirect(&curr));
push(builder.makeCallIndirect(
table, curr.target, curr.operands, type, isReturn));
@@ -1209,6 +1168,7 @@ Result<> IRBuilder::makeLoad(unsigned bytes,
Type type,
Name mem) {
Load curr;
+ curr.memory = mem;
CHECK_ERR(visitLoad(&curr));
push(builder.makeLoad(bytes, signed_, offset, align, curr.ptr, type, mem));
return Ok{};
@@ -1217,6 +1177,8 @@ Result<> IRBuilder::makeLoad(unsigned bytes,
Result<> IRBuilder::makeStore(
unsigned bytes, Address offset, unsigned align, Type type, Name mem) {
Store curr;
+ curr.memory = mem;
+ curr.valueType = type;
CHECK_ERR(visitStore(&curr));
push(
builder.makeStore(bytes, offset, align, curr.ptr, curr.value, type, mem));
@@ -1226,6 +1188,7 @@ Result<> IRBuilder::makeStore(
Result<>
IRBuilder::makeAtomicLoad(unsigned bytes, Address offset, Type type, Name mem) {
Load curr;
+ curr.memory = mem;
CHECK_ERR(visitLoad(&curr));
push(builder.makeAtomicLoad(bytes, offset, curr.ptr, type, mem));
return Ok{};
@@ -1236,6 +1199,8 @@ Result<> IRBuilder::makeAtomicStore(unsigned bytes,
Type type,
Name mem) {
Store curr;
+ curr.memory = mem;
+ curr.valueType = type;
CHECK_ERR(visitStore(&curr));
push(builder.makeAtomicStore(bytes, offset, curr.ptr, curr.value, type, mem));
return Ok{};
@@ -1244,6 +1209,8 @@ Result<> IRBuilder::makeAtomicStore(unsigned bytes,
Result<> IRBuilder::makeAtomicRMW(
AtomicRMWOp op, unsigned bytes, Address offset, Type type, Name mem) {
AtomicRMW curr;
+ curr.memory = mem;
+ curr.type = type;
CHECK_ERR(visitAtomicRMW(&curr));
push(
builder.makeAtomicRMW(op, bytes, offset, curr.ptr, curr.value, type, mem));
@@ -1255,7 +1222,8 @@ Result<> IRBuilder::makeAtomicCmpxchg(unsigned bytes,
Type type,
Name mem) {
AtomicCmpxchg curr;
- CHECK_ERR(visitAtomicCmpxchg(&curr));
+ curr.memory = mem;
+ CHECK_ERR(ChildPopper{*this}.visitAtomicCmpxchg(&curr, type));
push(builder.makeAtomicCmpxchg(
bytes, offset, curr.ptr, curr.expected, curr.replacement, type, mem));
return Ok{};
@@ -1263,6 +1231,8 @@ Result<> IRBuilder::makeAtomicCmpxchg(unsigned bytes,
Result<> IRBuilder::makeAtomicWait(Type type, Address offset, Name mem) {
AtomicWait curr;
+ curr.memory = mem;
+ curr.expectedType = type;
CHECK_ERR(visitAtomicWait(&curr));
push(builder.makeAtomicWait(
curr.ptr, curr.expected, curr.timeout, type, offset, mem));
@@ -1271,6 +1241,7 @@ Result<> IRBuilder::makeAtomicWait(Type type, Address offset, Name mem) {
Result<> IRBuilder::makeAtomicNotify(Address offset, Name mem) {
AtomicNotify curr;
+ curr.memory = mem;
CHECK_ERR(visitAtomicNotify(&curr));
push(builder.makeAtomicNotify(curr.ptr, curr.notifyCount, offset, mem));
return Ok{};
@@ -1290,6 +1261,7 @@ Result<> IRBuilder::makeSIMDExtract(SIMDExtractOp op, uint8_t lane) {
Result<> IRBuilder::makeSIMDReplace(SIMDReplaceOp op, uint8_t lane) {
SIMDReplace curr;
+ curr.op = op;
CHECK_ERR(visitSIMDReplace(&curr));
push(builder.makeSIMDReplace(op, curr.vec, lane, curr.value));
return Ok{};
@@ -1321,6 +1293,7 @@ Result<> IRBuilder::makeSIMDLoad(SIMDLoadOp op,
unsigned align,
Name mem) {
SIMDLoad curr;
+ curr.memory = mem;
CHECK_ERR(visitSIMDLoad(&curr));
push(builder.makeSIMDLoad(op, offset, align, curr.ptr, mem));
return Ok{};
@@ -1332,6 +1305,7 @@ Result<> IRBuilder::makeSIMDLoadStoreLane(SIMDLoadStoreLaneOp op,
uint8_t lane,
Name mem) {
SIMDLoadStoreLane curr;
+ curr.memory = mem;
CHECK_ERR(visitSIMDLoadStoreLane(&curr));
push(builder.makeSIMDLoadStoreLane(
op, offset, align, lane, curr.ptr, curr.vec, mem));
@@ -1340,6 +1314,7 @@ Result<> IRBuilder::makeSIMDLoadStoreLane(SIMDLoadStoreLaneOp op,
Result<> IRBuilder::makeMemoryInit(Name data, Name mem) {
MemoryInit curr;
+ curr.memory = mem;
CHECK_ERR(visitMemoryInit(&curr));
push(builder.makeMemoryInit(data, curr.dest, curr.offset, curr.size, mem));
return Ok{};
@@ -1352,6 +1327,8 @@ Result<> IRBuilder::makeDataDrop(Name data) {
Result<> IRBuilder::makeMemoryCopy(Name destMem, Name srcMem) {
MemoryCopy curr;
+ curr.destMemory = destMem;
+ curr.sourceMemory = srcMem;
CHECK_ERR(visitMemoryCopy(&curr));
push(
builder.makeMemoryCopy(curr.dest, curr.source, curr.size, destMem, srcMem));
@@ -1360,6 +1337,7 @@ Result<> IRBuilder::makeMemoryCopy(Name destMem, Name srcMem) {
Result<> IRBuilder::makeMemoryFill(Name mem) {
MemoryFill curr;
+ curr.memory = mem;
CHECK_ERR(visitMemoryFill(&curr));
push(builder.makeMemoryFill(curr.dest, curr.value, curr.size, mem));
return Ok{};
@@ -1372,6 +1350,7 @@ Result<> IRBuilder::makeConst(Literal val) {
Result<> IRBuilder::makeUnary(UnaryOp op) {
Unary curr;
+ curr.op = op;
CHECK_ERR(visitUnary(&curr));
push(builder.makeUnary(op, curr.value));
return Ok{};
@@ -1379,6 +1358,7 @@ Result<> IRBuilder::makeUnary(UnaryOp op) {
Result<> IRBuilder::makeBinary(BinaryOp op) {
Binary curr;
+ curr.op = op;
CHECK_ERR(visitBinary(&curr));
push(builder.makeBinary(op, curr.left, curr.right));
return Ok{};
@@ -1399,7 +1379,7 @@ Result<> IRBuilder::makeSelect(std::optional<Type> type) {
Result<> IRBuilder::makeDrop() {
Drop curr;
- CHECK_ERR(visitDrop(&curr, 1));
+ CHECK_ERR(ChildPopper{*this}.visitDrop(&curr, 1));
push(builder.makeDrop(curr.value));
return Ok{};
}
@@ -1418,6 +1398,7 @@ Result<> IRBuilder::makeMemorySize(Name mem) {
Result<> IRBuilder::makeMemoryGrow(Name mem) {
MemoryGrow curr;
+ curr.memory = mem;
CHECK_ERR(visitMemoryGrow(&curr));
push(builder.makeMemoryGrow(curr.delta, mem));
return Ok{};
@@ -1480,6 +1461,7 @@ Result<> IRBuilder::makeTableGet(Name table) {
Result<> IRBuilder::makeTableSet(Name table) {
TableSet curr;
+ curr.table = table;
CHECK_ERR(visitTableSet(&curr));
push(builder.makeTableSet(table, curr.index, curr.value));
return Ok{};
@@ -1492,6 +1474,7 @@ Result<> IRBuilder::makeTableSize(Name table) {
Result<> IRBuilder::makeTableGrow(Name table) {
TableGrow curr;
+ curr.table = table;
CHECK_ERR(visitTableGrow(&curr));
push(builder.makeTableGrow(table, curr.value, curr.delta));
return Ok{};
@@ -1499,6 +1482,7 @@ Result<> IRBuilder::makeTableGrow(Name table) {
Result<> IRBuilder::makeTableFill(Name table) {
TableFill curr;
+ curr.table = table;
CHECK_ERR(visitTableFill(&curr));
push(builder.makeTableFill(table, curr.dest, curr.value, curr.size));
return Ok{};
@@ -1539,6 +1523,7 @@ Result<> IRBuilder::makeTryTable(Name label,
Result<> IRBuilder::makeThrow(Name tag) {
Throw curr(wasm.allocator);
curr.tag = tag;
+ curr.operands.resize(wasm.getTag(tag)->sig.params.size());
CHECK_ERR(visitThrow(&curr));
push(builder.makeThrow(tag, curr.operands));
return Ok{};
@@ -1578,7 +1563,7 @@ Result<> IRBuilder::makeTupleExtract(uint32_t arity, uint32_t index) {
return Err{"tuple arity must be at least 2"};
}
TupleExtract curr;
- CHECK_ERR(visitTupleExtract(&curr, arity));
+ CHECK_ERR(ChildPopper{*this}.visitTupleExtract(&curr, arity));
push(builder.makeTupleExtract(curr.tuple, index));
return Ok{};
}
@@ -1588,7 +1573,7 @@ Result<> IRBuilder::makeTupleDrop(uint32_t arity) {
return Err{"tuple arity must be at least 2"};
}
Drop curr;
- CHECK_ERR(visitDrop(&curr, arity));
+ CHECK_ERR(ChildPopper{*this}.visitDrop(&curr, arity));
push(builder.makeDrop(curr.value));
return Ok{};
}
@@ -1614,7 +1599,7 @@ Result<> IRBuilder::makeCallRef(HeapType type, bool isReturn) {
}
auto sig = type.getSignature();
curr.operands.resize(type.getSignature().params.size());
- CHECK_ERR(visitCallRef(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitCallRef(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.target));
push(builder.makeCallRef(curr.target, curr.operands, sig.results, isReturn));
return Ok{};
@@ -1622,6 +1607,7 @@ Result<> IRBuilder::makeCallRef(HeapType type, bool isReturn) {
Result<> IRBuilder::makeRefTest(Type type) {
RefTest curr;
+ curr.castType = type;
CHECK_ERR(visitRefTest(&curr));
push(builder.makeRefTest(curr.ref, type));
return Ok{};
@@ -1629,6 +1615,7 @@ Result<> IRBuilder::makeRefTest(Type type) {
Result<> IRBuilder::makeRefCast(Type type) {
RefCast curr;
+ curr.type = type;
CHECK_ERR(visitRefCast(&curr));
push(builder.makeRefCast(curr.ref, type));
return Ok{};
@@ -1636,6 +1623,8 @@ Result<> IRBuilder::makeRefCast(Type type) {
Result<> IRBuilder::makeBrOn(Index label, BrOnOp op, Type in, Type out) {
BrOn curr;
+ curr.op = op;
+ curr.castType = out;
CHECK_ERR(visitBrOn(&curr));
if (out != Type::none) {
if (!Type::isSubType(out, in)) {
@@ -1653,6 +1642,7 @@ Result<> IRBuilder::makeBrOn(Index label, BrOnOp op, Type in, Type out) {
Result<> IRBuilder::makeStructNew(HeapType type) {
StructNew curr(wasm.allocator);
+ curr.type = Type(type, NonNullable);
// Differentiate from struct.new_default with a non-empty expression list.
curr.operands.resize(type.getStruct().fields.size());
CHECK_ERR(visitStructNew(&curr));
@@ -1668,7 +1658,7 @@ Result<> IRBuilder::makeStructNewDefault(HeapType type) {
Result<> IRBuilder::makeStructGet(HeapType type, Index field, bool signed_) {
const auto& fields = type.getStruct().fields;
StructGet curr;
- CHECK_ERR(visitStructGet(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitStructGet(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.ref));
push(builder.makeStructGet(field, curr.ref, fields[field].type, signed_));
return Ok{};
@@ -1676,7 +1666,8 @@ Result<> IRBuilder::makeStructGet(HeapType type, Index field, bool signed_) {
Result<> IRBuilder::makeStructSet(HeapType type, Index field) {
StructSet curr;
- CHECK_ERR(visitStructSet(&curr));
+ curr.index = field;
+ CHECK_ERR(ChildPopper{*this}.visitStructSet(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.ref));
push(builder.makeStructSet(field, curr.ref, curr.value));
return Ok{};
@@ -1684,6 +1675,7 @@ Result<> IRBuilder::makeStructSet(HeapType type, Index field) {
Result<> IRBuilder::makeArrayNew(HeapType type) {
ArrayNew curr;
+ curr.type = Type(type, NonNullable);
// Differentiate from array.new_default with dummy initializer.
curr.init = (Expression*)0x01;
CHECK_ERR(visitArrayNew(&curr));
@@ -1693,6 +1685,7 @@ Result<> IRBuilder::makeArrayNew(HeapType type) {
Result<> IRBuilder::makeArrayNewDefault(HeapType type) {
ArrayNew curr;
+ curr.init = nullptr;
CHECK_ERR(visitArrayNew(&curr));
push(builder.makeArrayNew(type, curr.size));
return Ok{};
@@ -1714,6 +1707,7 @@ Result<> IRBuilder::makeArrayNewElem(HeapType type, Name elem) {
Result<> IRBuilder::makeArrayNewFixed(HeapType type, uint32_t arity) {
ArrayNewFixed curr(wasm.allocator);
+ curr.type = Type(type, NonNullable);
curr.values.resize(arity);
CHECK_ERR(visitArrayNewFixed(&curr));
push(builder.makeArrayNewFixed(type, curr.values));
@@ -1722,7 +1716,7 @@ Result<> IRBuilder::makeArrayNewFixed(HeapType type, uint32_t arity) {
Result<> IRBuilder::makeArrayGet(HeapType type, bool signed_) {
ArrayGet curr;
- CHECK_ERR(visitArrayGet(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitArrayGet(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.ref));
push(builder.makeArrayGet(
curr.ref, curr.index, type.getArray().element.type, signed_));
@@ -1731,7 +1725,7 @@ Result<> IRBuilder::makeArrayGet(HeapType type, bool signed_) {
Result<> IRBuilder::makeArraySet(HeapType type) {
ArraySet curr;
- CHECK_ERR(visitArraySet(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitArraySet(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.ref));
push(builder.makeArraySet(curr.ref, curr.index, curr.value));
return Ok{};
@@ -1746,7 +1740,7 @@ Result<> IRBuilder::makeArrayLen() {
Result<> IRBuilder::makeArrayCopy(HeapType destType, HeapType srcType) {
ArrayCopy curr;
- CHECK_ERR(visitArrayCopy(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitArrayCopy(&curr, destType, srcType));
CHECK_ERR(validateTypeAnnotation(destType, curr.destRef));
CHECK_ERR(validateTypeAnnotation(srcType, curr.srcRef));
push(builder.makeArrayCopy(
@@ -1756,7 +1750,7 @@ Result<> IRBuilder::makeArrayCopy(HeapType destType, HeapType srcType) {
Result<> IRBuilder::makeArrayFill(HeapType type) {
ArrayFill curr;
- CHECK_ERR(visitArrayFill(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitArrayFill(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.ref));
push(builder.makeArrayFill(curr.ref, curr.index, curr.value, curr.size));
return Ok{};
@@ -1764,7 +1758,7 @@ Result<> IRBuilder::makeArrayFill(HeapType type) {
Result<> IRBuilder::makeArrayInitData(HeapType type, Name data) {
ArrayInitData curr;
- CHECK_ERR(visitArrayInitData(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitArrayInitData(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.ref));
push(builder.makeArrayInitData(
data, curr.ref, curr.index, curr.offset, curr.size));
@@ -1773,7 +1767,7 @@ Result<> IRBuilder::makeArrayInitData(HeapType type, Name data) {
Result<> IRBuilder::makeArrayInitElem(HeapType type, Name elem) {
ArrayInitElem curr;
- CHECK_ERR(visitArrayInitElem(&curr));
+ CHECK_ERR(ChildPopper{*this}.visitArrayInitElem(&curr, type));
CHECK_ERR(validateTypeAnnotation(type, curr.ref));
push(builder.makeArrayInitElem(
elem, curr.ref, curr.index, curr.offset, curr.size));
@@ -1782,6 +1776,7 @@ Result<> IRBuilder::makeArrayInitElem(HeapType type, Name elem) {
Result<> IRBuilder::makeRefAs(RefAsOp op) {
RefAs curr;
+ curr.op = op;
CHECK_ERR(visitRefAs(&curr));
push(builder.makeRefAs(op, curr.value));
return Ok{};
@@ -1790,22 +1785,28 @@ Result<> IRBuilder::makeRefAs(RefAsOp op) {
Result<> IRBuilder::makeStringNew(StringNewOp op, bool try_, Name mem) {
StringNew curr;
curr.op = op;
- CHECK_ERR(visitStringNew(&curr));
// TODO: Store the memory in the IR.
switch (op) {
case StringNewUTF8:
case StringNewWTF8:
case StringNewLossyUTF8:
case StringNewWTF16:
+ CHECK_ERR(visitStringNew(&curr));
push(builder.makeStringNew(op, curr.ptr, curr.length, try_));
return Ok{};
case StringNewUTF8Array:
case StringNewWTF8Array:
case StringNewLossyUTF8Array:
case StringNewWTF16Array:
+ // There's no type annotation on these instructions due to a bug in the
+ // stringref proposal, so we just fudge it and pass `array` instead of a
+ // defined heap type. This will allow us to pop a child with an invalid
+ // array type, but that's just too bad.
+ CHECK_ERR(ChildPopper{*this}.visitStringNew(&curr, HeapType::array));
push(builder.makeStringNew(op, curr.ptr, curr.start, curr.end, try_));
return Ok{};
case StringNewFromCodePoint:
+ CHECK_ERR(visitStringNew(&curr));
push(builder.makeStringNew(op, curr.ptr, nullptr, try_));
return Ok{};
}
@@ -1819,6 +1820,7 @@ Result<> IRBuilder::makeStringConst(Name string) {
Result<> IRBuilder::makeStringMeasure(StringMeasureOp op) {
StringMeasure curr;
+ curr.op = op;
CHECK_ERR(visitStringMeasure(&curr));
push(builder.makeStringMeasure(op, curr.ref));
return Ok{};
@@ -1827,10 +1829,30 @@ Result<> IRBuilder::makeStringMeasure(StringMeasureOp op) {
Result<> IRBuilder::makeStringEncode(StringEncodeOp op, Name mem) {
StringEncode curr;
curr.op = op;
- CHECK_ERR(visitStringEncode(&curr));
// TODO: Store the memory in the IR.
- push(builder.makeStringEncode(op, curr.ref, curr.ptr, curr.start));
- return Ok{};
+ switch (op) {
+ case StringEncodeUTF8:
+ case StringEncodeLossyUTF8:
+ case StringEncodeWTF8:
+ case StringEncodeWTF16: {
+ CHECK_ERR(visitStringEncode(&curr));
+ push(builder.makeStringEncode(op, curr.ref, curr.ptr, curr.start));
+ return Ok{};
+ }
+ case StringEncodeUTF8Array:
+ case StringEncodeLossyUTF8Array:
+ case StringEncodeWTF8Array:
+ case StringEncodeWTF16Array: {
+ // There's no type annotation on these instructions due to a bug in the
+ // stringref proposal, so we just fudge it and pass `array` instead of a
+ // defined heap type. This will allow us to pop a child with an invalid
+ // array type, but that's just too bad.
+ CHECK_ERR(ChildPopper{*this}.visitStringEncode(&curr, HeapType::array));
+ push(builder.makeStringEncode(op, curr.ref, curr.ptr, curr.start));
+ return Ok{};
+ }
+ }
+ WASM_UNREACHABLE("unexpected op");
}
Result<> IRBuilder::makeStringConcat() {
@@ -1884,6 +1906,7 @@ Result<> IRBuilder::makeStringIterMove(StringIterMoveOp op) {
Result<> IRBuilder::makeStringSliceWTF(StringSliceWTFOp op) {
StringSliceWTF curr;
+ curr.op = op;
CHECK_ERR(visitStringSliceWTF(&curr));
push(builder.makeStringSliceWTF(op, curr.ref, curr.start, curr.end));
return Ok{};
@@ -1904,6 +1927,17 @@ Result<> IRBuilder::makeContBind(HeapType contTypeBefore,
ContBind curr(wasm.allocator);
curr.contTypeBefore = contTypeBefore;
curr.contTypeAfter = contTypeAfter;
+ size_t paramsBefore =
+ contTypeBefore.getContinuation().type.getSignature().params.size();
+ size_t paramsAfter =
+ contTypeAfter.getContinuation().type.getSignature().params.size();
+ if (paramsBefore < paramsAfter) {
+ return Err{"incompatible continuation types in cont.bind: source type " +
+ contTypeBefore.toString() +
+ " has fewer parameters than destination " +
+ contTypeAfter.toString()};
+ }
+ curr.operands.resize(paramsBefore - paramsAfter);
CHECK_ERR(visitContBind(&curr));
std::vector<Expression*> operands(curr.operands.begin(), curr.operands.end());
@@ -1917,6 +1951,7 @@ Result<> IRBuilder::makeContNew(HeapType ct) {
return Err{"expected continuation type"};
}
ContNew curr;
+ curr.contType = ct;
CHECK_ERR(visitContNew(&curr));
push(builder.makeContNew(ct, curr.func));
@@ -1931,6 +1966,7 @@ Result<> IRBuilder::makeResume(HeapType ct,
}
Resume curr(wasm.allocator);
curr.contType = ct;
+ curr.operands.resize(ct.getContinuation().type.getSignature().params.size());
CHECK_ERR(visitResume(&curr));
std::vector<Name> labelNames;
@@ -1948,6 +1984,7 @@ Result<> IRBuilder::makeResume(HeapType ct,
Result<> IRBuilder::makeSuspend(Name tag) {
Suspend curr(wasm.allocator);
curr.tag = tag;
+ curr.operands.resize(wasm.getTag(tag)->sig.params.size());
CHECK_ERR(visitSuspend(&curr));
std::vector<Expression*> operands(curr.operands.begin(), curr.operands.end());