From 92bef101c7b7957e3ad67b82c2bf0ed301fb3d3c Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 10:26:51 -0700 Subject: [PATCH 01/33] start --- src/wasm/wasm-validator.cpp | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/wasm/wasm-validator.cpp b/src/wasm/wasm-validator.cpp index 576cc476934..6441ca6824e 100644 --- a/src/wasm/wasm-validator.cpp +++ b/src/wasm/wasm-validator.cpp @@ -2999,6 +2999,13 @@ void FunctionValidator::visitRefCast(RefCast* curr) { "[--enable-custom-descriptors]"); } + shouldBeFalse(curr->ref->type.isContinuation(), + curr, + "continuations cannot be cast from"); + shouldBeFalse(curr->type.isContinuation(), + curr, + "continuations cannot be cast to"); + if (!curr->desc) { return; } From 441a6ccee2f9f7bdfc6c8f20a5aaca0f85125a40 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 10:29:36 -0700 Subject: [PATCH 02/33] start --- src/wasm/wasm-validator.cpp | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/wasm/wasm-validator.cpp b/src/wasm/wasm-validator.cpp index 6441ca6824e..fc0004f3398 100644 --- a/src/wasm/wasm-validator.cpp +++ b/src/wasm/wasm-validator.cpp @@ -2939,6 +2939,10 @@ void FunctionValidator::visitRefTest(RefTest* curr) { "ref.test of exact type requires custom descriptors " "[--enable-custom-descriptors]"); } + + shouldBeFalse(curr->ref->type.isContinuation(), + curr, + "ref.test cannot cast continuation"); } void FunctionValidator::visitRefCast(RefCast* curr) { @@ -3001,10 +3005,10 @@ void FunctionValidator::visitRefCast(RefCast* curr) { shouldBeFalse(curr->ref->type.isContinuation(), curr, - "continuations cannot be cast from"); + "ref.cast cannot cast continuation"); shouldBeFalse(curr->type.isContinuation(), curr, - "continuations cannot be cast to"); + "ref.cast cannot cast to continuation"); if (!curr->desc) { return; @@ -3129,6 +3133,12 @@ void FunctionValidator::visitBrOn(BrOn* curr) { "br_on_cast* to exact type requires custom descriptors " "[--enable-custom-descriptors]"); } + shouldBeFalse(curr->ref->type.isContinuation(), + curr, + "br_on cannot cast continuation"); + shouldBeFalse(curr->castType.isContinuation(), + curr, + "br_on cannot cast to continuation"); break; } } From dd5bc7a9129fd131c6f35b76bedef5d81f23de2f Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 12:02:42 -0700 Subject: [PATCH 03/33] castble --- src/tools/fuzzing.h | 1 + src/tools/fuzzing/fuzzing.cpp | 31 +++++++++++++++++++++++++------ src/wasm-type.h | 3 +++ 3 files changed, 29 insertions(+), 6 deletions(-) diff --git a/src/tools/fuzzing.h b/src/tools/fuzzing.h index 161ccd6d139..96454470e8e 100644 --- a/src/tools/fuzzing.h +++ b/src/tools/fuzzing.h @@ -540,6 +540,7 @@ class TranslateToFuzzReader { // Getters for Types Type getSingleConcreteType(); Type getReferenceType(); + Type getCastableReferenceType(); Type getEqReferenceType(); Type getMVPType(); Type getTupleType(); diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index 8f3775f3a10..80118737779 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -4970,8 +4970,8 @@ Expression* TranslateToFuzzReader::makeRefTest(Type type) { switch (upTo(3)) { case 0: // Totally random. - refType = getReferenceType(); - castType = getReferenceType(); + refType = getCastableReferenceType(); + castType = getCastableReferenceType(); // They must share a bottom type in order to validate. if (refType.getHeapType().getBottom() == castType.getHeapType().getBottom()) { @@ -4982,12 +4982,12 @@ Expression* TranslateToFuzzReader::makeRefTest(Type type) { [[fallthrough]]; case 1: // Cast is a subtype of ref. - refType = getReferenceType(); + refType = getCastableReferenceType(); castType = getSubType(refType); break; case 2: // Ref is a subtype of cast. - castType = getReferenceType(); + castType = getCastableReferenceType(); refType = getSubType(castType); break; default: @@ -5011,7 +5011,7 @@ Expression* TranslateToFuzzReader::makeRefCast(Type type) { switch (upTo(3)) { case 0: // Totally random. - refType = getReferenceType(); + refType = getCastableReferenceType(); // They must share a bottom type in order to validate. if (refType.getHeapType().getBottom() == type.getHeapType().getBottom()) { break; @@ -5116,7 +5116,11 @@ Expression* TranslateToFuzzReader::makeBrOn(Type type) { // We are sending a reference type to the target. All other BrOn variants can // do that. assert(targetType.isRef()); - auto op = pick(BrOnNonNull, BrOnCast, BrOnCastFail); + // BrOnNonNull can handle sending any reference. The casts are more limited. + auto op = BrOnNonNull; + if (targetType.isCastable()) { + op = pick(BrOnNonNull, BrOnCast, BrOnCastFail); + } Type castType = Type::none; Type refType; switch (op) { @@ -5561,6 +5565,21 @@ Type TranslateToFuzzReader::getReferenceType() { Type(HeapType::string, NonNullable))); } +Type TranslateToFuzzReader::getCastableReferenceType() { + auto type = getReferenceType(); + if (!type.isCastable()) { + // Avoid continuations in a simple way (this is rare, so being precise is + // not crucial). + assert(type.isContinuation()); + if (oneIn(4)) { + type = getSubType(Type(HeapType::func, Nullable)); + } else { + type = getEqReferenceType(); + } + } + return type; +} + Type TranslateToFuzzReader::getEqReferenceType() { if (oneIn(2) && !interestingHeapTypes.empty()) { // Try to find an interesting eq-compatible type. diff --git a/src/wasm-type.h b/src/wasm-type.h index d90385b1c67..58dd5ca70ca 100644 --- a/src/wasm-type.h +++ b/src/wasm-type.h @@ -184,6 +184,8 @@ class HeapType { return isBasic() && getBasic(Unshared) == type; } + bool isCastable() { return !isContinuation(); } + Signature getSignature() const; Continuation getContinuation() const; @@ -415,6 +417,7 @@ class Type { return isRef() && getHeapType().isContinuation(); } bool isDefaultable() const; + bool isCastable() { return isRef() && !isContinuation(); } // TODO: Allow this only for reference types. Nullability getNullability() const { From 27dd48b8613eda1dcbf8a722787263920332cbd1 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 13:04:55 -0700 Subject: [PATCH 04/33] fix --- src/wasm/wasm-validator.cpp | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/wasm/wasm-validator.cpp b/src/wasm/wasm-validator.cpp index fc0004f3398..ad920efca9a 100644 --- a/src/wasm/wasm-validator.cpp +++ b/src/wasm/wasm-validator.cpp @@ -2940,9 +2940,9 @@ void FunctionValidator::visitRefTest(RefTest* curr) { "[--enable-custom-descriptors]"); } - shouldBeFalse(curr->ref->type.isContinuation(), - curr, - "ref.test cannot cast continuation"); + shouldBeTrue(curr->ref->type.isCastable(), + curr, + "ref.test cannot cast invalid type"); } void FunctionValidator::visitRefCast(RefCast* curr) { @@ -3003,12 +3003,12 @@ void FunctionValidator::visitRefCast(RefCast* curr) { "[--enable-custom-descriptors]"); } - shouldBeFalse(curr->ref->type.isContinuation(), - curr, - "ref.cast cannot cast continuation"); - shouldBeFalse(curr->type.isContinuation(), - curr, - "ref.cast cannot cast to continuation"); + shouldBeTrue(curr->ref->type.isCastable(), + curr, + "ref.cast cannot cast invalid type"); + shouldBeTrue(curr->type.isCastable(), + curr, + "ref.cast cannot cast to invalid type"); if (!curr->desc) { return; @@ -3133,12 +3133,12 @@ void FunctionValidator::visitBrOn(BrOn* curr) { "br_on_cast* to exact type requires custom descriptors " "[--enable-custom-descriptors]"); } - shouldBeFalse(curr->ref->type.isContinuation(), - curr, - "br_on cannot cast continuation"); - shouldBeFalse(curr->castType.isContinuation(), - curr, - "br_on cannot cast to continuation"); + shouldBeTrue(curr->ref->type.isCastable(), + curr, + "br_on cannot cast invalid type"); + shouldBeTrue(curr->castType.isCastable(), + curr, + "br_on cannot cast to invalid type"); break; } } From 3dcbbbe4f6e5952585b5d296c887a0564731da23 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 13:13:06 -0700 Subject: [PATCH 05/33] nicer --- src/tools/fuzzing/fuzzing.cpp | 2 +- src/wasm-type.h | 4 ++-- src/wasm/wasm-type.cpp | 8 ++++++++ 3 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index 80118737779..15f473145b5 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -5570,12 +5570,12 @@ Type TranslateToFuzzReader::getCastableReferenceType() { if (!type.isCastable()) { // Avoid continuations in a simple way (this is rare, so being precise is // not crucial). - assert(type.isContinuation()); if (oneIn(4)) { type = getSubType(Type(HeapType::func, Nullable)); } else { type = getEqReferenceType(); } + assert(type.isCastable()); } return type; } diff --git a/src/wasm-type.h b/src/wasm-type.h index 58dd5ca70ca..d6dc254734a 100644 --- a/src/wasm-type.h +++ b/src/wasm-type.h @@ -184,7 +184,7 @@ class HeapType { return isBasic() && getBasic(Unshared) == type; } - bool isCastable() { return !isContinuation(); } + bool isCastable(); Signature getSignature() const; Continuation getContinuation() const; @@ -417,7 +417,7 @@ class Type { return isRef() && getHeapType().isContinuation(); } bool isDefaultable() const; - bool isCastable() { return isRef() && !isContinuation(); } + bool isCastable(); // TODO: Allow this only for reference types. Nullability getNullability() const { diff --git a/src/wasm/wasm-type.cpp b/src/wasm/wasm-type.cpp index 795bf410cbf..191a75791c4 100644 --- a/src/wasm/wasm-type.cpp +++ b/src/wasm/wasm-type.cpp @@ -623,6 +623,10 @@ bool Type::isDefaultable() const { return isConcrete() && !isNonNullable(); } +bool Type::isCastable() { + return isRef() && getHeapType().isCastable(); +} + unsigned Type::getByteSize() const { // TODO: alignment? auto getSingleByteSize = [](Type t) { @@ -889,6 +893,10 @@ Shareability HeapType::getShared() const { } } +bool HeapType::isCastable() { + return !isContinuation() && !isMaybeShared(HeapType::cont); +} + Signature HeapType::getSignature() const { assert(isSignature()); return getHeapTypeInfo(*this)->signature; From a4d826d1ebcdcef2a1c175dd1ca96d6440421da7 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 13:19:57 -0700 Subject: [PATCH 06/33] test --- test/spec/cont-validation.wast | 105 +++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 test/spec/cont-validation.wast diff --git a/test/spec/cont-validation.wast b/test/spec/cont-validation.wast new file mode 100644 index 00000000000..9a625e21e1c --- /dev/null +++ b/test/spec/cont-validation.wast @@ -0,0 +1,105 @@ +;; Part of stack-switching/validation.wast + +;; Illegal casts + +(assert_invalid + (module + (func (drop (ref.test contref (unreachable)))) + ) + "invalid cast" +) +(assert_invalid + (module + (func (drop (ref.test nullcontref (unreachable)))) + ) + "invalid cast" +) +(assert_invalid + (module + (type $f (func)) + (type $c (cont $f)) + (func (drop (ref.test (ref $c) (unreachable)))) + ) + "invalid cast" +) + +(assert_invalid + (module + (func (drop (ref.cast contref (unreachable)))) + ) + "invalid cast" +) +(assert_invalid + (module + (func (drop (ref.cast nullcontref (unreachable)))) + ) + "invalid cast" +) +(assert_invalid + (module + (type $f (func)) + (type $c (cont $f)) + (func (drop (ref.cast (ref $c) (unreachable)))) + ) + "invalid cast" +) + +(assert_invalid + (module + (func + (block (result contref) (br_on_cast 0 contref contref (unreachable))) + (drop) + ) + ) + "invalid cast" +) +(assert_invalid + (module + (func + (block (result contref) (br_on_cast 0 nullcontref nullcontref (unreachable))) + (drop) + ) + ) + "invalid cast" +) +(assert_invalid + (module + (type $f (func)) + (type $c (cont $f)) + (func + (block (result contref) (br_on_cast 0 (ref $c) (ref $c) (unreachable))) + (drop) + ) + ) + "invalid cast" +) + +(assert_invalid + (module + (func + (block (result contref) (br_on_cast_fail 0 contref contref (unreachable))) + (drop) + ) + ) + "invalid cast" +) +(assert_invalid + (module + (func + (block (result contref) (br_on_cast_fail 0 nullcontref nullcontref (unreachable))) + (drop) + ) + ) + "invalid cast" +) +(assert_invalid + (module + (type $f (func)) + (type $c (cont $f)) + (func + (block (result contref) (br_on_cast_fail 0 (ref $c) (ref $c) (unreachable))) + (drop) + ) + ) + "invalid cast" +) From 1b25d7bb159635283fe874d9fda565f417667f00 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 13:27:25 -0700 Subject: [PATCH 07/33] finis --- src/wasm/wasm-type.cpp | 3 +- src/wasm/wasm-validator.cpp | 5 +++ test/spec/cont-validation.wast | 80 ---------------------------------- 3 files changed, 7 insertions(+), 81 deletions(-) diff --git a/src/wasm/wasm-type.cpp b/src/wasm/wasm-type.cpp index 191a75791c4..55351e85ba8 100644 --- a/src/wasm/wasm-type.cpp +++ b/src/wasm/wasm-type.cpp @@ -894,7 +894,8 @@ Shareability HeapType::getShared() const { } bool HeapType::isCastable() { - return !isContinuation() && !isMaybeShared(HeapType::cont); + return !isContinuation() && !isMaybeShared(HeapType::cont) && + !isMaybeShared(HeapType::nocont); } Signature HeapType::getSignature() const { diff --git a/src/wasm/wasm-validator.cpp b/src/wasm/wasm-validator.cpp index ad920efca9a..e995a3d1b38 100644 --- a/src/wasm/wasm-validator.cpp +++ b/src/wasm/wasm-validator.cpp @@ -2910,6 +2910,11 @@ void FunctionValidator::visitI31Get(I31Get* curr) { void FunctionValidator::visitRefTest(RefTest* curr) { shouldBeTrue( getModule()->features.hasGC(), curr, "ref.test requires gc [--enable-gc]"); + + shouldBeTrue(curr->castType.isCastable(), + curr, + "ref.test cannot cast to invalid type"); + if (curr->ref->type == Type::unreachable) { return; } diff --git a/test/spec/cont-validation.wast b/test/spec/cont-validation.wast index 9a625e21e1c..47d5df15eac 100644 --- a/test/spec/cont-validation.wast +++ b/test/spec/cont-validation.wast @@ -23,83 +23,3 @@ "invalid cast" ) -(assert_invalid - (module - (func (drop (ref.cast contref (unreachable)))) - ) - "invalid cast" -) -(assert_invalid - (module - (func (drop (ref.cast nullcontref (unreachable)))) - ) - "invalid cast" -) -(assert_invalid - (module - (type $f (func)) - (type $c (cont $f)) - (func (drop (ref.cast (ref $c) (unreachable)))) - ) - "invalid cast" -) - -(assert_invalid - (module - (func - (block (result contref) (br_on_cast 0 contref contref (unreachable))) - (drop) - ) - ) - "invalid cast" -) -(assert_invalid - (module - (func - (block (result contref) (br_on_cast 0 nullcontref nullcontref (unreachable))) - (drop) - ) - ) - "invalid cast" -) -(assert_invalid - (module - (type $f (func)) - (type $c (cont $f)) - (func - (block (result contref) (br_on_cast 0 (ref $c) (ref $c) (unreachable))) - (drop) - ) - ) - "invalid cast" -) - -(assert_invalid - (module - (func - (block (result contref) (br_on_cast_fail 0 contref contref (unreachable))) - (drop) - ) - ) - "invalid cast" -) -(assert_invalid - (module - (func - (block (result contref) (br_on_cast_fail 0 nullcontref nullcontref (unreachable))) - (drop) - ) - ) - "invalid cast" -) -(assert_invalid - (module - (type $f (func)) - (type $c (cont $f)) - (func - (block (result contref) (br_on_cast_fail 0 (ref $c) (ref $c) (unreachable))) - (drop) - ) - ) - "invalid cast" -) From 1b7135ae707a3da7e8a0edad443348f3c745a537 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 14:50:45 -0700 Subject: [PATCH 08/33] fix --- src/tools/fuzzing/fuzzing.cpp | 41 +++++++++++++++++++++++++++++++---- 1 file changed, 37 insertions(+), 4 deletions(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index 15f473145b5..2f1560593bf 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -1760,6 +1760,12 @@ void TranslateToFuzzReader::recombine(Function* func) { // be placed as either of the children of the i32.add. This tramples the // existing content there. Returns true if we found a place. static bool replaceChildWith(Expression* expr, Expression* with) { + // Casts can get broken if we replace with something not castable. + if (!with->type.isCastable()) { + if (expr->is() || expr->is() || expr->is()) { + return false; // TODO verify + } + } for (auto*& child : ChildIterator(expr)) { // To replace, we must have an appropriate type, and we cannot replace a // Pop under any circumstances. @@ -2024,6 +2030,8 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { Fixer(Module& wasm, TranslateToFuzzReader& parent) : wasm(wasm), parent(parent) {} + using Super = ExpressionStackWalker>; + // Track seen names to find duplication, which is invalid. std::set seen; @@ -2116,6 +2124,29 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { i--; } } + + // Fix up casts: Our changes may have put an uncastable type in a cast. + void visitRefCast(RefCast* curr) { + fixCast(curr, curr->ref); + } + void visitRefTest(RefTest* curr) { + fixCast(curr, curr->ref); + } + void visitBrOn(BrOn* curr) { + if (curr->op == BrOnCast || + curr->op == BrOnCastFail || + curr->op == BrOnCastDesc || + curr->op == BrOnCastDescFail) { + fixCast(curr, curr->ref); + } + } + void fixCast(Expression* curr, Expression* ref) { + if (!ref->type.isCastable()) { + replaceCurrent(parent.makeTrivial(curr->type)); // TODO verify + } else { + Super::visitExpression(curr); + } + } }; Fixer fixer(wasm, *this); fixer.walk(func->body); @@ -2347,10 +2378,12 @@ Expression* TranslateToFuzzReader::_makeConcrete(Type type) { options.add(FeatureSet::ReferenceTypes | FeatureSet::GC, &Self::makeCompoundRef); } - // Exact casts are only allowed with custom descriptors enabled. - if (type.isInexact() || wasm.features.hasCustomDescriptors()) { - options.add(FeatureSet::ReferenceTypes | FeatureSet::GC, - &Self::makeRefCast); + if (type.isCastable()) { + // Exact casts are only allowed with custom descriptors enabled. + if (type.isInexact() || wasm.features.hasCustomDescriptors()) { + options.add(FeatureSet::ReferenceTypes | FeatureSet::GC, + &Self::makeRefCast); + } } if (heapType.getDescribedType()) { options.add(FeatureSet::ReferenceTypes | FeatureSet::GC, From 50f721844ff5a9c33446f4fa4a3dee05a7773628 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 16:07:53 -0700 Subject: [PATCH 09/33] test --- src/tools/fuzzing/fuzzing.cpp | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index 2f1560593bf..992a055eff9 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -2138,6 +2138,8 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { curr->op == BrOnCastDesc || curr->op == BrOnCastDescFail) { fixCast(curr, curr->ref); + } else { + Super::visitExpression(curr); } } void fixCast(Expression* curr, Expression* ref) { From 6c40c53228eb64caae5c085ef0fd3d9d413c36b1 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 16:10:01 -0700 Subject: [PATCH 10/33] test --- src/tools/fuzzing/fuzzing.cpp | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index 992a055eff9..e3ad3e60a59 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -2030,8 +2030,6 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { Fixer(Module& wasm, TranslateToFuzzReader& parent) : wasm(wasm), parent(parent) {} - using Super = ExpressionStackWalker>; - // Track seen names to find duplication, which is invalid. std::set seen; @@ -2139,14 +2137,14 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { curr->op == BrOnCastDescFail) { fixCast(curr, curr->ref); } else { - Super::visitExpression(curr); + visitExpression(curr); } } void fixCast(Expression* curr, Expression* ref) { if (!ref->type.isCastable()) { replaceCurrent(parent.makeTrivial(curr->type)); // TODO verify } else { - Super::visitExpression(curr); + visitExpression(curr); } } }; From b384541f65e7be2619942cc31b43f6725b8d3741 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Wed, 15 Oct 2025 16:22:47 -0700 Subject: [PATCH 11/33] fuzz --- scripts/fuzz_opt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/fuzz_opt.py b/scripts/fuzz_opt.py index e0950b58c9c..eb2085b5153 100755 --- a/scripts/fuzz_opt.py +++ b/scripts/fuzz_opt.py @@ -154,7 +154,7 @@ def randomize_feature_opts(): # Same with strings. Relaxed SIMD's nondeterminism disables much but not # all of our V8 fuzzing, so avoid it too. Stack Switching, as well, is # not yet ready in V8. - if random.random() < 0.9: + if random.random() < 0.45: # XXX fuzz continuations more, in this PR FEATURE_OPTS.append('--disable-shared-everything') FEATURE_OPTS.append('--disable-strings') FEATURE_OPTS.append('--disable-relaxed-simd') From b02da8d34822ee5bc7ff5f565aba05347a3ed7b6 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 11:25:55 -0700 Subject: [PATCH 12/33] eh --- src/wasm-stack.h | 4 +- src/wasm/wasm-stack.cpp | 125 +++++++++++++++++++++++++--------------- 2 files changed, 79 insertions(+), 50 deletions(-) diff --git a/src/wasm-stack.h b/src/wasm-stack.h index f97c9c7acc7..4d5f04a552c 100644 --- a/src/wasm-stack.h +++ b/src/wasm-stack.h @@ -175,8 +175,8 @@ class BinaryInstWriter : public OverriddenVisitor { // Each br_if present as a key here is mapped to the unrefined type for it. // That is, the br_if has a type in Binaryen IR that is too refined, and the // map contains the unrefined one (which we need to know the local types, as - // we'll stash the unrefined values and then cast them). - std::unordered_map brIfsNeedingHandling; + // we'll stash the unrefined values and then cast them). XXX + std::unordered_set brIfsNeedingHandling; }; // Takes binaryen IR and converts it to something else (binary or stack IR) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index 529bb1db19c..db326c97bf6 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -63,56 +63,78 @@ void BinaryInstWriter::visitLoop(Loop* curr) { } void BinaryInstWriter::visitBreak(Break* curr) { + auto type = curr->type; + + // See comment on |brIfsNeedingHandling| for the extra handling we need to + // emit here for certain br_ifs. + bool needScratchLocals = false; + // If we need this handling, + // We must track how many scratch locals we've used from each type as we + // go, as a type might appear multiple times in the tuple. We allocated + // enough for each, in a contiguous range, so we just increment as we go. + std::unordered_map scratchTypeUses; + auto stashStack = [&](const std::vector& types) { + for (Index i = 0; i < types.size(); i++) { + auto t = types[types.size() - i - 1]; + assert(scratchLocals.find(t) != scratchLocals.end()); + auto localIndex = scratchLocals[t] + scratchTypeUses[t]++; + o << int8_t(BinaryConsts::LocalSet) << U32LEB(localIndex); + } + }; + auto restoreStack = [&](const std::vector& types) { + // Use a copy of this data, as we will restore twice. + auto currScratchTypeUses = scratchTypeUses; + for (Index i = 0; i < types.size(); i++) { + auto t = types[i]; + auto localIndex = scratchLocals[t] + --currScratchTypeUses[t]; + o << int8_t(BinaryConsts::LocalGet) << U32LEB(localIndex); + } + }; + + auto needHandling = brIfsNeedingHandling.count(curr); + if (needHandling) { // TODO make it a set + // Tuples always need scratch locals. Uncastable types do as well (see + // below). + needScratchLocals = type->isTuple() || !type.isCastable(); + if (needScratchLocals) { + // Stash all the values on the stack to those locals, then reload them. + // After this, we can reload the refined values after the br_if. + // + // We must track how many scratch locals we've used from each type as we + // go, as a type might appear multiple times in the tuple. We allocated + // enough for each, in a contiguous range, so we just increment as we go. + // + // Work on the types on the stack: the value + condition. + std::vector typesOnStack; + if (type.isTuple()) { + typesOnStack = type.getTuple(); + } else { + typesOnStack.push_back(type); + } + typesOnStack.push_back(Type::i32); + stashStack(typesOnStack); + restoreStack(typesOnStack); + // The stack is now as if we didn't change anything, but we + } + } + o << int8_t(curr->condition ? BinaryConsts::BrIf : BinaryConsts::Br) << U32LEB(getBreakIndex(curr->name)); - // See comment on |brIfsNeedingHandling| for the extra casts we need to emit - // here for certain br_ifs. - auto iter = brIfsNeedingHandling.find(curr); - if (iter != brIfsNeedingHandling.end()) { - auto unrefinedType = iter->second; - auto type = curr->type; - assert(type.size() == unrefinedType.size()); - - assert(curr->type.hasRef()); - - auto emitCast = [&](Type to) { + if (needHandling) { // TODO make it a set + if (!needScratchLocals) { + // We can just cast here, avoiding scratch locals. + // // Shim a tiny bit of IR, just enough to get visitRefCast to see what we // are casting, and to emit the proper thing. RefCast cast; cast.type = to; cast.ref = cast.desc = nullptr; visitRefCast(&cast); - }; - - if (!type.isTuple()) { - // Simple: Just emit a cast, and then the type matches Binaryen IR's. - emitCast(type); } else { - // Tuples are trickier to handle, and we need to use scratch locals. Stash - // all the values on the stack to those locals, then reload them, casting - // as we go. - // - // We must track how many scratch locals we've used from each type as we - // go, as a type might appear multiple times in the tuple. We allocated - // enough for each, in a contiguous range, so we just increment as we go. - std::unordered_map scratchTypeUses; - for (Index i = 0; i < unrefinedType.size(); i++) { - auto t = unrefinedType[unrefinedType.size() - i - 1]; - assert(scratchLocals.find(t) != scratchLocals.end()); - auto localIndex = scratchLocals[t] + scratchTypeUses[t]++; - o << int8_t(BinaryConsts::LocalSet) << U32LEB(localIndex); - } - for (Index i = 0; i < unrefinedType.size(); i++) { - auto t = unrefinedType[i]; - auto localIndex = scratchLocals[t] + --scratchTypeUses[t]; - o << int8_t(BinaryConsts::LocalGet) << U32LEB(localIndex); - if (t.isRef()) { - // Note that we cast all types here, when perhaps only some of the - // tuple's lanes need that. This is simpler. - emitCast(type[i]); - } - } + // We need locals. Earlier we stashed the stack, so we just need to + // restore the value from there (note we don't restore the condition). + restoreStack(curr->type); } } } @@ -3094,8 +3116,9 @@ InsertOrderedMap BinaryInstWriter::countScratchLocals() { : writer(writer), finder(finder) {} void visitBreak(Break* curr) { + auto type = curr->type; // See if this is one of the dangerous br_ifs we must handle. - if (!curr->type.hasRef()) { + if (!type.hasRef()) { // Not even a reference. return; } @@ -3106,7 +3129,7 @@ InsertOrderedMap BinaryInstWriter::countScratchLocals() { return; } if (auto* cast = parent->dynCast()) { - if (Type::isSubType(cast->type, curr->type)) { + if (Type::isSubType(cast->type, type)) { // It is cast to the same type or a better one. In particular this // handles the case of repeated roundtripping: After the first // roundtrip we emit a cast that we'll identify here, and not emit @@ -3117,23 +3140,29 @@ InsertOrderedMap BinaryInstWriter::countScratchLocals() { } auto* breakTarget = findBreakTarget(curr->name); auto unrefinedType = breakTarget->type; - if (unrefinedType == curr->type) { + if (unrefinedType == type) { // It has the proper type anyhow. return; } // Mark the br_if as needing handling, and add the type to the set of // types we need scratch tuple locals for (if relevant). - writer.brIfsNeedingHandling[curr] = unrefinedType; - - if (unrefinedType.isTuple()) { - // We must allocate enough scratch locals for this tuple. Note that we + writer.brIfsNeedingHandling[curr] = unrefinedType; // XXX make it a set + + if (type->isTuple() || !type.isCastable()) { + // We must allocate enough scratch locals for this tuple, plus the i32 + // of the condition, as we will stash it all so that we can restore the + // fully refined value after the br_if. + // + // Note that we // may need more than one per type in the tuple, if a type appears more // than once, so we count their appearances. InsertOrderedMap scratchTypeUses; - for (auto t : unrefinedType) { + for (auto t : type) { scratchTypeUses[t]++; } + // The condition. + scratchTypeUses[Type::i32]++; for (auto& [type, uses] : scratchTypeUses) { auto& count = finder.scratches[type]; count = std::max(count, uses); From 5ab33bf8e30ac58f7f2edd1d609bd39e94cce054 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 11:27:22 -0700 Subject: [PATCH 13/33] fi --- src/wasm/wasm-stack.cpp | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index db326c97bf6..a1b0f717183 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -95,7 +95,7 @@ void BinaryInstWriter::visitBreak(Break* curr) { if (needHandling) { // TODO make it a set // Tuples always need scratch locals. Uncastable types do as well (see // below). - needScratchLocals = type->isTuple() || !type.isCastable(); + needScratchLocals = type.isTuple() || !type.isCastable(); if (needScratchLocals) { // Stash all the values on the stack to those locals, then reload them. // After this, we can reload the refined values after the br_if. @@ -128,7 +128,7 @@ void BinaryInstWriter::visitBreak(Break* curr) { // Shim a tiny bit of IR, just enough to get visitRefCast to see what we // are casting, and to emit the proper thing. RefCast cast; - cast.type = to; + cast.type = type; cast.ref = cast.desc = nullptr; visitRefCast(&cast); } else { @@ -3147,9 +3147,9 @@ InsertOrderedMap BinaryInstWriter::countScratchLocals() { // Mark the br_if as needing handling, and add the type to the set of // types we need scratch tuple locals for (if relevant). - writer.brIfsNeedingHandling[curr] = unrefinedType; // XXX make it a set + writer.brIfsNeedingHandling.insert(curr); - if (type->isTuple() || !type.isCastable()) { + if (type.isTuple() || !type.isCastable()) { // We must allocate enough scratch locals for this tuple, plus the i32 // of the condition, as we will stash it all so that we can restore the // fully refined value after the br_if. From 97bd9f459c341e5b6cf698977297e0174755fe00 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 11:31:14 -0700 Subject: [PATCH 14/33] fix --- src/wasm/wasm-stack.cpp | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index a1b0f717183..63eb2658b60 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -91,6 +91,8 @@ void BinaryInstWriter::visitBreak(Break* curr) { } }; + std::vector typesOnStack; + auto needHandling = brIfsNeedingHandling.count(curr); if (needHandling) { // TODO make it a set // Tuples always need scratch locals. Uncastable types do as well (see @@ -105,13 +107,13 @@ void BinaryInstWriter::visitBreak(Break* curr) { // enough for each, in a contiguous range, so we just increment as we go. // // Work on the types on the stack: the value + condition. - std::vector typesOnStack; if (type.isTuple()) { typesOnStack = type.getTuple(); } else { typesOnStack.push_back(type); } typesOnStack.push_back(Type::i32); + stashStack(typesOnStack); restoreStack(typesOnStack); // The stack is now as if we didn't change anything, but we @@ -134,7 +136,9 @@ void BinaryInstWriter::visitBreak(Break* curr) { } else { // We need locals. Earlier we stashed the stack, so we just need to // restore the value from there (note we don't restore the condition). - restoreStack(curr->type); + assert(typesOnStack.back() == Type::i32); + typesOnStack.pop_back(); + restoreStack(typesOnStack); } } } From 4080b41fe151287a60b00f772334413fb2a779d0 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 11:37:19 -0700 Subject: [PATCH 15/33] fix --- test/lit/br_if_cont_uncastable.wast | 107 ++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 test/lit/br_if_cont_uncastable.wast diff --git a/test/lit/br_if_cont_uncastable.wast b/test/lit/br_if_cont_uncastable.wast new file mode 100644 index 00000000000..fd51358fe80 --- /dev/null +++ b/test/lit/br_if_cont_uncastable.wast @@ -0,0 +1,107 @@ +;; NOTE: Assertions have been generated by update_lit_checks.py and should not be edited. + +;; Test that our hack for br_if output types works with continuation types, +;; which are not castable. + +;; RUN: wasm-opt %s -all --roundtrip -S -o - | filecheck %s + +(module + (rec + ;; CHECK: (rec + ;; CHECK-NEXT: (type $cont (cont $none)) + (type $cont (cont $none)) + ;; CHECK: (type $none (func)) + (type $none (func)) + ) + + ;; CHECK: (func $suspend (type $none) + ;; CHECK-NEXT: (nop) + ;; CHECK-NEXT: ) + (func $suspend (type $none) + (nop) + ) + + ;; CHECK: (func $br_if_gc (type $2) (param $ref (ref any)) (result anyref) + ;; CHECK-NEXT: (block $block (result anyref) + ;; CHECK-NEXT: (call $receive_any + ;; CHECK-NEXT: (ref.cast (ref any) + ;; CHECK-NEXT: (br_if $block + ;; CHECK-NEXT: (local.get $ref) + ;; CHECK-NEXT: (i32.const 0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (unreachable) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + (func $br_if_gc (param $ref (ref any)) (result anyref) + ;; After the roundtrip here we should see a cast, and no added locals. + (block $label (result anyref) + (call $receive_any ;; ensure the value flowing out is fully refined + (br_if $label + (local.get $ref) + (i32.const 0) + ) + ) + (unreachable) + ) + ) + + ;; CHECK: (func $br_if_cont (type $3) (result contref) + ;; CHECK-NEXT: (local $0 (ref (exact $cont))) + ;; CHECK-NEXT: (local $1 i32) + ;; CHECK-NEXT: (local $scratch (ref (exact $cont))) + ;; CHECK-NEXT: (block $block (result contref) + ;; CHECK-NEXT: (local.set $0 + ;; CHECK-NEXT: (block (result (ref (exact $cont))) + ;; CHECK-NEXT: (local.set $scratch + ;; CHECK-NEXT: (cont.new $cont + ;; CHECK-NEXT: (ref.func $suspend) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $1 + ;; CHECK-NEXT: (i32.const 0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (br_if $block + ;; CHECK-NEXT: (local.get $0) + ;; CHECK-NEXT: (local.get $1) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (call $receive_cont + ;; CHECK-NEXT: (local.get $0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (unreachable) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + (func $br_if_cont (result contref) + ;; After the roundtrip here we should see stashing of the continuation into a + ;; local, rather than a cast. + (block $label (result contref) + (call $receive_cont ;; ensure the value flowing out is fully refined + (br_if $label + (cont.new $cont + (ref.func $suspend) + ) + (i32.const 0) + ) + ) + (unreachable) + ) + ) + + ;; CHECK: (func $receive_any (type $4) (param $0 (ref any)) + ;; CHECK-NEXT: ) + (func $receive_any (param (ref any)) + ) + + ;; CHECK: (func $receive_cont (type $5) (param $0 (ref $cont)) + ;; CHECK-NEXT: ) + (func $receive_cont (param (ref $cont)) + ) +) + +;; TODO tuples From ece64fa317c005bc4f9a088e3bbdee718f04b44e Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 11:41:18 -0700 Subject: [PATCH 16/33] fix --- test/lit/br_if_cont_uncastable.wast | 76 ++++++++++++++++++++++++++++- 1 file changed, 75 insertions(+), 1 deletion(-) diff --git a/test/lit/br_if_cont_uncastable.wast b/test/lit/br_if_cont_uncastable.wast index fd51358fe80..a2319d030da 100644 --- a/test/lit/br_if_cont_uncastable.wast +++ b/test/lit/br_if_cont_uncastable.wast @@ -81,7 +81,7 @@ ;; After the roundtrip here we should see stashing of the continuation into a ;; local, rather than a cast. (block $label (result contref) - (call $receive_cont ;; ensure the value flowing out is fully refined + (call $receive_cont (br_if $label (cont.new $cont (ref.func $suspend) @@ -102,6 +102,80 @@ ;; CHECK-NEXT: ) (func $receive_cont (param (ref $cont)) ) + + ;; As above, but with tuples. + + ;; CHECK: (func $br_if_gc (type $2) (param $ref (ref any)) (result anyref) + ;; CHECK-NEXT: (block $block (result anyref) + ;; CHECK-NEXT: (call $receive_any + ;; CHECK-NEXT: (ref.cast (ref any) + ;; CHECK-NEXT: (br_if $block + ;; CHECK-NEXT: (local.get $ref) + ;; CHECK-NEXT: (i32.const 0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (unreachable) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + (func $br_if_gc_tuple (param $ref (ref any)) (result anyref i32) + ;; We use locals here, avoiding casts - since we are using locals anyhow, we + ;; don't need any casts (we stash the refined values). + (block $label (result anyref i32) + (br_if $label + (tuple.make 2 + (local.get $ref) + (i32.const 1) + ) + (i32.const 0) + ) + ) + ) + + ;; CHECK: (func $br_if_cont (type $3) (result contref) + ;; CHECK-NEXT: (local $0 (ref (exact $cont))) + ;; CHECK-NEXT: (local $1 i32) + ;; CHECK-NEXT: (local $scratch (ref (exact $cont))) + ;; CHECK-NEXT: (block $block (result contref) + ;; CHECK-NEXT: (local.set $0 + ;; CHECK-NEXT: (block (result (ref (exact $cont))) + ;; CHECK-NEXT: (local.set $scratch + ;; CHECK-NEXT: (cont.new $cont + ;; CHECK-NEXT: (ref.func $suspend) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $1 + ;; CHECK-NEXT: (i32.const 0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (br_if $block + ;; CHECK-NEXT: (local.get $0) + ;; CHECK-NEXT: (local.get $1) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (call $receive_cont + ;; CHECK-NEXT: (local.get $0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (unreachable) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + (func $br_if_cont_tuple (result contref i32) + ;; As above, locals are used (and here, casts would also be invalid). + (block $label (result contref i32) + (br_if $label + (tuple.make 2 + (cont.new $cont + (ref.func $suspend) + ) + (i32.const 1) + ) + (i32.const 0) + ) + ) + ) ) ;; TODO tuples From 34c3ab89ac944740ff723a9c27e31d1d8dcc10fe Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 12:12:59 -0700 Subject: [PATCH 17/33] fix --- src/wasm/wasm-stack.cpp | 6 +- test/lit/br_if_cont_uncastable.wast | 123 ++++++++++++++++++++++------ 2 files changed, 102 insertions(+), 27 deletions(-) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index 63eb2658b60..35571550778 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -135,7 +135,11 @@ void BinaryInstWriter::visitBreak(Break* curr) { visitRefCast(&cast); } else { // We need locals. Earlier we stashed the stack, so we just need to - // restore the value from there (note we don't restore the condition). + // restore the value from there (note we don't restore the condition), + // after dropping the br_if's unrefined values. + for (auto _ : type) { + o << int8_t(BinaryConsts::Drop); + } assert(typesOnStack.back() == Type::i32); typesOnStack.pop_back(); restoreStack(typesOnStack); diff --git a/test/lit/br_if_cont_uncastable.wast b/test/lit/br_if_cont_uncastable.wast index a2319d030da..7b323426126 100644 --- a/test/lit/br_if_cont_uncastable.wast +++ b/test/lit/br_if_cont_uncastable.wast @@ -21,7 +21,7 @@ (nop) ) - ;; CHECK: (func $br_if_gc (type $2) (param $ref (ref any)) (result anyref) + ;; CHECK: (func $br_if_gc (type $4) (param $ref (ref any)) (result anyref) ;; CHECK-NEXT: (block $block (result anyref) ;; CHECK-NEXT: (call $receive_any ;; CHECK-NEXT: (ref.cast (ref any) @@ -47,7 +47,7 @@ ) ) - ;; CHECK: (func $br_if_cont (type $3) (result contref) + ;; CHECK: (func $br_if_cont (type $5) (result contref) ;; CHECK-NEXT: (local $0 (ref (exact $cont))) ;; CHECK-NEXT: (local $1 i32) ;; CHECK-NEXT: (local $scratch (ref (exact $cont))) @@ -79,7 +79,7 @@ ;; CHECK-NEXT: ) (func $br_if_cont (result contref) ;; After the roundtrip here we should see stashing of the continuation into a - ;; local, rather than a cast. + ;; local, rather than a cast (as continuations cannot be cast). (block $label (result contref) (call $receive_cont (br_if $label @@ -93,29 +93,73 @@ ) ) - ;; CHECK: (func $receive_any (type $4) (param $0 (ref any)) + ;; CHECK: (func $receive_any (type $6) (param $0 (ref any)) ;; CHECK-NEXT: ) (func $receive_any (param (ref any)) ) - ;; CHECK: (func $receive_cont (type $5) (param $0 (ref $cont)) + ;; CHECK: (func $receive_cont (type $7) (param $0 (ref $cont)) ;; CHECK-NEXT: ) (func $receive_cont (param (ref $cont)) ) ;; As above, but with tuples. - ;; CHECK: (func $br_if_gc (type $2) (param $ref (ref any)) (result anyref) - ;; CHECK-NEXT: (block $block (result anyref) - ;; CHECK-NEXT: (call $receive_any - ;; CHECK-NEXT: (ref.cast (ref any) - ;; CHECK-NEXT: (br_if $block + ;; CHECK: (func $br_if_gc_tuple (type $8) (param $ref (ref any)) (result anyref i32) + ;; CHECK-NEXT: (local $1 (ref any)) + ;; CHECK-NEXT: (local $2 i32) + ;; CHECK-NEXT: (local $3 i32) + ;; CHECK-NEXT: (local $scratch i32) + ;; CHECK-NEXT: (local $scratch_5 (ref any)) + ;; CHECK-NEXT: (local $scratch_6 (tuple (ref any) i32)) + ;; CHECK-NEXT: (local $scratch_7 (ref any)) + ;; CHECK-NEXT: (block $block (type $3) (result anyref i32) + ;; CHECK-NEXT: (local.set $1 + ;; CHECK-NEXT: (block (result (ref any)) + ;; CHECK-NEXT: (local.set $scratch_5 ;; CHECK-NEXT: (local.get $ref) - ;; CHECK-NEXT: (i32.const 0) ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $3 + ;; CHECK-NEXT: (block (result i32) + ;; CHECK-NEXT: (local.set $scratch + ;; CHECK-NEXT: (i32.const 1) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $2 + ;; CHECK-NEXT: (i32.const 0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch_5) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (unreachable) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (block (result (ref any)) + ;; CHECK-NEXT: (local.set $scratch_7 + ;; CHECK-NEXT: (tuple.extract 2 0 + ;; CHECK-NEXT: (local.tee $scratch_6 + ;; CHECK-NEXT: (br_if $block + ;; CHECK-NEXT: (tuple.make 2 + ;; CHECK-NEXT: (local.get $1) + ;; CHECK-NEXT: (local.get $3) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $2) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (tuple.extract 2 1 + ;; CHECK-NEXT: (local.get $scratch_6) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch_7) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (tuple.make 2 + ;; CHECK-NEXT: (local.get $1) + ;; CHECK-NEXT: (local.get $3) + ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) (func $br_if_gc_tuple (param $ref (ref any)) (result anyref i32) @@ -132,34 +176,63 @@ ) ) - ;; CHECK: (func $br_if_cont (type $3) (result contref) + ;; CHECK: (func $br_if_cont_tuple (type $2) (result contref i32) ;; CHECK-NEXT: (local $0 (ref (exact $cont))) ;; CHECK-NEXT: (local $1 i32) - ;; CHECK-NEXT: (local $scratch (ref (exact $cont))) - ;; CHECK-NEXT: (block $block (result contref) + ;; CHECK-NEXT: (local $2 i32) + ;; CHECK-NEXT: (local $scratch i32) + ;; CHECK-NEXT: (local $scratch_4 (ref (exact $cont))) + ;; CHECK-NEXT: (local $scratch_5 (tuple (ref (exact $cont)) i32)) + ;; CHECK-NEXT: (local $scratch_6 (ref (exact $cont))) + ;; CHECK-NEXT: (block $block (type $2) (result contref i32) ;; CHECK-NEXT: (local.set $0 ;; CHECK-NEXT: (block (result (ref (exact $cont))) - ;; CHECK-NEXT: (local.set $scratch + ;; CHECK-NEXT: (local.set $scratch_4 ;; CHECK-NEXT: (cont.new $cont ;; CHECK-NEXT: (ref.func $suspend) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.set $1 - ;; CHECK-NEXT: (i32.const 0) + ;; CHECK-NEXT: (local.set $2 + ;; CHECK-NEXT: (block (result i32) + ;; CHECK-NEXT: (local.set $scratch + ;; CHECK-NEXT: (i32.const 1) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $1 + ;; CHECK-NEXT: (i32.const 0) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch) + ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch) + ;; CHECK-NEXT: (local.get $scratch_4) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (drop - ;; CHECK-NEXT: (br_if $block - ;; CHECK-NEXT: (local.get $0) - ;; CHECK-NEXT: (local.get $1) + ;; CHECK-NEXT: (block (result (ref (exact $cont))) + ;; CHECK-NEXT: (local.set $scratch_6 + ;; CHECK-NEXT: (tuple.extract 2 0 + ;; CHECK-NEXT: (local.tee $scratch_5 + ;; CHECK-NEXT: (br_if $block + ;; CHECK-NEXT: (tuple.make 2 + ;; CHECK-NEXT: (local.get $0) + ;; CHECK-NEXT: (local.get $2) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $1) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (tuple.extract 2 1 + ;; CHECK-NEXT: (local.get $scratch_5) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch_6) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (call $receive_cont + ;; CHECK-NEXT: (tuple.make 2 ;; CHECK-NEXT: (local.get $0) + ;; CHECK-NEXT: (local.get $2) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (unreachable) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) (func $br_if_cont_tuple (result contref i32) @@ -177,5 +250,3 @@ ) ) ) - -;; TODO tuples From c11460f5a83ec19135f3e3543d94032c9de978b5 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 12:18:36 -0700 Subject: [PATCH 18/33] fix --- test/lit/cast-and-recast-tuple.wast | 151 ++++++++++++++++++---------- 1 file changed, 100 insertions(+), 51 deletions(-) diff --git a/test/lit/cast-and-recast-tuple.wast b/test/lit/cast-and-recast-tuple.wast index 6ceefe7d605..8be560e8838 100644 --- a/test/lit/cast-and-recast-tuple.wast +++ b/test/lit/cast-and-recast-tuple.wast @@ -167,47 +167,67 @@ ;; CHECK: (func $test-local-tuple-4-bad (type $3) (param $B (ref $B)) (param $x i32) (result anyref i32) ;; CHECK-NEXT: (local $temp (ref $B)) - ;; CHECK-NEXT: (local $3 (ref $A)) + ;; CHECK-NEXT: (local $3 (ref $B)) ;; CHECK-NEXT: (local $4 i32) ;; CHECK-NEXT: (local $5 i32) - ;; CHECK-NEXT: (local $scratch (tuple (ref $B) i32)) - ;; CHECK-NEXT: (local $scratch_7 (ref $B)) + ;; CHECK-NEXT: (local $6 i32) + ;; CHECK-NEXT: (local $scratch i32) ;; CHECK-NEXT: (local $scratch_8 (ref $B)) + ;; CHECK-NEXT: (local $scratch_9 (tuple (ref $B) i32)) + ;; CHECK-NEXT: (local $scratch_10 (ref $B)) + ;; CHECK-NEXT: (local $scratch_11 (ref $B)) ;; CHECK-NEXT: (block $block (type $2) (result (ref $A) i32) ;; CHECK-NEXT: (local.set $3 ;; CHECK-NEXT: (block (result (ref $B)) - ;; CHECK-NEXT: (local.set $scratch_7 + ;; CHECK-NEXT: (local.set $scratch_8 + ;; CHECK-NEXT: (local.get $B) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $6 + ;; CHECK-NEXT: (block (result i32) + ;; CHECK-NEXT: (local.set $scratch + ;; CHECK-NEXT: (i32.const 3) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $5 + ;; CHECK-NEXT: (local.get $x) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch_8) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop + ;; CHECK-NEXT: (block (result (ref $B)) + ;; CHECK-NEXT: (local.set $scratch_10 ;; CHECK-NEXT: (tuple.extract 2 0 - ;; CHECK-NEXT: (local.tee $scratch + ;; CHECK-NEXT: (local.tee $scratch_9 ;; CHECK-NEXT: (br_if $block ;; CHECK-NEXT: (tuple.make 2 - ;; CHECK-NEXT: (local.get $B) - ;; CHECK-NEXT: (i32.const 3) + ;; CHECK-NEXT: (local.get $3) + ;; CHECK-NEXT: (local.get $6) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $x) + ;; CHECK-NEXT: (local.get $5) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.set $5 + ;; CHECK-NEXT: (drop ;; CHECK-NEXT: (tuple.extract 2 1 - ;; CHECK-NEXT: (local.get $scratch) + ;; CHECK-NEXT: (local.get $scratch_9) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch_7) + ;; CHECK-NEXT: (local.get $scratch_10) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (local.set $temp ;; CHECK-NEXT: (block (result (ref $B)) - ;; CHECK-NEXT: (local.set $scratch_8 - ;; CHECK-NEXT: (ref.cast (ref $B) - ;; CHECK-NEXT: (local.get $3) - ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $scratch_11 + ;; CHECK-NEXT: (local.get $3) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (local.set $4 - ;; CHECK-NEXT: (local.get $5) + ;; CHECK-NEXT: (local.get $6) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch_8) + ;; CHECK-NEXT: (local.get $scratch_11) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (unreachable) @@ -218,10 +238,10 @@ ;; As above, but none of the mitigating circumstances happens: we have a ;; tuple with a reference that is refined compared to the break target. As a ;; result we must fix this up, which we do by adding locals, saving the - ;; br_if's output to them, and then loading from those locals and casting. + ;; br_if's refined input to them, reloading those values for the br_if, then + ;; reloading them again afterwards. ;; - ;; Comparing to $test-local-tuple-4, we end up with 3 more locals, and also - ;; there is now a ref.cast. + ;; Comparing to $test-local-tuple-4, we end up with 6 more locals. (block $out (result (ref $A) i32) (local.set $temp (br_if $out @@ -239,85 +259,114 @@ ;; CHECK: (func $test-local-tuple-4-bad-dupes (type $8) (param $B (ref $B)) (param $x i32) (result i32 anyref i32) ;; CHECK-NEXT: (local $temp (ref $B)) ;; CHECK-NEXT: (local $3 (ref $B)) - ;; CHECK-NEXT: (local $4 (ref $A)) + ;; CHECK-NEXT: (local $4 (ref $B)) ;; CHECK-NEXT: (local $5 i32) ;; CHECK-NEXT: (local $scratch i32) ;; CHECK-NEXT: (local $7 i32) ;; CHECK-NEXT: (local $8 i32) ;; CHECK-NEXT: (local $9 i32) - ;; CHECK-NEXT: (local $scratch_10 (tuple i32 (ref $B) i32)) - ;; CHECK-NEXT: (local $scratch_11 (ref $B)) - ;; CHECK-NEXT: (local $scratch_12 i32) - ;; CHECK-NEXT: (local $scratch_13 (ref $B)) - ;; CHECK-NEXT: (local $scratch_14 i32) + ;; CHECK-NEXT: (local $10 i32) + ;; CHECK-NEXT: (local $scratch_11 i32) + ;; CHECK-NEXT: (local $scratch_12 (ref $B)) + ;; CHECK-NEXT: (local $scratch_13 i32) + ;; CHECK-NEXT: (local $scratch_14 (tuple i32 (ref $B) i32)) ;; CHECK-NEXT: (local $scratch_15 (ref $B)) + ;; CHECK-NEXT: (local $scratch_16 i32) + ;; CHECK-NEXT: (local $scratch_17 (ref $B)) + ;; CHECK-NEXT: (local $scratch_18 i32) + ;; CHECK-NEXT: (local $scratch_19 (ref $B)) ;; CHECK-NEXT: (block $block (type $6) (result i32 (ref $A) i32) - ;; CHECK-NEXT: (local.set $9 + ;; CHECK-NEXT: (local.set $10 + ;; CHECK-NEXT: (block (result i32) + ;; CHECK-NEXT: (local.set $scratch_13 + ;; CHECK-NEXT: (i32.const -3) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $4 + ;; CHECK-NEXT: (block (result (ref $B)) + ;; CHECK-NEXT: (local.set $scratch_12 + ;; CHECK-NEXT: (local.get $B) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $9 + ;; CHECK-NEXT: (block (result i32) + ;; CHECK-NEXT: (local.set $scratch_11 + ;; CHECK-NEXT: (i32.const 3) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $8 + ;; CHECK-NEXT: (local.get $x) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch_11) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch_12) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.get $scratch_13) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (drop ;; CHECK-NEXT: (block (result i32) - ;; CHECK-NEXT: (local.set $scratch_12 + ;; CHECK-NEXT: (local.set $scratch_16 ;; CHECK-NEXT: (tuple.extract 3 0 - ;; CHECK-NEXT: (local.tee $scratch_10 + ;; CHECK-NEXT: (local.tee $scratch_14 ;; CHECK-NEXT: (br_if $block ;; CHECK-NEXT: (tuple.make 3 - ;; CHECK-NEXT: (i32.const -3) - ;; CHECK-NEXT: (local.get $B) - ;; CHECK-NEXT: (i32.const 3) + ;; CHECK-NEXT: (local.get $10) + ;; CHECK-NEXT: (local.get $4) + ;; CHECK-NEXT: (local.get $9) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $x) + ;; CHECK-NEXT: (local.get $8) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.set $4 + ;; CHECK-NEXT: (drop ;; CHECK-NEXT: (block (result (ref $B)) - ;; CHECK-NEXT: (local.set $scratch_11 + ;; CHECK-NEXT: (local.set $scratch_15 ;; CHECK-NEXT: (tuple.extract 3 1 - ;; CHECK-NEXT: (local.get $scratch_10) + ;; CHECK-NEXT: (local.get $scratch_14) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.set $8 + ;; CHECK-NEXT: (drop ;; CHECK-NEXT: (tuple.extract 3 2 - ;; CHECK-NEXT: (local.get $scratch_10) + ;; CHECK-NEXT: (local.get $scratch_14) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch_11) + ;; CHECK-NEXT: (local.get $scratch_15) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch_12) + ;; CHECK-NEXT: (local.get $scratch_16) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (drop ;; CHECK-NEXT: (local.tee $scratch ;; CHECK-NEXT: (block (result i32) - ;; CHECK-NEXT: (local.set $scratch_14 - ;; CHECK-NEXT: (local.get $9) + ;; CHECK-NEXT: (local.set $scratch_18 + ;; CHECK-NEXT: (local.get $10) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (local.set $3 ;; CHECK-NEXT: (block (result (ref $B)) - ;; CHECK-NEXT: (local.set $scratch_13 - ;; CHECK-NEXT: (ref.cast (ref $B) - ;; CHECK-NEXT: (local.get $4) - ;; CHECK-NEXT: ) + ;; CHECK-NEXT: (local.set $scratch_17 + ;; CHECK-NEXT: (local.get $4) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (local.set $7 - ;; CHECK-NEXT: (local.get $8) + ;; CHECK-NEXT: (local.get $9) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch_13) + ;; CHECK-NEXT: (local.get $scratch_17) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch_14) + ;; CHECK-NEXT: (local.get $scratch_18) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (local.set $temp ;; CHECK-NEXT: (block (result (ref $B)) - ;; CHECK-NEXT: (local.set $scratch_15 + ;; CHECK-NEXT: (local.set $scratch_19 ;; CHECK-NEXT: (local.get $3) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (local.set $5 ;; CHECK-NEXT: (local.get $7) ;; CHECK-NEXT: ) - ;; CHECK-NEXT: (local.get $scratch_15) + ;; CHECK-NEXT: (local.get $scratch_19) ;; CHECK-NEXT: ) ;; CHECK-NEXT: ) ;; CHECK-NEXT: (unreachable) From 3c2e6fe75e510a388e86fc19f002ab0524879627 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 12:24:43 -0700 Subject: [PATCH 19/33] more --- test/lit/cast-and-recast-tuple.wast | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/test/lit/cast-and-recast-tuple.wast b/test/lit/cast-and-recast-tuple.wast index 8be560e8838..5bc4b19ccfe 100644 --- a/test/lit/cast-and-recast-tuple.wast +++ b/test/lit/cast-and-recast-tuple.wast @@ -379,28 +379,36 @@ ;; that the tuple.extracts use different locals for the first and last i32. ;; For easier reading, here is the wami output of the binary: ;; - ;; (func $func4 (param $var0 (ref $type1)) (param $var1 i32) (result i32) (result anyref) (result i32) + ;; (func $func0 (param $var0 (ref $type1)) (param $var1 i32) (result i32) (result anyref) (result i32) ;; (local $var2 (ref $type1)) ;; (local $var3 (ref $type1)) - ;; (local $var4 (ref $type0)) + ;; (local $var4 (ref $type1)) ;; (local $var5 i32) ;; (local $var6 i32) ;; (local $var7 i32) ;; (local $var8 i32) ;; (local $var9 i32) + ;; (local $var10 i32) ;; block $label0 (result i32) (result (ref $type0)) (result i32) ;; i32.const -3 ;; local.get $var0 ;; i32.const 3 ;; local.get $var1 + ;; local.set $var8 ;; saves condition + ;; local.set $var9 ;; saves 3 + ;; local.set $var4 ;; saves ref + ;; local.set $var10 ;; saves -3 + ;; local.get $var10 ;; gets -3 + ;; local.get $var4 ;; gets ref + ;; local.get $var9 ;; gets 3 + ;; local.get $var8 ;; gets condition ;; br_if $label0 - ;; local.set $var8 ;; saves 3 - ;; local.set $var4 ;; saves the ref - ;; local.set $var9 ;; saves -3 - ;; local.get $var9 ;; gets -3 - ;; local.get $var4 ;; gets the ref - ;; ref.cast $type1 ;; casts the ref - ;; local.get $var8 ;; gets 3 + ;; drop + ;; drop + ;; drop + ;; local.get $var10 ;; gets -3 + ;; local.get $var4 ;; gets ref + ;; local.get $var9 ;; gets 3 ;; local.set $var7 ;; local.set $var3 ;; local.tee $var6 From de17bf192e3f290ea7036741001e3514e4222752 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 12:29:26 -0700 Subject: [PATCH 20/33] format --- src/tools/fuzzing/fuzzing.cpp | 14 ++++---------- src/wasm/wasm-stack.cpp | 4 ++-- src/wasm/wasm-type.cpp | 4 +--- src/wasm/wasm-validator.cpp | 30 ++++++++++++------------------ 4 files changed, 19 insertions(+), 33 deletions(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index e3ad3e60a59..e90b26ecd0f 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -2124,17 +2124,11 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { } // Fix up casts: Our changes may have put an uncastable type in a cast. - void visitRefCast(RefCast* curr) { - fixCast(curr, curr->ref); - } - void visitRefTest(RefTest* curr) { - fixCast(curr, curr->ref); - } + void visitRefCast(RefCast* curr) { fixCast(curr, curr->ref); } + void visitRefTest(RefTest* curr) { fixCast(curr, curr->ref); } void visitBrOn(BrOn* curr) { - if (curr->op == BrOnCast || - curr->op == BrOnCastFail || - curr->op == BrOnCastDesc || - curr->op == BrOnCastDescFail) { + if (curr->op == BrOnCast || curr->op == BrOnCastFail || + curr->op == BrOnCastDesc || curr->op == BrOnCastDescFail) { fixCast(curr, curr->ref); } else { visitExpression(curr); diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index 35571550778..f15af5b8151 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -88,12 +88,12 @@ void BinaryInstWriter::visitBreak(Break* curr) { auto t = types[i]; auto localIndex = scratchLocals[t] + --currScratchTypeUses[t]; o << int8_t(BinaryConsts::LocalGet) << U32LEB(localIndex); - } + } }; std::vector typesOnStack; - auto needHandling = brIfsNeedingHandling.count(curr); + auto needHandling = brIfsNeedingHandling.count(curr); if (needHandling) { // TODO make it a set // Tuples always need scratch locals. Uncastable types do as well (see // below). diff --git a/src/wasm/wasm-type.cpp b/src/wasm/wasm-type.cpp index 55351e85ba8..097f3b2284f 100644 --- a/src/wasm/wasm-type.cpp +++ b/src/wasm/wasm-type.cpp @@ -623,9 +623,7 @@ bool Type::isDefaultable() const { return isConcrete() && !isNonNullable(); } -bool Type::isCastable() { - return isRef() && getHeapType().isCastable(); -} +bool Type::isCastable() { return isRef() && getHeapType().isCastable(); } unsigned Type::getByteSize() const { // TODO: alignment? diff --git a/src/wasm/wasm-validator.cpp b/src/wasm/wasm-validator.cpp index e995a3d1b38..fb554664d0f 100644 --- a/src/wasm/wasm-validator.cpp +++ b/src/wasm/wasm-validator.cpp @@ -2911,9 +2911,8 @@ void FunctionValidator::visitRefTest(RefTest* curr) { shouldBeTrue( getModule()->features.hasGC(), curr, "ref.test requires gc [--enable-gc]"); - shouldBeTrue(curr->castType.isCastable(), - curr, - "ref.test cannot cast to invalid type"); + shouldBeTrue( + curr->castType.isCastable(), curr, "ref.test cannot cast to invalid type"); if (curr->ref->type == Type::unreachable) { return; @@ -2945,9 +2944,8 @@ void FunctionValidator::visitRefTest(RefTest* curr) { "[--enable-custom-descriptors]"); } - shouldBeTrue(curr->ref->type.isCastable(), - curr, - "ref.test cannot cast invalid type"); + shouldBeTrue( + curr->ref->type.isCastable(), curr, "ref.test cannot cast invalid type"); } void FunctionValidator::visitRefCast(RefCast* curr) { @@ -3008,12 +3006,10 @@ void FunctionValidator::visitRefCast(RefCast* curr) { "[--enable-custom-descriptors]"); } - shouldBeTrue(curr->ref->type.isCastable(), - curr, - "ref.cast cannot cast invalid type"); - shouldBeTrue(curr->type.isCastable(), - curr, - "ref.cast cannot cast to invalid type"); + shouldBeTrue( + curr->ref->type.isCastable(), curr, "ref.cast cannot cast invalid type"); + shouldBeTrue( + curr->type.isCastable(), curr, "ref.cast cannot cast to invalid type"); if (!curr->desc) { return; @@ -3138,12 +3134,10 @@ void FunctionValidator::visitBrOn(BrOn* curr) { "br_on_cast* to exact type requires custom descriptors " "[--enable-custom-descriptors]"); } - shouldBeTrue(curr->ref->type.isCastable(), - curr, - "br_on cannot cast invalid type"); - shouldBeTrue(curr->castType.isCastable(), - curr, - "br_on cannot cast to invalid type"); + shouldBeTrue( + curr->ref->type.isCastable(), curr, "br_on cannot cast invalid type"); + shouldBeTrue( + curr->castType.isCastable(), curr, "br_on cannot cast to invalid type"); break; } } From a060d63b176982c9abf6a8121f5d8004909176e6 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 12:40:13 -0700 Subject: [PATCH 21/33] more --- src/wasm-stack.h | 13 ++++------ src/wasm/wasm-stack.cpp | 53 +++++++++++++++++++++++------------------ 2 files changed, 34 insertions(+), 32 deletions(-) diff --git a/src/wasm-stack.h b/src/wasm-stack.h index 4d5f04a552c..41118d1391e 100644 --- a/src/wasm-stack.h +++ b/src/wasm-stack.h @@ -167,15 +167,10 @@ class BinaryInstWriter : public OverriddenVisitor { // when they have a value that is more refined than the wasm type system // allows atm (and they are not dropped, in which case the type would not // matter). See https://github.com/WebAssembly/binaryen/pull/6390 for more on - // the difference. As a result of the difference, we will insert extra casts - // to ensure validation in the wasm spec. The wasm spec will hopefully improve - // to use the more refined type as well, which would remove the need for this - // hack. - // - // Each br_if present as a key here is mapped to the unrefined type for it. - // That is, the br_if has a type in Binaryen IR that is too refined, and the - // map contains the unrefined one (which we need to know the local types, as - // we'll stash the unrefined values and then cast them). XXX + // the difference. As a result of the difference, we must fix things up for + // the spec. (The wasm spec might - hopefully - improve to use the more + // refined type as well, which would remove the need for this hack, and + // improve code size in general.) std::unordered_set brIfsNeedingHandling; }; diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index f15af5b8151..f74bf6ba8c0 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -66,13 +66,18 @@ void BinaryInstWriter::visitBreak(Break* curr) { auto type = curr->type; // See comment on |brIfsNeedingHandling| for the extra handling we need to - // emit here for certain br_ifs. + // emit here for certain br_ifs. If we need that handling, we either use a + // cast in simple cases, or scratch locals otherwise. We use the scratch + // locals to stash the stack before the br_if (which contains the refined + // types), then restore it later from those locals). bool needScratchLocals = false; - // If we need this handling, - // We must track how many scratch locals we've used from each type as we - // go, as a type might appear multiple times in the tuple. We allocated - // enough for each, in a contiguous range, so we just increment as we go. + // If we need locals, we must track how many we've used from each type as we + // go, as a type might appear multiple times in the tuple. We know we have + // enough of a range allocated for them, so we just increment as we go. std::unordered_map scratchTypeUses; + // Logic to stash and restore the stack, given a vector of types we are + // stashing/restoring. We will first stash the entire stack, including the i32 + // condition, and after the br_if, restore the value (without the condition). auto stashStack = [&](const std::vector& types) { for (Index i = 0; i < types.size(); i++) { auto t = types[types.size() - i - 1]; @@ -91,22 +96,20 @@ void BinaryInstWriter::visitBreak(Break* curr) { } }; + // The types on the stack before the br_if. We need this if we use locals to + // stash the stack. std::vector typesOnStack; auto needHandling = brIfsNeedingHandling.count(curr); - if (needHandling) { // TODO make it a set - // Tuples always need scratch locals. Uncastable types do as well (see - // below). + if (needHandling) { + // Tuples always need scratch locals. Uncastable types do as well, we we + // can't fix them up below with a simple cast. needScratchLocals = type.isTuple() || !type.isCastable(); if (needScratchLocals) { - // Stash all the values on the stack to those locals, then reload them. - // After this, we can reload the refined values after the br_if. - // - // We must track how many scratch locals we've used from each type as we - // go, as a type might appear multiple times in the tuple. We allocated - // enough for each, in a contiguous range, so we just increment as we go. - // - // Work on the types on the stack: the value + condition. + // Stash all the values on the stack to those locals, then reload them for + // the br_if to consume. Later, we can reload the refined values after the + // br_if, for its parent to consume. + if (type.isTuple()) { typesOnStack = type.getTuple(); } else { @@ -116,17 +119,20 @@ void BinaryInstWriter::visitBreak(Break* curr) { stashStack(typesOnStack); restoreStack(typesOnStack); - // The stack is now as if we didn't change anything, but we + // The stack is now in the same state as before, but we have copies in + // locals for later. } } o << int8_t(curr->condition ? BinaryConsts::BrIf : BinaryConsts::Br) << U32LEB(getBreakIndex(curr->name)); - if (needHandling) { // TODO make it a set + if (needHandling) { if (!needScratchLocals) { - // We can just cast here, avoiding scratch locals. - // + // We can just cast here, avoiding scratch locals. (Casting adds overhead, + // but this is very rare, and it avoids adding locals, which would keep + // growing the wasm with each roundtrip.) + // Shim a tiny bit of IR, just enough to get visitRefCast to see what we // are casting, and to emit the proper thing. RefCast cast; @@ -3157,14 +3163,15 @@ InsertOrderedMap BinaryInstWriter::countScratchLocals() { // types we need scratch tuple locals for (if relevant). writer.brIfsNeedingHandling.insert(curr); + // Simple cases can be handled by a cast. However, tuples and uncastable + // types require us to use locals too. if (type.isTuple() || !type.isCastable()) { // We must allocate enough scratch locals for this tuple, plus the i32 // of the condition, as we will stash it all so that we can restore the // fully refined value after the br_if. // - // Note that we - // may need more than one per type in the tuple, if a type appears more - // than once, so we count their appearances. + // Note that we may need more than one per type in the tuple, if a type + // appears more than once, so we count their appearances. InsertOrderedMap scratchTypeUses; for (auto t : type) { scratchTypeUses[t]++; From 11540f371d91324b6bcb9239b7562e3bd8a49510 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 12:40:23 -0700 Subject: [PATCH 22/33] format --- src/wasm/wasm-stack.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index f74bf6ba8c0..d24dd23402f 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -3164,7 +3164,7 @@ InsertOrderedMap BinaryInstWriter::countScratchLocals() { writer.brIfsNeedingHandling.insert(curr); // Simple cases can be handled by a cast. However, tuples and uncastable - // types require us to use locals too. + // types require us to use locals too. if (type.isTuple() || !type.isCastable()) { // We must allocate enough scratch locals for this tuple, plus the i32 // of the condition, as we will stash it all so that we can restore the From 5167d3b3f184862da059f4157dcbe01be648c686 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Thu, 16 Oct 2025 13:52:44 -0700 Subject: [PATCH 23/33] fix --- src/passes/GUFA.cpp | 4 +-- test/lit/passes/gufa-cast-all.wast | 40 ++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+), 2 deletions(-) diff --git a/src/passes/GUFA.cpp b/src/passes/GUFA.cpp index 21f9aa8521d..c5d0bbbe3a7 100644 --- a/src/passes/GUFA.cpp +++ b/src/passes/GUFA.cpp @@ -371,8 +371,8 @@ struct GUFAOptimizer bool optimized = false; void visitExpression(Expression* curr) { - if (!curr->type.isRef()) { - // Ignore anything we cannot infer a type for. + // Ignore anything we cannot emit a cast for. + if (!curr->type.isCastable()) { return; } diff --git a/test/lit/passes/gufa-cast-all.wast b/test/lit/passes/gufa-cast-all.wast index d8f01441d2b..38a1de8863b 100644 --- a/test/lit/passes/gufa-cast-all.wast +++ b/test/lit/passes/gufa-cast-all.wast @@ -354,3 +354,43 @@ ) ) +;; Do not refine uncastable types. +(module + (rec + ;; CHECK: (rec + ;; CHECK-NEXT: (type $cont (cont $none)) + (type $cont (cont $none)) + ;; CHECK: (type $none (func)) + (type $none (func)) + ) + + ;; CHECK: (type $2 (func (result contref))) + + ;; CHECK: (type $3 (func (param (ref $cont)))) + + ;; CHECK: (elem declare func $suspend) + + ;; CHECK: (func $suspend (type $none) + ;; CHECK-NEXT: (nop) + ;; CHECK-NEXT: ) + (func $suspend (type $none) + (nop) + ) + + ;; CHECK: (func $unrefine (type $2) (result contref) + ;; CHECK-NEXT: (block $label (result contref) + ;; CHECK-NEXT: (cont.new $cont + ;; CHECK-NEXT: (ref.func $suspend) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + ;; CHECK-NEXT: ) + (func $unrefine (result contref) + ;; No cast should be added here. + (block $label (result contref) + (cont.new $cont + (ref.func $suspend) + ) + ) + ) +) + From 235ca576d80d3c24bc28a0124f58bb9ee4629155 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Mon, 20 Oct 2025 14:49:47 -0700 Subject: [PATCH 24/33] fin --- src/tools/fuzzing/fuzzing.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index e90b26ecd0f..48df3052d12 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -1763,7 +1763,7 @@ static bool replaceChildWith(Expression* expr, Expression* with) { // Casts can get broken if we replace with something not castable. if (!with->type.isCastable()) { if (expr->is() || expr->is() || expr->is()) { - return false; // TODO verify + return false; } } for (auto*& child : ChildIterator(expr)) { @@ -2136,7 +2136,7 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { } void fixCast(Expression* curr, Expression* ref) { if (!ref->type.isCastable()) { - replaceCurrent(parent.makeTrivial(curr->type)); // TODO verify + replaceCurrent(parent.makeTrivial(curr->type)); } else { visitExpression(curr); } From f8cfc04d4bbf81dfd0dc6f8b956b5a7d6f45c19f Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 10:28:45 -0700 Subject: [PATCH 25/33] finish --- scripts/fuzz_opt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/fuzz_opt.py b/scripts/fuzz_opt.py index eb2085b5153..a8886d362fb 100755 --- a/scripts/fuzz_opt.py +++ b/scripts/fuzz_opt.py @@ -154,7 +154,7 @@ def randomize_feature_opts(): # Same with strings. Relaxed SIMD's nondeterminism disables much but not # all of our V8 fuzzing, so avoid it too. Stack Switching, as well, is # not yet ready in V8. - if random.random() < 0.45: # XXX fuzz continuations more, in this PR + if random.random() < 0.90: FEATURE_OPTS.append('--disable-shared-everything') FEATURE_OPTS.append('--disable-strings') FEATURE_OPTS.append('--disable-relaxed-simd') From 48cab8b4b6a1ac7fc2079889d53ed011cf1f6dc6 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 10:31:00 -0700 Subject: [PATCH 26/33] finish --- scripts/fuzz_opt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/fuzz_opt.py b/scripts/fuzz_opt.py index a8886d362fb..e0950b58c9c 100755 --- a/scripts/fuzz_opt.py +++ b/scripts/fuzz_opt.py @@ -154,7 +154,7 @@ def randomize_feature_opts(): # Same with strings. Relaxed SIMD's nondeterminism disables much but not # all of our V8 fuzzing, so avoid it too. Stack Switching, as well, is # not yet ready in V8. - if random.random() < 0.90: + if random.random() < 0.9: FEATURE_OPTS.append('--disable-shared-everything') FEATURE_OPTS.append('--disable-strings') FEATURE_OPTS.append('--disable-relaxed-simd') From 238325bf186322e66e9aaaa445b17830fb011791 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 10:42:37 -0700 Subject: [PATCH 27/33] typo --- src/wasm/wasm-stack.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index d24dd23402f..ad77c18e958 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -69,7 +69,7 @@ void BinaryInstWriter::visitBreak(Break* curr) { // emit here for certain br_ifs. If we need that handling, we either use a // cast in simple cases, or scratch locals otherwise. We use the scratch // locals to stash the stack before the br_if (which contains the refined - // types), then restore it later from those locals). + // types), then restore it later from those locals. bool needScratchLocals = false; // If we need locals, we must track how many we've used from each type as we // go, as a type might appear multiple times in the tuple. We know we have From d532f4c98e4f2e117cd5acd47faa9ecbdc8a7782 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 10:52:47 -0700 Subject: [PATCH 28/33] fix unreasonable compiler warning --- src/wasm/wasm-stack.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index ad77c18e958..09936ef86c0 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -143,7 +143,7 @@ void BinaryInstWriter::visitBreak(Break* curr) { // We need locals. Earlier we stashed the stack, so we just need to // restore the value from there (note we don't restore the condition), // after dropping the br_if's unrefined values. - for (auto _ : type) { + for (Index i = 0; i < type.size(); ++i) { o << int8_t(BinaryConsts::Drop); } assert(typesOnStack.back() == Type::i32); From f0ea20e8f3e4bdc1211093f2a45c3ea2ca14bb08 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 11:40:24 -0700 Subject: [PATCH 29/33] update test --- test/lit/passes/gufa-cast-all.wast | 2 -- 1 file changed, 2 deletions(-) diff --git a/test/lit/passes/gufa-cast-all.wast b/test/lit/passes/gufa-cast-all.wast index 38a1de8863b..812ba58f7f2 100644 --- a/test/lit/passes/gufa-cast-all.wast +++ b/test/lit/passes/gufa-cast-all.wast @@ -366,8 +366,6 @@ ;; CHECK: (type $2 (func (result contref))) - ;; CHECK: (type $3 (func (param (ref $cont)))) - ;; CHECK: (elem declare func $suspend) ;; CHECK: (func $suspend (type $none) From 7600f60f22b94207f297aa334ceec5eb9eca98ec Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 11:45:05 -0700 Subject: [PATCH 30/33] feedback: use any --- src/tools/fuzzing/fuzzing.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index c6565b63d26..a80f2d3576c 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -5674,7 +5674,7 @@ Type TranslateToFuzzReader::getCastableReferenceType() { if (oneIn(4)) { type = getSubType(Type(HeapType::func, Nullable)); } else { - type = getEqReferenceType(); + type = getSubType(Type(HeapType::any, Nullable)); } assert(type.isCastable()); } From aefdf88b11ed5a842e52e78a687172d58803ae8c Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 11:46:03 -0700 Subject: [PATCH 31/33] Update src/wasm/wasm-stack.cpp Co-authored-by: Thomas Lively --- src/wasm/wasm-stack.cpp | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/wasm/wasm-stack.cpp b/src/wasm/wasm-stack.cpp index 09936ef86c0..33c3d7e3973 100644 --- a/src/wasm/wasm-stack.cpp +++ b/src/wasm/wasm-stack.cpp @@ -110,11 +110,7 @@ void BinaryInstWriter::visitBreak(Break* curr) { // the br_if to consume. Later, we can reload the refined values after the // br_if, for its parent to consume. - if (type.isTuple()) { - typesOnStack = type.getTuple(); - } else { - typesOnStack.push_back(type); - } + typesOnStack = std::vector(type.begin(), type.end()); typesOnStack.push_back(Type::i32); stashStack(typesOnStack); From f6633aa80c63c2a4bd3a2ff6e9d9a56ffd1565cf Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 12:43:32 -0700 Subject: [PATCH 32/33] Use more accurate sampling --- src/tools/fuzzing/fuzzing.cpp | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index a80f2d3576c..c3e6ddb8ef8 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -5667,17 +5667,21 @@ Type TranslateToFuzzReader::getReferenceType() { } Type TranslateToFuzzReader::getCastableReferenceType() { - auto type = getReferenceType(); - if (!type.isCastable()) { - // Avoid continuations in a simple way (this is rare, so being precise is - // not crucial). - if (oneIn(4)) { - type = getSubType(Type(HeapType::func, Nullable)); - } else { - type = getSubType(Type(HeapType::any, Nullable)); + int tries = fuzzParams->TRIES; + while (tries-- > 0) { + auto type = getReferenceType(); + if (type.isCastable()) { + return type; } - assert(type.isCastable()); } + // We failed to find a type using fair sampling. Do something simple that must + // work. + if (oneIn(4)) { + type = getSubType(Type(HeapType::func, Nullable)); + } else { + type = getSubType(Type(HeapType::any, Nullable)); + } + assert(type.isCastable()); return type; } From c441627aab7c124050e3e5ff9905eed493216891 Mon Sep 17 00:00:00 2001 From: Alon Zakai Date: Tue, 21 Oct 2025 14:06:46 -0700 Subject: [PATCH 33/33] simplify+fix --- src/tools/fuzzing/fuzzing.cpp | 26 +------------------------- 1 file changed, 1 insertion(+), 25 deletions(-) diff --git a/src/tools/fuzzing/fuzzing.cpp b/src/tools/fuzzing/fuzzing.cpp index c3e6ddb8ef8..93939b8c667 100644 --- a/src/tools/fuzzing/fuzzing.cpp +++ b/src/tools/fuzzing/fuzzing.cpp @@ -1826,12 +1826,6 @@ void TranslateToFuzzReader::recombine(Function* func) { // be placed as either of the children of the i32.add. This tramples the // existing content there. Returns true if we found a place. static bool replaceChildWith(Expression* expr, Expression* with) { - // Casts can get broken if we replace with something not castable. - if (!with->type.isCastable()) { - if (expr->is() || expr->is() || expr->is()) { - return false; - } - } for (auto*& child : ChildIterator(expr)) { // To replace, we must have an appropriate type, and we cannot replace a // Pop under any circumstances. @@ -2188,25 +2182,6 @@ void TranslateToFuzzReader::fixAfterChanges(Function* func) { i--; } } - - // Fix up casts: Our changes may have put an uncastable type in a cast. - void visitRefCast(RefCast* curr) { fixCast(curr, curr->ref); } - void visitRefTest(RefTest* curr) { fixCast(curr, curr->ref); } - void visitBrOn(BrOn* curr) { - if (curr->op == BrOnCast || curr->op == BrOnCastFail || - curr->op == BrOnCastDesc || curr->op == BrOnCastDescFail) { - fixCast(curr, curr->ref); - } else { - visitExpression(curr); - } - } - void fixCast(Expression* curr, Expression* ref) { - if (!ref->type.isCastable()) { - replaceCurrent(parent.makeTrivial(curr->type)); - } else { - visitExpression(curr); - } - } }; Fixer fixer(wasm, *this); fixer.walk(func->body); @@ -5676,6 +5651,7 @@ Type TranslateToFuzzReader::getCastableReferenceType() { } // We failed to find a type using fair sampling. Do something simple that must // work. + Type type; if (oneIn(4)) { type = getSubType(Type(HeapType::func, Nullable)); } else {