From 70dc7b74fb79cb7f4b9352c0f3c7195eecf47c09 Mon Sep 17 00:00:00 2001 From: Susikrishna Date: Fri, 7 Nov 2025 18:55:07 +0530 Subject: [PATCH 1/2] [LLVM][InstCombine] Simplify zext(sub(0, trunc(x))) -> and(sub(0, x), (bitwidth-1)) --- .../InstCombine/InstCombineCasts.cpp | 16 ++++++ .../InstCombine/rotate-trunc-zext.ll | 55 +++++++++++++++++++ 2 files changed, 71 insertions(+) create mode 100644 llvm/test/Transforms/InstCombine/rotate-trunc-zext.ll diff --git a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp index 614c6ebd63be6..01368273a47c0 100644 --- a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp @@ -1217,6 +1217,22 @@ static bool canEvaluateZExtd(Value *V, Type *Ty, unsigned &BitsToClear, } Instruction *InstCombinerImpl::visitZExt(ZExtInst &Zext) { + { + Value *TruncSrc = nullptr; + if (match(&Zext, m_ZExt(m_Sub(m_Zero(), m_Trunc(m_Value(TruncSrc)))))) { + IRBuilder<> Builder(&Zext); + Type *Ty = TruncSrc->getType(); + unsigned BitWidth = Ty->getScalarSizeInBits(); + unsigned MaskVal = BitWidth - 1; + + Value *Zero = ConstantInt::get(Ty, 0); + Value *Neg = Builder.CreateSub(Zero, TruncSrc); + Value *Mask = ConstantInt::get(Ty, MaskVal); + Value *Masked = Builder.CreateAnd(Neg, Mask); + return replaceInstUsesWith(Zext, Masked); + } + } + // If this zero extend is only used by a truncate, let the truncate be // eliminated before we try to optimize this zext. if (Zext.hasOneUse() && isa(Zext.user_back()) && diff --git a/llvm/test/Transforms/InstCombine/rotate-trunc-zext.ll b/llvm/test/Transforms/InstCombine/rotate-trunc-zext.ll new file mode 100644 index 0000000000000..31c7ba4a26796 --- /dev/null +++ b/llvm/test/Transforms/InstCombine/rotate-trunc-zext.ll @@ -0,0 +1,55 @@ +; RUN: opt -passes=instcombine -S %s | FileCheck %s + +; ================================================================ +; Test: Simplify zext(sub(0, trunc(x))) -> and(sub(0, x), (bitwidth-1)) +; Purpose: Check that InstCombine detects and simplifies the pattern +; seen in rotate idioms, enabling backend rotate lowering. +; ================================================================ + +; === Scalar Case (i64) ========================================= +define i64 @neg_trunc_zext(i64 %a) { +; CHECK-LABEL: @neg_trunc_zext( +; CHECK-NEXT: %[[NEG:[0-9]+]] = sub i64 0, %a +; CHECK-NEXT: %[[MASKED:[0-9A-Za-z_]+]] = and i64 %[[NEG]], 63 +; CHECK-NEXT: ret i64 %[[MASKED]] + %t = trunc i64 %a to i6 + %n = sub i6 0, %t + %z = zext i6 %n to i64 + ret i64 %z +} + +; === Vector Case 1: <2 x i64> ================================== +define <2 x i64> @foo(<2 x i64> %x, <2 x i64> %n) { +; CHECK-LABEL: @foo( +; CHECK: %[[NEG:[0-9A-Za-z_]+]] = sub <2 x i64> zeroinitializer, %n +; CHECK: %[[MASK:[0-9A-Za-z_]+]] = and <2 x i64> %[[NEG]], splat (i64 63) +; CHECK: ret <2 x i64> %[[MASK]] + %t = trunc <2 x i64> %n to <2 x i6> + %neg = sub <2 x i6> zeroinitializer, %t + %z = zext <2 x i6> %neg to <2 x i64> + ret <2 x i64> %z +} + +; === Vector Case 2: <4 x i64> ================================== +define <4 x i64> @bar(<4 x i64> %x, <4 x i64> %n) { +; CHECK-LABEL: @bar( +; CHECK: %[[NEG:[0-9A-Za-z_]+]] = sub <4 x i64> zeroinitializer, %n +; CHECK: %[[MASK:[0-9A-Za-z_]+]] = and <4 x i64> %[[NEG]], splat (i64 63) +; CHECK: ret <4 x i64> %[[MASK]] + %t = trunc <4 x i64> %n to <4 x i6> + %neg = sub <4 x i6> zeroinitializer, %t + %z = zext <4 x i6> %neg to <4 x i64> + ret <4 x i64> %z +} + +; === Vector Case 3: <8 x i64> ================================== +define <8 x i64> @baz(<8 x i64> %x, <8 x i64> %n) { +; CHECK-LABEL: @baz( +; CHECK: %[[NEG:[0-9A-Za-z_]+]] = sub <8 x i64> zeroinitializer, %n +; CHECK: %[[MASK:[0-9A-Za-z_]+]] = and <8 x i64> %[[NEG]], splat (i64 63) +; CHECK: ret <8 x i64> %[[MASK]] + %t = trunc <8 x i64> %n to <8 x i6> + %neg = sub <8 x i6> zeroinitializer, %t + %z = zext <8 x i6> %neg to <8 x i64> + ret <8 x i64> %z +} From 27cf5b5427eb8be9563de04b3387d836e89c9b31 Mon Sep 17 00:00:00 2001 From: Susikrishna Date: Sat, 8 Nov 2025 12:11:08 +0530 Subject: [PATCH 2/2] [NFC][InstCombine] Move zext(sub 0, trunc X) combine to end of function --- .../InstCombine/InstCombineCasts.cpp | 32 +++++++++---------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp index 01368273a47c0..ebe1b747e6be4 100644 --- a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp @@ -1217,22 +1217,6 @@ static bool canEvaluateZExtd(Value *V, Type *Ty, unsigned &BitsToClear, } Instruction *InstCombinerImpl::visitZExt(ZExtInst &Zext) { - { - Value *TruncSrc = nullptr; - if (match(&Zext, m_ZExt(m_Sub(m_Zero(), m_Trunc(m_Value(TruncSrc)))))) { - IRBuilder<> Builder(&Zext); - Type *Ty = TruncSrc->getType(); - unsigned BitWidth = Ty->getScalarSizeInBits(); - unsigned MaskVal = BitWidth - 1; - - Value *Zero = ConstantInt::get(Ty, 0); - Value *Neg = Builder.CreateSub(Zero, TruncSrc); - Value *Mask = ConstantInt::get(Ty, MaskVal); - Value *Masked = Builder.CreateAnd(Neg, Mask); - return replaceInstUsesWith(Zext, Masked); - } - } - // If this zero extend is only used by a truncate, let the truncate be // eliminated before we try to optimize this zext. if (Zext.hasOneUse() && isa(Zext.user_back()) && @@ -1382,6 +1366,22 @@ Instruction *InstCombinerImpl::visitZExt(ZExtInst &Zext) { } } + { + Value *TruncSrc = nullptr; + if (match(&Zext, m_ZExt(m_Sub(m_Zero(), m_Trunc(m_Value(TruncSrc)))))) { + IRBuilder<> Builder(&Zext); + Type *Ty = TruncSrc->getType(); + unsigned BitWidth = Ty->getScalarSizeInBits(); + unsigned MaskVal = BitWidth - 1; + + Value *Zero = ConstantInt::get(Ty, 0); + Value *Neg = Builder.CreateSub(Zero, TruncSrc); + Value *Mask = ConstantInt::get(Ty, MaskVal); + Value *Masked = Builder.CreateAnd(Neg, Mask); + return replaceInstUsesWith(Zext, Masked); + } + } + return nullptr; }