Skip to content

Commit de622a4

Browse files
committed
Revert "Split the appoarch that transforms the reverse to vp.reverse for loaded result and stored value"
This reverts commit 2ba8601.
1 parent ba7d6fd commit de622a4

File tree

2 files changed

+53
-56
lines changed

2 files changed

+53
-56
lines changed

llvm/lib/Transforms/Vectorize/VPlan.h

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -3479,15 +3479,6 @@ struct VPWidenStoreEVLRecipe final : public VPWidenMemoryRecipe {
34793479
setMask(Mask);
34803480
}
34813481

3482-
VPWidenStoreEVLRecipe(VPWidenStoreRecipe &S, VPValue *Addr,
3483-
VPValue *StoredVal, VPValue &EVL, VPValue *Mask)
3484-
: VPWidenMemoryRecipe(VPDef::VPWidenStoreEVLSC, S.getIngredient(),
3485-
{Addr, StoredVal, &EVL}, S.isConsecutive(),
3486-
S.isReverse(), S, S.getDebugLoc()) {
3487-
assert(isReverse() && "Only reverse access need to set new stored value");
3488-
setMask(Mask);
3489-
}
3490-
34913482
VP_CLASSOF_IMPL(VPDef::VPWidenStoreEVLSC)
34923483

34933484
/// Return the address accessed by this recipe.

llvm/lib/Transforms/Vectorize/VPlanTransforms.cpp

Lines changed: 53 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -2664,6 +2664,22 @@ static inline RemoveMask_match<Op0_t, Op1_t> m_RemoveMask(const Op0_t &In,
26642664
return RemoveMask_match<Op0_t, Op1_t>(In, Out);
26652665
}
26662666

2667+
/// If \p R is a VPInstruction::Reverse, return a VPWidenIntrinsicRecipe
2668+
/// for the vp.reverse intrinsic using \p EVL. Returns nullptr otherwise.
2669+
static VPWidenIntrinsicRecipe *
2670+
getEVLReverse(VPRecipeBase &R, VPTypeAnalysis &TypeInfo, VPValue &EVL) {
2671+
VPValue *ReversedVal;
2672+
if (!match(&R,
2673+
m_VPInstruction<VPInstruction::Reverse>(m_VPValue(ReversedVal))))
2674+
return nullptr;
2675+
2676+
auto *Reverse = cast<VPInstruction>(&R);
2677+
VPlan *Plan = Reverse->getParent()->getPlan();
2678+
return new VPWidenIntrinsicRecipe(
2679+
Intrinsic::experimental_vp_reverse, {ReversedVal, Plan->getTrue(), &EVL},
2680+
TypeInfo.inferScalarType(Reverse), {}, {}, Reverse->getDebugLoc());
2681+
}
2682+
26672683
/// Try to optimize a \p CurRecipe masked by \p HeaderMask to a corresponding
26682684
/// EVL-based recipe without the header mask. Returns nullptr if no EVL-based
26692685
/// recipe could be created.
@@ -2705,31 +2721,12 @@ static VPRecipeBase *optimizeMaskToEVL(VPValue *HeaderMask,
27052721
return new VPWidenStoreEVLRecipe(cast<VPWidenStoreRecipe>(CurRecipe), Addr,
27062722
EVL, Mask);
27072723

2708-
VPValue *StoredVal;
2709-
if (match(&CurRecipe, m_MaskedStore(m_VPValue(EndPtr), m_VPValue(StoredVal),
2724+
if (match(&CurRecipe, m_MaskedStore(m_VPValue(EndPtr), m_VPValue(),
27102725
m_RemoveMask(HeaderMask, Mask))) &&
27112726
match(EndPtr, m_VecEndPtr(m_VPValue(Addr), m_Specific(&Plan->getVF()))) &&
2712-
cast<VPWidenStoreRecipe>(CurRecipe).isReverse()) {
2713-
auto *StoreR = cast<VPWidenStoreRecipe>(&CurRecipe);
2714-
// Convert general reverse operations on stored value into vp.reverse.
2715-
// Skip if the stored value is not defined in the loop region.
2716-
if (!StoredVal->isDefinedOutsideLoopRegions()) {
2717-
VPValue *ReversedVal;
2718-
bool IsReverse = match(StoredVal, m_VPInstruction<VPInstruction::Reverse>(
2719-
m_VPValue(ReversedVal)));
2720-
assert(IsReverse && "The stored value of reverse store must be defined "
2721-
"by a reverse operation");
2722-
auto *Reverse = cast<VPInstruction>(StoredVal);
2723-
auto *NewReverse = new VPWidenIntrinsicRecipe(
2724-
Intrinsic::experimental_vp_reverse,
2725-
{ReversedVal, Plan->getTrue(), &EVL},
2726-
TypeInfo.inferScalarType(Reverse), {}, {}, Reverse->getDebugLoc());
2727-
NewReverse->insertBefore(Reverse);
2728-
return new VPWidenStoreEVLRecipe(*StoreR, AdjustEndPtr(EndPtr),
2729-
NewReverse, EVL, Mask);
2730-
}
2731-
return new VPWidenStoreEVLRecipe(*StoreR, AdjustEndPtr(EndPtr), EVL, Mask);
2732-
}
2727+
cast<VPWidenStoreRecipe>(CurRecipe).isReverse())
2728+
return new VPWidenStoreEVLRecipe(cast<VPWidenStoreRecipe>(CurRecipe),
2729+
AdjustEndPtr(EndPtr), EVL, Mask);
27332730

27342731
if (auto *Rdx = dyn_cast<VPReductionRecipe>(&CurRecipe))
27352732
if (Rdx->isConditional() &&
@@ -2872,32 +2869,41 @@ static void transformRecipestoEVLRecipes(VPlan &Plan, VPValue &EVL) {
28722869
}
28732870
ToErase.push_back(CurRecipe);
28742871

2875-
// Convert general reverse operations on loaded results into vp.reverse,
2876-
// when the VPVectorEndPointerRecipe adjusting the access address uses EVL
2877-
// instead of VF.
2878-
if (auto *LoadR = dyn_cast<VPWidenLoadEVLRecipe>(EVLRecipe)) {
2879-
if (!match(LoadR->getAddr(), m_VecEndPtr(m_VPValue(), m_Specific(&EVL))))
2872+
// Convert general reverse operations on loaded values and stored values
2873+
// into vp.reverse, when the VPVectorEndPointerRecipe adjusting the access
2874+
// address uses EVL instead of VF.
2875+
// TODO: Extend conversion along the def-use/use-def chain, as reverse
2876+
// operations may be eliminated or moved in the future.
2877+
if (auto *MemR = dyn_cast<VPWidenMemoryRecipe>(EVLRecipe)) {
2878+
if (!match(MemR->getAddr(), m_VecEndPtr(m_VPValue(), m_Specific(&EVL))))
28802879
continue;
2881-
assert(LoadR->isReverse() &&
2880+
assert(MemR->isReverse() &&
28822881
"Only reverse access uses VPVectorEndPointerRecipe as address");
2883-
// TODO: Extend conversion along the use-def chain, as reverse operations
2884-
// may be eliminated or sunk in the future.
2885-
assert(LoadR->getNumUsers() == 1 &&
2886-
"Unexpected user number of reverse load");
2887-
auto *UserR = cast<VPRecipeBase>(*LoadR->user_begin());
2888-
VPValue *ReversedVal;
2889-
bool IsReverse = match(UserR, m_VPInstruction<VPInstruction::Reverse>(
2890-
m_VPValue(ReversedVal)));
2891-
assert(IsReverse && "The defined value of reverse load must be used by a "
2892-
"reverse operation");
2893-
auto *Reverse = cast<VPInstruction>(UserR);
2894-
auto *NewReverse = new VPWidenIntrinsicRecipe(
2895-
Intrinsic::experimental_vp_reverse,
2896-
{ReversedVal, Plan.getTrue(), &EVL},
2897-
TypeInfo.inferScalarType(Reverse), {}, {}, Reverse->getDebugLoc());
2898-
NewReverse->insertBefore(Reverse);
2899-
Reverse->replaceAllUsesWith(NewReverse);
2900-
ToErase.push_back(Reverse);
2882+
2883+
VPRecipeBase *Candidate = nullptr;
2884+
if (auto *LoadR = dyn_cast<VPWidenLoadEVLRecipe>(MemR)) {
2885+
assert(LoadR->getNumUsers() == 1 &&
2886+
"Unexpected user number of reverse load");
2887+
Candidate = cast<VPRecipeBase>(*LoadR->user_begin());
2888+
} else if (auto *StoreR = dyn_cast<VPWidenStoreEVLRecipe>(MemR)) {
2889+
VPValue *StoredVal = StoreR->getStoredValue();
2890+
// Skip if the stored value is not defined in the loop region.
2891+
if (StoredVal->isDefinedOutsideLoopRegions())
2892+
continue;
2893+
Candidate = StoredVal->getDefiningRecipe();
2894+
}
2895+
assert(Candidate && "Must have one reverse operation for reverse access");
2896+
2897+
if (match(Candidate, m_Intrinsic<Intrinsic::experimental_vp_reverse>()))
2898+
continue;
2899+
2900+
VPWidenIntrinsicRecipe *NewReverse =
2901+
getEVLReverse(*Candidate, TypeInfo, EVL);
2902+
assert(NewReverse &&
2903+
"Unable to get an EVL reverse when tail folding by EVL");
2904+
NewReverse->insertBefore(Candidate);
2905+
cast<VPInstruction>(Candidate)->replaceAllUsesWith(NewReverse);
2906+
ToErase.push_back(Candidate);
29012907
}
29022908
}
29032909
// Remove dead EVL mask.

0 commit comments

Comments
 (0)