Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ARM64: Recognize more STP patterns #102126

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 0 additions & 7 deletions src/coreclr/jit/assertionprop.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2377,13 +2377,6 @@ void Compiler::optAssertionGen(GenTree* tree)
break;

case GT_IND:
// Dynamic block copy sources could be zero-sized and so should not generate assertions.
if (tree->TypeIs(TYP_STRUCT))
{
break;
}
FALLTHROUGH;

case GT_XAND:
case GT_XORR:
case GT_XADD:
Expand Down
36 changes: 28 additions & 8 deletions src/coreclr/jit/lower.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -8673,16 +8673,29 @@ void Lowering::LowerStoreIndirCoalescing(GenTreeIndir* ind)
assert(prevData.IsStore());
assert(currData.IsStore());

// For now, only constants are supported for data.
if (!prevData.value->OperIsConst() || !currData.value->OperIsConst())
// Otherwise, the difference between two offsets has to match the size of the type.
// We don't support overlapping stores.
if (abs(prevData.offset - currData.offset) != (int)genTypeSize(prevData.targetType))
{
return;
}

// Otherwise, the difference between two offsets has to match the size of the type.
// We don't support overlapping stores.
if (abs(prevData.offset - currData.offset) != (int)genTypeSize(prevData.targetType))
// At this point we know that the 2nd (current) STOREIND has the same side-effects as the previous STOREIND
// We can move the address part before the previous STOREIND to make STP friendly
auto makeStpFriendly = [&]() {
#ifdef TARGET_ARM64
if (!GenTree::Compare(prevData.value, currData.value) || currData.value->IsVectorZero())
{
LIR::Range Range = BlockRange().Remove(currData.rangeStart, ind->gtPrev);
BlockRange().InsertBefore(prevInd, std::move(Range));
}
#endif
};

// For now, only constants are supported for data.
if (!prevData.value->OperIsConst() || !currData.value->OperIsConst())
{
makeStpFriendly();
return;
}

Expand Down Expand Up @@ -8715,6 +8728,7 @@ void Lowering::LowerStoreIndirCoalescing(GenTreeIndir* ind)
// Base address being TYP_REF gives us a hint that data is pointer-aligned.
if (!currData.baseAddr->TypeIs(TYP_REF))
{
makeStpFriendly();
return;
}

Expand Down Expand Up @@ -8807,9 +8821,15 @@ void Lowering::LowerStoreIndirCoalescing(GenTreeIndir* ind)
break;
}
return;
#endif // TARGET_AMD64
#endif // FEATURE_HW_INTRINSICS
#endif // TARGET_64BIT
#elif defined(TARGET_ARM64) // TARGET_AMD64
case TYP_SIMD16:
// There is no TYP_SIMD32 to coalesce these two stores.
// At least, we can make it STP-friendly:
makeStpFriendly();
return;
#endif // TARGET_ARM64
#endif // FEATURE_HW_INTRINSICS
#endif // TARGET_64BIT

// TYP_FLOAT and TYP_DOUBLE aren't needed here - they're expected to
// be converted to TYP_INT/TYP_LONG for constant value.
Expand Down
Loading