diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index 911fa5073c36de..c9bb395088b7c0 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -20830,13 +20830,6 @@ GenTree* Compiler::gtNewSimdBinOpNode( { GenTree** broadcastOp = nullptr; -#if defined(TARGET_ARM64) - if (varTypeIsLong(simdBaseType)) - { - break; - } -#endif // TARGET_ARM64 - if (varTypeIsArithmetic(op1)) { broadcastOp = &op1; @@ -20844,7 +20837,7 @@ GenTree* Compiler::gtNewSimdBinOpNode( #if defined(TARGET_ARM64) if (!varTypeIsByte(simdBaseType)) { - // MultiplyByScalar requires the scalar op to be op2fGetHWIntrinsicIdForBinOp + // MultiplyByScalar requires the scalar op to be op2 for GetHWIntrinsicIdForBinOp needsReverseOps = true; } #endif // TARGET_ARM64 @@ -20857,7 +20850,12 @@ GenTree* Compiler::gtNewSimdBinOpNode( if (broadcastOp != nullptr) { #if defined(TARGET_ARM64) - if (!varTypeIsByte(simdBaseType)) + if (varTypeIsLong(simdBaseType)) + { + // This is handled via emulation and the scalar is consumed directly + break; + } + else if (!varTypeIsByte(simdBaseType)) { op2ForLookup = *broadcastOp; *broadcastOp = gtNewSimdCreateScalarUnsafeNode(TYP_SIMD8, *broadcastOp, simdBaseJitType, 8); @@ -21261,16 +21259,13 @@ GenTree* Compiler::gtNewSimdBinOpNode( #elif defined(TARGET_ARM64) if (varTypeIsLong(simdBaseType)) { - GenTree** op1ToDup = &op1; - GenTree** op2ToDup = &op2; + GenTree** op2ToDup = nullptr; - if (!varTypeIsArithmetic(op1)) - { - op1 = gtNewSimdToScalarNode(TYP_LONG, op1, simdBaseJitType, simdSize); - op1ToDup = &op1->AsHWIntrinsic()->Op(1); - } + assert(varTypeIsSIMD(op1)); + op1 = gtNewSimdToScalarNode(TYP_LONG, op1, simdBaseJitType, simdSize); + GenTree** op1ToDup = &op1->AsHWIntrinsic()->Op(1); - if (!varTypeIsArithmetic(op2)) + if (varTypeIsSIMD(op2)) { op2 = gtNewSimdToScalarNode(TYP_LONG, op2, simdBaseJitType, simdSize); op2ToDup = &op2->AsHWIntrinsic()->Op(1); @@ -21278,7 +21273,12 @@ GenTree* Compiler::gtNewSimdBinOpNode( // lower = op1.GetElement(0) * op2.GetElement(0) GenTree* lower = gtNewOperNode(GT_MUL, TYP_LONG, op1, op2); - lower = gtNewSimdCreateScalarUnsafeNode(type, lower, simdBaseJitType, simdSize); + + if (op2ToDup == nullptr) + { + op2ToDup = &lower->AsOp()->gtOp2; + } + lower = gtNewSimdCreateScalarUnsafeNode(type, lower, simdBaseJitType, simdSize); if (simdSize == 8) { @@ -21290,10 +21290,8 @@ GenTree* Compiler::gtNewSimdBinOpNode( GenTree* op1Dup = fgMakeMultiUse(op1ToDup); GenTree* op2Dup = fgMakeMultiUse(op2ToDup); - if (!varTypeIsArithmetic(op1Dup)) - { - op1Dup = gtNewSimdGetElementNode(TYP_LONG, op1Dup, gtNewIconNode(1), simdBaseJitType, simdSize); - } + assert(!varTypeIsArithmetic(op1Dup)); + op1Dup = gtNewSimdGetElementNode(TYP_LONG, op1Dup, gtNewIconNode(1), simdBaseJitType, simdSize); if (!varTypeIsArithmetic(op2Dup)) { diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_106838/Runtime_106838.cs b/src/tests/JIT/Regression/JitBlue/Runtime_106838/Runtime_106838.cs new file mode 100644 index 00000000000000..9c3f024398d8e3 --- /dev/null +++ b/src/tests/JIT/Regression/JitBlue/Runtime_106838/Runtime_106838.cs @@ -0,0 +1,20 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. + +using System; +using System.Runtime.CompilerServices; +using System.Runtime.Intrinsics; +using Xunit; + +public class Runtime_106838 +{ + [MethodImpl(MethodImplOptions.NoInlining)] + private static Vector128 Problem(Vector128 vector) => vector * 5UL; + + [Fact] + public static void TestEntryPoint() + { + Vector128 result = Problem(Vector128.Create(5)); + Assert.Equal(Vector128.Create(25), result); + } +} diff --git a/src/tests/JIT/Regression/JitBlue/Runtime_106838/Runtime_106838.csproj b/src/tests/JIT/Regression/JitBlue/Runtime_106838/Runtime_106838.csproj new file mode 100644 index 00000000000000..de6d5e08882e86 --- /dev/null +++ b/src/tests/JIT/Regression/JitBlue/Runtime_106838/Runtime_106838.csproj @@ -0,0 +1,8 @@ + + + True + + + + +