diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index b2d92cf6be5a8a..717d25cb5b31bb 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -6792,6 +6792,51 @@ void Lowering::LowerStoreIndirCommon(GenTreeStoreInd* ind) TryCreateAddrMode(ind->Addr(), true, ind->TypeGet()); if (!comp->codeGen->gcInfo.gcIsWriteBarrierStoreIndNode(ind)) { + if (varTypeIsFloating(ind) && ind->Data()->IsCnsFltOrDbl()) + { + // Optimize *x = DCON to *x = ICON which can be slightly faster and/or smaller. + GenTree* data = ind->Data(); + double dblCns = data->AsDblCon()->gtDconVal; + ssize_t intCns = 0; + var_types type = TYP_UNKNOWN; + // XARCH: we can always contain the immediates. + // ARM64: zero can always be contained, other cases will use immediates from the data + // section and it is not a clear win to switch them to inline integers. + // ARM: FP constants are assembled from integral ones, so it is always profitable + // to directly use the integers as it avoids the int -> float conversion. + CLANG_FORMAT_COMMENT_ANCHOR; + +#if defined(TARGET_XARCH) || defined(TARGET_ARM) + bool shouldSwitchToInteger = true; +#else // TARGET_ARM64 + bool shouldSwitchToInteger = !data->IsCnsNonZeroFltOrDbl(); +#endif + + if (shouldSwitchToInteger) + { + if (ind->TypeIs(TYP_FLOAT)) + { + float fltCns = static_cast(dblCns); // should be a safe round-trip + intCns = static_cast(*reinterpret_cast(&fltCns)); + type = TYP_INT; + } +#ifdef TARGET_64BIT + else + { + assert(ind->TypeIs(TYP_DOUBLE)); + intCns = static_cast(*reinterpret_cast(&dblCns)); + type = TYP_LONG; + } +#endif + } + + if (type != TYP_UNKNOWN) + { + data->BashToConst(intCns, type); + ind->ChangeType(type); + } + } + LowerStoreIndir(ind); } } @@ -6861,7 +6906,7 @@ void Lowering::TransformUnusedIndirection(GenTreeIndir* ind, Compiler* comp, Bas #ifdef TARGET_ARM64 bool useNullCheck = true; #elif TARGET_ARM - bool useNullCheck = false; + bool useNullCheck = false; #else // TARGET_XARCH bool useNullCheck = !ind->Addr()->isContained(); #endif // !TARGET_XARCH diff --git a/src/coreclr/jit/lowerxarch.cpp b/src/coreclr/jit/lowerxarch.cpp index 8f15e9a04fb8eb..9815b245b5cead 100644 --- a/src/coreclr/jit/lowerxarch.cpp +++ b/src/coreclr/jit/lowerxarch.cpp @@ -130,36 +130,6 @@ void Lowering::LowerStoreIndir(GenTreeStoreInd* node) return; } } - else if (node->Data()->IsCnsFltOrDbl()) - { - // Optimize *x = DCON to *x = ICON which is slightly faster on xarch - GenTree* data = node->Data(); - double dblCns = data->AsDblCon()->gtDconVal; - ssize_t intCns = 0; - var_types type = TYP_UNKNOWN; - - if (node->TypeIs(TYP_FLOAT)) - { - float fltCns = static_cast(dblCns); // should be a safe round-trip - intCns = static_cast(*reinterpret_cast(&fltCns)); - type = TYP_INT; - } -#ifdef TARGET_AMD64 - else - { - assert(node->TypeIs(TYP_DOUBLE)); - intCns = static_cast(*reinterpret_cast(&dblCns)); - type = TYP_LONG; - } -#endif - - if (type != TYP_UNKNOWN) - { - data->SetContained(); - data->BashToConst(intCns, type); - node->ChangeType(type); - } - } // Optimization: do not unnecessarily zero-extend the result of setcc. if (varTypeIsByte(node) && (node->Data()->OperIsCompare() || node->Data()->OperIs(GT_SETCC)))