From 33016f15ed650f469339239d0eb5242bd9390d6d Mon Sep 17 00:00:00 2001 From: SingleAccretion Date: Tue, 3 May 2022 00:19:12 +0300 Subject: [PATCH 1/6] Delete field sequences from GenTreeLclFld --- src/coreclr/jit/compiler.h | 6 - src/coreclr/jit/compiler.hpp | 2 +- src/coreclr/jit/decomposelongs.cpp | 2 - src/coreclr/jit/flowgraph.cpp | 7 - src/coreclr/jit/gcinfo.cpp | 19 --- src/coreclr/jit/gentree.cpp | 144 +---------------- src/coreclr/jit/gentree.h | 27 +--- src/coreclr/jit/importer.cpp | 13 +- src/coreclr/jit/lclmorph.cpp | 163 ++++---------------- src/coreclr/jit/lclvars.cpp | 4 - src/coreclr/jit/morph.cpp | 77 +--------- src/coreclr/jit/morphblock.cpp | 112 ++++---------- src/coreclr/jit/rationalize.cpp | 3 +- src/coreclr/jit/valuenum.cpp | 239 +++-------------------------- src/coreclr/jit/valuenumfuncs.h | 2 +- 15 files changed, 110 insertions(+), 710 deletions(-) diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index e7b26005af05e..aaa21130e3a1d 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -470,8 +470,6 @@ class LclVarDsc unsigned char lvInSsa : 1; // The variable is in SSA form (set by SsaBuilder) unsigned char lvIsCSE : 1; // Indicates if this LclVar is a CSE variable. unsigned char lvHasLdAddrOp : 1; // has ldloca or ldarga opcode on this local. - unsigned char lvStackByref : 1; // This is a compiler temporary of TYP_BYREF that is known to point into our local - // stack frame. unsigned char lvHasILStoreOp : 1; // there is at least one STLOC or STARG on this local unsigned char lvHasMultipleILStoreOp : 1; // there is more than one STLOC on this local @@ -4781,8 +4779,6 @@ class Compiler // Does value-numbering for a block assignment. void fgValueNumberBlockAssignment(GenTree* tree); - bool fgValueNumberBlockAssignmentTypeCheck(LclVarDsc* dstVarDsc, FieldSeqNode* dstFldSeq, GenTree* src); - // Does value-numbering for a cast tree. void fgValueNumberCastTree(GenTree* tree); @@ -5701,8 +5697,6 @@ class Compiler GenTree* fgMorphMultiOp(GenTreeMultiOp* multiOp); GenTree* fgMorphConst(GenTree* tree); - bool fgMorphCanUseLclFldForCopy(unsigned lclNum1, unsigned lclNum2); - GenTreeLclVar* fgMorphTryFoldObjAsLclVar(GenTreeObj* obj, bool destroyNodes = true); GenTreeOp* fgMorphCommutative(GenTreeOp* tree); GenTree* fgMorphCastedBitwiseOp(GenTreeOp* tree); diff --git a/src/coreclr/jit/compiler.hpp b/src/coreclr/jit/compiler.hpp index db0d382131ec7..edcc065630679 100644 --- a/src/coreclr/jit/compiler.hpp +++ b/src/coreclr/jit/compiler.hpp @@ -1348,7 +1348,7 @@ inline void GenTree::SetOper(genTreeOps oper, ValueNumberUpdate vnUpdate) #endif case GT_LCL_FLD: AsLclFld()->SetLclOffs(0); - AsLclFld()->SetFieldSeq(FieldSeqStore::NotAField()); + AsLclFld()->SetLayout(nullptr); break; case GT_CALL: diff --git a/src/coreclr/jit/decomposelongs.cpp b/src/coreclr/jit/decomposelongs.cpp index a4267ae1d685a..781afe9dfbf32 100644 --- a/src/coreclr/jit/decomposelongs.cpp +++ b/src/coreclr/jit/decomposelongs.cpp @@ -366,11 +366,9 @@ GenTree* DecomposeLongs::DecomposeLclVar(LIR::Use& use) m_compiler->lvaSetVarDoNotEnregister(varNum DEBUGARG(DoNotEnregisterReason::LocalField)); loResult->SetOper(GT_LCL_FLD); loResult->AsLclFld()->SetLclOffs(0); - loResult->AsLclFld()->SetFieldSeq(FieldSeqStore::NotAField()); hiResult->SetOper(GT_LCL_FLD); hiResult->AsLclFld()->SetLclOffs(4); - hiResult->AsLclFld()->SetFieldSeq(FieldSeqStore::NotAField()); } return FinalizeDecomposition(use, loResult, hiResult, hiResult); diff --git a/src/coreclr/jit/flowgraph.cpp b/src/coreclr/jit/flowgraph.cpp index 7782c35597ed7..e948abbd0dd8a 100644 --- a/src/coreclr/jit/flowgraph.cpp +++ b/src/coreclr/jit/flowgraph.cpp @@ -961,13 +961,6 @@ bool Compiler::fgAddrCouldBeNull(GenTree* addr) { return false; } - - LclVarDsc* varDsc = lvaGetDesc(varNum); - - if (varDsc->lvStackByref) - { - return false; - } } else if (addr->gtOper == GT_ADDR) { diff --git a/src/coreclr/jit/gcinfo.cpp b/src/coreclr/jit/gcinfo.cpp index 0e8ec3e22cdef..1d0229fbf26f5 100644 --- a/src/coreclr/jit/gcinfo.cpp +++ b/src/coreclr/jit/gcinfo.cpp @@ -346,25 +346,6 @@ GCInfo::WriteBarrierForm GCInfo::gcWriteBarrierFormFromTargetAddress(GenTree* tg return GCInfo::WBF_NoBarrier; } - if (tgtAddr->OperGet() == GT_LCL_VAR) - { - unsigned lclNum = tgtAddr->AsLclVar()->GetLclNum(); - LclVarDsc* varDsc = compiler->lvaGetDesc(lclNum); - - // Instead of marking LclVar with 'lvStackByref', - // Consider decomposing the Value Number given to this LclVar to see if it was - // created using a GT_ADDR(GT_LCLVAR) or a GT_ADD( GT_ADDR(GT_LCLVAR), Constant) - - // We may have an internal compiler temp created in fgMorphCopyBlock() that we know - // points at one of our stack local variables, it will have lvStackByref set to true. - // - if (varDsc->lvStackByref) - { - assert(varDsc->TypeGet() == TYP_BYREF); - return GCInfo::WBF_NoBarrier; - } - } - if (tgtAddr->TypeGet() == TYP_REF) { return GCInfo::WBF_BarrierUnchecked; diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index be00ec67fd77c..dd3a8a56d20e6 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -7109,20 +7109,12 @@ GenTreeLclVar* Compiler::gtNewLclVarAddrNode(unsigned lclNum, var_types type) GenTreeLclFld* Compiler::gtNewLclFldAddrNode(unsigned lclNum, unsigned lclOffs, FieldSeqNode* fieldSeq, var_types type) { GenTreeLclFld* node = new (this, GT_LCL_FLD_ADDR) GenTreeLclFld(GT_LCL_FLD_ADDR, type, lclNum, lclOffs); - node->SetFieldSeq(fieldSeq == nullptr ? FieldSeqStore::NotAField() : fieldSeq); return node; } GenTreeLclFld* Compiler::gtNewLclFldNode(unsigned lnum, var_types type, unsigned offset) { GenTreeLclFld* node = new (this, GT_LCL_FLD) GenTreeLclFld(GT_LCL_FLD, type, lnum, offset); - - /* Cannot have this assert because the inliner uses this function - * to add temporaries */ - - // assert(lnum < lvaCount); - - node->SetFieldSeq(FieldSeqStore::NotAField()); return node; } @@ -7913,8 +7905,7 @@ GenTree* Compiler::gtClone(GenTree* tree, bool complexOK) case GT_LCL_FLD_ADDR: copy = new (this, tree->OperGet()) GenTreeLclFld(tree->OperGet(), tree->TypeGet(), tree->AsLclFld()->GetLclNum(), - tree->AsLclFld()->GetLclOffs()); - copy->AsLclFld()->SetFieldSeq(tree->AsLclFld()->GetFieldSeq()); + tree->AsLclFld()->GetLclOffs(), tree->AsLclFld()->GetLayout()); goto FINISH_CLONING_LCL_NODE; FINISH_CLONING_LCL_NODE: @@ -8103,10 +8094,9 @@ GenTree* Compiler::gtCloneExpr( // Remember that the LclVar node has been cloned. The flag will // be set on 'copy' as well. tree->gtFlags |= GTF_VAR_CLONED; - copy = - new (this, GT_LCL_FLD) GenTreeLclFld(GT_LCL_FLD, tree->TypeGet(), tree->AsLclFld()->GetLclNum(), - tree->AsLclFld()->GetLclOffs()); - copy->AsLclFld()->SetFieldSeq(tree->AsLclFld()->GetFieldSeq()); + copy = new (this, GT_LCL_FLD) + GenTreeLclFld(GT_LCL_FLD, tree->TypeGet(), tree->AsLclFld()->GetLclNum(), + tree->AsLclFld()->GetLclOffs(), tree->AsLclFld()->GetLayout()); copy->AsLclFld()->SetSsaNum(tree->AsLclFld()->GetSsaNum()); } goto DONE; @@ -8150,7 +8140,6 @@ GenTree* Compiler::gtCloneExpr( case GT_LCL_FLD_ADDR: copy = new (this, oper) GenTreeLclFld(oper, tree->TypeGet(), tree->AsLclFld()->GetLclNum(), tree->AsLclFld()->GetLclOffs()); - copy->AsLclFld()->SetFieldSeq(tree->AsLclFld()->GetFieldSeq()); goto DONE; default: @@ -10381,6 +10370,10 @@ void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, _In_ _In_opt_ layout = varDsc->GetLayout(); } } + else if (tree->OperIs(GT_LCL_FLD, GT_STORE_LCL_FLD)) + { + layout = tree->AsLclFld()->GetLayout(); + } else if (tree->OperIs(GT_INDEX)) { GenTreeIndex* asInd = tree->AsIndex(); @@ -11115,7 +11108,6 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) if (isLclFld) { printf("[+%u]", tree->AsLclFld()->GetLclOffs()); - gtDispFieldSeq(tree->AsLclFld()->GetFieldSeq()); } if (varDsc->lvRegister) @@ -16206,39 +16198,6 @@ bool GenTree::DefinesLocalAddr( return false; } -//------------------------------------------------------------------------ -// IsLocalExpr: Determine if this is a LclVarCommon node and return some -// additional info about it in the two out parameters. -// -// Arguments: -// comp - The Compiler instance -// pLclVarTree - An "out" argument that returns the local tree as a -// LclVarCommon, if it is indeed local. -// pFldSeq - An "out" argument that returns the value numbering field -// sequence for the node, if any. -// -// Return Value: -// Returns true, and sets the out arguments accordingly, if this is -// a LclVarCommon node. - -bool GenTree::IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq) -{ - if (IsLocal()) // Note that this covers "GT_LCL_FLD." - { - *pLclVarTree = AsLclVarCommon(); - if (OperGet() == GT_LCL_FLD) - { - // Otherwise, prepend this field to whatever we've already accumulated outside in. - *pFldSeq = comp->GetFieldSeqStore()->Append(AsLclFld()->GetFieldSeq(), *pFldSeq); - } - return true; - } - else - { - return false; - } -} - // If this tree evaluates some sum of a local address and some constants, // return the node for the local being addressed @@ -16267,93 +16226,6 @@ const GenTreeLclVarCommon* GenTree::IsLocalAddrExpr() const return nullptr; } -//------------------------------------------------------------------------ -// IsLocalAddrExpr: finds if "this" is an address of a local var/fld. -// -// Arguments: -// comp - a compiler instance; -// pLclVarTree - [out] sets to the node indicating the local variable if found; -// pFldSeq - [out] sets to the field sequence representing the field, else null; -// pOffset - [out](optional) sets to the sum offset of the lcl/fld if found, -// note it does not include pLclVarTree->GetLclOffs(). -// -// Returns: -// Returns true if "this" represents the address of a local, or a field of a local. -// -// Notes: -// It is mostly used for optimizations but assertion propagation depends on it for correctness. -// So if this function does not recognize a def of a LCL_VAR we can have an incorrect optimization. -// -bool GenTree::IsLocalAddrExpr(Compiler* comp, - GenTreeLclVarCommon** pLclVarTree, - FieldSeqNode** pFldSeq, - ssize_t* pOffset /* = nullptr */) -{ - if (OperGet() == GT_ADDR) - { - assert(!comp->compRationalIRForm); - GenTree* addrArg = AsOp()->gtOp1; - if (addrArg->IsLocal()) // Note that this covers "GT_LCL_FLD." - { - *pLclVarTree = addrArg->AsLclVarCommon(); - if (addrArg->OperGet() == GT_LCL_FLD) - { - // Otherwise, prepend this field to whatever we've already accumulated outside in. - *pFldSeq = comp->GetFieldSeqStore()->Append(addrArg->AsLclFld()->GetFieldSeq(), *pFldSeq); - } - return true; - } - else - { - return false; - } - } - else if (OperIsLocalAddr()) - { - *pLclVarTree = this->AsLclVarCommon(); - if (this->OperGet() == GT_LCL_FLD_ADDR) - { - *pFldSeq = comp->GetFieldSeqStore()->Append(this->AsLclFld()->GetFieldSeq(), *pFldSeq); - } - return true; - } - else if (OperGet() == GT_ADD) - { - if (AsOp()->gtOp1->OperGet() == GT_CNS_INT) - { - GenTreeIntCon* cnst = AsOp()->gtOp1->AsIntCon(); - if (cnst->gtFieldSeq == nullptr) - { - return false; - } - // Otherwise, prepend this field to whatever we've already accumulated outside in. - *pFldSeq = comp->GetFieldSeqStore()->Append(cnst->gtFieldSeq, *pFldSeq); - if (pOffset != nullptr) - { - *pOffset += cnst->IconValue(); - } - return AsOp()->gtOp2->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq, pOffset); - } - else if (AsOp()->gtOp2->OperGet() == GT_CNS_INT) - { - GenTreeIntCon* cnst = AsOp()->gtOp2->AsIntCon(); - if (cnst->gtFieldSeq == nullptr) - { - return false; - } - // Otherwise, prepend this field to whatever we've already accumulated outside in. - *pFldSeq = comp->GetFieldSeqStore()->Append(cnst->gtFieldSeq, *pFldSeq); - if (pOffset != nullptr) - { - *pOffset += cnst->IconValue(); - } - return AsOp()->gtOp1->IsLocalAddrExpr(comp, pLclVarTree, pFldSeq, pOffset); - } - } - // Otherwise... - return false; -} - //------------------------------------------------------------------------ // IsImplicitByrefParameterValue: determine if this tree is the entire // value of a local implicit byref parameter diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index 89f182703fb48..dc99768afb027 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -1919,13 +1919,6 @@ struct GenTree bool* pIsEntire = nullptr, ssize_t* pOffset = nullptr); - bool IsLocalAddrExpr(Compiler* comp, - GenTreeLclVarCommon** pLclVarTree, - FieldSeqNode** pFldSeq, - ssize_t* pOffset = nullptr); - - // Simpler variant of the above which just returns the local node if this is an expression that - // yields an address into a local const GenTreeLclVarCommon* IsLocalAddrExpr() const; GenTreeLclVarCommon* IsLocalAddrExpr() { @@ -1936,10 +1929,6 @@ struct GenTree // and if so return the tree for the parameter. GenTreeLclVar* IsImplicitByrefParameterValue(Compiler* compiler); - // Determine if this is a LclVarCommon node and return some additional info about it in the - // two out parameters. - bool IsLocalExpr(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, FieldSeqNode** pFldSeq); - // Determine whether this is an assignment tree of the form X = X (op) Y, // where Y is an arbitrary tree, and X is a lclVar. unsigned IsLclVarUpdateTree(GenTree** otherTree, genTreeOps* updateOper); @@ -3504,12 +3493,12 @@ struct GenTreeLclVar : public GenTreeLclVarCommon struct GenTreeLclFld : public GenTreeLclVarCommon { private: - uint16_t m_lclOffs; // offset into the variable to access - FieldSeqNode* m_fieldSeq; // This LclFld node represents some sequences of accesses. + uint16_t m_lclOffs; // offset into the variable to access + ClassLayout* m_layout; // The struct layout for this local field. public: - GenTreeLclFld(genTreeOps oper, var_types type, unsigned lclNum, unsigned lclOffs) - : GenTreeLclVarCommon(oper, type, lclNum), m_lclOffs(static_cast(lclOffs)), m_fieldSeq(nullptr) + GenTreeLclFld(genTreeOps oper, var_types type, unsigned lclNum, unsigned lclOffs, ClassLayout* layout = nullptr) + : GenTreeLclVarCommon(oper, type, lclNum), m_lclOffs(static_cast(lclOffs)), m_layout(layout) { assert(lclOffs <= UINT16_MAX); } @@ -3525,14 +3514,14 @@ struct GenTreeLclFld : public GenTreeLclVarCommon m_lclOffs = static_cast(lclOffs); } - FieldSeqNode* GetFieldSeq() const + ClassLayout* GetLayout() const { - return m_fieldSeq; + return m_layout; } - void SetFieldSeq(FieldSeqNode* fieldSeq) + void SetLayout(ClassLayout* layout) { - m_fieldSeq = fieldSeq; + m_layout = layout; } unsigned GetSize() const; diff --git a/src/coreclr/jit/importer.cpp b/src/coreclr/jit/importer.cpp index e2360e7f7c80d..e477b7dcc231c 100644 --- a/src/coreclr/jit/importer.cpp +++ b/src/coreclr/jit/importer.cpp @@ -3678,16 +3678,11 @@ GenTree* Compiler::impCreateSpanIntrinsic(CORINFO_SIG_INFO* sig) unsigned spanTempNum = lvaGrabTemp(true DEBUGARG("ReadOnlySpan for CreateSpan")); lvaSetStruct(spanTempNum, spanHnd, false); - CORINFO_FIELD_HANDLE pointerFieldHnd = info.compCompHnd->getFieldInClass(spanHnd, 0); - CORINFO_FIELD_HANDLE lengthFieldHnd = info.compCompHnd->getFieldInClass(spanHnd, 1); + GenTreeLclFld* pointerField = gtNewLclFldNode(spanTempNum, TYP_BYREF, 0); + GenTree* pointerFieldAsg = gtNewAssignNode(pointerField, pointerValue); - GenTreeLclFld* pointerField = gtNewLclFldNode(spanTempNum, TYP_BYREF, 0); - pointerField->SetFieldSeq(GetFieldSeqStore()->CreateSingleton(pointerFieldHnd, 0)); - GenTree* pointerFieldAsg = gtNewAssignNode(pointerField, pointerValue); - - GenTreeLclFld* lengthField = gtNewLclFldNode(spanTempNum, TYP_INT, TARGET_POINTER_SIZE); - lengthField->SetFieldSeq(GetFieldSeqStore()->CreateSingleton(lengthFieldHnd, TARGET_POINTER_SIZE)); - GenTree* lengthFieldAsg = gtNewAssignNode(lengthField, lengthValue); + GenTreeLclFld* lengthField = gtNewLclFldNode(spanTempNum, TYP_INT, TARGET_POINTER_SIZE); + GenTree* lengthFieldAsg = gtNewAssignNode(lengthField, lengthValue); // Now append a few statements the initialize the span impAppendTree(lengthFieldAsg, (unsigned)CHECK_SPILL_NONE, impCurStmtDI); diff --git a/src/coreclr/jit/lclmorph.cpp b/src/coreclr/jit/lclmorph.cpp index dae83e51b9d47..4fd10a2cd7518 100644 --- a/src/coreclr/jit/lclmorph.cpp +++ b/src/coreclr/jit/lclmorph.cpp @@ -34,18 +34,16 @@ class LocalAddressVisitor final : public GenTreeVisitor // class Value { - GenTree* m_node; - FieldSeqNode* m_fieldSeq; - unsigned m_lclNum; - unsigned m_offset; - bool m_address; + GenTree* m_node; + unsigned m_lclNum; + unsigned m_offset; + bool m_address; INDEBUG(bool m_consumed;) public: // Produce an unknown value associated with the specified node. Value(GenTree* node) : m_node(node) - , m_fieldSeq(nullptr) , m_lclNum(BAD_VAR_NUM) , m_offset(0) , m_address(false) @@ -91,12 +89,6 @@ class LocalAddressVisitor final : public GenTreeVisitor return m_offset; } - // Get the location's field sequence. - FieldSeqNode* FieldSeq() const - { - return m_fieldSeq; - } - //------------------------------------------------------------------------ // Location: Produce a location value. // @@ -114,7 +106,6 @@ class LocalAddressVisitor final : public GenTreeVisitor m_lclNum = lclVar->GetLclNum(); assert(m_offset == 0); - assert(m_fieldSeq == nullptr); } //------------------------------------------------------------------------ @@ -135,7 +126,6 @@ class LocalAddressVisitor final : public GenTreeVisitor m_address = true; assert(m_offset == 0); - assert(m_fieldSeq == nullptr); } //------------------------------------------------------------------------ @@ -152,9 +142,8 @@ class LocalAddressVisitor final : public GenTreeVisitor assert(lclFld->OperIs(GT_LCL_FLD)); assert(!IsLocation() && !IsAddress()); - m_lclNum = lclFld->GetLclNum(); - m_offset = lclFld->GetLclOffs(); - m_fieldSeq = lclFld->GetFieldSeq(); + m_lclNum = lclFld->GetLclNum(); + m_offset = lclFld->GetLclOffs(); } //------------------------------------------------------------------------ @@ -171,10 +160,9 @@ class LocalAddressVisitor final : public GenTreeVisitor assert(lclFld->OperIs(GT_LCL_FLD_ADDR)); assert(!IsLocation() && !IsAddress()); - m_lclNum = lclFld->GetLclNum(); - m_offset = lclFld->GetLclOffs(); - m_fieldSeq = lclFld->GetFieldSeq(); - m_address = true; + m_lclNum = lclFld->GetLclNum(); + m_offset = lclFld->GetLclOffs(); + m_address = true; } //------------------------------------------------------------------------ @@ -195,10 +183,9 @@ class LocalAddressVisitor final : public GenTreeVisitor if (val.IsLocation()) { - m_address = true; - m_lclNum = val.m_lclNum; - m_offset = val.m_offset; - m_fieldSeq = val.m_fieldSeq; + m_address = true; + m_lclNum = val.m_lclNum; + m_offset = val.m_offset; } INDEBUG(val.Consume();) @@ -245,76 +232,6 @@ class LocalAddressVisitor final : public GenTreeVisitor m_lclNum = val.m_lclNum; m_offset = newOffset.Value(); - - bool haveCorrectFieldForVN; - if (field->gtFldMayOverlap) - { - haveCorrectFieldForVN = false; - } - else - { - LclVarDsc* varDsc = compiler->lvaGetDesc(m_lclNum); - if (!varTypeIsStruct(varDsc)) - { - haveCorrectFieldForVN = false; - } - else if (val.m_fieldSeq == nullptr) - { - - CORINFO_CLASS_HANDLE clsHnd = varDsc->GetStructHnd(); - // If the answer is no we are probably accessing a canon type with a non-canon fldHnd, - // currently it could happen in crossgen2 scenario where VM distinguishes class._field - // from class._field. - haveCorrectFieldForVN = - compiler->info.compCompHnd->doesFieldBelongToClass(field->gtFldHnd, clsHnd); - } - else - { - FieldSeqNode* lastSeqNode = val.m_fieldSeq->GetTail(); - assert(lastSeqNode != nullptr); - if (lastSeqNode == FieldSeqStore::NotAField()) - { - haveCorrectFieldForVN = false; - } - else - { - CORINFO_FIELD_HANDLE lastFieldBeforeTheCurrent = lastSeqNode->GetFieldHandle(); - - CORINFO_CLASS_HANDLE clsHnd; - CorInfoType fieldCorType = - compiler->info.compCompHnd->getFieldType(lastFieldBeforeTheCurrent, &clsHnd); - if (fieldCorType != CORINFO_TYPE_VALUECLASS) - { - // For example, System.IntPtr:ToInt64, when inlined, creates trees like - // * FIELD long _value - // \--* ADDR byref - // \--* FIELD long Information - // \--* ADDR byref - // \--* LCL_VAR struct V08 tmp7 - haveCorrectFieldForVN = false; - } - else - { - - haveCorrectFieldForVN = - compiler->info.compCompHnd->doesFieldBelongToClass(field->gtFldHnd, clsHnd); - noway_assert(haveCorrectFieldForVN); - } - } - } - } - - if (haveCorrectFieldForVN) - { - FieldSeqStore* fieldSeqStore = compiler->GetFieldSeqStore(); - FieldSeqNode* fieldSeqNode = fieldSeqStore->CreateSingleton(field->gtFldHnd, field->gtFldOffset); - m_fieldSeq = fieldSeqStore->Append(val.m_fieldSeq, fieldSeqNode); - } - else - { - m_fieldSeq = FieldSeqStore::NotAField(); - JITDUMP("Setting NotAField for [%06u],\n", compiler->dspTreeID(field)); - } } INDEBUG(val.Consume();) @@ -348,9 +265,8 @@ class LocalAddressVisitor final : public GenTreeVisitor if (val.IsAddress()) { - m_lclNum = val.m_lclNum; - m_offset = val.m_offset; - m_fieldSeq = val.m_fieldSeq; + m_lclNum = val.m_lclNum; + m_offset = val.m_offset; } INDEBUG(val.Consume();) @@ -939,14 +855,14 @@ class LocalAddressVisitor final : public GenTreeVisitor // use ADD(LCL_VAR_ADDR, offset) instead. addr->ChangeOper(GT_ADD); addr->AsOp()->gtOp1 = m_compiler->gtNewLclVarAddrNode(val.LclNum()); - addr->AsOp()->gtOp2 = m_compiler->gtNewIconNode(val.Offset(), val.FieldSeq()); + addr->AsOp()->gtOp2 = m_compiler->gtNewIconNode(val.Offset(), TYP_I_IMPL); } - else if ((val.Offset() != 0) || (val.FieldSeq() != nullptr)) + else if (val.Offset() != 0) { addr->ChangeOper(GT_LCL_FLD_ADDR); addr->AsLclFld()->SetLclNum(val.LclNum()); addr->AsLclFld()->SetLclOffs(val.Offset()); - addr->AsLclFld()->SetFieldSeq(val.FieldSeq()); + addr->AsLclFld()->SetLayout(nullptr); } else { @@ -980,33 +896,10 @@ class LocalAddressVisitor final : public GenTreeVisitor return; } - GenTree* indir = val.Node(); - FieldSeqNode* fieldSeq = val.FieldSeq(); - - if (fieldSeq == nullptr) - { - fieldSeq = FieldSeqStore::NotAField(); - } - - if (fieldSeq != FieldSeqStore::NotAField()) - { - if (!indir->OperIs(GT_FIELD)) - { - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the below check. - if (indir->TypeGet() != m_compiler->eeGetFieldType(fieldSeq->GetTail()->GetFieldHandle())) - { - fieldSeq = FieldSeqStore::NotAField(); - } - } - else - { - // FIELDs are correctly typed by construction. - assert(indir->AsField()->gtFldHnd == fieldSeq->GetTail()->GetFieldHandle()); - } - } - + GenTree* indir = val.Node(); GenTreeLclVarCommon* lclNode = nullptr; GenTreeFlags lclNodeFlags = GTF_EMPTY; + switch (transform) { case IndirTransform::LclVar: @@ -1020,12 +913,12 @@ class LocalAddressVisitor final : public GenTreeVisitor indir->ChangeOper(GT_LCL_FLD); indir->AsLclFld()->SetLclNum(val.LclNum()); indir->AsLclFld()->SetLclOffs(val.Offset()); - indir->AsLclFld()->SetFieldSeq(fieldSeq); + indir->AsLclFld()->SetLayout(indirLayout); lclNode = indir->AsLclVarCommon(); break; - // TODO-ADDR: support TYP_STRUCT LCL_FLD and use it instead. + // TODO-ADDR: support TYP_STRUCT LCL_FLD for all users and use it instead. case IndirTransform::ObjAddrLclFld: { indir->SetOper(indirLayout->IsBlockLayout() ? GT_BLK : GT_OBJ); @@ -1044,7 +937,7 @@ class LocalAddressVisitor final : public GenTreeVisitor location->ChangeOper(GT_LCL_FLD); location->AsLclFld()->SetLclNum(val.LclNum()); location->AsLclFld()->SetLclOffs(val.Offset()); - location->AsLclFld()->SetFieldSeq(fieldSeq); + location->AsLclFld()->SetLayout(nullptr); lclNode = location->AsLclVarCommon(); lclNodeFlags |= GTF_DONT_CSE; @@ -1081,6 +974,18 @@ class LocalAddressVisitor final : public GenTreeVisitor INDEBUG(m_stmtModified = true); } + //------------------------------------------------------------------------ + // SelectLocalIndirTransform: Select the transformation appropriate for an + // indirect access of a local variable. + // + // Arguments: + // val - a value that represents the local indirection + // user - the indirection's user node + // pStructLayout - [out] parameter for layout of struct indirections + // + // Return Value: + // The transformation the caller should perform on this indirection. + // IndirTransform SelectLocalIndirTransform(const Value& val, GenTree* user, ClassLayout** pStructLayout) { GenTree* indir = val.Node(); diff --git a/src/coreclr/jit/lclvars.cpp b/src/coreclr/jit/lclvars.cpp index a52342f86c7eb..f379847b9efdb 100644 --- a/src/coreclr/jit/lclvars.cpp +++ b/src/coreclr/jit/lclvars.cpp @@ -7805,10 +7805,6 @@ void Compiler::lvaDumpEntry(unsigned lclNum, FrameLayoutState curState, size_t r { printf(" pinned"); } - if (varDsc->lvStackByref) - { - printf(" stack-byref"); - } if (varDsc->lvClassHnd != NO_CLASS_HANDLE) { printf(" class-hnd"); diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index 3449e3fd44799..1e1ced6a9b221 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -3420,11 +3420,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) // (tmp.ptr=argx),(tmp.type=handle) GenTreeLclFld* destPtrSlot = gtNewLclFldNode(tmp, TYP_I_IMPL, OFFSETOF__CORINFO_TypedReference__dataPtr); GenTreeLclFld* destTypeSlot = gtNewLclFldNode(tmp, TYP_I_IMPL, OFFSETOF__CORINFO_TypedReference__type); - destPtrSlot->SetFieldSeq( - GetFieldSeqStore()->CreateSingleton(GetRefanyDataField(), OFFSETOF__CORINFO_TypedReference__dataPtr)); destPtrSlot->gtFlags |= GTF_VAR_DEF; - destTypeSlot->SetFieldSeq( - GetFieldSeqStore()->CreateSingleton(GetRefanyTypeField(), OFFSETOF__CORINFO_TypedReference__type)); destTypeSlot->gtFlags |= GTF_VAR_DEF; GenTree* asgPtrSlot = gtNewAssignNode(destPtrSlot, argx->AsOp()->gtOp1); @@ -9644,45 +9640,6 @@ GenTree* Compiler::fgMorphBlockOperand(GenTree* tree, var_types asgType, unsigne return tree; } -//------------------------------------------------------------------------ -// fgMorphCanUseLclFldForCopy: check if we can access LclVar2 using LclVar1's fields. -// -// Arguments: -// lclNum1 - a promoted lclVar that is used in fieldwise assignment; -// lclNum2 - the local variable on the other side of ASG, can be BAD_VAR_NUM. -// -// Return Value: -// True if the second local is valid and has the same struct handle as the first, -// false otherwise. -// -// Notes: -// TODO-PhysicalVN: with the physical VN scheme, this method is no longer needed. -// -bool Compiler::fgMorphCanUseLclFldForCopy(unsigned lclNum1, unsigned lclNum2) -{ - assert(lclNum1 != BAD_VAR_NUM); - if (lclNum2 == BAD_VAR_NUM) - { - return false; - } - const LclVarDsc* varDsc1 = lvaGetDesc(lclNum1); - const LclVarDsc* varDsc2 = lvaGetDesc(lclNum2); - assert(varTypeIsStruct(varDsc1)); - if (!varTypeIsStruct(varDsc2)) - { - return false; - } - CORINFO_CLASS_HANDLE struct1 = varDsc1->GetStructHnd(); - CORINFO_CLASS_HANDLE struct2 = varDsc2->GetStructHnd(); - assert(struct1 != NO_CLASS_HANDLE); - assert(struct2 != NO_CLASS_HANDLE); - if (struct1 != struct2) - { - return false; - } - return true; -} - // insert conversions and normalize to make tree amenable to register // FP architectures GenTree* Compiler::fgMorphForRegisterFP(GenTree* tree) @@ -11671,11 +11628,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) if ((fieldSeq != nullptr) && (temp->OperGet() == GT_LCL_FLD)) { - // Append the field sequence, change the type. - temp->AsLclFld()->SetFieldSeq( - GetFieldSeqStore()->Append(temp->AsLclFld()->GetFieldSeq(), fieldSeq)); - temp->gtType = typ; - + temp->gtType = typ; foldAndReturnTemp = true; } } @@ -11730,13 +11683,6 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) ival1 = op1->AsOp()->gtOp2->AsIntCon()->gtIconVal; fieldSeq = op1->AsOp()->gtOp2->AsIntCon()->gtFieldSeq; - // Does the address have an associated zero-offset field sequence? - FieldSeqNode* addrFieldSeq = nullptr; - if (GetZeroOffsetFieldMap()->Lookup(op1->AsOp()->gtOp1, &addrFieldSeq)) - { - fieldSeq = GetFieldSeqStore()->Append(addrFieldSeq, fieldSeq); - } - if (ival1 == 0 && typ == temp->TypeGet() && temp->TypeGet() != TYP_STRUCT) { noway_assert(!varTypeIsGC(temp->TypeGet())); @@ -11794,7 +11740,6 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) { lclFld = temp->AsLclFld(); lclFld->SetLclOffs(lclFld->GetLclOffs() + static_cast(ival1)); - lclFld->SetFieldSeq(GetFieldSeqStore()->Append(lclFld->GetFieldSeq(), fieldSeq)); } else // We have a GT_LCL_VAR. { @@ -11802,12 +11747,6 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) temp->ChangeOper(GT_LCL_FLD); // Note that this makes the gtFieldSeq "NotAField". lclFld = temp->AsLclFld(); lclFld->SetLclOffs(static_cast(ival1)); - - if (fieldSeq != nullptr) - { - // If it does represent a field, note that. - lclFld->SetFieldSeq(fieldSeq); - } } temp->gtType = tree->gtType; @@ -17249,18 +17188,6 @@ void Compiler::fgAddFieldSeqForZeroOffset(GenTree* addr, FieldSeqNode* fieldSeqZ fieldSeqRecorded = true; break; - case GT_ADDR: - if (addr->AsOp()->gtOp1->OperGet() == GT_LCL_FLD) - { - fieldSeqNode = addr->AsOp()->gtOp1; - - GenTreeLclFld* lclFld = addr->AsOp()->gtOp1->AsLclFld(); - fieldSeqUpdate = GetFieldSeqStore()->Append(lclFld->GetFieldSeq(), fieldSeqZero); - lclFld->SetFieldSeq(fieldSeqUpdate); - fieldSeqRecorded = true; - } - break; - case GT_ADD: if (addr->AsOp()->gtOp1->OperGet() == GT_CNS_INT) { @@ -17406,7 +17333,7 @@ bool Compiler::fgMorphCombineSIMDFieldAssignments(BasicBlock* block, Statement* { dstNode = originalLHS; dstNode->gtType = simdType; - dstNode->AsLclFld()->SetFieldSeq(FieldSeqStore::NotAField()); + dstNode->AsLclFld()->SetLayout(nullptr); // This may have changed a partial local field into full local field if (dstNode->IsPartialLclFld(this)) diff --git a/src/coreclr/jit/morphblock.cpp b/src/coreclr/jit/morphblock.cpp index 882a67c2304d8..2cd0741ea0f3b 100644 --- a/src/coreclr/jit/morphblock.cpp +++ b/src/coreclr/jit/morphblock.cpp @@ -39,7 +39,6 @@ class MorphInitBlockHelper unsigned m_dstLclNum = BAD_VAR_NUM; GenTreeLclVarCommon* m_dstLclNode = nullptr; LclVarDsc* m_dstVarDsc = nullptr; - FieldSeqNode* m_dstFldSeq = nullptr; unsigned m_dstLclOffset = 0; bool m_dstUseLclFld = false; bool m_dstSingleLclVarAsg = false; @@ -222,7 +221,6 @@ void MorphInitBlockHelper::PrepareDst() assert(m_dst->OperIs(GT_LCL_FLD) && !m_dst->TypeIs(TYP_STRUCT)); GenTreeLclFld* destFld = m_dst->AsLclFld(); m_blockSize = genTypeSize(destFld->TypeGet()); - m_dstFldSeq = destFld->GetFieldSeq(); } } else @@ -244,9 +242,10 @@ void MorphInitBlockHelper::PrepareDst() } noway_assert(dstAddr->TypeIs(TYP_BYREF, TYP_I_IMPL)); - if (dstAddr->IsLocalAddrExpr(m_comp, &m_dstLclNode, &m_dstFldSeq, &m_dstAddOff)) + if (dstAddr->DefinesLocalAddr(m_comp, /* width */ 0, &m_dstLclNode, /* pIsEntire */ nullptr, &m_dstAddOff)) { // Note that lclNode can be a field, like `BLK<4> struct(ADD(ADDR(LCL_FLD int), CNST_INT))`. + m_dstAddOff = m_dstAddOff - m_dstLclNode->GetLclOffs(); m_dstVarDsc = m_comp->lvaGetDesc(m_dstLclNode); } } @@ -529,7 +528,6 @@ class MorphCopyBlockHelper : public MorphInitBlockHelper unsigned m_srcLclNum = BAD_VAR_NUM; LclVarDsc* m_srcVarDsc = nullptr; GenTreeLclVarCommon* m_srcLclNode = nullptr; - FieldSeqNode* m_srcFldSeq = nullptr; bool m_srcUseLclFld = false; unsigned m_srcLclOffset = 0; bool m_srcSingleLclVarAsg = false; @@ -588,15 +586,12 @@ void MorphCopyBlockHelper::PrepareSrc() { m_srcLclNode = m_src->AsLclVarCommon(); m_srcLclNum = m_srcLclNode->GetLclNum(); - if (m_src->OperGet() == GT_LCL_FLD) - { - m_srcFldSeq = m_src->AsLclFld()->GetFieldSeq(); - } } else if (m_src->OperIsIndir()) { - if (m_src->AsOp()->gtOp1->IsLocalAddrExpr(m_comp, &m_srcLclNode, &m_srcFldSeq, &m_srcAddOff)) + if (m_src->AsOp()->gtOp1->DefinesLocalAddr(m_comp, /* width */ 0, &m_srcLclNode, /* pIsEntire */ nullptr, &m_srcAddOff)) { + m_srcAddOff = m_srcAddOff - m_srcLclNode->GetLclOffs(); m_srcLclNum = m_srcLclNode->GetLclNum(); } else @@ -726,8 +721,8 @@ void MorphCopyBlockHelper::MorphStructCases() // Check to see if we are doing a copy to/from the same local block. // If so, morph it to a nop. - if ((m_dstVarDsc != nullptr) && (m_srcVarDsc == m_dstVarDsc) && (m_dstFldSeq == m_srcFldSeq) && - m_dstFldSeq != FieldSeqStore::NotAField()) + if ((m_dstVarDsc != nullptr) && (m_srcVarDsc == m_dstVarDsc) && (m_dstLclOffset == m_srcLclOffset) && + (m_dstAddOff == m_srcAddOff)) { JITDUMP("Self-copy; replaced with a NOP.\n"); m_transformationDecision = BlockTransformation::Nop; @@ -1010,12 +1005,11 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() } else if (m_dstDoFldAsg) { - fieldCnt = m_dstVarDsc->lvFieldCnt; - m_src = m_comp->fgMorphBlockOperand(m_src, m_asg->TypeGet(), m_blockSize, false /*isBlkReqd*/); + fieldCnt = m_dstVarDsc->lvFieldCnt; + m_src = m_comp->fgMorphBlockOperand(m_src, m_asg->TypeGet(), m_blockSize, false /*isBlkReqd*/); + m_srcUseLclFld = m_srcVarDsc != nullptr; - m_srcUseLclFld = m_comp->fgMorphCanUseLclFldForCopy(m_dstLclNum, m_srcLclNum); - - if (!m_srcUseLclFld && m_srcAddr == nullptr) + if (!m_srcUseLclFld && (m_srcAddr == nullptr)) { m_srcAddr = m_comp->fgMorphGetStructAddr(&m_src, m_dstVarDsc->GetStructHnd(), true /* rValue */); } @@ -1037,8 +1031,7 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() // We will spill m_srcAddr (i.e. assign to a temp "BlockOp address local") // no need to clone a new copy as it is only used once // - addrSpill = m_srcAddr; // addrSpill represents the 'm_srcAddr' - addrSpillSrcLclNum = m_srcLclNum; + addrSpill = m_srcAddr; // addrSpill represents the 'm_srcAddr' } } } @@ -1046,25 +1039,22 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() else { assert(m_srcDoFldAsg); - fieldCnt = m_srcVarDsc->lvFieldCnt; - m_dst = m_comp->fgMorphBlockOperand(m_dst, m_dst->TypeGet(), m_blockSize, false /*isBlkReqd*/); + fieldCnt = m_srcVarDsc->lvFieldCnt; + m_dst = m_comp->fgMorphBlockOperand(m_dst, m_dst->TypeGet(), m_blockSize, false /*isBlkReqd*/); + m_dstUseLclFld = m_dstVarDsc != nullptr; + if (m_dst->OperIsBlk()) { m_dst->SetOper(GT_IND); m_dst->gtType = TYP_STRUCT; } - m_dstUseLclFld = m_comp->fgMorphCanUseLclFldForCopy(m_srcLclNum, m_dstLclNum); + if (!m_dstUseLclFld) { m_dstAddr = m_comp->gtNewOperNode(GT_ADDR, TYP_BYREF, m_dst); } - // If we're doing field-wise stores, to an address within a local, and we copy - // the address into "addrSpill", do *not* declare the original local var node in the - // field address as GTF_VAR_DEF and GTF_VAR_USEASG; we will declare each of the - // field-wise assignments as an "indirect" assignment to the local. - // ("m_dstLclNode" is a subtree of "m_dstAddr"; make sure we remove the flags before - // we clone it.) + // Clear the def flags, we'll reuse the node below and reset them. if (m_dstLclNode != nullptr) { m_dstLclNode->gtFlags &= ~(GTF_VAR_DEF | GTF_VAR_USEASG); @@ -1087,8 +1077,7 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() // We will spill m_dstAddr (i.e. assign to a temp "BlockOp address local") // no need to clone a new copy as it is only used once // - addrSpill = m_dstAddr; // addrSpill represents the 'm_dstAddr' - addrSpillSrcLclNum = m_dstLclNum; + addrSpill = m_dstAddr; // addrSpill represents the 'm_dstAddr' } } } @@ -1103,30 +1092,10 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() addrSpillTemp = m_comp->lvaGrabTemp(true DEBUGARG("BlockOp address local")); LclVarDsc* addrSpillDsc = m_comp->lvaGetDesc(addrSpillTemp); - - addrSpillDsc->lvType = TYP_BYREF; - - if (addrSpillSrcLclNum != BAD_VAR_NUM) - { - // addrSpill represents the address of LclVar[varNum] in our local stack frame. - addrSpillDsc->lvStackByref = true; - } + addrSpillDsc->lvType = TYP_BYREF; GenTreeLclVar* addrSpillNode = m_comp->gtNewLclvNode(addrSpillTemp, TYP_BYREF); addrSpillAsg = m_comp->gtNewAssignNode(addrSpillNode, addrSpill); - - // If we are assigning the address of a LclVar here liveness will not - // account for this kind of address taken use. Mark the local as - // address-exposed so that we don't do illegal optimizations with it. - // - // TODO-CQ: usage of "addrSpill" for local addresses is a workaround - // for cases where we fail to use LCL_FLD nodes instead. Fix them and - // delete this code. - // - if (addrSpillSrcLclNum != BAD_VAR_NUM) - { - m_comp->lvaSetVarAddrExposed(addrSpillSrcLclNum DEBUGARG(AddressExposedReason::COPY_FLD_BY_FLD)); - } } // We may have allocated a temp above, and that may have caused the lvaTable to be expanded. @@ -1197,19 +1166,6 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() // Morph the newly created tree dstAddrClone = m_comp->fgMorphTree(dstAddrClone); } - - // Is the address of a local? - GenTreeLclVarCommon* lclVarTree = nullptr; - bool isEntire = false; - bool* pIsEntire = &isEntire; - if (dstAddrClone->DefinesLocalAddr(m_comp, m_blockSize, &lclVarTree, pIsEntire)) - { - lclVarTree->gtFlags |= GTF_VAR_DEF; - if (!isEntire) - { - lclVarTree->gtFlags |= GTF_VAR_USEASG; - } - } } } @@ -1228,7 +1184,6 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() if (!m_dstUseLclFld) { - if (srcFieldOffset == 0) { m_comp->fgAddFieldSeqForZeroOffset(dstAddrClone, curFieldSeq); @@ -1245,13 +1200,11 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() { assert(m_dstAddOff == 0); assert(dstAddrClone == nullptr); - assert((m_dstLclOffset == 0) || (m_dstFldSeq != nullptr)); + // If the dst was a struct type field "B" in a struct "A" then we add // add offset of ("B" in "A") + current offset in "B". - unsigned summOffset = m_dstLclOffset + srcFieldOffset; - dstFld = m_comp->gtNewLclFldNode(m_dstLclNum, srcType, summOffset); - FieldSeqNode* dstFldFldSeq = m_comp->GetFieldSeqStore()->Append(m_dstFldSeq, curFieldSeq); - dstFld->AsLclFld()->SetFieldSeq(dstFldFldSeq); + unsigned totalOffset = m_dstLclOffset + srcFieldOffset; + dstFld = m_comp->gtNewLclFldNode(m_dstLclNum, srcType, totalOffset); // TODO-1stClassStructs: remove this and implement storing to a field in a struct in a reg. m_comp->lvaSetVarDoNotEnregister(m_dstLclNum DEBUGARG(DoNotEnregisterReason::LocalField)); @@ -1344,7 +1297,6 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() m_srcLclNode->gtFlags |= GTF_VAR_CAST; m_srcLclNode->ChangeOper(GT_LCL_FLD); m_srcLclNode->gtType = destType; - m_srcLclNode->AsLclFld()->SetFieldSeq(curFieldSeq); m_comp->lvaSetVarDoNotEnregister(m_srcLclNum DEBUGARG(DoNotEnregisterReason::LocalField)); srcFld = m_srcLclNode; done = true; @@ -1369,16 +1321,14 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() } else { - assert((m_srcLclOffset == 0) || (m_srcFldSeq != 0)); assert(m_srcAddOff == 0); + // If the src was a struct type field "B" in a struct "A" then we add // add offset of ("B" in "A") + current offset in "B". - unsigned summOffset = m_srcLclOffset + fldOffset; - srcFld = m_comp->gtNewLclFldNode(m_srcLclNum, destType, summOffset); - FieldSeqNode* srcFldFldSeq = m_comp->GetFieldSeqStore()->Append(m_srcFldSeq, curFieldSeq); - srcFld->AsLclFld()->SetFieldSeq(srcFldFldSeq); - // TODO-1stClassStructs: remove this and implement reading a field from a struct in a - // reg. + unsigned totalOffset = m_srcLclOffset + fldOffset; + srcFld = m_comp->gtNewLclFldNode(m_srcLclNum, destType, totalOffset); + + // TODO-1stClassStructs: remove this and implement reading a field from a struct in a reg. m_comp->lvaSetVarDoNotEnregister(m_srcLclNum DEBUGARG(DoNotEnregisterReason::LocalField)); } } @@ -1389,14 +1339,6 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() GenTreeOp* asgOneFld = m_comp->gtNewAssignNode(dstFld, srcFld); - // If we spilled the address, and we didn't do individual field assignments to promoted fields, - // and it was of a local, ensure that the destination local variable has been marked as address - // exposed. Neither liveness nor SSA are able to track this kind of indirect assignments. - if (addrSpill && !m_dstDoFldAsg && m_dstLclNum != BAD_VAR_NUM) - { - noway_assert(m_comp->lvaGetDesc(m_dstLclNum)->IsAddressExposed()); - } - if (m_comp->optLocalAssertionProp) { m_comp->optAssertionGen(asgOneFld); diff --git a/src/coreclr/jit/rationalize.cpp b/src/coreclr/jit/rationalize.cpp index e2e43e75ed575..193b36d314a21 100644 --- a/src/coreclr/jit/rationalize.cpp +++ b/src/coreclr/jit/rationalize.cpp @@ -146,7 +146,6 @@ void Rationalizer::RewriteSIMDIndir(LIR::Use& use) { addr->SetOper(GT_LCL_FLD); addr->AsLclFld()->SetLclOffs(0); - addr->AsLclFld()->SetFieldSeq(FieldSeqStore::NotAField()); if (((addr->gtFlags & GTF_VAR_DEF) != 0) && (genTypeSize(simdType) < genTypeSize(lclType))) { @@ -362,7 +361,7 @@ static void RewriteAssignmentIntoStoreLclCore(GenTreeOp* assignment, if (locationOp == GT_LCL_FLD) { store->AsLclFld()->SetLclOffs(var->AsLclFld()->GetLclOffs()); - store->AsLclFld()->SetFieldSeq(var->AsLclFld()->GetFieldSeq()); + store->AsLclFld()->SetLayout(var->AsLclFld()->GetLayout()); } copyFlags(store, var, (GTF_LIVENESS_MASK | GTF_VAR_MULTIREG)); diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp index 7f4377c53ba94..5c7bb5705555f 100644 --- a/src/coreclr/jit/valuenum.cpp +++ b/src/coreclr/jit/valuenum.cpp @@ -4562,16 +4562,7 @@ ValueNum ValueNumStore::ExtendPtrVN(GenTree* opA, FieldSeqNode* fldSeq, ssize_t return res; } - if (funcApp.m_func == VNF_PtrToLoc) - { -#ifdef DEBUG - // For PtrToLoc, lib == cons. - VNFuncApp consFuncApp; - assert(GetVNFunc(VNConservativeNormalValue(opA->gtVNPair), &consFuncApp) && consFuncApp.Equals(funcApp)); -#endif - res = VNForFunc(TYP_BYREF, VNF_PtrToLoc, funcApp.m_args[0], FieldSeqVNAppend(funcApp.m_args[1], fldSeq)); - } - else if (funcApp.m_func == VNF_PtrToStatic) + if (funcApp.m_func == VNF_PtrToStatic) { res = VNForFunc(TYP_BYREF, VNF_PtrToStatic, funcApp.m_args[0], FieldSeqVNAppend(funcApp.m_args[1], fldSeq), VNForIntPtrCon(ConstantValue(funcApp.m_args[2]) + offset)); @@ -8127,22 +8118,8 @@ void Compiler::fgValueNumberAssignment(GenTreeOp* tree) case GT_LCL_FLD: { - GenTreeLclFld* lclFld = lhs->AsLclFld(); - unsigned storeOffset = lclFld->GetLclOffs(); - unsigned storeSize = lclFld->GetSize(); - - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the field sequence checks. - if ((lclFld->GetFieldSeq() == FieldSeqStore::NotAField()) && ((lclFld->gtFlags & GTF_VAR_USEASG) != 0)) - { - // Invalidate the whole local. - rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lvaGetDesc(lclFld)->TypeGet())); - storeOffset = 0; - storeSize = lvaLclExactSize(lclFld->GetLclNum()); - } - - // TODO-PhysicalVN: remove this quirk, it was added to avoid diffs. - bool normalize = ((lclFld->gtFlags & GTF_VAR_USEASG) != 0); - fgValueNumberLocalStore(tree, lclFld, storeOffset, storeSize, rhsVNPair, normalize); + GenTreeLclFld* lclFld = lhs->AsLclFld(); + fgValueNumberLocalStore(tree, lclFld, lclFld->GetLclOffs(), lclFld->GetSize(), rhsVNPair); } break; @@ -8193,24 +8170,6 @@ void Compiler::fgValueNumberAssignment(GenTreeOp* tree) } else if (arg->DefinesLocalAddr(this, storeSize, &lclVarTree, &isEntire, &offset)) { - unsigned lclNum = lclVarTree->GetLclNum(); - fldSeq = FieldSeqStore::NotAField(); - - if (argIsVNFunc && (funcApp.m_func == VNF_PtrToLoc)) - { - assert(arg->gtVNPair.BothEqual()); // If it's a PtrToLoc, lib/cons shouldn't differ. - assert(lclNum == vnStore->ConstantValue(funcApp.m_args[0])); - fldSeq = vnStore->FieldSeqVNToFieldSeq(funcApp.m_args[1]); - } - - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the field sequence checks. - if ((fldSeq == FieldSeqStore::NotAField()) || ((fldSeq == nullptr) && !isEntire)) - { - rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lclVarTree->TypeGet())); - offset = 0; - storeSize = lvaLclExactSize(lclVarTree->GetLclNum()); - } - fgValueNumberLocalStore(tree, lclVarTree, offset, storeSize, rhsVNPair); } else @@ -8275,29 +8234,18 @@ void Compiler::fgValueNumberBlockAssignment(GenTree* tree) if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM) { unsigned lhsLclSize = lvaLclExactSize(lhsLclNum); - - unsigned storeSize; - FieldSeqNode* lhsFldSeq; + unsigned storeSize; if (lhs->OperIs(GT_LCL_VAR)) { storeSize = lhsLclSize; - lhsFldSeq = nullptr; } else if (lhs->OperIs(GT_LCL_FLD)) { storeSize = lhs->AsLclFld()->GetSize(); - lhsFldSeq = lhs->AsLclFld()->GetFieldSeq(); } else { storeSize = lhs->AsIndir()->Size(); - - ValueNumPair lhsAddrVNP = lhs->AsIndir()->Addr()->gtVNPair; - VNFuncApp lhsAddrFunc; - bool lhsAddrIsVNFunc = vnStore->GetVNFunc(lhsAddrVNP.GetLiberal(), &lhsAddrFunc); - assert(lhsAddrIsVNFunc && (lhsAddrFunc.m_func == VNF_PtrToLoc) && lhsAddrVNP.BothEqual()); - - lhsFldSeq = vnStore->FieldSeqVNToFieldSeq(lhsAddrFunc.m_args[1]); } ValueNumPair rhsVNPair = ValueNumPair(); @@ -8315,11 +8263,6 @@ void Compiler::fgValueNumberBlockAssignment(GenTree* tree) { // Non-zero block init is very rare so we'll use a simple, unique VN here. initObjVN = vnStore->VNForExpr(compCurBB, lhsVarDsc->TypeGet()); - - // TODO-PhysicalVN: remove this pessimization, it was added to avoid diffs. - // There is no need to invalidate the whole local. - offset = 0; - storeSize = lhsLclSize; } rhsVNPair.SetBoth(initObjVN); @@ -8327,22 +8270,7 @@ void Compiler::fgValueNumberBlockAssignment(GenTree* tree) else { assert(tree->OperIsCopyBlkOp()); - rhsVNPair = vnStore->VNPNormalPair(rhs->gtVNPair); - - // TODO-PhysicalVN: with the physical VN scheme, we no longer need this check. - if (!fgValueNumberBlockAssignmentTypeCheck(lhsVarDsc, lhsFldSeq, rhs)) - { - // Invalidate the whole local. - rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lhsVarDsc->TypeGet())); - offset = 0; - storeSize = lhsLclSize; - } - // TODO-PhysicalVN: remove this quirk, it was added to avoid diffs. - else if (lhs->TypeIs(TYP_STRUCT) && (vnStore->TypeOfVN(rhsVNPair.GetLiberal()) != TYP_STRUCT)) - { - rhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lhsVarDsc->TypeGet())); - } } fgValueNumberLocalStore(tree, lclVarTree, offset, storeSize, rhsVNPair); @@ -8388,81 +8316,6 @@ void Compiler::fgValueNumberBlockAssignment(GenTree* tree) tree->gtVNPair = vnStore->VNPWithExc(vnStore->VNPForVoid(), vnpExcSet); } -//------------------------------------------------------------------------ -// fgValueNumberBlockAssignmentTypeCheck: Checks if there is a struct reinterpretation that prevent VN propagation. -// -// Arguments: -// dstVarDsc - the descriptor for the local being assigned to -// dstFldSeq - the sequence of fields used for the assignment -// src - the source of the assignment, i. e. the RHS -// -// Return Value: -// Whether "src"'s exact type matches that of the destination location. -// -// Notes: -// TODO-PhysicalVN: with the physical VN scheme, this method is no longer needed. -// -bool Compiler::fgValueNumberBlockAssignmentTypeCheck(LclVarDsc* dstVarDsc, FieldSeqNode* dstFldSeq, GenTree* src) -{ - if (dstFldSeq == FieldSeqStore::NotAField()) - { - JITDUMP(" *** Missing field sequence info for Dst/LHS of COPYBLK\n"); - return false; - } - - var_types dstLocationType = TYP_UNDEF; - CORINFO_CLASS_HANDLE dstLocationStructHnd = NO_CLASS_HANDLE; - if (dstFldSeq == nullptr) - { - dstLocationType = dstVarDsc->TypeGet(); - if (dstLocationType == TYP_STRUCT) - { - dstLocationStructHnd = dstVarDsc->GetStructHnd(); - } - } - else - { - // Have to normalize as "eeGetFieldType" will return TYP_STRUCT for TYP_SIMD. - dstLocationType = eeGetFieldType(dstFldSeq->GetTail()->GetFieldHandle(), &dstLocationStructHnd); - if (dstLocationType == TYP_STRUCT) - { - dstLocationType = impNormStructType(dstLocationStructHnd); - } - } - - // This method is meant to handle TYP_STRUCT mismatches, bail early for anything else. - if (dstLocationType != src->TypeGet()) - { - JITDUMP(" *** Different types for Dst/Src of COPYBLK: %s != %s\n", varTypeName(dstLocationType), - varTypeName(src->TypeGet())); - return false; - } - - // They're equal, and they're primitives. Allow. - if (dstLocationType != TYP_STRUCT) - { - return true; - } - - CORINFO_CLASS_HANDLE srcValueStructHnd = gtGetStructHandleIfPresent(src); - if (srcValueStructHnd == NO_CLASS_HANDLE) - { - JITDUMP(" *** Missing struct handle for Src of COPYBLK\n"); - return false; - } - - assert((dstLocationStructHnd != NO_CLASS_HANDLE) && (srcValueStructHnd != NO_CLASS_HANDLE)); - - if (dstLocationStructHnd != srcValueStructHnd) - { - JITDUMP(" *** Different struct handles for Dst/Src of COPYBLK: %s != %s\n", - eeGetClassName(dstLocationStructHnd), eeGetClassName(srcValueStructHnd)); - return false; - } - - return true; -} - void Compiler::fgValueNumberTree(GenTree* tree) { genTreeOps oper = tree->OperGet(); @@ -8529,7 +8382,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) { // Address-exposed locals are part of ByrefExposed. ValueNum addrVN = vnStore->VNForFunc(TYP_BYREF, VNF_PtrToLoc, vnStore->VNForIntCon(lclNum), - vnStore->VNForFieldSeq(nullptr)); + vnStore->VNForIntPtrCon(lcl->GetLclOffs())); ValueNum loadVN = fgValueNumberByrefExposedLoad(lcl->TypeGet(), addrVN); lcl->gtVNPair.SetBoth(loadVN); @@ -8570,10 +8423,8 @@ void Compiler::fgValueNumberTree(GenTree* tree) { unsigned lclNum = lclFld->GetLclNum(); - // TODO-ADDR: delete the "GetSize" check below once TYP_STRUCT LCL_FLD is supported. - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the field sequence checks. - if ((lclFld->GetFieldSeq() == FieldSeqStore::NotAField()) || !lvaInSsa(lclFld->GetLclNum()) || - !lclFld->HasSsaName() || (lclFld->GetSize() == 0)) + // TODO-ADDR: delete the "GetSize" check once location nodes are no more. + if (!lvaInSsa(lclFld->GetLclNum()) || !lclFld->HasSsaName() || (lclFld->GetSize() == 0)) { lclFld->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lclFld->TypeGet())); } @@ -8651,42 +8502,16 @@ void Compiler::fgValueNumberTree(GenTree* tree) else if (oper == GT_ADDR) { // We have special representations for byrefs to lvalues. - GenTree* arg = tree->AsOp()->gtOp1; - if (arg->OperIsLocal()) + GenTree* location = tree->AsUnOp()->gtGetOp1(); + if (location->OperIs(GT_LCL_VAR, GT_LCL_FLD)) { - FieldSeqNode* fieldSeq = nullptr; - ValueNum newVN = ValueNumStore::NoVN; - if (!lvaInSsa(arg->AsLclVarCommon()->GetLclNum()) || !arg->AsLclVarCommon()->HasSsaName()) - { - newVN = vnStore->VNForExpr(compCurBB, TYP_BYREF); - } - else if (arg->OperGet() == GT_LCL_FLD) - { - fieldSeq = arg->AsLclFld()->GetFieldSeq(); - if (fieldSeq == nullptr) - { - // Local field with unknown field seq -- not a precise pointer. - newVN = vnStore->VNForExpr(compCurBB, TYP_BYREF); - } - } - - if (newVN == ValueNumStore::NoVN) - { - // We may have a zero-offset field sequence on this ADDR. - FieldSeqNode* zeroOffsetFieldSeq = nullptr; - if (GetZeroOffsetFieldMap()->Lookup(tree, &zeroOffsetFieldSeq)) - { - fieldSeq = GetFieldSeqStore()->Append(fieldSeq, zeroOffsetFieldSeq); - } - - newVN = vnStore->VNForFunc(TYP_BYREF, VNF_PtrToLoc, - vnStore->VNForIntCon(arg->AsLclVarCommon()->GetLclNum()), - vnStore->VNForFieldSeq(fieldSeq)); - } - - tree->gtVNPair.SetBoth(newVN); + GenTreeLclVarCommon* lclNode = location->AsLclVarCommon(); + ValueNum addrVN = + vnStore->VNForFunc(TYP_BYREF, VNF_PtrToLoc, vnStore->VNForIntCon(lclNode->GetLclNum()), + vnStore->VNForIntPtrCon(lclNode->GetLclOffs())); + tree->gtVNPair.SetBoth(addrVN); // No exceptions for local addresses. } - else if ((arg->gtOper == GT_IND) || arg->OperIsBlk()) + else if ((location->gtOper == GT_IND) || location->OperIsBlk()) { // Usually the ADDR and IND just cancel out... // except when this GT_ADDR has a valid zero-offset field sequence @@ -8696,7 +8521,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) FieldSeqNode* zeroOffsetFieldSeq = nullptr; if (GetZeroOffsetFieldMap()->Lookup(tree, &zeroOffsetFieldSeq)) { - ValueNum addrExtended = vnStore->ExtendPtrVN(arg->AsIndir()->Addr(), zeroOffsetFieldSeq, 0); + ValueNum addrExtended = vnStore->ExtendPtrVN(location->AsIndir()->Addr(), zeroOffsetFieldSeq, 0); if (addrExtended != ValueNumStore::NoVN) { // We don't care about lib/cons differences for addresses. @@ -8712,7 +8537,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) { // They just cancel, so fetch the ValueNumber from the op1 of the GT_IND node. // - GenTree* addr = arg->AsIndir()->Addr(); + GenTree* addr = location->AsIndir()->Addr(); addrVNP = addr->gtVNPair; // For the CSE phase mark the address as GTF_DONT_CSE @@ -8720,12 +8545,12 @@ void Compiler::fgValueNumberTree(GenTree* tree) addr->gtFlags |= GTF_DONT_CSE; } - tree->gtVNPair = vnStore->VNPWithExc(addrVNP, vnStore->VNPExceptionSet(arg->gtVNPair)); + tree->gtVNPair = vnStore->VNPWithExc(addrVNP, vnStore->VNPExceptionSet(location->gtVNPair)); } else { // May be more cases to do here! But we'll punt for now. - tree->gtVNPair = vnStore->VNPUniqueWithExc(TYP_BYREF, vnStore->VNPExceptionSet(arg->gtVNPair)); + tree->gtVNPair = vnStore->VNPUniqueWithExc(TYP_BYREF, vnStore->VNPExceptionSet(location->gtVNPair)); } } else if ((oper == GT_IND) || GenTree::OperIsBlk(oper)) @@ -8819,35 +8644,20 @@ void Compiler::fgValueNumberTree(GenTree* tree) var_types loadType = tree->TypeGet(); ssize_t offset = 0; unsigned loadSize = tree->AsIndir()->Size(); - - FieldSeqNode* localFldSeq = nullptr; - VNFuncApp funcApp{VNF_COUNT}; + VNFuncApp funcApp{VNF_COUNT}; // TODO-1stClassStructs: delete layout-less "IND(struct)" nodes and the "loadSize == 0" condition. if (loadSize == 0) { tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, loadType)); } - else if (addr->IsLocalAddrExpr(this, &lclVarTree, &localFldSeq, &offset) && + else if (addr->DefinesLocalAddr(this, /* width */ 0, &lclVarTree, /* pIsEntire */ nullptr, &offset) && lvaInSsa(lclVarTree->GetLclNum()) && lclVarTree->HasSsaName()) { - unsigned ssaNum = lclVarTree->GetSsaNum(); - LclVarDsc* varDsc = lvaGetDesc(lclVarTree); + ValueNumPair lclVNPair = lvaGetDesc(lclVarTree)->GetPerSsaData(lclVarTree->GetSsaNum())->m_vnPair; + unsigned lclSize = lvaLclExactSize(lclVarTree->GetLclNum()); - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the field sequence checks. - // TODO-PhysicalVN: use "DefinesLocalAddr" here for consistency with the store code. - if ((localFldSeq == FieldSeqStore::NotAField()) || (localFldSeq == nullptr)) - { - tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, tree->TypeGet())); - } - else - { - ValueNumPair lclVNPair = varDsc->GetPerSsaData(ssaNum)->m_vnPair; - unsigned lclSize = lvaLclExactSize(lclVarTree->GetLclNum()); - offset = offset + lclVarTree->GetLclOffs(); - - tree->gtVNPair = vnStore->VNPairForLoad(lclVNPair, lclSize, tree->TypeGet(), offset, loadSize); - } + tree->gtVNPair = vnStore->VNPairForLoad(lclVNPair, lclSize, tree->TypeGet(), offset, loadSize); } else if (vnStore->GetVNFunc(addrNvnp.GetLiberal(), &funcApp) && (funcApp.m_func == VNF_PtrToStatic)) { @@ -11023,7 +10833,6 @@ void Compiler::fgDebugCheckValueNumberedTree(GenTree* tree) FieldSeqNode* fullFldSeq; switch (vnFunc.m_func) { - case VNF_PtrToLoc: case VNF_PtrToStatic: fullFldSeq = vnStore->FieldSeqVNToFieldSeq(vnFunc.m_args[1]); break; diff --git a/src/coreclr/jit/valuenumfuncs.h b/src/coreclr/jit/valuenumfuncs.h index b5e23ca851372..340eabe4fc1e0 100644 --- a/src/coreclr/jit/valuenumfuncs.h +++ b/src/coreclr/jit/valuenumfuncs.h @@ -16,7 +16,7 @@ ValueNumFuncDef(PhiDef, 3, false, false, false) // Args: 0: local va ValueNumFuncDef(PhiMemoryDef, 2, false, false, false) // Args: 0: VN for basic block pointer, 1: VN of definition ValueNumFuncDef(Phi, 2, false, false, false) // A phi function. Only occurs as arg of PhiDef or PhiMemoryDef. Arguments are SSA numbers of var being defined. -ValueNumFuncDef(PtrToLoc, 2, false, true, false) // Pointer (byref) to a local variable. Args: VN's of: 0: var num, 1: FieldSeq. +ValueNumFuncDef(PtrToLoc, 2, false, true, false) // Pointer (byref) to a local variable. Args: VN's of: 0: local's number, 1: offset. ValueNumFuncDef(PtrToArrElem, 4, false, false, false) // Pointer (byref) to an array element. Args: 0: array elem type eq class var_types value, VN's of: 1: array, 2: index, 3: "ArrElemOffset" VN. ValueNumFuncDef(ArrElemOffset, 2, false, false, false) // Args: 0: (VN of) the field sequence, 1: (VN of) the offset ValueNumFuncDef(PtrToStatic, 3, false, true, false) // Pointer (byref) to a static variable (or possibly a field thereof, if the static variable is a struct). From c7c19460d3fa3496c0f40ac98f1c1de6633131cd Mon Sep 17 00:00:00 2001 From: SingleAccretion Date: Tue, 3 May 2022 00:35:17 +0300 Subject: [PATCH 2/6] Delete field sequences from VNF_PtrToArrElem --- src/coreclr/jit/valuenum.cpp | 114 ++++++++------------------------ src/coreclr/jit/valuenumfuncs.h | 3 +- 2 files changed, 30 insertions(+), 87 deletions(-) diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp index 5c7bb5705555f..5f7c02485b160 100644 --- a/src/coreclr/jit/valuenum.cpp +++ b/src/coreclr/jit/valuenum.cpp @@ -4569,13 +4569,8 @@ ValueNum ValueNumStore::ExtendPtrVN(GenTree* opA, FieldSeqNode* fldSeq, ssize_t } else if (funcApp.m_func == VNF_PtrToArrElem) { - VNFuncApp offsFunc; - GetVNFunc(funcApp.m_args[3], &offsFunc); - ValueNum fieldSeqVN = FieldSeqVNAppend(offsFunc.m_args[0], fldSeq); - ValueNum offsetVN = VNForIntPtrCon(ConstantValue(offsFunc.m_args[1]) + offset); - res = VNForFunc(TYP_BYREF, VNF_PtrToArrElem, funcApp.m_args[0], funcApp.m_args[1], funcApp.m_args[2], - VNForFunc(TYP_I_IMPL, VNF_ArrElemOffset, fieldSeqVN, offsetVN)); + VNForIntPtrCon(ConstantValue(funcApp.m_args[3]) + offset)); } if (res != NoVN) { @@ -4678,17 +4673,7 @@ void Compiler::fgValueNumberArrayElemLoad(GenTree* loadTree, VNFuncApp* addrFunc CORINFO_CLASS_HANDLE elemTypeEq = CORINFO_CLASS_HANDLE(vnStore->ConstantValue(addrFunc->m_args[0])); ValueNum arrVN = addrFunc->m_args[1]; ValueNum inxVN = addrFunc->m_args[2]; - VNFuncApp offsFunc; - vnStore->GetVNFunc(addrFunc->m_args[3], &offsFunc); - FieldSeqNode* fieldSeq = vnStore->FieldSeqVNToFieldSeq(offsFunc.m_args[0]); - ssize_t offset = vnStore->ConstantValue(offsFunc.m_args[1]); - - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the field sequence checks. - if (fieldSeq == FieldSeqStore::NotAField()) - { - loadTree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, loadTree->TypeGet())); - return; - } + ssize_t offset = vnStore->ConstantValue(addrFunc->m_args[3]); // The VN inputs are required to be non-exceptional values. assert(arrVN == vnStore->VNNormalValue(arrVN)); @@ -4735,10 +4720,7 @@ void Compiler::fgValueNumberArrayElemStore(GenTree* storeNode, VNFuncApp* addrFu CORINFO_CLASS_HANDLE elemTypeEq = CORINFO_CLASS_HANDLE(vnStore->ConstantValue(addrFunc->m_args[0])); ValueNum arrVN = addrFunc->m_args[1]; ValueNum inxVN = addrFunc->m_args[2]; - VNFuncApp offsFunc; - vnStore->GetVNFunc(addrFunc->m_args[3], &offsFunc); - FieldSeqNode* fldSeq = vnStore->FieldSeqVNToFieldSeq(offsFunc.m_args[0]); - ssize_t offset = vnStore->ConstantValue(offsFunc.m_args[1]); + ssize_t offset = vnStore->ConstantValue(addrFunc->m_args[3]); bool invalidateArray = false; var_types elemType = DecodeElemType(elemTypeEq); @@ -4752,66 +4734,42 @@ void Compiler::fgValueNumberArrayElemStore(GenTree* storeNode, VNFuncApp* addrFu ValueNum hAtArrTypeAtArr = vnStore->VNForMapSelect(VNK_Liberal, TYP_MEM, hAtArrType, arrVN); JITDUMP(" GcHeap[elemTypeEq][array: " FMT_VN "] is " FMT_VN "\n", arrVN, hAtArrTypeAtArr); - ValueNum newWholeElem = ValueNumStore::NoVN; - ValueNum newValAtArr = ValueNumStore::NoVN; - ValueNum newValAtArrType = ValueNumStore::NoVN; + unsigned elemSize = (elemType == TYP_STRUCT) ? info.compCompHnd->getClassSize(elemTypeEq) : genTypeSize(elemType); - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the field sequence checks. - if (fldSeq == FieldSeqStore::NotAField()) - { - // This doesn't represent a proper array access - JITDUMP(" *** NotAField sequence encountered in fgValueNumberArrayElemStore\n"); + // This is the value that should be stored at "arr[inx]". + ValueNum newWholeElem = ValueNumStore::NoVN; - // Store a new unique value for newValAtArrType - newValAtArrType = vnStore->VNForExpr(compCurBB, TYP_MEM); - invalidateArray = true; + if (vnStore->LoadStoreIsEntire(elemSize, offset, storeSize)) + { + // For memory locations (as opposed to locals), we do not normalize types. + newWholeElem = value; } else { - unsigned elemSize = - (elemType == TYP_STRUCT) ? info.compCompHnd->getClassSize(elemTypeEq) : genTypeSize(elemType); - - // This is the value that should be stored at "arr[inx]". - if (vnStore->LoadStoreIsEntire(elemSize, offset, storeSize)) - { - // For memory locations (as opposed to locals), we do not normalize types. - newWholeElem = value; - } - else - { - ValueNum oldWholeElem = vnStore->VNForMapSelect(VNK_Liberal, elemType, hAtArrTypeAtArr, inxVN); - JITDUMP(" GcHeap[elemTypeEq][array][index: " FMT_VN "] is " FMT_VN "\n", inxVN, oldWholeElem); + ValueNum oldWholeElem = vnStore->VNForMapSelect(VNK_Liberal, elemType, hAtArrTypeAtArr, inxVN); + JITDUMP(" GcHeap[elemTypeEq][array][index: " FMT_VN "] is " FMT_VN "\n", inxVN, oldWholeElem); - newWholeElem = vnStore->VNForStore(oldWholeElem, elemSize, offset, storeSize, value); - } - - // TODO-PhysicalVN: with the physical VN scheme, we no longer need the type check below. - var_types arrElemFldType = (fldSeq != nullptr) ? eeGetFieldType(fldSeq->GetTail()->GetFieldHandle()) : elemType; - - if ((storeNode->gtGetOp1()->TypeGet() != arrElemFldType) || (newWholeElem == ValueNumStore::NoVN)) - { - // Mismatched types: Store between different types (indType into array of arrElemFldType) - JITDUMP(" *** Mismatched types in fgValueNumberArrayElemStore\n"); - - // Store a new unique value for newValAtArrType - newValAtArrType = vnStore->VNForExpr(compCurBB, TYP_MEM); - invalidateArray = true; - } + newWholeElem = vnStore->VNForStore(oldWholeElem, elemSize, offset, storeSize, value); } - if (!invalidateArray) + if (newWholeElem != ValueNumStore::NoVN) { JITDUMP(" GcHeap[elemTypeEq][array][index: " FMT_VN "] = " FMT_VN ":\n", inxVN, newWholeElem); - newValAtArr = vnStore->VNForMapStore(hAtArrTypeAtArr, inxVN, newWholeElem); + ValueNum newValAtArr = vnStore->VNForMapStore(hAtArrTypeAtArr, inxVN, newWholeElem); JITDUMP(" GcHeap[elemTypeEq][array: " FMT_VN "] = " FMT_VN ":\n", arrVN, newValAtArr); - newValAtArrType = vnStore->VNForMapStore(hAtArrType, arrVN, newValAtArr); - } + ValueNum newValAtArrType = vnStore->VNForMapStore(hAtArrType, arrVN, newValAtArr); - JITDUMP(" GcHeap[elemTypeEq: " FMT_VN "] = " FMT_VN ":\n", elemTypeEqVN, newValAtArrType); - ValueNum newHeapVN = vnStore->VNForMapStore(fgCurMemoryVN[GcHeap], elemTypeEqVN, newValAtArrType); + JITDUMP(" GcHeap[elemTypeEq: " FMT_VN "] = " FMT_VN ":\n", elemTypeEqVN, newValAtArrType); + ValueNum newHeapVN = vnStore->VNForMapStore(fgCurMemoryVN[GcHeap], elemTypeEqVN, newValAtArrType); - recordGcHeapStore(storeNode, newHeapVN DEBUGARG("array element store")); + recordGcHeapStore(storeNode, newHeapVN DEBUGARG("array element store")); + } + else + { + // An out-of-bounds store: invalidate the whole heap, for simplicity. + fgMutateGcHeap(storeNode DEBUGARG("out-of-bounds array element store")); + } } //------------------------------------------------------------------------ @@ -9124,19 +9082,11 @@ void Compiler::fgValueNumberArrIndexAddr(GenTreeArrAddr* arrAddr) JITDUMP(" VNForHandle(arrElemType: %s) is " FMT_VN "\n", (elemType == TYP_STRUCT) ? eeGetClassName(elemStructType) : varTypeName(elemType), elemTypeEqVN); - ValueNum arrVN = vnStore->VNNormalValue(arr->GetVN(VNK_Liberal)); - inxVN = vnStore->VNNormalValue(inxVN); - - // Look up if we have any zero-offset sequences... - assert(!GetZeroOffsetFieldMap()->Lookup(arrAddr->Addr())); + ValueNum arrVN = vnStore->VNNormalValue(arr->GetVN(VNK_Liberal)); + inxVN = vnStore->VNNormalValue(inxVN); + ValueNum offsetVN = vnStore->VNForIntPtrCon(0); - FieldSeqNode* fldSeq = nullptr; - GetZeroOffsetFieldMap()->Lookup(arrAddr, &fldSeq); - ValueNum fldSeqVN = vnStore->VNForFieldSeq(fldSeq); - ValueNum offsetVN = vnStore->VNForIntPtrCon(0); - ValueNum elemOffsVN = vnStore->VNForFunc(TYP_I_IMPL, VNF_ArrElemOffset, fldSeqVN, offsetVN); - - ValueNum arrAddrVN = vnStore->VNForFunc(TYP_BYREF, VNF_PtrToArrElem, elemTypeEqVN, arrVN, inxVN, elemOffsVN); + ValueNum arrAddrVN = vnStore->VNForFunc(TYP_BYREF, VNF_PtrToArrElem, elemTypeEqVN, arrVN, inxVN, offsetVN); ValueNumPair arrAddrVNP = ValueNumPair(arrAddrVN, arrAddrVN); arrAddr->gtVNPair = vnStore->VNPWithExc(arrAddrVNP, vnStore->VNPExceptionSet(arrAddr->Addr()->gtVNPair)); } @@ -10837,12 +10787,6 @@ void Compiler::fgDebugCheckValueNumberedTree(GenTree* tree) fullFldSeq = vnStore->FieldSeqVNToFieldSeq(vnFunc.m_args[1]); break; - case VNF_PtrToArrElem: - VNFuncApp offsFunc; - vnStore->GetVNFunc(vnFunc.m_args[3], &offsFunc); - fullFldSeq = vnStore->FieldSeqVNToFieldSeq(offsFunc.m_args[0]); - break; - default: continue; } diff --git a/src/coreclr/jit/valuenumfuncs.h b/src/coreclr/jit/valuenumfuncs.h index 340eabe4fc1e0..2bc7332afa4f1 100644 --- a/src/coreclr/jit/valuenumfuncs.h +++ b/src/coreclr/jit/valuenumfuncs.h @@ -17,8 +17,7 @@ ValueNumFuncDef(PhiMemoryDef, 2, false, false, false) // Args: 0: VN for b ValueNumFuncDef(Phi, 2, false, false, false) // A phi function. Only occurs as arg of PhiDef or PhiMemoryDef. Arguments are SSA numbers of var being defined. ValueNumFuncDef(PtrToLoc, 2, false, true, false) // Pointer (byref) to a local variable. Args: VN's of: 0: local's number, 1: offset. -ValueNumFuncDef(PtrToArrElem, 4, false, false, false) // Pointer (byref) to an array element. Args: 0: array elem type eq class var_types value, VN's of: 1: array, 2: index, 3: "ArrElemOffset" VN. -ValueNumFuncDef(ArrElemOffset, 2, false, false, false) // Args: 0: (VN of) the field sequence, 1: (VN of) the offset +ValueNumFuncDef(PtrToArrElem, 4, false, false, false) // Pointer (byref) to an array element. Args: 0: array elem type eq class var_types value, VN's of: 1: array, 2: index, 3: offset. ValueNumFuncDef(PtrToStatic, 3, false, true, false) // Pointer (byref) to a static variable (or possibly a field thereof, if the static variable is a struct). // Args: 0: (VN of) the box's address if the static is "boxed", // 1: (VN of) the field sequence, of which the first element is the static itself. From 1e924acf06f5e12f4899b2b991a25af714221208 Mon Sep 17 00:00:00 2001 From: SingleAccretion Date: Fri, 6 May 2022 15:45:24 +0300 Subject: [PATCH 3/6] Clean up "DefinesLocalAddr" Move the responsibility of determining "entireness" to its only caller that needed to do that: "DefinesLocal". Add function headers. No diffs. --- src/coreclr/jit/gentree.cpp | 187 ++++++++++++++++++--------------- src/coreclr/jit/gentree.h | 15 +-- src/coreclr/jit/liveness.cpp | 11 +- src/coreclr/jit/morphblock.cpp | 4 +- src/coreclr/jit/valuenum.cpp | 12 +-- 5 files changed, 119 insertions(+), 110 deletions(-) diff --git a/src/coreclr/jit/gentree.cpp b/src/coreclr/jit/gentree.cpp index dd3a8a56d20e6..b69afc4542e2f 100644 --- a/src/coreclr/jit/gentree.cpp +++ b/src/coreclr/jit/gentree.cpp @@ -16002,48 +16002,70 @@ bool GenTree::IsPhiDefn() // Return Value: // Returns "true" iff 'this' is a GT_LCL_FLD or GT_STORE_LCL_FLD on which the type // is not the same size as the type of the GT_LCL_VAR - +// bool GenTree::IsPartialLclFld(Compiler* comp) { - return ((gtOper == GT_LCL_FLD) && - (comp->lvaTable[this->AsLclVarCommon()->GetLclNum()].lvExactSize != genTypeSize(gtType))); + return OperIs(GT_LCL_FLD, GT_STORE_LCL_FLD) && (comp->lvaGetDesc(AsLclFld())->lvExactSize != AsLclFld()->GetSize()); } +//------------------------------------------------------------------------ +// DefinesLocal: Does "this" define a local? +// +// Recognizes "ASG" stores. Also recognizes "STORE_OBJ/BLK". +// +// Arguments: +// comp - the compiler instance +// pLclVarTree - [out] parameter for the local representing the definition +// pIsEntire - optional [out] parameter for whether the store represents +// a "full" definition (overwites the entire variable) +// pOffset - optional [out] parameter for the offset, relative to the +// local, at which the store is performed +// +// Return Value: +// Whether "this" represents a store to a local variable. +// +// Notes: +// This function is contractually bound to recognize a superset of stores +// that "LocalAddressVisitor" recognizes, as it is used to detect which +// trees can define tracked locals. +// bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset) { - GenTreeBlk* blkNode = nullptr; + assert((pOffset == nullptr) || (*pOffset == 0)); + + GenTreeLclVarCommon* lclVarTree = nullptr; + unsigned storeSize = 0; + ssize_t offset = 0; + if (OperIs(GT_ASG)) { - if (AsOp()->gtOp1->IsLocal()) + GenTree* lhs = AsOp()->gtGetOp1(); + + // Return early for the common case. + // + if (lhs->OperIs(GT_LCL_VAR, GT_LCL_FLD)) { - GenTreeLclVarCommon* lclVarTree = AsOp()->gtOp1->AsLclVarCommon(); - *pLclVarTree = lclVarTree; + *pLclVarTree = lhs->AsLclVarCommon(); if (pIsEntire != nullptr) { - if (lclVarTree->IsPartialLclFld(comp)) - { - *pIsEntire = false; - } - else - { - *pIsEntire = true; - } + *pIsEntire = !lhs->IsPartialLclFld(comp); } + if (pOffset != nullptr) { - *pOffset = AsOp()->gtOp1->AsLclVarCommon()->GetLclOffs(); + *pOffset = lhs->AsLclVarCommon()->GetLclOffs(); } + return true; } - else if (AsOp()->gtOp1->OperGet() == GT_IND) + + if (lhs->OperIsIndir() && lhs->AsIndir()->Addr()->DefinesLocalAddr(&lclVarTree, &offset)) { - GenTree* indArg = AsOp()->gtOp1->AsOp()->gtOp1; - return indArg->DefinesLocalAddr(comp, genTypeSize(AsOp()->gtOp1->TypeGet()), pLclVarTree, pIsEntire, - pOffset); + storeSize = lhs->AsIndir()->Size(); } - else if (AsOp()->gtOp1->OperIsBlk()) + else { - blkNode = AsOp()->gtOp1->AsBlk(); + return false; } } else if (OperIs(GT_CALL)) @@ -16054,41 +16076,71 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo return false; } - unsigned size = comp->typGetObjLayout(AsCall()->gtRetClsHnd)->GetSize(); - return retBufArg->DefinesLocalAddr(comp, size, pLclVarTree, pIsEntire, pOffset); + if (!retBufArg->DefinesLocalAddr(&lclVarTree, &offset)) + { + return false; + } + + storeSize = comp->typGetObjLayout(AsCall()->gtRetClsHnd)->GetSize(); + } + else if (OperIs(GT_STORE_BLK, GT_STORE_OBJ) && AsBlk()->Addr()->DefinesLocalAddr(&lclVarTree, &offset)) + { + storeSize = AsBlk()->Size(); } - else if (OperIsBlk()) + else { - blkNode = this->AsBlk(); + return false; } - if (blkNode != nullptr) + + assert(lclVarTree != nullptr); + *pLclVarTree = lclVarTree; + + if (pIsEntire != nullptr) { - GenTree* destAddr = blkNode->Addr(); - unsigned width = blkNode->Size(); - // Do we care about whether this assigns the entire variable? - if (pIsEntire != nullptr && blkNode->OperIs(GT_STORE_DYN_BLK)) + if (offset == 0) { - GenTree* blockWidth = blkNode->AsStoreDynBlk()->gtDynamicSize; - if (blockWidth->IsCnsIntOrI()) + unsigned lclSize = comp->lvaLclExactSize(lclVarTree->GetLclNum()); + if (comp->lvaGetDesc(lclVarTree)->lvNormalizeOnStore()) { - assert(blockWidth->AsIntConCommon()->FitsInI32()); - width = static_cast(blockWidth->AsIntConCommon()->IconValue()); - - if (width == 0) - { - return false; - } + // It's normalize on store, so use the full storage width -- writing to low bytes won't + // necessarily yield a normalized value. + lclSize = genTypeSize(TYP_INT); } + + *pIsEntire = storeSize == lclSize; } + else + { + *pIsEntire = false; + } + } - return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset); + if (pOffset != nullptr) + { + *pOffset = offset; } - // Otherwise... - return false; + + return true; } -bool GenTree::DefinesLocalAddr( - Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset) +//------------------------------------------------------------------------ +// DefinesLocalAddr: Does "this" represent a local address tree? +// +// Arguments: +// pLclVarTree - [out] parameter for the local node +// pOffset - optional [out] parameter for the offset (relative to the +// local itself) that this tree computes. The caller must +// initialize this to zero. +// +// Return Value: +// Whether "this" is a LCL_VAR|FLD_ADDR-equivalent tree. +// +// Notes: +// This function is contractually bound to recognize a superset of trees +// that "LocalAddressVisitor" recognizes, as it is used by "DefinesLocal" +// to detect stores to tracked locals. +// +bool GenTree::DefinesLocalAddr(GenTreeLclVarCommon** pLclVarTree, ssize_t* pOffset) { if (OperIs(GT_ADDR, GT_LCL_VAR_ADDR, GT_LCL_FLD_ADDR)) { @@ -16102,32 +16154,10 @@ bool GenTree::DefinesLocalAddr( { GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon(); *pLclVarTree = addrArgLcl; - unsigned lclOffset = addrArgLcl->GetLclOffs(); - - if (pIsEntire != nullptr) - { - if (lclOffset != 0) - { - // We aren't updating the bytes at [0..lclOffset-1] so *pIsEntire should be set to false - *pIsEntire = false; - } - else - { - unsigned lclNum = addrArgLcl->GetLclNum(); - unsigned varWidth = comp->lvaLclExactSize(lclNum); - if (comp->lvaTable[lclNum].lvNormalizeOnStore()) - { - // It's normalize on store, so use the full storage width -- writing to low bytes won't - // necessarily yield a normalized value. - varWidth = genTypeStSz(var_types(comp->lvaTable[lclNum].lvType)) * sizeof(int); - } - *pIsEntire = (varWidth == width); - } - } if (pOffset != nullptr) { - *pOffset += lclOffset; + *pOffset += addrArgLcl->GetLclOffs(); } return true; @@ -16135,7 +16165,7 @@ bool GenTree::DefinesLocalAddr( else if (addrArg->OperGet() == GT_IND) { // A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND - return addrArg->AsOp()->gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset); + return addrArg->AsIndir()->Addr()->DefinesLocalAddr(pLclVarTree, pOffset); } } else if (OperGet() == GT_ADD) @@ -16147,10 +16177,7 @@ bool GenTree::DefinesLocalAddr( *pOffset += AsOp()->gtOp1->AsIntCon()->IconValue(); } - // If we just adding a zero then we allow an IsEntire match against width - // otherwise we change width to zero to disallow an IsEntire Match - return AsOp()->gtOp2->DefinesLocalAddr(comp, AsOp()->gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree, - pIsEntire, pOffset); + return AsOp()->gtOp2->DefinesLocalAddr(pLclVarTree, pOffset); } else if (AsOp()->gtOp2->IsCnsIntOrI()) { @@ -16159,10 +16186,7 @@ bool GenTree::DefinesLocalAddr( *pOffset += AsOp()->gtOp2->AsIntCon()->IconValue(); } - // If we just adding a zero then we allow an IsEntire match against width - // otherwise we change width to zero to disallow an IsEntire Match - return AsOp()->gtOp1->DefinesLocalAddr(comp, AsOp()->gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree, - pIsEntire, pOffset); + return AsOp()->gtOp1->DefinesLocalAddr(pLclVarTree, pOffset); } } // Post rationalization we could have GT_IND(GT_LEA(..)) trees. @@ -16178,11 +16202,10 @@ bool GenTree::DefinesLocalAddr( GenTree* index = AsOp()->gtOp2; if (index != nullptr) { - assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset)); + assert(!index->DefinesLocalAddr(pLclVarTree, pOffset)); } #endif // DEBUG - // base GenTree* base = AsAddrMode()->Base(); if (base != nullptr) { @@ -16191,9 +16214,10 @@ bool GenTree::DefinesLocalAddr( *pOffset += AsAddrMode()->Offset(); } - return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset); + return base->DefinesLocalAddr(pLclVarTree, pOffset); } } + // Otherwise... return false; } @@ -17757,9 +17781,8 @@ GenTree* Compiler::gtCallGetDefinedRetBufLclAddr(GenTreeCall* call) node = node->gtSkipReloadOrCopy(); #ifdef DEBUG - unsigned size = typGetObjLayout(call->gtRetClsHnd)->GetSize(); GenTreeLclVarCommon* lcl; - assert(node->DefinesLocalAddr(this, size, &lcl, nullptr) && lvaGetDesc(lcl)->lvHiddenBufferStructArg); + assert(node->DefinesLocalAddr(&lcl) && lvaGetDesc(lcl)->lvHiddenBufferStructArg); #endif return node; diff --git a/src/coreclr/jit/gentree.h b/src/coreclr/jit/gentree.h index dc99768afb027..d6911485d77be 100644 --- a/src/coreclr/jit/gentree.h +++ b/src/coreclr/jit/gentree.h @@ -1909,16 +1909,13 @@ struct GenTree // is not the same size as the type of the GT_LCL_VAR. bool IsPartialLclFld(Compiler* comp); - // Returns "true" iff "this" defines a local variable. Requires "comp" to be the - // current compilation. If returns "true", sets "*pLclVarTree" to the - // tree for the local that is defined, and, if "pIsEntire" is non-null, sets "*pIsEntire" to - // true or false, depending on whether the assignment writes to the entirety of the local - // variable, or just a portion of it. bool DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire = nullptr, ssize_t* pOffset = nullptr); + bool DefinesLocalAddr(GenTreeLclVarCommon** pLclVarTree, ssize_t* pOffset = nullptr); + const GenTreeLclVarCommon* IsLocalAddrExpr() const; GenTreeLclVarCommon* IsLocalAddrExpr() { @@ -1940,14 +1937,6 @@ struct GenTree bool IsArrayAddr(GenTreeArrAddr** pArrAddr); - // Assumes that "this" occurs in a context where it is being dereferenced as the LHS of an assignment-like - // statement (assignment, initblk, or copyblk). The "width" should be the number of bytes copied by the - // operation. Returns "true" if "this" is an address of (or within) - // a local variable; sets "*pLclVarTree" to that local variable instance; and, if "pIsEntire" is non-null, - // sets "*pIsEntire" to true if this assignment writes the full width of the local. - bool DefinesLocalAddr( - Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset = nullptr); - // These are only used for dumping. // The GetRegNum() is only valid in LIR, but the dumping methods are not easily // modified to check this. diff --git a/src/coreclr/jit/liveness.cpp b/src/coreclr/jit/liveness.cpp index 0c0667551b393..ea9567ad1baab 100644 --- a/src/coreclr/jit/liveness.cpp +++ b/src/coreclr/jit/liveness.cpp @@ -243,18 +243,17 @@ void Compiler::fgPerNodeLocalVarLiveness(GenTree* tree) // Otherwise, we treat it as a use here. if ((tree->gtFlags & GTF_IND_ASG_LHS) == 0) { - GenTreeLclVarCommon* dummyLclVarTree = nullptr; - bool dummyIsEntire = false; - GenTree* addrArg = tree->AsOp()->gtOp1->gtEffectiveVal(/*commaOnly*/ true); - if (!addrArg->DefinesLocalAddr(this, /*width doesn't matter*/ 0, &dummyLclVarTree, &dummyIsEntire)) + GenTreeLclVarCommon* lclVarTree = nullptr; + GenTree* addrArg = tree->AsOp()->gtOp1->gtEffectiveVal(/*commaOnly*/ true); + if (!addrArg->DefinesLocalAddr(&lclVarTree)) { fgCurMemoryUse |= memoryKindSet(GcHeap, ByrefExposed); } else { // Defines a local addr - assert(dummyLclVarTree != nullptr); - fgMarkUseDef(dummyLclVarTree->AsLclVarCommon()); + assert(lclVarTree != nullptr); + fgMarkUseDef(lclVarTree->AsLclVarCommon()); } } break; diff --git a/src/coreclr/jit/morphblock.cpp b/src/coreclr/jit/morphblock.cpp index 2cd0741ea0f3b..cd0aa4552231d 100644 --- a/src/coreclr/jit/morphblock.cpp +++ b/src/coreclr/jit/morphblock.cpp @@ -242,7 +242,7 @@ void MorphInitBlockHelper::PrepareDst() } noway_assert(dstAddr->TypeIs(TYP_BYREF, TYP_I_IMPL)); - if (dstAddr->DefinesLocalAddr(m_comp, /* width */ 0, &m_dstLclNode, /* pIsEntire */ nullptr, &m_dstAddOff)) + if (dstAddr->DefinesLocalAddr(&m_dstLclNode, &m_dstAddOff)) { // Note that lclNode can be a field, like `BLK<4> struct(ADD(ADDR(LCL_FLD int), CNST_INT))`. m_dstAddOff = m_dstAddOff - m_dstLclNode->GetLclOffs(); @@ -589,7 +589,7 @@ void MorphCopyBlockHelper::PrepareSrc() } else if (m_src->OperIsIndir()) { - if (m_src->AsOp()->gtOp1->DefinesLocalAddr(m_comp, /* width */ 0, &m_srcLclNode, /* pIsEntire */ nullptr, &m_srcAddOff)) + if (m_src->AsOp()->gtOp1->DefinesLocalAddr(&m_srcLclNode, &m_srcAddOff)) { m_srcAddOff = m_srcAddOff - m_srcLclNode->GetLclOffs(); m_srcLclNum = m_srcLclNode->GetLclNum(); diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp index 5f7c02485b160..334f2cd931447 100644 --- a/src/coreclr/jit/valuenum.cpp +++ b/src/coreclr/jit/valuenum.cpp @@ -8103,7 +8103,6 @@ void Compiler::fgValueNumberAssignment(GenTreeOp* tree) bool argIsVNFunc = vnStore->GetVNFunc(vnStore->VNNormalValue(argVN), &funcApp); GenTreeLclVarCommon* lclVarTree = nullptr; - bool isEntire = false; ssize_t offset = 0; unsigned storeSize = lhs->AsIndir()->Size(); GenTree* baseAddr = nullptr; @@ -8126,7 +8125,7 @@ void Compiler::fgValueNumberAssignment(GenTreeOp* tree) assert(fldSeq != FieldSeqStore::NotAField()); fgValueNumberFieldStore(tree, baseAddr, fldSeq, offset, storeSize, rhsVNPair.GetLiberal()); } - else if (arg->DefinesLocalAddr(this, storeSize, &lclVarTree, &isEntire, &offset)) + else if (arg->DefinesLocalAddr(&lclVarTree, &offset)) { fgValueNumberLocalStore(tree, lclVarTree, offset, storeSize, rhsVNPair); } @@ -8609,8 +8608,8 @@ void Compiler::fgValueNumberTree(GenTree* tree) { tree->gtVNPair.SetBoth(vnStore->VNForExpr(compCurBB, loadType)); } - else if (addr->DefinesLocalAddr(this, /* width */ 0, &lclVarTree, /* pIsEntire */ nullptr, &offset) && - lvaInSsa(lclVarTree->GetLclNum()) && lclVarTree->HasSsaName()) + else if (addr->DefinesLocalAddr(&lclVarTree, &offset) && lvaInSsa(lclVarTree->GetLclNum()) && + lclVarTree->HasSsaName()) { ValueNumPair lclVNPair = lvaGetDesc(lclVarTree)->GetPerSsaData(lclVarTree->GetSsaNum())->m_vnPair; unsigned lclSize = lvaLclExactSize(lclVarTree->GetLclNum()); @@ -9733,11 +9732,10 @@ void Compiler::fgValueNumberCall(GenTreeCall* call) LclVarDsc* hiddenArgVarDsc = lvaGetDesc(hiddenArgLclNum); unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree); + // TODO-Bug: call "fgValueNumberLocalStore" here, currently this code fails to update + // the heap state if the local was address-exposed. if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM) { - // TODO-CQ: for now, we assign a simple "new, unique" VN to the whole local. We should - // instead look at the field sequence (if one is present) and be more precise if the - // store is to a field. ValueNumPair newHiddenArgLclVNPair = ValueNumPair(); newHiddenArgLclVNPair.SetBoth(vnStore->VNForExpr(compCurBB, hiddenArgVarDsc->TypeGet())); hiddenArgVarDsc->GetPerSsaData(lclDefSsaNum)->m_vnPair = newHiddenArgLclVNPair; From 1562d2a4d30bfa3f9bd1b2309e5d3317fb6c0aec Mon Sep 17 00:00:00 2001 From: SingleAccretion Date: Fri, 6 May 2022 16:32:53 +0300 Subject: [PATCH 4/6] Also clean up block morphing a little No diffs. --- src/coreclr/jit/morphblock.cpp | 61 +++++++++++++--------------------- 1 file changed, 24 insertions(+), 37 deletions(-) diff --git a/src/coreclr/jit/morphblock.cpp b/src/coreclr/jit/morphblock.cpp index cd0aa4552231d..0d7e06eb444f3 100644 --- a/src/coreclr/jit/morphblock.cpp +++ b/src/coreclr/jit/morphblock.cpp @@ -192,8 +192,9 @@ void MorphInitBlockHelper::PrepareDst() if (m_dst->IsLocal()) { - m_dstLclNode = m_dst->AsLclVarCommon(); - m_dstVarDsc = m_comp->lvaGetDesc(m_dstLclNode); + m_dstLclNode = m_dst->AsLclVarCommon(); + m_dstVarDsc = m_comp->lvaGetDesc(m_dstLclNode); + m_dstLclOffset = m_dstLclNode->GetLclOffs(); if (m_dst->OperIs(GT_LCL_VAR)) { @@ -219,41 +220,31 @@ void MorphInitBlockHelper::PrepareDst() else { assert(m_dst->OperIs(GT_LCL_FLD) && !m_dst->TypeIs(TYP_STRUCT)); - GenTreeLclFld* destFld = m_dst->AsLclFld(); - m_blockSize = genTypeSize(destFld->TypeGet()); + m_blockSize = m_dst->AsLclFld()->GetSize(); } } else { assert(m_dst == m_dst->gtEffectiveVal() && "the commas were skipped in MorphBlock"); - assert(m_dst->OperIs(GT_IND, GT_BLK, GT_OBJ)); + assert(m_dst->OperIs(GT_IND, GT_BLK, GT_OBJ) && (!m_dst->OperIs(GT_IND) || !m_dst->TypeIs(TYP_STRUCT))); GenTree* dstAddr = m_dst->AsIndir()->Addr(); - if (m_dst->OperGet() == GT_IND) - { - assert(m_dst->TypeGet() != TYP_STRUCT); - m_blockSize = genTypeSize(m_dst); - } - else - { - assert(m_dst->OperIsBlk()); - GenTreeBlk* blk = m_dst->AsBlk(); - m_blockSize = blk->Size(); - } - noway_assert(dstAddr->TypeIs(TYP_BYREF, TYP_I_IMPL)); - if (dstAddr->DefinesLocalAddr(&m_dstLclNode, &m_dstAddOff)) + + ssize_t dstLclOffset = 0; + if (dstAddr->DefinesLocalAddr(&m_dstLclNode, &dstLclOffset)) { // Note that lclNode can be a field, like `BLK<4> struct(ADD(ADDR(LCL_FLD int), CNST_INT))`. - m_dstAddOff = m_dstAddOff - m_dstLclNode->GetLclOffs(); - m_dstVarDsc = m_comp->lvaGetDesc(m_dstLclNode); + m_dstVarDsc = m_comp->lvaGetDesc(m_dstLclNode); + m_dstLclOffset = static_cast(dstLclOffset); } + + m_blockSize = m_dst->AsIndir()->Size(); } if (m_dstLclNode != nullptr) { - m_dstLclNum = m_dstLclNode->GetLclNum(); - m_dstLclOffset = m_dstLclNode->GetLclOffs(); + m_dstLclNum = m_dstLclNode->GetLclNum(); // Kill everything about m_dstLclNum (and its field locals) if (m_comp->optLocalAssertionProp && (m_comp->optAssertionCount > 0)) @@ -532,7 +523,6 @@ class MorphCopyBlockHelper : public MorphInitBlockHelper unsigned m_srcLclOffset = 0; bool m_srcSingleLclVarAsg = false; GenTree* m_srcAddr = nullptr; - ssize_t m_srcAddOff = 0; bool m_dstDoFldAsg = false; bool m_srcDoFldAsg = false; @@ -584,25 +574,26 @@ void MorphCopyBlockHelper::PrepareSrc() if (m_src->IsLocal()) { - m_srcLclNode = m_src->AsLclVarCommon(); - m_srcLclNum = m_srcLclNode->GetLclNum(); + m_srcLclNode = m_src->AsLclVarCommon(); + m_srcLclOffset = m_srcLclNode->GetLclOffs(); } else if (m_src->OperIsIndir()) { - if (m_src->AsOp()->gtOp1->DefinesLocalAddr(&m_srcLclNode, &m_srcAddOff)) + ssize_t srcLclOffset = 0; + if (m_src->AsIndir()->Addr()->DefinesLocalAddr(&m_srcLclNode, &srcLclOffset)) { - m_srcAddOff = m_srcAddOff - m_srcLclNode->GetLclOffs(); - m_srcLclNum = m_srcLclNode->GetLclNum(); + m_srcLclOffset = static_cast(srcLclOffset); } else { - m_srcAddr = m_src->AsOp()->gtOp1; + m_srcAddr = m_src->AsIndir()->Addr(); } } - if (m_srcLclNum != BAD_VAR_NUM) + + if (m_srcLclNode != nullptr) { - m_srcLclOffset = m_srcLclNode->GetLclOffs(); - m_srcVarDsc = m_comp->lvaGetDesc(m_srcLclNum); + m_srcLclNum = m_srcLclNode->GetLclNum(); + m_srcVarDsc = m_comp->lvaGetDesc(m_srcLclNum); } } @@ -721,8 +712,7 @@ void MorphCopyBlockHelper::MorphStructCases() // Check to see if we are doing a copy to/from the same local block. // If so, morph it to a nop. - if ((m_dstVarDsc != nullptr) && (m_srcVarDsc == m_dstVarDsc) && (m_dstLclOffset == m_srcLclOffset) && - (m_dstAddOff == m_srcAddOff)) + if ((m_dstVarDsc != nullptr) && (m_srcVarDsc == m_dstVarDsc) && (m_dstLclOffset == m_srcLclOffset)) { JITDUMP("Self-copy; replaced with a NOP.\n"); m_transformationDecision = BlockTransformation::Nop; @@ -1198,7 +1188,6 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() } else { - assert(m_dstAddOff == 0); assert(dstAddrClone == nullptr); // If the dst was a struct type field "B" in a struct "A" then we add @@ -1321,8 +1310,6 @@ GenTree* MorphCopyBlockHelper::CopyFieldByField() } else { - assert(m_srcAddOff == 0); - // If the src was a struct type field "B" in a struct "A" then we add // add offset of ("B" in "A") + current offset in "B". unsigned totalOffset = m_srcLclOffset + fldOffset; From 836eac06f0424d564fcc054ebdd3a995b5a6d0aa Mon Sep 17 00:00:00 2001 From: SingleAccretion Date: Fri, 6 May 2022 23:15:12 +0300 Subject: [PATCH 5/6] Fix a potential bug with return buffer numbering Also address the TODO-CQ. --- src/coreclr/jit/valuenum.cpp | 22 +++++++--------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/src/coreclr/jit/valuenum.cpp b/src/coreclr/jit/valuenum.cpp index 334f2cd931447..dddf059f0b1c7 100644 --- a/src/coreclr/jit/valuenum.cpp +++ b/src/coreclr/jit/valuenum.cpp @@ -9723,23 +9723,15 @@ void Compiler::fgValueNumberCall(GenTreeCall* call) // If the call generates a definition, because it uses "return buffer", then VN the local // as well. - GenTreeLclVarCommon* lclVarTree; - if (call->DefinesLocal(this, &lclVarTree)) + GenTreeLclVarCommon* lclVarTree = nullptr; + ssize_t offset = 0; + if (call->DefinesLocal(this, &lclVarTree, /* pIsEntire */ nullptr, &offset)) { - assert((lclVarTree->gtFlags & GTF_VAR_DEF) != 0); - - unsigned hiddenArgLclNum = lclVarTree->GetLclNum(); - LclVarDsc* hiddenArgVarDsc = lvaGetDesc(hiddenArgLclNum); - unsigned lclDefSsaNum = GetSsaNumForLocalVarDef(lclVarTree); + ValueNumPair storeValue; + storeValue.SetBoth(vnStore->VNForExpr(compCurBB, TYP_STRUCT)); + unsigned storeSize = typGetObjLayout(call->gtRetClsHnd)->GetSize(); - // TODO-Bug: call "fgValueNumberLocalStore" here, currently this code fails to update - // the heap state if the local was address-exposed. - if (lclDefSsaNum != SsaConfig::RESERVED_SSA_NUM) - { - ValueNumPair newHiddenArgLclVNPair = ValueNumPair(); - newHiddenArgLclVNPair.SetBoth(vnStore->VNForExpr(compCurBB, hiddenArgVarDsc->TypeGet())); - hiddenArgVarDsc->GetPerSsaData(lclDefSsaNum)->m_vnPair = newHiddenArgLclVNPair; - } + fgValueNumberLocalStore(call, lclVarTree, offset, storeSize, storeValue); } } From 9de70e9733a099bc7abc6053e4517b2981aad37e Mon Sep 17 00:00:00 2001 From: SingleAccretion Date: Sat, 14 May 2022 23:08:11 +0300 Subject: [PATCH 6/6] Fix another "VN maintainance in morph" bug Exposed by the more aggressive hoisting. --- src/coreclr/jit/morph.cpp | 1 + 1 file changed, 1 insertion(+) diff --git a/src/coreclr/jit/morph.cpp b/src/coreclr/jit/morph.cpp index 1e1ced6a9b221..47d7e0852d1d2 100644 --- a/src/coreclr/jit/morph.cpp +++ b/src/coreclr/jit/morph.cpp @@ -11764,6 +11764,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) // 'temp' because a GT_ADDR always marks it for its operand. temp->gtFlags &= ~GTF_DONT_CSE; temp->gtFlags |= (tree->gtFlags & GTF_DONT_CSE); + temp->SetVNsFromNode(tree); if (op1->OperGet() == GT_ADD) {