diff --git a/src/jit/assertionprop.cpp b/src/jit/assertionprop.cpp index f75a78907d1d..5c9e71abab80 100644 --- a/src/jit/assertionprop.cpp +++ b/src/jit/assertionprop.cpp @@ -31,7 +31,7 @@ Compiler::fgWalkResult Compiler::optAddCopiesCallback(GenTree** pTree, fgWalkDat GenTree* op1 = tree->gtOp.gtOp1; Compiler* comp = data->compiler; - if ((op1->gtOper == GT_LCL_VAR) && (op1->gtLclVarCommon.gtLclNum == comp->optAddCopyLclNum)) + if ((op1->gtOper == GT_LCL_VAR) && (op1->gtLclVarCommon.GetLclNum() == comp->optAddCopyLclNum)) { comp->optAddCopyAsgnNode = tree; return WALK_ABORT; @@ -450,7 +450,7 @@ void Compiler::optAddCopies() GenTree* op1 = tree->gtOp.gtOp1; noway_assert(tree && op1 && tree->OperIs(GT_ASG) && (op1->gtOper == GT_LCL_VAR) && - (op1->gtLclVarCommon.gtLclNum == lclNum)); + (op1->gtLclVarCommon.GetLclNum() == lclNum)); /* Assign the old expression into the new temp */ @@ -886,7 +886,7 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1, goto DONE_ASSERTION; // Don't make an assertion } - unsigned lclNum = op1->gtLclVarCommon.gtLclNum; + unsigned lclNum = op1->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); LclVarDsc* lclVar = &lvaTable[lclNum]; @@ -965,7 +965,7 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1, // else if (op1->gtOper == GT_LCL_VAR) { - unsigned lclNum = op1->gtLclVarCommon.gtLclNum; + unsigned lclNum = op1->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); LclVarDsc* lclVar = &lvaTable[lclNum]; @@ -1138,7 +1138,7 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1, goto DONE_ASSERTION; // Don't make an assertion } - unsigned lclNum2 = op2->gtLclVarCommon.gtLclNum; + unsigned lclNum2 = op2->gtLclVarCommon.GetLclNum(); noway_assert(lclNum2 < lvaCount); LclVarDsc* lclVar2 = &lvaTable[lclNum2]; @@ -1278,7 +1278,7 @@ AssertionIndex Compiler::optCreateAssertion(GenTree* op1, // if (op1->gtOper == GT_LCL_VAR) { - unsigned lclNum = op1->gtLclVarCommon.gtLclNum; + unsigned lclNum = op1->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); // If the local variable is not in SSA then bail @@ -2593,7 +2593,7 @@ GenTree* Compiler::optConstantAssertionProp(AssertionDsc* curAssertion, GenTree* tree, Statement* stmt DEBUGARG(AssertionIndex index)) { - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); if (lclNumIsCSE(lclNum)) { @@ -3238,9 +3238,9 @@ GenTree* Compiler::optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions, Gen if (verbose) { printf("\nVN relop based copy assertion prop in " FMT_BB ":\n", compCurBB->bbNum); - printf("Assertion index=#%02u: V%02d.%02d %s V%02d.%02d\n", index, op1->gtLclVar.gtLclNum, + printf("Assertion index=#%02u: V%02d.%02d %s V%02d.%02d\n", index, op1->gtLclVar.GetLclNum(), op1->gtLclVar.GetSsaNum(), (curAssertion->assertionKind == OAK_EQUAL) ? "==" : "!=", - op2->gtLclVar.gtLclNum, op2->gtLclVar.GetSsaNum()); + op2->gtLclVar.GetLclNum(), op2->gtLclVar.GetSsaNum()); gtDispTree(tree, nullptr, nullptr, true); } #endif @@ -3326,7 +3326,7 @@ GenTree* Compiler::optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions, GenT } // Find an equal or not equal assertion about op1 var. - unsigned lclNum = op1->gtLclVarCommon.gtLclNum; + unsigned lclNum = op1->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); AssertionIndex index = optLocalAssertionIsEqualOrNotEqual(op1Kind, lclNum, op2Kind, cnsVal, assertions); @@ -3425,7 +3425,7 @@ GenTree* Compiler::optAssertionProp_Cast(ASSERT_VALARG_TP assertions, GenTree* t AssertionIndex index = optAssertionIsSubrange(lcl, fromType, toType, assertions); if (index != NO_ASSERTION_INDEX) { - LclVarDsc* varDsc = &lvaTable[lcl->gtLclVarCommon.gtLclNum]; + LclVarDsc* varDsc = &lvaTable[lcl->gtLclVarCommon.GetLclNum()]; if (varDsc->lvNormalizeOnLoad() || varTypeIsLong(varDsc->TypeGet())) { // For normalize on load variables it must be a narrowing cast to remove diff --git a/src/jit/codegen.h b/src/jit/codegen.h index e2bcdf75fb3c..4d916349e0d4 100644 --- a/src/jit/codegen.h +++ b/src/jit/codegen.h @@ -1253,7 +1253,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX { return false; } - const LclVarDsc* varDsc = &compiler->lvaTable[tree->gtLclVarCommon.gtLclNum]; + const LclVarDsc* varDsc = &compiler->lvaTable[tree->gtLclVarCommon.GetLclNum()]; return (varDsc->lvIsRegCandidate()); } diff --git a/src/jit/codegenarm.cpp b/src/jit/codegenarm.cpp index 3c58e360dcac..e7ce9c897df0 100644 --- a/src/jit/codegenarm.cpp +++ b/src/jit/codegenarm.cpp @@ -966,14 +966,14 @@ void CodeGen::genCodeForLclVar(GenTreeLclVar* tree) // lcl_vars are not defs assert((tree->gtFlags & GTF_VAR_DEF) == 0); - bool isRegCandidate = compiler->lvaTable[tree->gtLclNum].lvIsRegCandidate(); + bool isRegCandidate = compiler->lvaTable[tree->GetLclNum()].lvIsRegCandidate(); // If this is a register candidate that has been spilled, genConsumeReg() will // reload it at the point of use. Otherwise, if it's not in a register, we load it here. if (!isRegCandidate && !(tree->gtFlags & GTF_SPILLED)) { - GetEmitter()->emitIns_R_S(ins_Load(tree->TypeGet()), emitTypeSize(tree), tree->gtRegNum, tree->gtLclNum, 0); + GetEmitter()->emitIns_R_S(ins_Load(tree->TypeGet()), emitTypeSize(tree), tree->gtRegNum, tree->GetLclNum(), 0); genProduceReg(tree); } } @@ -998,7 +998,7 @@ void CodeGen::genCodeForStoreLclFld(GenTreeLclFld* tree) // We must have a stack store with GT_STORE_LCL_FLD noway_assert(targetReg == REG_NA); - unsigned varNum = tree->gtLclNum; + unsigned varNum = tree->GetLclNum(); assert(varNum < compiler->lvaCount); LclVarDsc* varDsc = &(compiler->lvaTable[varNum]); @@ -1030,7 +1030,7 @@ void CodeGen::genCodeForStoreLclVar(GenTreeLclVar* tree) regNumber targetReg = tree->gtRegNum; emitter* emit = GetEmitter(); - unsigned varNum = tree->gtLclNum; + unsigned varNum = tree->GetLclNum(); assert(varNum < compiler->lvaCount); LclVarDsc* varDsc = &(compiler->lvaTable[varNum]); diff --git a/src/jit/codegenarm64.cpp b/src/jit/codegenarm64.cpp index ce0ac7dcea4f..96aafb54b01e 100644 --- a/src/jit/codegenarm64.cpp +++ b/src/jit/codegenarm64.cpp @@ -1828,7 +1828,7 @@ void CodeGen::genCodeForLclVar(GenTreeLclVar* tree) var_types targetType = tree->TypeGet(); emitter* emit = GetEmitter(); - unsigned varNum = tree->gtLclNum; + unsigned varNum = tree->GetLclNum(); assert(varNum < compiler->lvaCount); LclVarDsc* varDsc = &(compiler->lvaTable[varNum]); bool isRegCandidate = varDsc->lvIsRegCandidate(); @@ -1880,7 +1880,7 @@ void CodeGen::genCodeForStoreLclFld(GenTreeLclFld* tree) // We must have a stack store with GT_STORE_LCL_FLD noway_assert(targetReg == REG_NA); - unsigned varNum = tree->gtLclNum; + unsigned varNum = tree->GetLclNum(); assert(varNum < compiler->lvaCount); LclVarDsc* varDsc = &(compiler->lvaTable[varNum]); @@ -1926,7 +1926,7 @@ void CodeGen::genCodeForStoreLclVar(GenTreeLclVar* tree) regNumber targetReg = tree->gtRegNum; emitter* emit = GetEmitter(); - unsigned varNum = tree->gtLclNum; + unsigned varNum = tree->GetLclNum(); assert(varNum < compiler->lvaCount); LclVarDsc* varDsc = &(compiler->lvaTable[varNum]); @@ -2031,12 +2031,12 @@ void CodeGen::genSimpleReturn(GenTree* treeNode) if (op1->OperGet() == GT_LCL_VAR) { GenTreeLclVarCommon* lcl = op1->AsLclVarCommon(); - bool isRegCandidate = compiler->lvaTable[lcl->gtLclNum].lvIsRegCandidate(); + bool isRegCandidate = compiler->lvaTable[lcl->GetLclNum()].lvIsRegCandidate(); if (isRegCandidate && ((op1->gtFlags & GTF_SPILLED) == 0)) { // We may need to generate a zero-extending mov instruction to load the value from this GT_LCL_VAR - unsigned lclNum = lcl->gtLclNum; + unsigned lclNum = lcl->GetLclNum(); LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]); var_types op1Type = genActualType(op1->TypeGet()); var_types lclType = genActualType(varDsc->TypeGet()); @@ -2571,7 +2571,7 @@ void CodeGen::genCodeForLoadPairOffset(regNumber dst, regNumber dst2, GenTree* b if (base->gtOper == GT_LCL_FLD_ADDR) offset += base->gtLclFld.gtLclOffs; - emit->emitIns_R_R_S_S(INS_ldp, EA_8BYTE, EA_8BYTE, dst, dst2, base->gtLclVarCommon.gtLclNum, offset); + emit->emitIns_R_R_S_S(INS_ldp, EA_8BYTE, EA_8BYTE, dst, dst2, base->gtLclVarCommon.GetLclNum(), offset); } else { @@ -2591,7 +2591,7 @@ void CodeGen::genCodeForStorePairOffset(regNumber src, regNumber src2, GenTree* if (base->gtOper == GT_LCL_FLD_ADDR) offset += base->gtLclFld.gtLclOffs; - emit->emitIns_S_S_R_R(INS_stp, EA_8BYTE, EA_8BYTE, src, src2, base->gtLclVarCommon.gtLclNum, offset); + emit->emitIns_S_S_R_R(INS_stp, EA_8BYTE, EA_8BYTE, src, src2, base->gtLclVarCommon.GetLclNum(), offset); } else { @@ -3301,10 +3301,10 @@ void CodeGen::genCodeForSwap(GenTreeOp* tree) assert(genIsRegCandidateLocal(tree->gtOp1) && genIsRegCandidateLocal(tree->gtOp2)); GenTreeLclVarCommon* lcl1 = tree->gtOp1->AsLclVarCommon(); - LclVarDsc* varDsc1 = &(compiler->lvaTable[lcl1->gtLclNum]); + LclVarDsc* varDsc1 = &(compiler->lvaTable[lcl1->GetLclNum()]); var_types type1 = varDsc1->TypeGet(); GenTreeLclVarCommon* lcl2 = tree->gtOp2->AsLclVarCommon(); - LclVarDsc* varDsc2 = &(compiler->lvaTable[lcl2->gtLclNum]); + LclVarDsc* varDsc2 = &(compiler->lvaTable[lcl2->GetLclNum()]); var_types type2 = varDsc2->TypeGet(); // We must have both int or both fp regs @@ -4712,7 +4712,7 @@ void CodeGen::genSIMDIntrinsicGetItem(GenTreeSIMD* simdNode) if (op1->OperIsLocal()) { - unsigned varNum = op1->gtLclVarCommon.gtLclNum; + unsigned varNum = op1->gtLclVarCommon.GetLclNum(); GetEmitter()->emitIns_R_S(ins, emitActualTypeSize(baseType), targetReg, varNum, offset); } @@ -4771,7 +4771,7 @@ void CodeGen::genSIMDIntrinsicGetItem(GenTreeSIMD* simdNode) assert(!op1->isUsedFromReg()); if (op1->OperIsLocal()) { - unsigned varNum = op1->gtLclVarCommon.gtLclNum; + unsigned varNum = op1->gtLclVarCommon.GetLclNum(); baseReg = simdNode->ExtractTempReg(); @@ -4926,7 +4926,7 @@ void CodeGen::genSIMDIntrinsicUpperSave(GenTreeSIMD* simdNode) { // This is not a normal spill; we'll spill it to the lclVar location. // The localVar must have a stack home. - unsigned varNum = op1->AsLclVarCommon()->gtLclNum; + unsigned varNum = op1->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = compiler->lvaGetDesc(varNum); assert(varDsc->lvOnFrame); // We want to store this to the upper 8 bytes of this localVar's home. @@ -4967,7 +4967,7 @@ void CodeGen::genSIMDIntrinsicUpperRestore(GenTreeSIMD* simdNode) assert(emitTypeSize(op1->TypeGet()) == 16); regNumber srcReg = simdNode->gtRegNum; regNumber lclVarReg = genConsumeReg(op1); - unsigned varNum = op1->AsLclVarCommon()->gtLclNum; + unsigned varNum = op1->AsLclVarCommon()->GetLclNum(); assert(lclVarReg != REG_NA); assert(srcReg != REG_NA); if (simdNode->gtFlags & GTF_SPILLED) @@ -5083,7 +5083,7 @@ void CodeGen::genStoreLclTypeSIMD12(GenTree* treeNode) assert((treeNode->OperGet() == GT_STORE_LCL_FLD) || (treeNode->OperGet() == GT_STORE_LCL_VAR)); unsigned offs = 0; - unsigned varNum = treeNode->gtLclVarCommon.gtLclNum; + unsigned varNum = treeNode->gtLclVarCommon.GetLclNum(); assert(varNum < compiler->lvaCount); if (treeNode->OperGet() == GT_STORE_LCL_FLD) diff --git a/src/jit/codegenarmarch.cpp b/src/jit/codegenarmarch.cpp index d968c5089d05..800412fa64a8 100644 --- a/src/jit/codegenarmarch.cpp +++ b/src/jit/codegenarmarch.cpp @@ -1210,7 +1210,7 @@ void CodeGen::genPutArgSplit(GenTreePutArgSplit* treeNode) if (varNode != nullptr) { - srcVarNum = varNode->gtLclNum; + srcVarNum = varNode->GetLclNum(); assert(srcVarNum < compiler->lvaCount); // handle promote situation @@ -1340,7 +1340,7 @@ void CodeGen::genMultiRegCallStoreToLocal(GenTree* treeNode) // Assumption: current implementation requires that a multi-reg // var in 'var = call' is flagged as lvIsMultiRegRet to prevent it from // being promoted. - unsigned lclNum = treeNode->AsLclVarCommon()->gtLclNum; + unsigned lclNum = treeNode->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]); noway_assert(varDsc->lvIsMultiRegRet); @@ -1750,7 +1750,7 @@ void CodeGen::genCodeForLclFld(GenTreeLclFld* tree) emitAttr size = emitTypeSize(targetType); unsigned offs = tree->gtLclOffs; - unsigned varNum = tree->gtLclNum; + unsigned varNum = tree->GetLclNum(); assert(varNum < compiler->lvaCount); emit->emitIns_R_S(ins_Load(targetType), emitActualTypeSize(targetType), targetReg, varNum, offs); @@ -2106,7 +2106,7 @@ void CodeGen::genCodeForLoadOffset(instruction ins, emitAttr size, regNumber dst { if (base->gtOper == GT_LCL_FLD_ADDR) offset += base->gtLclFld.gtLclOffs; - emit->emitIns_R_S(ins, size, dst, base->gtLclVarCommon.gtLclNum, offset); + emit->emitIns_R_S(ins, size, dst, base->gtLclVarCommon.GetLclNum(), offset); } else { @@ -2125,7 +2125,7 @@ void CodeGen::genCodeForStoreOffset(instruction ins, emitAttr size, regNumber sr { if (base->gtOper == GT_LCL_FLD_ADDR) offset += base->gtLclFld.gtLclOffs; - emit->emitIns_S_R(ins, size, src, base->gtLclVarCommon.gtLclNum, offset); + emit->emitIns_S_R(ins, size, src, base->gtLclVarCommon.GetLclNum(), offset); } else { @@ -2233,7 +2233,7 @@ void CodeGen::genRegCopy(GenTree* treeNode) if ((lcl->gtFlags & GTF_VAR_DEATH) == 0 && (treeNode->gtFlags & GTF_VAR_DEATH) == 0) { - LclVarDsc* varDsc = &compiler->lvaTable[lcl->gtLclNum]; + LclVarDsc* varDsc = &compiler->lvaTable[lcl->GetLclNum()]; // If we didn't just spill it (in genConsumeReg, above), then update the register info if (varDsc->lvRegNum != REG_STK) @@ -2247,11 +2247,11 @@ void CodeGen::genRegCopy(GenTree* treeNode) #ifdef USING_VARIABLE_LIVE_RANGE // Report the home change for this variable - varLiveKeeper->siUpdateVariableLiveRange(varDsc, lcl->gtLclNum); + varLiveKeeper->siUpdateVariableLiveRange(varDsc, lcl->GetLclNum()) #endif // USING_VARIABLE_LIVE_RANGE - // The new location is going live - genUpdateRegLife(varDsc, /*isBorn*/ true, /*isDying*/ false DEBUGARG(treeNode)); + // The new location is going live + genUpdateRegLife(varDsc, /*isBorn*/ true, /*isDying*/ false DEBUGARG(treeNode)); } } } @@ -3602,7 +3602,7 @@ void CodeGen::genStructReturn(GenTree* treeNode) if (op1->OperGet() == GT_LCL_VAR) { GenTreeLclVarCommon* lclVar = op1->AsLclVarCommon(); - LclVarDsc* varDsc = &(compiler->lvaTable[lclVar->gtLclNum]); + LclVarDsc* varDsc = &(compiler->lvaTable[lclVar->GetLclNum()]); var_types lclType = genActualType(varDsc->TypeGet()); assert(varTypeIsStruct(lclType)); @@ -3627,7 +3627,7 @@ void CodeGen::genStructReturn(GenTree* treeNode) { var_types type = retTypeDesc.GetReturnRegType(i); regNumber reg = retTypeDesc.GetABIReturnReg(i); - GetEmitter()->emitIns_R_S(ins_Load(type), emitTypeSize(type), reg, lclVar->gtLclNum, offset); + GetEmitter()->emitIns_R_S(ins_Load(type), emitTypeSize(type), reg, lclVar->GetLclNum(), offset); offset += genTypeSize(type); } } diff --git a/src/jit/codegencommon.cpp b/src/jit/codegencommon.cpp index 64142215edd7..fad76523ffad 100644 --- a/src/jit/codegencommon.cpp +++ b/src/jit/codegencommon.cpp @@ -457,7 +457,7 @@ regMaskTP CodeGenInterface::genGetRegMask(GenTree* tree) assert(tree->gtOper == GT_LCL_VAR); regMaskTP regMask = RBM_NONE; - const LclVarDsc* varDsc = compiler->lvaTable + tree->gtLclVarCommon.gtLclNum; + const LclVarDsc* varDsc = compiler->lvaTable + tree->gtLclVarCommon.GetLclNum(); if (varDsc->lvPromoted) { for (unsigned i = varDsc->lvFieldLclStart; i < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++i) @@ -1142,7 +1142,7 @@ unsigned CodeGenInterface::InferStructOpSizeAlign(GenTree* op, unsigned* alignme } else if (op->gtOper == GT_LCL_VAR) { - unsigned varNum = op->gtLclVarCommon.gtLclNum; + unsigned varNum = op->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = compiler->lvaTable + varNum; assert(varDsc->lvType == TYP_STRUCT); opSize = varDsc->lvSize(); diff --git a/src/jit/codegenlinear.cpp b/src/jit/codegenlinear.cpp index 63d9cb89c216..852844c59eac 100644 --- a/src/jit/codegenlinear.cpp +++ b/src/jit/codegenlinear.cpp @@ -807,7 +807,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX void CodeGen::genSpillVar(GenTree* tree) { - unsigned varNum = tree->gtLclVarCommon.gtLclNum; + unsigned varNum = tree->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &(compiler->lvaTable[varNum]); assert(varDsc->lvIsRegCandidate()); @@ -947,7 +947,7 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree) unspillTree->gtFlags &= ~GTF_SPILLED; GenTreeLclVarCommon* lcl = unspillTree->AsLclVarCommon(); - LclVarDsc* varDsc = &compiler->lvaTable[lcl->gtLclNum]; + LclVarDsc* varDsc = &compiler->lvaTable[lcl->GetLclNum()]; // TODO-Cleanup: The following code could probably be further merged and cleaned up. #ifdef _TARGET_XARCH_ @@ -969,12 +969,13 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree) assert(!varTypeIsGC(varDsc)); var_types spillType = genActualType(varDsc->lvType); unspillTree->gtType = spillType; - inst_RV_TT(ins_Load(spillType, compiler->isSIMDTypeLocalAligned(lcl->gtLclNum)), dstReg, unspillTree); + inst_RV_TT(ins_Load(spillType, compiler->isSIMDTypeLocalAligned(lcl->GetLclNum())), dstReg, + unspillTree); unspillTree->gtType = treeType; } else { - inst_RV_TT(ins_Load(treeType, compiler->isSIMDTypeLocalAligned(lcl->gtLclNum)), dstReg, unspillTree); + inst_RV_TT(ins_Load(treeType, compiler->isSIMDTypeLocalAligned(lcl->GetLclNum())), dstReg, unspillTree); } #elif defined(_TARGET_ARM64_) var_types targetType = unspillTree->gtType; @@ -983,7 +984,7 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree) assert(!varTypeIsGC(varDsc)); targetType = genActualType(varDsc->lvType); } - instruction ins = ins_Load(targetType, compiler->isSIMDTypeLocalAligned(lcl->gtLclNum)); + instruction ins = ins_Load(targetType, compiler->isSIMDTypeLocalAligned(lcl->GetLclNum())); emitAttr attr = emitActualTypeSize(targetType); emitter* emit = GetEmitter(); @@ -991,7 +992,7 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree) inst_RV_TT(ins, dstReg, unspillTree, 0, attr); #elif defined(_TARGET_ARM_) var_types targetType = unspillTree->gtType; - instruction ins = ins_Load(targetType, compiler->isSIMDTypeLocalAligned(lcl->gtLclNum)); + instruction ins = ins_Load(targetType, compiler->isSIMDTypeLocalAligned(lcl->GetLclNum())); emitAttr attr = emitTypeSize(targetType); // Load local variable from its home location. @@ -1025,14 +1026,14 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree) if ((unspillTree->gtFlags & GTF_VAR_DEATH) == 0) { // Report the home change for this variable - varLiveKeeper->siUpdateVariableLiveRange(varDsc, lcl->gtLclNum); + varLiveKeeper->siUpdateVariableLiveRange(varDsc, lcl->GetLclNum()); } #endif // USING_VARIABLE_LIVE_RANGE #ifdef DEBUG if (VarSetOps::IsMember(compiler, gcInfo.gcVarPtrSetCur, varDsc->lvVarIndex)) { - JITDUMP("\t\t\t\t\t\t\tRemoving V%02u from gcVarPtrSetCur\n", lcl->gtLclNum); + JITDUMP("\t\t\t\t\t\t\tRemoving V%02u from gcVarPtrSetCur\n", lcl->GetLclNum()); } #endif // DEBUG VarSetOps::RemoveElemD(compiler, gcInfo.gcVarPtrSetCur, varDsc->lvVarIndex); @@ -1040,7 +1041,7 @@ void CodeGen::genUnspillRegIfNeeded(GenTree* tree) #ifdef DEBUG if (compiler->verbose) { - printf("\t\t\t\t\t\t\tV%02u in reg ", lcl->gtLclNum); + printf("\t\t\t\t\t\t\tV%02u in reg ", lcl->GetLclNum()); varDsc->PrintVarReg(); printf(" is becoming live "); compiler->printTreeID(unspillTree); @@ -1589,7 +1590,7 @@ void CodeGen::genConsumePutStructArgStk(GenTreePutArgStk* putArgNode, { offset = srcAddr->AsLclFld()->gtLclOffs; } - GetEmitter()->emitIns_R_S(INS_lea, EA_PTRSIZE, srcReg, lclNode->gtLclNum, offset); + GetEmitter()->emitIns_R_S(INS_lea, EA_PTRSIZE, srcReg, lclNode->GetLclNum(), offset); } else { @@ -1854,7 +1855,7 @@ void CodeGen::genProduceReg(GenTree* tree) { // Store local variable to its home location. // Ensure that lclVar stores are typed correctly. - unsigned varNum = tree->gtLclVarCommon.gtLclNum; + unsigned varNum = tree->gtLclVarCommon.GetLclNum(); assert(!compiler->lvaTable[varNum].lvNormalizeOnStore() || (tree->TypeGet() == genActualType(compiler->lvaTable[varNum].TypeGet()))); inst_TT_RV(ins_Store(tree->gtType, compiler->isSIMDTypeLocalAligned(varNum)), tree, tree->gtRegNum); @@ -2262,7 +2263,7 @@ void CodeGen::genStoreLongLclVar(GenTree* treeNode) emitter* emit = GetEmitter(); GenTreeLclVarCommon* lclNode = treeNode->AsLclVarCommon(); - unsigned lclNum = lclNode->gtLclNum; + unsigned lclNum = lclNode->GetLclNum(); LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]); assert(varDsc->TypeGet() == TYP_LONG); assert(!varDsc->lvPromoted); diff --git a/src/jit/codegenxarch.cpp b/src/jit/codegenxarch.cpp index 2ab3d652eecd..1f0835c20c28 100644 --- a/src/jit/codegenxarch.cpp +++ b/src/jit/codegenxarch.cpp @@ -1180,7 +1180,7 @@ void CodeGen::genStructReturn(GenTree* treeNode) if (op1->OperGet() == GT_LCL_VAR) { GenTreeLclVarCommon* lclVar = op1->AsLclVarCommon(); - LclVarDsc* varDsc = &(compiler->lvaTable[lclVar->gtLclNum]); + LclVarDsc* varDsc = &(compiler->lvaTable[lclVar->GetLclNum()]); assert(varDsc->lvIsMultiRegRet); ReturnTypeDesc retTypeDesc; @@ -1238,7 +1238,7 @@ void CodeGen::genStructReturn(GenTree* treeNode) { var_types type = retTypeDesc.GetReturnRegType(i); regNumber reg = retTypeDesc.GetABIReturnReg(i); - GetEmitter()->emitIns_R_S(ins_Load(type), emitTypeSize(type), reg, lclVar->gtLclNum, offset); + GetEmitter()->emitIns_R_S(ins_Load(type), emitTypeSize(type), reg, lclVar->GetLclNum(), offset); offset += genTypeSize(type); } } @@ -1360,16 +1360,16 @@ void CodeGen::genFloatReturn(GenTree* treeNode) GenTree* op1 = treeNode->gtGetOp1(); // Spill the return value register from an XMM register to the stack, then load it on the x87 stack. // If it already has a home location, use that. Otherwise, we need a temp. - if (genIsRegCandidateLocal(op1) && compiler->lvaTable[op1->gtLclVarCommon.gtLclNum].lvOnFrame) + if (genIsRegCandidateLocal(op1) && compiler->lvaTable[op1->gtLclVarCommon.GetLclNum()].lvOnFrame) { - if (compiler->lvaTable[op1->gtLclVarCommon.gtLclNum].lvRegNum != REG_STK) + if (compiler->lvaTable[op1->gtLclVarCommon.GetLclNum()].lvRegNum != REG_STK) { op1->gtFlags |= GTF_SPILL; - inst_TT_RV(ins_Store(op1->gtType, compiler->isSIMDTypeLocalAligned(op1->gtLclVarCommon.gtLclNum)), op1, + inst_TT_RV(ins_Store(op1->gtType, compiler->isSIMDTypeLocalAligned(op1->gtLclVarCommon.GetLclNum())), op1, op1->gtRegNum); } // Now, load it to the fp stack. - GetEmitter()->emitIns_S(INS_fld, emitTypeSize(op1), op1->AsLclVarCommon()->gtLclNum, 0); + GetEmitter()->emitIns_S(INS_fld, emitTypeSize(op1), op1->AsLclVarCommon()->GetLclNum(), 0); } else { @@ -2060,7 +2060,7 @@ void CodeGen::genMultiRegCallStoreToLocal(GenTree* treeNode) // Assumption: current x64 Unix implementation requires that a multi-reg struct // var in 'var = call' is flagged as lvIsMultiRegRet to prevent it from // being struct promoted. - unsigned lclNum = treeNode->AsLclVarCommon()->gtLclNum; + unsigned lclNum = treeNode->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]); noway_assert(varDsc->lvIsMultiRegRet); @@ -2171,7 +2171,7 @@ void CodeGen::genMultiRegCallStoreToLocal(GenTree* treeNode) // Assumption: current x86 implementation requires that a multi-reg long // var in 'var = call' is flagged as lvIsMultiRegRet to prevent it from // being promoted. - unsigned lclNum = treeNode->AsLclVarCommon()->gtLclNum; + unsigned lclNum = treeNode->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]); noway_assert(varDsc->lvIsMultiRegRet); @@ -3138,7 +3138,7 @@ void CodeGen::genCodeForLoadOffset(instruction ins, emitAttr size, regNumber dst { offset += baseNode->gtLclFld.gtLclOffs; } - emit->emitIns_R_S(ins, size, dst, baseNode->gtLclVarCommon.gtLclNum, offset); + emit->emitIns_R_S(ins, size, dst, baseNode->gtLclVarCommon.GetLclNum(), offset); } else { @@ -3680,12 +3680,12 @@ void CodeGen::genCodeForCpObj(GenTreeObj* cpObjNode) bool isDstAddrLiveOut = false; if (genIsRegCandidateLocal(actualSrcAddr)) { - srcLclVarNum = actualSrcAddr->AsLclVarCommon()->gtLclNum; + srcLclVarNum = actualSrcAddr->AsLclVarCommon()->GetLclNum(); isSrcAddrLiveOut = ((actualSrcAddr->gtFlags & (GTF_VAR_DEATH | GTF_SPILL)) == 0); } if (genIsRegCandidateLocal(actualDstAddr)) { - dstLclVarNum = actualDstAddr->AsLclVarCommon()->gtLclNum; + dstLclVarNum = actualDstAddr->AsLclVarCommon()->GetLclNum(); isDstAddrLiveOut = ((actualDstAddr->gtFlags & (GTF_VAR_DEATH | GTF_SPILL)) == 0); } assert((actualSrcAddr->gtRegNum != REG_RSI) || !isSrcAddrLiveOut || @@ -4569,7 +4569,7 @@ void CodeGen::genCodeForLclFld(GenTreeLclFld* tree) emitAttr size = emitTypeSize(targetType); unsigned offs = tree->gtLclOffs; - unsigned varNum = tree->gtLclNum; + unsigned varNum = tree->GetLclNum(); assert(varNum < compiler->lvaCount); GetEmitter()->emitIns_R_S(ins_Load(targetType), size, targetReg, varNum, offs); @@ -4590,7 +4590,7 @@ void CodeGen::genCodeForLclVar(GenTreeLclVar* tree) // lcl_vars are not defs assert((tree->gtFlags & GTF_VAR_DEF) == 0); - bool isRegCandidate = compiler->lvaTable[tree->gtLclNum].lvIsRegCandidate(); + bool isRegCandidate = compiler->lvaTable[tree->GetLclNum()].lvIsRegCandidate(); // If this is a register candidate that has been spilled, genConsumeReg() will // reload it at the point of use. Otherwise, if it's not in a register, we load it here. @@ -4606,8 +4606,8 @@ void CodeGen::genCodeForLclVar(GenTreeLclVar* tree) } #endif // defined(FEATURE_SIMD) && defined(_TARGET_X86_) - GetEmitter()->emitIns_R_S(ins_Load(tree->TypeGet(), compiler->isSIMDTypeLocalAligned(tree->gtLclNum)), - emitTypeSize(tree), tree->gtRegNum, tree->gtLclNum, 0); + GetEmitter()->emitIns_R_S(ins_Load(tree->TypeGet(), compiler->isSIMDTypeLocalAligned(tree->GetLclNum())), + emitTypeSize(tree), tree->gtRegNum, tree->GetLclNum(), 0); genProduceReg(tree); } } @@ -4670,7 +4670,7 @@ void CodeGen::genCodeForStoreLclVar(GenTreeLclVar* tree) noway_assert(targetType != TYP_STRUCT); assert(!varTypeIsFloating(targetType) || (targetType == op1->TypeGet())); - unsigned lclNum = tree->gtLclNum; + unsigned lclNum = tree->GetLclNum(); LclVarDsc* varDsc = &(compiler->lvaTable[lclNum]); // Ensure that lclVar nodes are typed correctly. @@ -4980,7 +4980,7 @@ void CodeGen::genRegCopy(GenTree* treeNode) if ((lcl->gtFlags & GTF_VAR_DEATH) == 0 && (treeNode->gtFlags & GTF_VAR_DEATH) == 0) { - LclVarDsc* varDsc = &compiler->lvaTable[lcl->gtLclNum]; + LclVarDsc* varDsc = &compiler->lvaTable[lcl->GetLclNum()]; // If we didn't just spill it (in genConsumeReg, above), then update the register info if (varDsc->lvRegNum != REG_STK) @@ -4994,7 +4994,7 @@ void CodeGen::genRegCopy(GenTree* treeNode) #ifdef USING_VARIABLE_LIVE_RANGE // Report the home change for this variable - varLiveKeeper->siUpdateVariableLiveRange(varDsc, lcl->gtLclNum); + varLiveKeeper->siUpdateVariableLiveRange(varDsc, lcl->GetLclNum()); #endif // USING_VARIABLE_LIVE_RANGE // The new location is going live @@ -5177,10 +5177,10 @@ void CodeGen::genCodeForSwap(GenTreeOp* tree) assert(genIsRegCandidateLocal(tree->gtOp1) && genIsRegCandidateLocal(tree->gtOp2)); GenTreeLclVarCommon* lcl1 = tree->gtOp1->AsLclVarCommon(); - LclVarDsc* varDsc1 = &(compiler->lvaTable[lcl1->gtLclNum]); + LclVarDsc* varDsc1 = &(compiler->lvaTable[lcl1->GetLclNum()]); var_types type1 = varDsc1->TypeGet(); GenTreeLclVarCommon* lcl2 = tree->gtOp2->AsLclVarCommon(); - LclVarDsc* varDsc2 = &(compiler->lvaTable[lcl2->gtLclNum]); + LclVarDsc* varDsc2 = &(compiler->lvaTable[lcl2->GetLclNum()]); var_types type2 = varDsc2->TypeGet(); // We must have both int or both fp regs @@ -7368,7 +7368,7 @@ void CodeGen::genSSE41RoundOp(GenTreeOp* treeNode) case GT_LCL_VAR: { assert(srcNode->IsRegOptional() || - !compiler->lvaTable[srcNode->gtLclVar.gtLclNum].lvIsRegCandidate()); + !compiler->lvaTable[srcNode->gtLclVar.GetLclNum()].lvIsRegCandidate()); varNum = srcNode->AsLclVar()->GetLclNum(); offset = 0; @@ -8233,7 +8233,7 @@ void CodeGen::genPutStructArgStk(GenTreePutArgStk* putArgStk) { assert(srcAddr->OperIsLocalAddr()); - srcLclNum = srcAddr->AsLclVarCommon()->gtLclNum; + srcLclNum = srcAddr->AsLclVarCommon()->GetLclNum(); if (srcAddr->OperGet() == GT_LCL_FLD_ADDR) { srcLclOffset = srcAddr->AsLclFld()->gtLclOffs; diff --git a/src/jit/compiler.cpp b/src/jit/compiler.cpp index f1faed4fcc34..da628466faf1 100644 --- a/src/jit/compiler.cpp +++ b/src/jit/compiler.cpp @@ -9552,7 +9552,7 @@ int cLeafIR(Compiler* comp, GenTree* tree) case GT_LCL_VAR: case GT_LCL_VAR_ADDR: case GT_STORE_LCL_VAR: - lclNum = tree->gtLclVarCommon.gtLclNum; + lclNum = tree->gtLclVarCommon.GetLclNum(); comp->gtGetLclVarNameInfo(lclNum, &ilKind, &ilName, &ilNum); if (ilName != nullptr) { @@ -9616,7 +9616,7 @@ int cLeafIR(Compiler* comp, GenTree* tree) case GT_LCL_FLD_ADDR: case GT_STORE_LCL_FLD: - lclNum = tree->gtLclVarCommon.gtLclNum; + lclNum = tree->gtLclVarCommon.GetLclNum(); comp->gtGetLclVarNameInfo(lclNum, &ilKind, &ilName, &ilNum); if (ilName != nullptr) { diff --git a/src/jit/compiler.h b/src/jit/compiler.h index 6464df5ee1d0..b4ab2db0c404 100644 --- a/src/jit/compiler.h +++ b/src/jit/compiler.h @@ -7751,7 +7751,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX // type of an arg node is TYP_BYREF and a local node is TYP_SIMD or TYP_STRUCT. bool isSIMDTypeLocal(GenTree* tree) { - return tree->OperIsLocal() && lvaTable[tree->AsLclVarCommon()->gtLclNum].lvSIMDType; + return tree->OperIsLocal() && lvaTable[tree->AsLclVarCommon()->GetLclNum()].lvSIMDType; } // Returns true if the lclVar is an opaque SIMD type. @@ -7775,7 +7775,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX return varTypeIsSIMD(tree->gtGetOp1()); case GT_LCL_VAR_ADDR: - return lvaTable[tree->AsLclVarCommon()->gtLclNum].lvSIMDType; + return lvaTable[tree->AsLclVarCommon()->GetLclNum()].lvSIMDType; default: return isSIMDTypeLocal(tree); @@ -7798,7 +7798,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX { if (isSIMDTypeLocal(tree)) { - return lvaTable[tree->AsLclVarCommon()->gtLclNum].lvBaseType; + return lvaTable[tree->AsLclVarCommon()->GetLclNum()].lvBaseType; } return TYP_UNKNOWN; @@ -8148,7 +8148,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX // Is this Local node a SIMD local? bool lclVarIsSIMDType(GenTreeLclVarCommon* lclVarTree) { - return lclVarIsSIMDType(lclVarTree->gtLclNum); + return lclVarIsSIMDType(lclVarTree->GetLclNum()); } // Returns true if the TYP_SIMD locals on stack are aligned at their diff --git a/src/jit/compiler.hpp b/src/jit/compiler.hpp index 75e960cc3a56..74b834f0a48a 100644 --- a/src/jit/compiler.hpp +++ b/src/jit/compiler.hpp @@ -1186,7 +1186,7 @@ inline GenTree* Compiler::gtNewFieldRef(var_types typ, CORINFO_FIELD_HANDLE fldH if (obj != nullptr && obj->OperGet() == GT_ADDR && varTypeIsStruct(obj->gtOp.gtOp1) && obj->gtOp.gtOp1->OperGet() == GT_LCL_VAR) { - unsigned lclNum = obj->gtOp.gtOp1->gtLclVarCommon.gtLclNum; + unsigned lclNum = obj->gtOp.gtOp1->gtLclVarCommon.GetLclNum(); lvaTable[lclNum].lvFieldAccessed = 1; #if defined(_TARGET_AMD64_) || defined(_TARGET_ARM64_) // These structs are passed by reference; we should probably be able to treat these @@ -1852,7 +1852,7 @@ inline VARSET_VALRET_TP Compiler::lvaStmtLclMask(Statement* stmt) continue; } - varNum = tree->gtLclVarCommon.gtLclNum; + varNum = tree->gtLclVarCommon.GetLclNum(); assert(varNum < lvaCount); varDsc = lvaTable + varNum; @@ -3351,7 +3351,7 @@ inline void Compiler::LoopDsc::VERIFY_lpIterTree() inline unsigned Compiler::LoopDsc::lpIterVar() { VERIFY_lpIterTree(); - return lpIterTree->gtOp.gtOp1->gtLclVarCommon.gtLclNum; + return lpIterTree->gtOp.gtOp1->gtLclVarCommon.GetLclNum(); } //----------------------------------------------------------------------------- @@ -3487,7 +3487,7 @@ inline unsigned Compiler::LoopDsc::lpVarLimit() GenTree* limit = lpLimit(); assert(limit->OperGet() == GT_LCL_VAR); - return limit->gtLclVarCommon.gtLclNum; + return limit->gtLclVarCommon.GetLclNum(); } //----------------------------------------------------------------------------- @@ -3503,7 +3503,7 @@ inline bool Compiler::LoopDsc::lpArrLenLimit(Compiler* comp, ArrIndex* index) // Check if we have a.length or a[i][j].length if (limit->gtArrLen.ArrRef()->gtOper == GT_LCL_VAR) { - index->arrLcl = limit->gtArrLen.ArrRef()->gtLclVarCommon.gtLclNum; + index->arrLcl = limit->gtArrLen.ArrRef()->gtLclVarCommon.GetLclNum(); index->rank = 0; return true; } @@ -3828,7 +3828,8 @@ inline bool Compiler::impIsThis(GenTree* obj) } else { - return ((obj != nullptr) && (obj->gtOper == GT_LCL_VAR) && lvaIsOriginalThisArg(obj->gtLclVarCommon.gtLclNum)); + return ((obj != nullptr) && (obj->gtOper == GT_LCL_VAR) && + lvaIsOriginalThisArg(obj->gtLclVarCommon.GetLclNum())); } } @@ -4158,7 +4159,7 @@ ValueNum Compiler::GetUseAsgDefVNOrTreeVN(GenTree* op) unsigned Compiler::GetSsaNumForLocalVarDef(GenTree* lcl) { // Address-taken variables don't have SSA numbers. - if (!lvaInSsa(lcl->AsLclVarCommon()->gtLclNum)) + if (!lvaInSsa(lcl->AsLclVarCommon()->GetLclNum())) { return SsaConfig::RESERVED_SSA_NUM; } diff --git a/src/jit/copyprop.cpp b/src/jit/copyprop.cpp index f053c55f3627..dc9dda228933 100644 --- a/src/jit/copyprop.cpp +++ b/src/jit/copyprop.cpp @@ -37,7 +37,7 @@ void Compiler::optBlockCopyPropPopStacks(BasicBlock* block, LclNumToGenTreePtrSt { continue; } - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); if (!lvaInSsa(lclNum)) { continue; @@ -63,7 +63,7 @@ void Compiler::optDumpCopyPropStack(LclNumToGenTreePtrStack* curSsaName) for (LclNumToGenTreePtrStack::KeyIterator iter = curSsaName->Begin(); !iter.Equal(curSsaName->End()); ++iter) { GenTree* node = iter.GetValue()->Top(); - JITDUMP("%d-[%06d]:V%02u ", iter.Get(), dspTreeID(node), node->AsLclVarCommon()->gtLclNum); + JITDUMP("%d-[%06d]:V%02u ", iter.Get(), dspTreeID(node), node->AsLclVarCommon()->GetLclNum()); } JITDUMP("}\n\n"); } @@ -354,7 +354,7 @@ void Compiler::optBlockCopyProp(BasicBlock* block, LclNumToGenTreePtrStack* curS // if (optIsSsaLocal(tree) && (tree->gtFlags & GTF_VAR_DEF)) { - VarSetOps::AddElemD(this, optCopyPropKillSet, lvaTable[tree->gtLclVarCommon.gtLclNum].lvVarIndex); + VarSetOps::AddElemD(this, optCopyPropKillSet, lvaTable[tree->gtLclVarCommon.GetLclNum()].lvVarIndex); } } @@ -366,7 +366,7 @@ void Compiler::optBlockCopyProp(BasicBlock* block, LclNumToGenTreePtrStack* curS continue; } - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); // As we encounter a definition add it to the stack as a live definition. if (tree->gtFlags & GTF_VAR_DEF) diff --git a/src/jit/decomposelongs.cpp b/src/jit/decomposelongs.cpp index 1bb540c8a45c..a5b6782b4054 100644 --- a/src/jit/decomposelongs.cpp +++ b/src/jit/decomposelongs.cpp @@ -130,7 +130,7 @@ GenTree* DecomposeLongs::DecomposeNode(GenTree* tree) // Handle the case where we are implicitly using the lower half of a long lclVar. if ((tree->TypeGet() == TYP_INT) && tree->OperIsLocal()) { - LclVarDsc* varDsc = m_compiler->lvaTable + tree->AsLclVarCommon()->gtLclNum; + LclVarDsc* varDsc = m_compiler->lvaTable + tree->AsLclVarCommon()->GetLclNum(); if (varTypeIsLong(varDsc) && varDsc->lvPromoted) { #ifdef DEBUG @@ -342,7 +342,7 @@ GenTree* DecomposeLongs::DecomposeLclVar(LIR::Use& use) assert(use.Def()->OperGet() == GT_LCL_VAR); GenTree* tree = use.Def(); - unsigned varNum = tree->AsLclVarCommon()->gtLclNum; + unsigned varNum = tree->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = m_compiler->lvaTable + varNum; GenTree* loResult = tree; loResult->gtType = TYP_INT; @@ -390,7 +390,7 @@ GenTree* DecomposeLongs::DecomposeLclFld(LIR::Use& use) GenTreeLclFld* loResult = tree->AsLclFld(); loResult->gtType = TYP_INT; - GenTree* hiResult = m_compiler->gtNewLclFldNode(loResult->gtLclNum, TYP_INT, loResult->gtLclOffs + 4); + GenTree* hiResult = m_compiler->gtNewLclFldNode(loResult->GetLclNum(), TYP_INT, loResult->gtLclOffs + 4); Range().InsertAfter(loResult, hiResult); return FinalizeDecomposition(use, loResult, hiResult, hiResult); @@ -423,7 +423,7 @@ GenTree* DecomposeLongs::DecomposeStoreLclVar(LIR::Use& use) noway_assert(rhs->OperGet() == GT_LONG); - unsigned varNum = tree->AsLclVarCommon()->gtLclNum; + unsigned varNum = tree->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = m_compiler->lvaTable + varNum; if (!varDsc->lvPromoted) { @@ -510,7 +510,7 @@ GenTree* DecomposeLongs::DecomposeStoreLclFld(LIR::Use& use) loStore->gtFlags |= GTF_VAR_USEASG; // Create the store for the upper half of the GT_LONG and insert it after the low store. - GenTreeLclFld* hiStore = m_compiler->gtNewLclFldNode(loStore->gtLclNum, TYP_INT, loStore->gtLclOffs + 4); + GenTreeLclFld* hiStore = m_compiler->gtNewLclFldNode(loStore->GetLclNum(), TYP_INT, loStore->gtLclOffs + 4); hiStore->SetOper(GT_STORE_LCL_FLD); hiStore->gtOp1 = value->gtOp2; hiStore->gtFlags |= (GTF_VAR_DEF | GTF_VAR_USEASG); @@ -1112,7 +1112,7 @@ GenTree* DecomposeLongs::DecomposeShift(LIR::Use& use) Range().Remove(gtLong); loOp1 = RepresentOpAsLocalVar(loOp1, gtLong, >Long->gtOp.gtOp1); - unsigned loOp1LclNum = loOp1->AsLclVarCommon()->gtLclNum; + unsigned loOp1LclNum = loOp1->AsLclVarCommon()->GetLclNum(); Range().Remove(loOp1); GenTree* shiftByHi = m_compiler->gtNewIconNode(count, TYP_INT); @@ -1195,7 +1195,7 @@ GenTree* DecomposeLongs::DecomposeShift(LIR::Use& use) // shr hi, shift hiOp1 = RepresentOpAsLocalVar(hiOp1, gtLong, >Long->gtOp.gtOp2); - unsigned hiOp1LclNum = hiOp1->AsLclVarCommon()->gtLclNum; + unsigned hiOp1LclNum = hiOp1->AsLclVarCommon()->GetLclNum(); GenTree* hiCopy = m_compiler->gtNewLclvNode(hiOp1LclNum, TYP_INT); GenTree* shiftByHi = m_compiler->gtNewIconNode(count, TYP_INT); @@ -1258,7 +1258,7 @@ GenTree* DecomposeLongs::DecomposeShift(LIR::Use& use) Range().Remove(gtLong); hiOp1 = RepresentOpAsLocalVar(hiOp1, gtLong, >Long->gtOp.gtOp2); - unsigned hiOp1LclNum = hiOp1->AsLclVarCommon()->gtLclNum; + unsigned hiOp1LclNum = hiOp1->AsLclVarCommon()->GetLclNum(); GenTree* hiCopy = m_compiler->gtNewLclvNode(hiOp1LclNum, TYP_INT); Range().Remove(hiOp1); @@ -1492,8 +1492,8 @@ GenTree* DecomposeLongs::DecomposeRotate(LIR::Use& use) hiOp1 = RepresentOpAsLocalVar(hiOp1, gtLong, >Long->gtOp.gtOp2); } - unsigned loOp1LclNum = loOp1->AsLclVarCommon()->gtLclNum; - unsigned hiOp1LclNum = hiOp1->AsLclVarCommon()->gtLclNum; + unsigned loOp1LclNum = loOp1->AsLclVarCommon()->GetLclNum(); + unsigned hiOp1LclNum = hiOp1->AsLclVarCommon()->GetLclNum(); Range().Remove(loOp1); Range().Remove(hiOp1); @@ -1717,7 +1717,7 @@ GenTree* DecomposeLongs::DecomposeSimdGetItem(LIR::Use& use) } GenTree* simdTmpVar = RepresentOpAsLocalVar(simdTree->gtOp.gtOp1, simdTree, &simdTree->gtOp.gtOp1); - unsigned simdTmpVarNum = simdTmpVar->AsLclVarCommon()->gtLclNum; + unsigned simdTmpVarNum = simdTmpVar->AsLclVarCommon()->GetLclNum(); JITDUMP("[DecomposeSimdGetItem]: Saving op1 tree to a temp var:\n"); DISPTREERANGE(Range(), simdTmpVar); Range().Remove(simdTmpVar); @@ -1727,7 +1727,7 @@ GenTree* DecomposeLongs::DecomposeSimdGetItem(LIR::Use& use) if (!indexIsConst) { indexTmpVar = RepresentOpAsLocalVar(simdTree->gtOp.gtOp2, simdTree, &simdTree->gtOp.gtOp2); - indexTmpVarNum = indexTmpVar->AsLclVarCommon()->gtLclNum; + indexTmpVarNum = indexTmpVar->AsLclVarCommon()->GetLclNum(); JITDUMP("[DecomposeSimdGetItem]: Saving op2 tree to a temp var:\n"); DISPTREERANGE(Range(), indexTmpVar); Range().Remove(indexTmpVar); @@ -1817,7 +1817,7 @@ GenTree* DecomposeLongs::StoreNodeToVar(LIR::Use& use) { // If parent is already a STORE_LCL_VAR, we can skip it if // it is already marked as lvIsMultiRegRet. - unsigned varNum = user->AsLclVarCommon()->gtLclNum; + unsigned varNum = user->AsLclVarCommon()->GetLclNum(); if (m_compiler->lvaTable[varNum].lvIsMultiRegRet) { return tree->gtNext; @@ -1884,7 +1884,7 @@ GenTree* DecomposeLongs::EnsureIntSized(GenTree* node, bool signExtend) return node; } - if (node->OperIs(GT_LCL_VAR) && !m_compiler->lvaTable[node->AsLclVarCommon()->gtLclNum].lvNormalizeOnLoad()) + if (node->OperIs(GT_LCL_VAR) && !m_compiler->lvaTable[node->AsLclVarCommon()->GetLclNum()].lvNormalizeOnLoad()) { node->gtType = TYP_INT; return node; diff --git a/src/jit/earlyprop.cpp b/src/jit/earlyprop.cpp index a0832f59aa22..b3752419ac6a 100644 --- a/src/jit/earlyprop.cpp +++ b/src/jit/earlyprop.cpp @@ -573,7 +573,7 @@ void Compiler::optFoldNullCheck(GenTree* tree) { GenTree* additionNode = defRHS->gtGetOp2(); if ((additionNode->gtGetOp1()->OperGet() == GT_LCL_VAR) && - (additionNode->gtGetOp1()->gtLclVarCommon.gtLclNum == nullCheckLclNum)) + (additionNode->gtGetOp1()->gtLclVarCommon.GetLclNum() == nullCheckLclNum)) { GenTree* offset = additionNode->gtGetOp2(); if (offset->IsCnsIntOrI()) diff --git a/src/jit/emitxarch.cpp b/src/jit/emitxarch.cpp index 313243748ce0..bb8916ba0c90 100644 --- a/src/jit/emitxarch.cpp +++ b/src/jit/emitxarch.cpp @@ -3369,7 +3369,8 @@ regNumber emitter::emitInsBinary(instruction ins, emitAttr attr, GenTree* dst, G case GT_LCL_VAR: { - assert(memOp->IsRegOptional() || !emitComp->lvaTable[memOp->gtLclVar.gtLclNum].lvIsRegCandidate()); + assert(memOp->IsRegOptional() || + !emitComp->lvaTable[memOp->gtLclVar.GetLclNum()].lvIsRegCandidate()); varNum = memOp->AsLclVar()->GetLclNum(); offset = 0; break; diff --git a/src/jit/flowgraph.cpp b/src/jit/flowgraph.cpp index 40f04c7e7579..9e98d101b19f 100644 --- a/src/jit/flowgraph.cpp +++ b/src/jit/flowgraph.cpp @@ -7639,7 +7639,7 @@ GenTree* Compiler::fgDoNormalizeOnStore(GenTree* tree) // Small-typed arguments and aliased locals are normalized on load. // Other small-typed locals are normalized on store. // If it is an assignment to one of the latter, insert the cast on RHS - unsigned varNum = op1->gtLclVarCommon.gtLclNum; + unsigned varNum = op1->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &lvaTable[varNum]; if (varDsc->lvNormalizeOnStore()) @@ -9845,7 +9845,7 @@ VARSET_VALRET_TP Compiler::fgGetVarBits(GenTree* tree) assert(tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_FLD); - unsigned int lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned int lclNum = tree->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = lvaTable + lclNum; if (varDsc->lvTracked) { @@ -22270,7 +22270,7 @@ void Compiler::fgAttachStructInlineeToAsg(GenTree* tree, GenTree* child, CORINFO // If it is a multireg return on x64/ux, the local variable should be marked as lvIsMultiRegRet if (child->AsCall()->HasMultiRegRetVal()) { - unsigned lclNum = tree->gtOp.gtOp1->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtOp.gtOp1->gtLclVarCommon.GetLclNum(); lvaTable[lclNum].lvIsMultiRegRet = true; } return; @@ -22603,7 +22603,7 @@ Compiler::fgWalkResult Compiler::fgLateDevirtualization(GenTree** pTree, fgWalkD if ((lhs->OperGet() == GT_LCL_VAR) && (lhs->TypeGet() == TYP_REF)) { - const unsigned lclNum = lhs->gtLclVarCommon.gtLclNum; + const unsigned lclNum = lhs->gtLclVarCommon.GetLclNum(); LclVarDsc* lcl = comp->lvaGetDesc(lclNum); if (lcl->lvSingleDef) diff --git a/src/jit/gentree.cpp b/src/jit/gentree.cpp index d6c0fa78e29d..a5ff292f37c5 100644 --- a/src/jit/gentree.cpp +++ b/src/jit/gentree.cpp @@ -1194,7 +1194,7 @@ bool GenTree::Compare(GenTree* op1, GenTree* op2, bool swapOK) switch (oper) { case GT_LCL_VAR: - if (op1->gtLclVarCommon.gtLclNum != op2->gtLclVarCommon.gtLclNum) + if (op1->gtLclVarCommon.GetLclNum() != op2->gtLclVarCommon.GetLclNum()) { break; } @@ -1202,7 +1202,7 @@ bool GenTree::Compare(GenTree* op1, GenTree* op2, bool swapOK) return true; case GT_LCL_FLD: - if (op1->gtLclFld.gtLclNum != op2->gtLclFld.gtLclNum || + if (op1->gtLclFld.GetLclNum() != op2->gtLclFld.GetLclNum() || op1->gtLclFld.gtLclOffs != op2->gtLclFld.gtLclOffs) { break; @@ -1585,7 +1585,7 @@ bool Compiler::gtHasRef(GenTree* tree, ssize_t lclNum, bool defOnly) { if (oper == GT_LCL_VAR) { - if (tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum) + if (tree->gtLclVarCommon.GetLclNum() == (unsigned)lclNum) { if (!defOnly) { @@ -1629,7 +1629,7 @@ bool Compiler::gtHasRef(GenTree* tree, ssize_t lclNum, bool defOnly) // 'tree' is the gtOp1 of an assignment node. So we can handle // the case where defOnly is either true or false. - if (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.gtLclNum == (unsigned)lclNum) + if (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.GetLclNum() == (unsigned)lclNum) { return true; } @@ -1826,7 +1826,7 @@ Compiler::fgWalkResult Compiler::gtHasLocalsWithAddrOpCB(GenTree** pTree, fgWalk if (tree->gtOper == GT_LCL_VAR) { - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &comp->lvaTable[lclNum]; if (varDsc->lvHasLdAddrOp || varDsc->lvAddrExposed) @@ -1910,10 +1910,10 @@ unsigned Compiler::gtHashValue(GenTree* tree) { UINT64 bits; case GT_LCL_VAR: - add = tree->gtLclVar.gtLclNum; + add = tree->gtLclVar.GetLclNum(); break; case GT_LCL_FLD: - hash = genTreeHashAdd(hash, tree->gtLclFld.gtLclNum); + hash = genTreeHashAdd(hash, tree->gtLclFld.GetLclNum()); add = tree->gtLclFld.gtLclOffs; break; @@ -2697,8 +2697,8 @@ bool Compiler::gtIsLikelyRegVar(GenTree* tree) return false; } - assert(tree->gtLclVar.gtLclNum < lvaTableCnt); - LclVarDsc* varDsc = lvaTable + tree->gtLclVar.gtLclNum; + assert(tree->gtLclVar.GetLclNum() < lvaTableCnt); + LclVarDsc* varDsc = lvaTable + tree->gtLclVar.GetLclNum(); if (varDsc->lvDoNotEnregister) { @@ -3321,7 +3321,7 @@ unsigned Compiler::gtSetEvalOrder(GenTree* tree) costEx = 1; costSz = 1; /* Sign-extend and zero-extend are more expensive to load */ - if (lvaTable[tree->gtLclVar.gtLclNum].lvNormalizeOnLoad()) + if (lvaTable[tree->gtLclVar.GetLclNum()].lvNormalizeOnLoad()) { costEx += 1; costSz += 1; @@ -6372,7 +6372,7 @@ GenTree* Compiler::gtNewObjNode(CORINFO_CLASS_HANDLE structHnd, GenTree* addr) if (lclNode != nullptr) { objNode->gtFlags |= GTF_IND_NONFAULTING; - if (!lvaIsImplicitByRefLocal(lclNode->gtLclNum)) + if (!lvaIsImplicitByRefLocal(lclNode->GetLclNum())) { objNode->gtFlags &= ~GTF_GLOB_REF; } @@ -6415,7 +6415,7 @@ GenTree* Compiler::gtNewStructVal(CORINFO_CLASS_HANDLE structHnd, GenTree* addr) GenTree* val = addr->gtGetOp1(); if (val->OperGet() == GT_LCL_VAR) { - unsigned lclNum = addr->gtGetOp1()->AsLclVarCommon()->gtLclNum; + unsigned lclNum = addr->gtGetOp1()->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = &(lvaTable[lclNum]); if (varTypeIsStruct(varDsc) && (varDsc->lvVerTypeInfo.GetClassHandle() == structHnd) && !lvaIsImplicitByRefLocal(lclNum)) @@ -6609,7 +6609,7 @@ void Compiler::gtBlockOpInit(GenTree* result, GenTree* dst, GenTree* srcOrFillVa } if (currSrc->OperGet() == GT_LCL_VAR && currDst->OperGet() == GT_LCL_VAR && - currSrc->gtLclVarCommon.gtLclNum == currDst->gtLclVarCommon.gtLclNum) + currSrc->gtLclVarCommon.GetLclNum() == currDst->gtLclVarCommon.GetLclNum()) { // Make this a NOP // TODO-Cleanup: probably doesn't matter, but could do this earlier and avoid creating a GT_ASG @@ -6885,7 +6885,7 @@ GenTree* Compiler::gtClone(GenTree* tree, bool complexOK) // Remember that the LclVar node has been cloned. The flag will be set // on 'copy' as well. tree->gtFlags |= GTF_VAR_CLONED; - copy = gtNewLclvNode(tree->gtLclVarCommon.gtLclNum, tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs)); + copy = gtNewLclvNode(tree->gtLclVarCommon.GetLclNum(), tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs)); break; case GT_LCL_FLD: @@ -6894,7 +6894,7 @@ GenTree* Compiler::gtClone(GenTree* tree, bool complexOK) // on 'copy' as well. tree->gtFlags |= GTF_VAR_CLONED; copy = new (this, tree->gtOper) - GenTreeLclFld(tree->gtOper, tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs); + GenTreeLclFld(tree->gtOper, tree->TypeGet(), tree->gtLclFld.GetLclNum(), tree->gtLclFld.gtLclOffs); copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq; break; @@ -7056,7 +7056,7 @@ GenTree* Compiler::gtCloneExpr( case GT_LCL_VAR: - if (tree->gtLclVarCommon.gtLclNum == varNum) + if (tree->gtLclVarCommon.GetLclNum() == varNum) { copy = gtNewIconNode(varVal, tree->gtType); if (tree->gtFlags & GTF_VAR_ARR_INDEX) @@ -7069,14 +7069,14 @@ GenTree* Compiler::gtCloneExpr( // Remember that the LclVar node has been cloned. The flag will // be set on 'copy' as well. tree->gtFlags |= GTF_VAR_CLONED; - copy = gtNewLclvNode(tree->gtLclVar.gtLclNum, tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs)); + copy = gtNewLclvNode(tree->gtLclVar.GetLclNum(), tree->gtType DEBUGARG(tree->gtLclVar.gtLclILoffs)); copy->AsLclVarCommon()->SetSsaNum(tree->AsLclVarCommon()->GetSsaNum()); } copy->gtFlags = tree->gtFlags; goto DONE; case GT_LCL_FLD: - if (tree->gtLclFld.gtLclNum == varNum) + if (tree->gtLclFld.GetLclNum() == varNum) { IMPL_LIMITATION("replacing GT_LCL_FLD with a constant"); } @@ -7086,7 +7086,7 @@ GenTree* Compiler::gtCloneExpr( // be set on 'copy' as well. tree->gtFlags |= GTF_VAR_CLONED; copy = new (this, GT_LCL_FLD) - GenTreeLclFld(tree->TypeGet(), tree->gtLclFld.gtLclNum, tree->gtLclFld.gtLclOffs); + GenTreeLclFld(tree->TypeGet(), tree->gtLclFld.GetLclNum(), tree->gtLclFld.gtLclOffs); copy->gtLclFld.gtFieldSeq = tree->gtLclFld.gtFieldSeq; copy->gtFlags = tree->gtFlags; } @@ -8023,7 +8023,7 @@ bool Compiler::gtCompareTree(GenTree* op1, GenTree* op2) break; case GT_LCL_VAR: - if (op1->gtLclVarCommon.gtLclNum == op2->gtLclVarCommon.gtLclNum) + if (op1->gtLclVarCommon.GetLclNum() == op2->gtLclVarCommon.GetLclNum()) { return true; } @@ -9847,7 +9847,7 @@ void Compiler::gtDispNode(GenTree* tree, IndentStack* indentStack, __in __in_z _ printf(" %-6s", varTypeName(tree->TypeGet())); if (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_STORE_LCL_VAR) { - LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum]; + LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.GetLclNum()]; if (varDsc->lvAddrExposed) { printf("(AX)"); // Variable has address exposed. @@ -10418,7 +10418,7 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) case GT_LCL_VAR_ADDR: case GT_STORE_LCL_VAR: printf(" "); - varNum = tree->gtLclVarCommon.gtLclNum; + varNum = tree->gtLclVarCommon.GetLclNum(); varDsc = &lvaTable[varNum]; gtDispLclVar(varNum); if (tree->gtLclVarCommon.HasSsaName()) @@ -10483,7 +10483,7 @@ void Compiler::gtDispLeaf(GenTree* tree, IndentStack* indentStack) printf(" "); printIndent(indentStack); printf(" %-6s V%02u.%s (offs=0x%02x) -> ", varTypeName(fieldVarDsc->TypeGet()), - tree->gtLclVarCommon.gtLclNum, fieldName, fieldVarDsc->lvFldOffset); + tree->gtLclVarCommon.GetLclNum(), fieldName, fieldVarDsc->lvFldOffset); gtDispLclVar(i); if (fieldVarDsc->lvRegister) @@ -12842,7 +12842,7 @@ GenTree* Compiler::gtTryRemoveBoxUpstreamEffects(GenTree* op, BoxRemovalOptions } GenTree* copyDstAddrOp1 = copyDstAddr->gtOp.gtOp1; - if ((copyDstAddrOp1->OperGet() != GT_LCL_VAR) || (copyDstAddrOp1->gtLclVarCommon.gtLclNum != boxTempLcl)) + if ((copyDstAddrOp1->OperGet() != GT_LCL_VAR) || (copyDstAddrOp1->gtLclVarCommon.GetLclNum() != boxTempLcl)) { JITDUMP("Unexpected copy dest address 1st addend\n"); return nullptr; @@ -14601,7 +14601,7 @@ GenTree* Compiler::gtNewTempAssign( unsigned tmp, GenTree* val, Statement** pAfterStmt, IL_OFFSETX ilOffset, BasicBlock* block) { // Self-assignment is a nop. - if (val->OperGet() == GT_LCL_VAR && val->gtLclVarCommon.gtLclNum == tmp) + if (val->OperGet() == GT_LCL_VAR && val->gtLclVarCommon.GetLclNum() == tmp) { return gtNewNothingNode(); } @@ -14614,9 +14614,9 @@ GenTree* Compiler::gtNewTempAssign( } var_types valTyp = val->TypeGet(); - if (val->OperGet() == GT_LCL_VAR && lvaTable[val->gtLclVar.gtLclNum].lvNormalizeOnLoad()) + if (val->OperGet() == GT_LCL_VAR && lvaTable[val->gtLclVar.GetLclNum()].lvNormalizeOnLoad()) { - valTyp = lvaGetRealType(val->gtLclVar.gtLclNum); + valTyp = lvaGetRealType(val->gtLclVar.GetLclNum()); val->gtType = valTyp; } var_types dstTyp = varDsc->TypeGet(); @@ -15567,7 +15567,7 @@ bool GenTree::IsPhiDefn() bool GenTree::IsPartialLclFld(Compiler* comp) { return ((gtOper == GT_LCL_FLD) && - (comp->lvaTable[this->gtLclVarCommon.gtLclNum].lvExactSize != genTypeSize(gtType))); + (comp->lvaTable[this->gtLclVarCommon.GetLclNum()].lvExactSize != genTypeSize(gtType))); } bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire) @@ -15891,10 +15891,10 @@ unsigned GenTree::IsLclVarUpdateTree(GenTree** pOtherTree, genTreeOps* pOper) GenTree* lhs = gtOp.gtOp1; if (lhs->OperGet() == GT_LCL_VAR) { - unsigned lhsLclNum = lhs->AsLclVarCommon()->gtLclNum; + unsigned lhsLclNum = lhs->AsLclVarCommon()->GetLclNum(); GenTree* rhs = gtOp.gtOp2; if (rhs->OperIsBinary() && (rhs->gtOp.gtOp1->gtOper == GT_LCL_VAR) && - (rhs->gtOp.gtOp1->AsLclVarCommon()->gtLclNum == lhsLclNum)) + (rhs->gtOp.gtOp1->AsLclVarCommon()->GetLclNum() == lhsLclNum)) { lclNum = lhsLclNum; *pOtherTree = rhs->gtOp.gtOp2; @@ -16665,7 +16665,7 @@ CORINFO_CLASS_HANDLE Compiler::gtGetStructHandleIfPresent(GenTree* tree) #endif break; case GT_LCL_VAR: - structHnd = lvaTable[tree->AsLclVarCommon()->gtLclNum].lvVerTypeInfo.GetClassHandle(); + structHnd = lvaTable[tree->AsLclVarCommon()->GetLclNum()].lvVerTypeInfo.GetClassHandle(); break; case GT_RETURN: structHnd = gtGetStructHandleIfPresent(tree->gtOp.gtOp1); diff --git a/src/jit/gentree.h b/src/jit/gentree.h index 76958f4b29f3..c5d88e6b219b 100644 --- a/src/jit/gentree.h +++ b/src/jit/gentree.h @@ -2853,7 +2853,6 @@ struct GenTreeLclVarCommon : public GenTreeUnOp { return _gtLclNum; } - __declspec(property(get = GetLclNum)) unsigned gtLclNum; void SetLclNum(unsigned lclNum) { diff --git a/src/jit/gschecks.cpp b/src/jit/gschecks.cpp index 652e3cc3a754..5e7ae07c5c9d 100644 --- a/src/jit/gschecks.cpp +++ b/src/jit/gschecks.cpp @@ -136,7 +136,7 @@ Compiler::fgWalkResult Compiler::gsMarkPtrsAndAssignGroups(GenTree** pTree, fgWa // local vars and param uses case GT_LCL_VAR: case GT_LCL_FLD: - lclNum = tree->gtLclVarCommon.gtLclNum; + lclNum = tree->gtLclVarCommon.GetLclNum(); if (pState->isUnderIndir) { @@ -257,7 +257,7 @@ Compiler::fgWalkResult Compiler::gsMarkPtrsAndAssignGroups(GenTree** pTree, fgWa if ((isLocVar || isLocFld) && tree->gtOp.gtOp2) { - lclNum = tree->gtOp.gtOp1->gtLclVarCommon.gtLclNum; + lclNum = tree->gtOp.gtOp1->gtLclVarCommon.GetLclNum(); newState.lvAssignDef = lclNum; newState.isAssignSrc = true; } diff --git a/src/jit/hwintrinsiccodegenxarch.cpp b/src/jit/hwintrinsiccodegenxarch.cpp index ee94fe0e91b6..ce9f29bc63a0 100644 --- a/src/jit/hwintrinsiccodegenxarch.cpp +++ b/src/jit/hwintrinsiccodegenxarch.cpp @@ -692,7 +692,7 @@ void CodeGen::genHWIntrinsic_R_R_RM( case GT_LCL_VAR: { - assert(op2->IsRegOptional() || !compiler->lvaTable[op2->gtLclVar.gtLclNum].lvIsRegCandidate()); + assert(op2->IsRegOptional() || !compiler->lvaTable[op2->gtLclVar.GetLclNum()].lvIsRegCandidate()); varNum = op2->AsLclVar()->GetLclNum(); offset = 0; break; @@ -857,7 +857,7 @@ void CodeGen::genHWIntrinsic_R_R_RM_I(GenTreeHWIntrinsic* node, instruction ins, case GT_LCL_VAR: { - assert(op2->IsRegOptional() || !compiler->lvaTable[op2->gtLclVar.gtLclNum].lvIsRegCandidate()); + assert(op2->IsRegOptional() || !compiler->lvaTable[op2->gtLclVar.GetLclNum()].lvIsRegCandidate()); varNum = op2->AsLclVar()->GetLclNum(); offset = 0; break; @@ -1021,7 +1021,7 @@ void CodeGen::genHWIntrinsic_R_R_RM_R(GenTreeHWIntrinsic* node, instruction ins) case GT_LCL_VAR: { - assert(op2->IsRegOptional() || !compiler->lvaTable[op2->gtLclVar.gtLclNum].lvIsRegCandidate()); + assert(op2->IsRegOptional() || !compiler->lvaTable[op2->gtLclVar.GetLclNum()].lvIsRegCandidate()); varNum = op2->AsLclVar()->GetLclNum(); offset = 0; break; @@ -1147,7 +1147,7 @@ void CodeGen::genHWIntrinsic_R_R_R_RM( case GT_LCL_VAR: { - assert(op3->IsRegOptional() || !compiler->lvaTable[op3->gtLclVar.gtLclNum].lvIsRegCandidate()); + assert(op3->IsRegOptional() || !compiler->lvaTable[op3->gtLclVar.GetLclNum()].lvIsRegCandidate()); varNum = op3->AsLclVar()->GetLclNum(); offset = 0; break; diff --git a/src/jit/importer.cpp b/src/jit/importer.cpp index 1db5063e4270..ba49a2d0bdf4 100644 --- a/src/jit/importer.cpp +++ b/src/jit/importer.cpp @@ -511,7 +511,7 @@ inline void Compiler::impAppendStmtCheck(Statement* stmt, unsigned chkLevel) if (tree->gtOp.gtOp1->gtOper == GT_LCL_VAR) { - unsigned lclNum = tree->gtOp.gtOp1->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtOp.gtOp1->gtLclVarCommon.GetLclNum(); for (unsigned level = 0; level < chkLevel; level++) { assert(!gtHasRef(verCurrentState.esStack[level].val, lclNum, false)); @@ -1122,7 +1122,7 @@ GenTree* Compiler::impAssignStruct(GenTree* dest, // Return a NOP if this is a self-assignment. if (dest->OperGet() == GT_LCL_VAR && src->OperGet() == GT_LCL_VAR && - src->gtLclVarCommon.gtLclNum == dest->gtLclVarCommon.gtLclNum) + src->gtLclVarCommon.GetLclNum() == dest->gtLclVarCommon.GetLclNum()) { return gtNewNothingNode(); } @@ -1240,13 +1240,13 @@ GenTree* Compiler::impAssignStructPtr(GenTree* destAddr, // TODO-1stClassStructs: Eliminate this pessimization when we can more generally // handle multireg returns. lcl->gtFlags |= GTF_DONT_CSE; - lvaTable[lcl->gtLclVarCommon.gtLclNum].lvIsMultiRegRet = true; + lvaTable[lcl->gtLclVarCommon.GetLclNum()].lvIsMultiRegRet = true; } else if (lcl->gtType != src->gtType) { // We change this to a GT_LCL_FLD (from a GT_ADDR of a GT_LCL_VAR) lcl->ChangeOper(GT_LCL_FLD); - fgLclFldAssign(lcl->gtLclVarCommon.gtLclNum); + fgLclFldAssign(lcl->gtLclVarCommon.GetLclNum()); lcl->gtType = src->gtType; asgType = src->gtType; } @@ -1256,7 +1256,7 @@ GenTree* Compiler::impAssignStructPtr(GenTree* destAddr, #if defined(_TARGET_ARM_) // TODO-Cleanup: This should have been taken care of in the above HasMultiRegRetVal() case, // but that method has not been updadted to include ARM. - impMarkLclDstNotPromotable(lcl->gtLclVarCommon.gtLclNum, src, structHnd); + impMarkLclDstNotPromotable(lcl->gtLclVarCommon.GetLclNum(), src, structHnd); lcl->gtFlags |= GTF_DONT_CSE; #elif defined(UNIX_AMD64_ABI) // Not allowed for FEATURE_CORCLR which is the only SKU available for System V OSs. @@ -1268,7 +1268,7 @@ GenTree* Compiler::impAssignStructPtr(GenTree* destAddr, // TODO-Cleanup: Why is this needed here? This seems that it will set this even for // non-multireg returns. lcl->gtFlags |= GTF_DONT_CSE; - lvaTable[lcl->gtLclVarCommon.gtLclNum].lvIsMultiRegRet = true; + lvaTable[lcl->gtLclVarCommon.GetLclNum()].lvIsMultiRegRet = true; #endif } else // we don't have a GT_ADDR of a GT_LCL_VAR @@ -1764,7 +1764,7 @@ GenTree* Compiler::impNormStructVal(GenTree* structVal, { // A OBJ on a ADDR(LCL_VAR) can never raise an exception // so we don't set GTF_EXCEPT here. - if (!lvaIsImplicitByRefLocal(structLcl->gtLclNum)) + if (!lvaIsImplicitByRefLocal(structLcl->GetLclNum())) { structVal->gtFlags &= ~GTF_GLOB_REF; } @@ -2160,7 +2160,7 @@ GenTree* Compiler::impRuntimeLookupToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken unsigned tmp; if (handleCopy->IsLocal()) { - tmp = handleCopy->gtLclVarCommon.gtLclNum; + tmp = handleCopy->gtLclVarCommon.GetLclNum(); } else { @@ -2329,7 +2329,7 @@ void Compiler::impSpillStackEnsure(bool spillLeaves) // Temps introduced by the importer itself don't need to be spilled - bool isTempLcl = (tree->OperGet() == GT_LCL_VAR) && (tree->gtLclVarCommon.gtLclNum >= info.compLocalsCount); + bool isTempLcl = (tree->OperGet() == GT_LCL_VAR) && (tree->gtLclVarCommon.GetLclNum() >= info.compLocalsCount); if (isTempLcl) { @@ -2533,7 +2533,7 @@ BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_H if ((tree->gtOper == GT_ASG) && (tree->gtOp.gtOp1->gtOper == GT_LCL_VAR) && (tree->gtOp.gtOp2->gtOper == GT_CATCH_ARG)) { - tree = gtNewLclvNode(tree->gtOp.gtOp1->gtLclVarCommon.gtLclNum, TYP_REF); + tree = gtNewLclvNode(tree->gtOp.gtOp1->gtLclVarCommon.GetLclNum(), TYP_REF); impPushOnStack(tree, typeInfo(TI_REF, clsHnd)); @@ -2555,7 +2555,7 @@ BasicBlock* Compiler::impPushCatchArgOnStack(BasicBlock* hndBlk, CORINFO_CLASS_H #if defined(JIT32_GCENCODER) const bool forceInsertNewBlock = isSingleBlockFilter || compStressCompile(STRESS_CATCH_ARG, 5); #else - const bool forceInsertNewBlock = compStressCompile(STRESS_CATCH_ARG, 5); + const bool forceInsertNewBlock = compStressCompile(STRESS_CATCH_ARG, 5); #endif // defined(JIT32_GCENCODER) /* Spill GT_CATCH_ARG to a temp if there are jumps to the beginning of the handler */ @@ -3076,7 +3076,7 @@ GenTree* Compiler::impInitializeArrayIntrinsic(CORINFO_SIG_INFO* sig) GenTree* arrayAssignment = impLastStmt->gtStmtExpr; if ((arrayAssignment->gtOper != GT_ASG) || (arrayAssignment->gtOp.gtOp1->gtOper != GT_LCL_VAR) || (arrayLocalNode->gtOper != GT_LCL_VAR) || - (arrayAssignment->gtOp.gtOp1->gtLclVarCommon.gtLclNum != arrayLocalNode->gtLclVarCommon.gtLclNum)) + (arrayAssignment->gtOp.gtOp1->gtLclVarCommon.GetLclNum() != arrayLocalNode->gtLclVarCommon.GetLclNum())) { return nullptr; } @@ -6528,7 +6528,7 @@ GenTreeCall* Compiler::impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX i if (fptr->OperGet() == GT_LCL_VAR) { - lvaTable[fptr->gtLclVarCommon.gtLclNum].lvKeepType = 1; + lvaTable[fptr->gtLclVarCommon.GetLclNum()].lvKeepType = 1; } #endif @@ -8321,7 +8321,7 @@ var_types Compiler::impImportCall(OPCODE opcode, { assert(newobjThis->gtOper == GT_ADDR && newobjThis->gtOp.gtOp1->gtOper == GT_LCL_VAR); - unsigned tmp = newobjThis->gtOp.gtOp1->gtLclVarCommon.gtLclNum; + unsigned tmp = newobjThis->gtOp.gtOp1->gtLclVarCommon.GetLclNum(); impPushOnStack(gtNewLclvNode(tmp, lvaGetRealType(tmp)), verMakeTypeInfo(clsHnd).NormaliseForStack()); } else @@ -8336,7 +8336,8 @@ var_types Compiler::impImportCall(OPCODE opcode, } assert(newobjThis->gtOper == GT_LCL_VAR); - impPushOnStack(gtNewLclvNode(newobjThis->gtLclVarCommon.gtLclNum, TYP_REF), typeInfo(TI_REF, clsHnd)); + impPushOnStack(gtNewLclvNode(newobjThis->gtLclVarCommon.GetLclNum(), TYP_REF), + typeInfo(TI_REF, clsHnd)); } } return callRetTyp; @@ -8962,7 +8963,7 @@ GenTree* Compiler::impFixupStructReturnType(GenTree* op, CORINFO_CLASS_HANDLE re if (op->gtOper == GT_LCL_VAR) { // Make sure that this struct stays in memory and doesn't get promoted. - unsigned lclNum = op->gtLclVarCommon.gtLclNum; + unsigned lclNum = op->gtLclVarCommon.GetLclNum(); lvaTable[lclNum].lvIsMultiRegRet = true; // TODO-1stClassStructs: Handle constant propagation and CSE-ing of multireg returns. @@ -8989,7 +8990,7 @@ GenTree* Compiler::impFixupStructReturnType(GenTree* op, CORINFO_CLASS_HANDLE re if (op->gtOper == GT_LCL_VAR) { // This LCL_VAR is an HFA return value, it stays as a TYP_STRUCT - unsigned lclNum = op->gtLclVarCommon.gtLclNum; + unsigned lclNum = op->gtLclVarCommon.GetLclNum(); // Make sure this struct type stays as struct so that we can return it as an HFA lvaTable[lclNum].lvIsMultiRegRet = true; @@ -9024,7 +9025,7 @@ GenTree* Compiler::impFixupStructReturnType(GenTree* op, CORINFO_CLASS_HANDLE re if (op->gtOper == GT_LCL_VAR) { // This LCL_VAR stays as a TYP_STRUCT - unsigned lclNum = op->gtLclVarCommon.gtLclNum; + unsigned lclNum = op->gtLclVarCommon.GetLclNum(); if (!lvaIsImplicitByRefLocal(lclNum)) { @@ -9064,7 +9065,7 @@ GenTree* Compiler::impFixupStructReturnType(GenTree* op, CORINFO_CLASS_HANDLE re { // It is possible that we now have a lclVar of scalar type. // If so, don't transform it to GT_LCL_FLD. - if (lvaTable[op->AsLclVar()->gtLclNum].lvType != info.compRetNativeType) + if (lvaTable[op->AsLclVar()->GetLclNum()].lvType != info.compRetNativeType) { op->ChangeOper(GT_LCL_FLD); } @@ -10821,7 +10822,7 @@ void Compiler::impImportBlockCode(BasicBlock* block) } if (lclVar != nullptr) { - impSpillLclRefs(lclVar->gtLclNum); + impSpillLclRefs(lclVar->GetLclNum()); } } @@ -10977,7 +10978,7 @@ void Compiler::impImportBlockCode(BasicBlock* block) { op1 = impInlineFetchArg(lclNum, impInlineInfo->inlArgInfo, impInlineInfo->lclVarInfo); noway_assert(op1->gtOper == GT_LCL_VAR); - lclNum = op1->AsLclVar()->gtLclNum; + lclNum = op1->AsLclVar()->GetLclNum(); goto VAR_ST_VALID; } @@ -11149,7 +11150,7 @@ void Compiler::impImportBlockCode(BasicBlock* block) /* Filter out simple assignments to itself */ - if (op1->gtOper == GT_LCL_VAR && lclNum == op1->gtLclVarCommon.gtLclNum) + if (op1->gtOper == GT_LCL_VAR && lclNum == op1->gtLclVarCommon.GetLclNum()) { if (opts.compDbgCode) { @@ -11820,8 +11821,8 @@ void Compiler::impImportBlockCode(BasicBlock* block) // This does not need CORINFO_HELP_ARRADDR_ST if (arrayNodeFrom->OperGet() == GT_INDEX && arrayNodeFrom->gtOp.gtOp1->gtOper == GT_LCL_VAR && arrayNodeTo->gtOper == GT_LCL_VAR && - arrayNodeTo->gtLclVarCommon.gtLclNum == arrayNodeFrom->gtOp.gtOp1->gtLclVarCommon.gtLclNum && - !lvaTable[arrayNodeTo->gtLclVarCommon.gtLclNum].lvAddrExposed) + arrayNodeTo->gtLclVarCommon.GetLclNum() == arrayNodeFrom->gtOp.gtOp1->gtLclVarCommon.GetLclNum() && + !lvaTable[arrayNodeTo->gtLclVarCommon.GetLclNum()].lvAddrExposed) { JITDUMP("\nstelem of ref from same array: skipping covariant store check\n"); lclTyp = TYP_REF; @@ -16438,7 +16439,7 @@ bool Compiler::impReturnInstruction(BasicBlock* block, int prefixFlags, OPCODE& // Some other block(s) have seen the CEE_RET first. // Better they spilled to the same temp. assert(impInlineInfo->retExpr->gtOper == GT_LCL_VAR); - assert(impInlineInfo->retExpr->gtLclVarCommon.gtLclNum == op2->gtLclVarCommon.gtLclNum); + assert(impInlineInfo->retExpr->gtLclVarCommon.GetLclNum() == op2->gtLclVarCommon.GetLclNum()); } #endif } @@ -16474,7 +16475,7 @@ bool Compiler::impReturnInstruction(BasicBlock* block, int prefixFlags, OPCODE& JITDUMP("\nSetting lvOverlappingFields to true on V%02u because of struct " "reinterpretation\n", - addrChild->AsLclVarCommon()->gtLclNum); + addrChild->AsLclVarCommon()->GetLclNum()); varDsc->lvOverlappingFields = true; } @@ -17690,7 +17691,7 @@ void Compiler::impRetypeEntryStateTemps(BasicBlock* blk) GenTree* tree = es->esStack[level].val; if ((tree->gtOper == GT_LCL_VAR) || (tree->gtOper == GT_LCL_FLD)) { - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); LclVarDsc* varDsc = lvaTable + lclNum; es->esStack[level].val->gtType = varDsc->TypeGet(); @@ -18576,7 +18577,7 @@ void Compiler::impInlineRecordArgInfo(InlineInfo* pInlineInfo, { inlCurArgInfo->argIsByRefToStructLocal = true; #ifdef FEATURE_SIMD - if (lvaTable[lclVarTree->AsLclVarCommon()->gtLclNum].lvSIMDType) + if (lvaTable[lclVarTree->AsLclVarCommon()->GetLclNum()].lvSIMDType) { pInlineInfo->hasSIMDTypeArgLocalOrReturn = true; } @@ -18941,8 +18942,8 @@ void Compiler::impInlineInitVars(InlineInfo* pInlineInfo) /* Narrowing cast */ if (inlArgNode->gtOper == GT_LCL_VAR && - !lvaTable[inlArgNode->gtLclVarCommon.gtLclNum].lvNormalizeOnLoad() && - sigType == lvaGetRealType(inlArgNode->gtLclVarCommon.gtLclNum)) + !lvaTable[inlArgNode->gtLclVarCommon.GetLclNum()].lvNormalizeOnLoad() && + sigType == lvaGetRealType(inlArgNode->gtLclVarCommon.GetLclNum())) { /* We don't need to insert a cast here as the variable was assigned a normalized value of the right type */ @@ -19237,7 +19238,7 @@ GenTree* Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo, In // // Use the caller-supplied node if this is the first use. op1 = argInfo.argNode; - argInfo.argTmpNum = op1->gtLclVarCommon.gtLclNum; + argInfo.argTmpNum = op1->gtLclVarCommon.GetLclNum(); // Use an equivalent copy if this is the second or subsequent // use, or if we need to retype. @@ -19251,13 +19252,13 @@ GenTree* Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo, In var_types newTyp = lclTyp; - if (!lvaTable[op1->gtLclVarCommon.gtLclNum].lvNormalizeOnLoad()) + if (!lvaTable[op1->gtLclVarCommon.GetLclNum()].lvNormalizeOnLoad()) { newTyp = genActualType(lclTyp); } // Create a new lcl var node - remember the argument lclNum - op1 = gtNewLclvNode(op1->gtLclVarCommon.gtLclNum, newTyp DEBUGARG(op1->gtLclVar.gtLclILoffs)); + op1 = gtNewLclvNode(op1->gtLclVarCommon.GetLclNum(), newTyp DEBUGARG(op1->gtLclVar.gtLclILoffs)); } } else if (argInfo.argIsByRefToStructLocal && !argInfo.argHasStargOp) @@ -19404,7 +19405,7 @@ GenTree* Compiler::impInlineFetchArg(unsigned lclNum, InlArgInfo* inlArgInfo, In BOOL Compiler::impInlineIsThis(GenTree* tree, InlArgInfo* inlArgInfo) { assert(compIsForInlining()); - return (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.gtLclNum == inlArgInfo[0].argTmpNum); + return (tree->gtOper == GT_LCL_VAR && tree->gtLclVarCommon.GetLclNum() == inlArgInfo[0].argTmpNum); } //----------------------------------------------------------------------------- diff --git a/src/jit/instr.cpp b/src/jit/instr.cpp index b7be6480a61d..2f981f660f58 100644 --- a/src/jit/instr.cpp +++ b/src/jit/instr.cpp @@ -480,7 +480,7 @@ void CodeGen::inst_set_SV_var(GenTree* tree) { #ifdef DEBUG assert(tree && (tree->gtOper == GT_LCL_VAR || tree->gtOper == GT_LCL_VAR_ADDR || tree->gtOper == GT_STORE_LCL_VAR)); - assert(tree->gtLclVarCommon.gtLclNum < compiler->lvaCount); + assert(tree->gtLclVarCommon.GetLclNum() < compiler->lvaCount); GetEmitter()->emitVarRefOffs = tree->gtLclVar.gtLclILoffs; @@ -588,7 +588,7 @@ void CodeGen::inst_TT(instruction ins, GenTree* tree, unsigned offs, int shfv, e goto LCL; LCL: - varNum = tree->gtLclVarCommon.gtLclNum; + varNum = tree->gtLclVarCommon.GetLclNum(); assert(varNum < compiler->lvaCount); if (shfv) @@ -696,7 +696,7 @@ void CodeGen::inst_TT_RV(instruction ins, GenTree* tree, regNumber reg, unsigned LCL: - varNum = tree->gtLclVarCommon.gtLclNum; + varNum = tree->gtLclVarCommon.GetLclNum(); assert(varNum < compiler->lvaCount); #if CPU_LOAD_STORE_ARCH @@ -844,7 +844,7 @@ void CodeGen::inst_RV_TT(instruction ins, goto LCL; LCL: - varNum = tree->gtLclVarCommon.gtLclNum; + varNum = tree->gtLclVarCommon.GetLclNum(); assert(varNum < compiler->lvaCount); #ifdef _TARGET_ARM_ @@ -1139,7 +1139,8 @@ void CodeGen::inst_RV_TT_IV(instruction ins, emitAttr attr, regNumber reg1, GenT case GT_LCL_VAR: { - assert(rmOp->IsRegOptional() || !compiler->lvaGetDesc(rmOp->gtLclVar.gtLclNum)->lvIsRegCandidate()); + assert(rmOp->IsRegOptional() || + !compiler->lvaGetDesc(rmOp->gtLclVar.GetLclNum())->lvIsRegCandidate()); varNum = rmOp->AsLclVar()->GetLclNum(); offset = 0; break; diff --git a/src/jit/lclvars.cpp b/src/jit/lclvars.cpp index 3c171cb8ac0d..cbea5ab26c96 100644 --- a/src/jit/lclvars.cpp +++ b/src/jit/lclvars.cpp @@ -3596,7 +3596,7 @@ void Compiler::lvaMarkLclRefs(GenTree* tree, BasicBlock* block, Statement* stmt, NOT_BOOL: - lclNum = op1->gtLclVarCommon.gtLclNum; + lclNum = op1->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); lvaTable[lclNum].lvIsBoolean = false; @@ -3615,7 +3615,7 @@ void Compiler::lvaMarkLclRefs(GenTree* tree, BasicBlock* block, Statement* stmt, /* This must be a local variable reference */ assert((tree->gtOper == GT_LCL_VAR) || (tree->gtOper == GT_LCL_FLD)); - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); LclVarDsc* varDsc = lvaTable + lclNum; @@ -4064,7 +4064,7 @@ void Compiler::lvaComputeRefCounts(bool isRecompute, bool setSlotNumbers) case GT_STORE_LCL_VAR: case GT_STORE_LCL_FLD: { - const unsigned lclNum = node->AsLclVarCommon()->gtLclNum; + const unsigned lclNum = node->AsLclVarCommon()->GetLclNum(); lvaTable[lclNum].incRefCnts(weight, this); break; } @@ -7255,7 +7255,7 @@ Compiler::fgWalkResult Compiler::lvaStressLclFldCB(GenTree** pTree, fgWalkData* Compiler* pComp = ((lvaStressLclFldArgs*)data->pCallbackData)->m_pCompiler; bool bFirstPass = ((lvaStressLclFldArgs*)data->pCallbackData)->m_bFirstPass; noway_assert(lcl->gtOper == GT_LCL_VAR); - unsigned lclNum = lcl->gtLclVarCommon.gtLclNum; + unsigned lclNum = lcl->gtLclVarCommon.GetLclNum(); var_types type = lcl->TypeGet(); LclVarDsc* varDsc = &pComp->lvaTable[lclNum]; diff --git a/src/jit/liveness.cpp b/src/jit/liveness.cpp index ff998208a68f..50cc996c7ab0 100644 --- a/src/jit/liveness.cpp +++ b/src/jit/liveness.cpp @@ -25,7 +25,7 @@ void Compiler::fgMarkUseDef(GenTreeLclVarCommon* tree) { assert((tree->OperIsLocal() && (tree->OperGet() != GT_PHI_ARG)) || tree->OperIsLocalAddr()); - const unsigned lclNum = tree->gtLclNum; + const unsigned lclNum = tree->GetLclNum(); assert(lclNum < lvaCount); LclVarDsc* const varDsc = &lvaTable[lclNum]; @@ -382,7 +382,7 @@ void Compiler::fgPerNodeLocalVarLiveness(GenTree* tree) GenTreeLclVarCommon* dummyLclVarTree = nullptr; if (tree->DefinesLocal(this, &dummyLclVarTree)) { - if (lvaVarAddrExposed(dummyLclVarTree->gtLclNum)) + if (lvaVarAddrExposed(dummyLclVarTree->GetLclNum())) { fgCurMemoryDef |= memoryKindSet(ByrefExposed); @@ -1470,7 +1470,7 @@ VARSET_VALRET_TP Compiler::fgUpdateLiveSet(VARSET_VALARG_TP liveSet, GenTree* tr // fgGetHandlerLiveVars(compCurBB), but seems excessive // assert(VarSetOps::IsEmptyIntersection(this, newLiveSet, varBits) || opts.compDbgCode || - lvaTable[tree->gtLclVarCommon.gtLclNum].lvAddrExposed || + lvaTable[tree->gtLclVarCommon.GetLclNum()].lvAddrExposed || (compCurBB != nullptr && ehBlockHasExnFlowDsc(compCurBB))); VarSetOps::UnionD(this, newLiveSet, varBits); } @@ -1581,7 +1581,7 @@ void Compiler::fgComputeLifeTrackedLocalUse(VARSET_TP& life, LclVarDsc& varDsc, #ifdef DEBUG if (verbose && 0) { - printf("Ref V%02u,T%02u] at ", node->gtLclNum, varIndex); + printf("Ref V%02u,T%02u] at ", node->GetLclNum(), varIndex); printTreeID(node); printf(" life %s -> %s\n", VarSetOps::ToString(this, life), VarSetOps::ToString(this, VarSetOps::AddElem(this, life, varIndex))); @@ -1630,7 +1630,7 @@ bool Compiler::fgComputeLifeTrackedLocalDef(VARSET_TP& life, #ifdef DEBUG if (verbose && 0) { - printf("Def V%02u,T%02u at ", node->gtLclNum, varIndex); + printf("Def V%02u,T%02u at ", node->GetLclNum(), varIndex); printTreeID(node); printf(" life %s -> %s\n", VarSetOps::ToString(this, @@ -1756,7 +1756,7 @@ void Compiler::fgComputeLifeUntrackedLocal(VARSET_TP& life, // `true` if the local var node corresponds to a dead store; `false` otherwise. bool Compiler::fgComputeLifeLocal(VARSET_TP& life, VARSET_VALARG_TP keepAliveVars, GenTree* lclVarNode) { - unsigned lclNum = lclVarNode->gtLclVarCommon.gtLclNum; + unsigned lclNum = lclVarNode->gtLclVarCommon.GetLclNum(); assert(lclNum < lvaCount); LclVarDsc& varDsc = lvaTable[lclNum]; @@ -1817,7 +1817,7 @@ void Compiler::fgComputeLife(VARSET_TP& life, bool isDeadStore = fgComputeLifeLocal(life, keepAliveVars, tree); if (isDeadStore) { - LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.gtLclNum]; + LclVarDsc* varDsc = &lvaTable[tree->gtLclVarCommon.GetLclNum()]; bool doAgain = false; if (fgRemoveDeadStore(&tree, varDsc, life, &doAgain, pStmtInfoDirty DEBUGARG(treeModf))) @@ -1903,7 +1903,7 @@ void Compiler::fgComputeLifeLIR(VARSET_TP& life, BasicBlock* block, VARSET_VALAR case GT_LCL_FLD: { GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon(); - LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum]; + LclVarDsc& varDsc = lvaTable[lclVarNode->GetLclNum()]; if (node->IsUnusedValue()) { @@ -1934,7 +1934,7 @@ void Compiler::fgComputeLifeLIR(VARSET_TP& life, BasicBlock* block, VARSET_VALAR JITDUMP("Removing dead LclVar address:\n"); DISPNODE(node); - const bool isTracked = lvaTable[node->AsLclVarCommon()->gtLclNum].lvTracked; + const bool isTracked = lvaTable[node->AsLclVarCommon()->GetLclNum()].lvTracked; blockRange.Delete(this, block, node); if (isTracked && !opts.MinOpts()) { @@ -1974,7 +1974,7 @@ void Compiler::fgComputeLifeLIR(VARSET_TP& life, BasicBlock* block, VARSET_VALAR { GenTreeLclVarCommon* const lclVarNode = node->AsLclVarCommon(); - LclVarDsc& varDsc = lvaTable[lclVarNode->gtLclNum]; + LclVarDsc& varDsc = lvaTable[lclVarNode->GetLclNum()]; if (varDsc.lvTracked) { isDeadStore = fgComputeLifeTrackedLocalDef(life, keepAliveVars, varDsc, lclVarNode); diff --git a/src/jit/loopcloning.h b/src/jit/loopcloning.h index 8d78b8dd0c61..00a292b4b337 100644 --- a/src/jit/loopcloning.h +++ b/src/jit/loopcloning.h @@ -194,9 +194,9 @@ struct LcMdArrayOptInfo : public LcOptInfo index->rank = arrElem->gtArrRank; for (unsigned i = 0; i < dim; ++i) { - index->indLcls.Push(arrElem->gtArrInds[i]->gtLclVarCommon.gtLclNum); + index->indLcls.Push(arrElem->gtArrInds[i]->gtLclVarCommon.GetLclNum()); } - index->arrLcl = arrElem->gtArrObj->gtLclVarCommon.gtLclNum; + index->arrLcl = arrElem->gtArrObj->gtLclVarCommon.GetLclNum(); } return index; } diff --git a/src/jit/lower.cpp b/src/jit/lower.cpp index 65a2e56857d2..f89ad507f7b4 100644 --- a/src/jit/lower.cpp +++ b/src/jit/lower.cpp @@ -273,7 +273,7 @@ GenTree* Lowering::LowerNode(GenTree* node) case GT_LCL_FLD: { // We should only encounter this for lclVars that are lvDoNotEnregister. - verifyLclFldDoNotEnregister(node->AsLclVarCommon()->gtLclNum); + verifyLclFldDoNotEnregister(node->AsLclVarCommon()->GetLclNum()); break; } @@ -506,7 +506,7 @@ GenTree* Lowering::LowerSwitch(GenTree* node) assert(node->gtOper == GT_SWITCH); GenTree* temp = node->gtOp.gtOp1; assert(temp->gtOper == GT_LCL_VAR); - unsigned tempLclNum = temp->gtLclVarCommon.gtLclNum; + unsigned tempLclNum = temp->gtLclVarCommon.GetLclNum(); var_types tempLclType = temp->TypeGet(); BasicBlock* defaultBB = jumpTab[jumpCnt - 1]; @@ -2136,7 +2136,7 @@ void Lowering::RehomeArgForFastTailCall(unsigned int lclNum, LclVarDsc* callerArgDsc = comp->lvaGetDesc(lclNum); var_types tmpTyp = genActualType(callerArgDsc->TypeGet()); comp->lvaTable[tmpLclNum].lvType = tmpTyp; - comp->lvaTable[tmpLclNum].lvDoNotEnregister = comp->lvaTable[lcl->gtLclNum].lvDoNotEnregister; + comp->lvaTable[tmpLclNum].lvDoNotEnregister = comp->lvaTable[lcl->GetLclNum()].lvDoNotEnregister; GenTree* value = comp->gtNewLclvNode(lclNum, tmpTyp); // TODO-1stClassStructs: This can be simplified with 1st class structs work. @@ -4073,7 +4073,7 @@ GenTree* Lowering::LowerVirtualVtableCall(GenTreeCall* call) unsigned lclNum; if (thisPtr->IsLocal()) { - lclNum = thisPtr->gtLclVarCommon.gtLclNum; + lclNum = thisPtr->gtLclVarCommon.GetLclNum(); } else { @@ -5640,7 +5640,7 @@ bool Lowering::NodesAreEquivalentLeaves(GenTree* tree1, GenTree* tree2) tree1->IsIconHandle() == tree2->IsIconHandle(); case GT_LCL_VAR: case GT_LCL_VAR_ADDR: - return tree1->gtLclVarCommon.gtLclNum == tree2->gtLclVarCommon.gtLclNum; + return tree1->gtLclVarCommon.GetLclNum() == tree2->gtLclVarCommon.GetLclNum(); case GT_CLS_VAR_ADDR: return tree1->gtClsVar.gtClsVarHnd == tree2->gtClsVar.gtClsVarHnd; default: @@ -5836,7 +5836,7 @@ void Lowering::ContainCheckRet(GenTreeOp* ret) if (op1->OperGet() == GT_LCL_VAR) { GenTreeLclVarCommon* lclVarCommon = op1->AsLclVarCommon(); - LclVarDsc* varDsc = &(comp->lvaTable[lclVarCommon->gtLclNum]); + LclVarDsc* varDsc = &(comp->lvaTable[lclVarCommon->GetLclNum()]); // This must be a multi-reg return or an HFA of a single element. assert(varDsc->lvIsMultiRegRet || (varDsc->lvIsHfa() && varTypeIsValidHfaType(varDsc->lvType))); diff --git a/src/jit/lowerarmarch.cpp b/src/jit/lowerarmarch.cpp index 984be77eddb8..7ee670f3765f 100644 --- a/src/jit/lowerarmarch.cpp +++ b/src/jit/lowerarmarch.cpp @@ -157,7 +157,7 @@ void Lowering::LowerStoreLoc(GenTreeLclVarCommon* storeLoc) { GenTreeIntCon* con = op1->AsIntCon(); ssize_t ival = con->gtIconVal; - unsigned varNum = storeLoc->gtLclNum; + unsigned varNum = storeLoc->GetLclNum(); LclVarDsc* varDsc = comp->lvaTable + varNum; if (varDsc->lvIsSIMDType()) @@ -202,7 +202,7 @@ void Lowering::LowerStoreLoc(GenTreeLclVarCommon* storeLoc) if (storeLoc->OperIs(GT_STORE_LCL_FLD)) { // We should only encounter this for lclVars that are lvDoNotEnregister. - verifyLclFldDoNotEnregister(storeLoc->gtLclNum); + verifyLclFldDoNotEnregister(storeLoc->GetLclNum()); } ContainCheckStoreLoc(storeLoc); } diff --git a/src/jit/lowerxarch.cpp b/src/jit/lowerxarch.cpp index 3b305275516d..e87d582c9a8f 100644 --- a/src/jit/lowerxarch.cpp +++ b/src/jit/lowerxarch.cpp @@ -52,7 +52,7 @@ void Lowering::LowerStoreLoc(GenTreeLclVarCommon* storeLoc) GenTreeIntCon* con = storeLoc->gtOp1->AsIntCon(); ssize_t ival = con->gtIconVal; - unsigned varNum = storeLoc->gtLclNum; + unsigned varNum = storeLoc->GetLclNum(); LclVarDsc* varDsc = comp->lvaTable + varNum; if (varDsc->lvIsSIMDType()) @@ -97,7 +97,7 @@ void Lowering::LowerStoreLoc(GenTreeLclVarCommon* storeLoc) if (storeLoc->OperIs(GT_STORE_LCL_FLD)) { // We should only encounter this for lclVars that are lvDoNotEnregister. - verifyLclFldDoNotEnregister(storeLoc->gtLclNum); + verifyLclFldDoNotEnregister(storeLoc->GetLclNum()); } ContainCheckStoreLoc(storeLoc); } @@ -505,7 +505,7 @@ void Lowering::LowerPutArgStk(GenTreePutArgStk* putArgStk) { if (fieldNode->OperGet() == GT_LCL_VAR) { - LclVarDsc* varDsc = &(comp->lvaTable[fieldNode->AsLclVarCommon()->gtLclNum]); + LclVarDsc* varDsc = &(comp->lvaTable[fieldNode->AsLclVarCommon()->GetLclNum()]); if (!varDsc->lvDoNotEnregister) { fieldNode->SetRegOptional(); diff --git a/src/jit/lsra.cpp b/src/jit/lsra.cpp index 6f3916ebc41b..cca3714c0794 100644 --- a/src/jit/lsra.cpp +++ b/src/jit/lsra.cpp @@ -181,7 +181,7 @@ unsigned LinearScan::getWeight(RefPosition* refPos) // Tracked locals: use weighted ref cnt as the weight of the // ref position. GenTreeLclVarCommon* lclCommon = treeNode->AsLclVarCommon(); - LclVarDsc* varDsc = &(compiler->lvaTable[lclCommon->gtLclNum]); + LclVarDsc* varDsc = &(compiler->lvaTable[lclCommon->GetLclNum()]); weight = varDsc->lvRefCntWtd(); if (refPos->getInterval()->isSpilled) { @@ -7835,7 +7835,7 @@ void LinearScan::handleOutgoingCriticalEdges(BasicBlock* block) if (op1->IsLocal()) { GenTreeLclVarCommon* lcl = op1->AsLclVarCommon(); - jcmpLocalVarDsc = &compiler->lvaTable[lcl->gtLclNum]; + jcmpLocalVarDsc = &compiler->lvaTable[lcl->GetLclNum()]; } } } @@ -9169,7 +9169,7 @@ void LinearScan::lsraDispNode(GenTree* tree, LsraTupleDumpMode mode, bool hasDes unsigned varNum = UINT_MAX; if (tree->IsLocal()) { - varNum = tree->gtLclVarCommon.gtLclNum; + varNum = tree->gtLclVarCommon.GetLclNum(); varDsc = &(compiler->lvaTable[varNum]); if (varDsc->lvLRACandidate) { @@ -10650,8 +10650,8 @@ void LinearScan::verifyResolutionMove(GenTree* resolutionMove, LsraLocation curr GenTreeLclVarCommon* right = dst->gtGetOp2()->AsLclVarCommon(); regNumber leftRegNum = left->gtRegNum; regNumber rightRegNum = right->gtRegNum; - LclVarDsc* leftVarDsc = compiler->lvaTable + left->gtLclNum; - LclVarDsc* rightVarDsc = compiler->lvaTable + right->gtLclNum; + LclVarDsc* leftVarDsc = compiler->lvaTable + left->GetLclNum(); + LclVarDsc* rightVarDsc = compiler->lvaTable + right->GetLclNum(); Interval* leftInterval = getIntervalForLocalVar(leftVarDsc->lvVarIndex); Interval* rightInterval = getIntervalForLocalVar(rightVarDsc->lvVarIndex); assert(leftInterval->physReg == leftRegNum && rightInterval->physReg == rightRegNum); diff --git a/src/jit/lsra.h b/src/jit/lsra.h index b1b29f24499a..3ca30ab95052 100644 --- a/src/jit/lsra.h +++ b/src/jit/lsra.h @@ -1016,9 +1016,9 @@ class LinearScan : public LinearScanInterface { if (tree->IsLocal()) { - unsigned int lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned int lclNum = tree->gtLclVarCommon.GetLclNum(); assert(lclNum < compiler->lvaCount); - LclVarDsc* varDsc = compiler->lvaTable + tree->gtLclVarCommon.gtLclNum; + LclVarDsc* varDsc = compiler->lvaTable + tree->gtLclVarCommon.GetLclNum(); return isCandidateVar(varDsc); } @@ -1114,7 +1114,7 @@ class LinearScan : public LinearScanInterface Interval* getIntervalForLocalVarNode(GenTreeLclVarCommon* tree) { - LclVarDsc* varDsc = &compiler->lvaTable[tree->gtLclNum]; + LclVarDsc* varDsc = &compiler->lvaTable[tree->GetLclNum()]; assert(varDsc->lvTracked); return getIntervalForLocalVar(varDsc->lvVarIndex); } @@ -1593,7 +1593,7 @@ class LinearScan : public LinearScanInterface } // If 'fromTree' was a lclVar, it must be contained and 'toTree' must match. if (!fromTree->isContained() || (toTree == nullptr) || !toTree->OperIs(GT_LCL_VAR) || - (toTree->AsLclVarCommon()->gtLclNum != toTree->AsLclVarCommon()->gtLclNum)) + (toTree->AsLclVarCommon()->GetLclNum() != toTree->AsLclVarCommon()->GetLclNum())) { assert(!"Unmatched RMW indirections"); return; diff --git a/src/jit/lsraarm.cpp b/src/jit/lsraarm.cpp index 85925e277c4c..98c50c62cd54 100644 --- a/src/jit/lsraarm.cpp +++ b/src/jit/lsraarm.cpp @@ -230,7 +230,7 @@ int LinearScan::BuildNode(GenTree* tree) // is processed, unless this is marked "isLocalDefUse" because it is a stack-based argument // to a call or an orphaned dead node. // - LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->gtLclNum]; + LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->GetLclNum()]; if (isCandidateVar(varDsc)) { return 0; diff --git a/src/jit/lsraarm64.cpp b/src/jit/lsraarm64.cpp index 7d649760bc78..6d5285b3f3e6 100644 --- a/src/jit/lsraarm64.cpp +++ b/src/jit/lsraarm64.cpp @@ -87,7 +87,7 @@ int LinearScan::BuildNode(GenTree* tree) // is processed, unless this is marked "isLocalDefUse" because it is a stack-based argument // to a call or an orphaned dead node. // - LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->gtLclNum]; + LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->GetLclNum()]; if (isCandidateVar(varDsc)) { return 0; diff --git a/src/jit/lsrabuild.cpp b/src/jit/lsrabuild.cpp index f5ff0f801e25..87b406aa5cdd 100644 --- a/src/jit/lsrabuild.cpp +++ b/src/jit/lsrabuild.cpp @@ -692,7 +692,7 @@ bool LinearScan::isContainableMemoryOp(GenTree* node) { return true; } - LclVarDsc* varDsc = &compiler->lvaTable[node->AsLclVar()->gtLclNum]; + LclVarDsc* varDsc = &compiler->lvaTable[node->AsLclVar()->GetLclNum()]; return varDsc->lvDoNotEnregister; } return false; @@ -1568,7 +1568,7 @@ void LinearScan::buildRefPositionsForNode(GenTree* tree, BasicBlock* block, Lsra // address computation. In this case we need to check whether it is a last use. if (tree->IsLocal() && ((tree->gtFlags & GTF_VAR_DEATH) != 0)) { - LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->gtLclNum]; + LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->GetLclNum()]; if (isCandidateVar(varDsc)) { assert(varDsc->lvTracked); @@ -2896,7 +2896,7 @@ int LinearScan::BuildStoreLoc(GenTreeLclVarCommon* storeLoc) GenTree* op1 = storeLoc->gtGetOp1(); int srcCount; RefPosition* singleUseRef = nullptr; - LclVarDsc* varDsc = &compiler->lvaTable[storeLoc->gtLclNum]; + LclVarDsc* varDsc = &compiler->lvaTable[storeLoc->GetLclNum()]; // First, define internal registers. #ifdef FEATURE_SIMD diff --git a/src/jit/lsraxarch.cpp b/src/jit/lsraxarch.cpp index 9946d5859aee..83521372abf8 100644 --- a/src/jit/lsraxarch.cpp +++ b/src/jit/lsraxarch.cpp @@ -92,8 +92,8 @@ int LinearScan::BuildNode(GenTree* tree) // use lvLRACandidate here instead. if (tree->IsRegOptional()) { - if (!compiler->lvaTable[tree->AsLclVarCommon()->gtLclNum].lvTracked || - compiler->lvaTable[tree->AsLclVarCommon()->gtLclNum].lvDoNotEnregister) + if (!compiler->lvaTable[tree->AsLclVarCommon()->GetLclNum()].lvTracked || + compiler->lvaTable[tree->AsLclVarCommon()->GetLclNum()].lvDoNotEnregister) { tree->ClearRegOptional(); tree->SetContained(); @@ -113,7 +113,7 @@ int LinearScan::BuildNode(GenTree* tree) // is processed, unless this is marked "isLocalDefUse" because it is a stack-based argument // to a call or an orphaned dead node. // - LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->gtLclNum]; + LclVarDsc* const varDsc = &compiler->lvaTable[tree->AsLclVarCommon()->GetLclNum()]; if (isCandidateVar(varDsc)) { return 0; diff --git a/src/jit/morph.cpp b/src/jit/morph.cpp index eda2cdb2d3d6..b318092ff00d 100644 --- a/src/jit/morph.cpp +++ b/src/jit/morph.cpp @@ -558,7 +558,7 @@ GenTree* Compiler::fgMorphCast(GenTree* tree) if (oper->OperGet() == GT_LCL_VAR && varTypeIsSmall(dstType)) { - unsigned varNum = oper->gtLclVarCommon.gtLclNum; + unsigned varNum = oper->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &lvaTable[varNum]; if (varDsc->TypeGet() == dstType && varDsc->lvNormalizeOnStore()) { @@ -3884,7 +3884,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) assert(argEntry->structDesc.passedInRegisters); if (lclVar != nullptr) { - if (lvaGetPromotionType(lclVar->gtLclVarCommon.gtLclNum) == PROMOTION_TYPE_INDEPENDENT) + if (lvaGetPromotionType(lclVar->gtLclVarCommon.GetLclNum()) == PROMOTION_TYPE_INDEPENDENT) { copyBlkClass = objClass; } @@ -3917,7 +3917,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) #ifdef _TARGET_ARM_ // TODO-1stClassStructs: Unify these conditions across targets. if (((lclVar != nullptr) && - (lvaGetPromotionType(lclVar->gtLclVarCommon.gtLclNum) == PROMOTION_TYPE_INDEPENDENT)) || + (lvaGetPromotionType(lclVar->gtLclVarCommon.GetLclNum()) == PROMOTION_TYPE_INDEPENDENT)) || ((argObj->OperIs(GT_OBJ)) && (passingSize != structSize))) { copyBlkClass = objClass; @@ -3973,7 +3973,7 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) } if (argObj->gtOper == GT_LCL_VAR) { - unsigned lclNum = argObj->gtLclVarCommon.gtLclNum; + unsigned lclNum = argObj->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &lvaTable[lclNum]; if (varDsc->lvPromoted) @@ -4155,11 +4155,11 @@ GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call) { GenTree* lclNode = argx->OperIs(GT_LCL_VAR) ? argx : fgIsIndirOfAddrOfLocal(argx); if ((lclNode != nullptr) && - (lvaGetPromotionType(lclNode->AsLclVarCommon()->gtLclNum) == Compiler::PROMOTION_TYPE_INDEPENDENT)) + (lvaGetPromotionType(lclNode->AsLclVarCommon()->GetLclNum()) == Compiler::PROMOTION_TYPE_INDEPENDENT)) { // Make a GT_FIELD_LIST of the field lclVars. GenTreeLclVarCommon* lcl = lclNode->AsLclVarCommon(); - LclVarDsc* varDsc = &(lvaTable[lcl->gtLclNum]); + LclVarDsc* varDsc = &(lvaTable[lcl->GetLclNum()]); GenTreeFieldList* fieldList = nullptr; for (unsigned fieldLclNum = varDsc->lvFieldLclStart; fieldLclNum < varDsc->lvFieldLclStart + varDsc->lvFieldCnt; ++fieldLclNum) @@ -4349,7 +4349,7 @@ void Compiler::fgMorphMultiregStructArgs(GenTreeCall* call) else { assert(argx->OperIs(GT_LCL_VAR)); - structSize = lvaGetDesc(argx->AsLclVar()->gtLclNum)->lvExactSize; + structSize = lvaGetDesc(argx->AsLclVar()->GetLclNum())->lvExactSize; } assert(structSize > 0); if (structSize == genTypeSize(hfaType)) @@ -4450,7 +4450,7 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry } if (lcl != nullptr) { - if (lvaGetPromotionType(lcl->gtLclNum) == PROMOTION_TYPE_INDEPENDENT) + if (lvaGetPromotionType(lcl->GetLclNum()) == PROMOTION_TYPE_INDEPENDENT) { arg = fgMorphLclArgToFieldlist(lcl); } @@ -4461,10 +4461,10 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry { // Create an Obj of the temp to use it as a call argument. arg = gtNewOperNode(GT_ADDR, TYP_I_IMPL, arg); - arg = gtNewObjNode(lvaGetStruct(lcl->gtLclNum), arg); + arg = gtNewObjNode(lvaGetStruct(lcl->GetLclNum()), arg); } // Its fields will need to be accessed by address. - lvaSetVarDoNotEnregister(lcl->gtLclNum DEBUG_ARG(DNER_IsStructArg)); + lvaSetVarDoNotEnregister(lcl->GetLclNum() DEBUG_ARG(DNER_IsStructArg)); } } @@ -4507,7 +4507,7 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry else if (arg->OperGet() == GT_LCL_VAR) { GenTreeLclVarCommon* varNode = arg->AsLclVarCommon(); - unsigned varNum = varNode->gtLclNum; + unsigned varNum = varNode->GetLclNum(); assert(varNum < lvaCount); LclVarDsc* varDsc = &lvaTable[varNum]; @@ -4618,7 +4618,7 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry if (argValue->OperGet() == GT_LCL_VAR) { GenTreeLclVarCommon* varNode = argValue->AsLclVarCommon(); - unsigned varNum = varNode->gtLclNum; + unsigned varNum = varNode->GetLclNum(); assert(varNum < lvaCount); LclVarDsc* varDsc = &lvaTable[varNum]; @@ -4823,7 +4823,7 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry if ((argValue->OperGet() == GT_LCL_FLD) || (argValue->OperGet() == GT_LCL_VAR)) { GenTreeLclVarCommon* varNode = argValue->AsLclVarCommon(); - unsigned varNum = varNode->gtLclNum; + unsigned varNum = varNode->GetLclNum(); assert(varNum < lvaCount); LclVarDsc* varDsc = &lvaTable[varNum]; @@ -4897,7 +4897,7 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry if (addrTaken->IsLocal()) { GenTreeLclVarCommon* varNode = addrTaken->AsLclVarCommon(); - unsigned varNum = varNode->gtLclNum; + unsigned varNum = varNode->GetLclNum(); // We access non-struct type (for example, long) as a struct type. // Make sure lclVar lives on stack to make sure its fields are accessible by address. lvaSetVarDoNotEnregister(varNum DEBUGARG(DNER_LocalField)); @@ -4999,7 +4999,7 @@ GenTree* Compiler::fgMorphMultiregStructArg(GenTree* arg, fgArgTabEntry* fgEntry // GenTreeFieldList* Compiler::fgMorphLclArgToFieldlist(GenTreeLclVarCommon* lcl) { - LclVarDsc* varDsc = &(lvaTable[lcl->gtLclNum]); + LclVarDsc* varDsc = &(lvaTable[lcl->GetLclNum()]); assert(varDsc->lvPromoted == true); unsigned fieldCount = varDsc->lvFieldCnt; @@ -6000,7 +6000,7 @@ GenTree* Compiler::fgMorphLocalVar(GenTree* tree, bool forceRemorph) { assert(tree->gtOper == GT_LCL_VAR); - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); var_types varType = lvaGetRealType(lclNum); LclVarDsc* varDsc = &lvaTable[lclNum]; @@ -6134,7 +6134,7 @@ GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac) GenTreeLclVarCommon* lcl = objRef->IsLocalAddrExpr(); if (lcl != nullptr) { - lvaSetVarDoNotEnregister(lcl->gtLclNum DEBUGARG(DNER_LocalField)); + lvaSetVarDoNotEnregister(lcl->GetLclNum() DEBUGARG(DNER_LocalField)); } } #endif @@ -6260,7 +6260,7 @@ GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac) bool addExplicitNullCheck = false; // Implicit byref locals are never null. - if (!((objRef->gtOper == GT_LCL_VAR) && lvaIsImplicitByRefLocal(objRef->gtLclVarCommon.gtLclNum))) + if (!((objRef->gtOper == GT_LCL_VAR) && lvaIsImplicitByRefLocal(objRef->gtLclVarCommon.GetLclNum()))) { // If the objRef is a GT_ADDR node, it, itself, never requires null checking. The expression // whose address is being taken is either a local or static variable, whose address is necessarily @@ -6321,7 +6321,7 @@ GenTree* Compiler::fgMorphField(GenTree* tree, MorphAddrContext* mac) } else { - lclNum = objRef->gtLclVarCommon.gtLclNum; + lclNum = objRef->gtLclVarCommon.GetLclNum(); } // Create the "nullchk" node. @@ -7193,7 +7193,7 @@ GenTree* Compiler::fgMorphPotentialTailCall(GenTreeCall* call) { noway_assert(call->TypeGet() == TYP_VOID); GenTree* retValBuf = call->gtCallArgs->GetNode(); - if (retValBuf->gtOper != GT_LCL_VAR || retValBuf->gtLclVarCommon.gtLclNum != info.compRetBuffArg) + if (retValBuf->gtOper != GT_LCL_VAR || retValBuf->gtLclVarCommon.GetLclNum() != info.compRetBuffArg) { failTailCall("Need to copy return buffer"); return nullptr; @@ -8238,7 +8238,7 @@ Statement* Compiler::fgAssignRecursiveCallArgToCallerParam(GenTree* arg, } else if (arg->OperGet() == GT_LCL_VAR) { - unsigned lclNum = arg->AsLclVar()->gtLclNum; + unsigned lclNum = arg->AsLclVar()->GetLclNum(); LclVarDsc* varDsc = &lvaTable[lclNum]; if (!varDsc->lvIsParam) { @@ -8458,7 +8458,7 @@ GenTree* Compiler::fgMorphCall(GenTreeCall* call) { structHnd = call->gtRetClsHnd; if (info.compCompHnd->isStructRequiringStackAllocRetBuf(structHnd) && - !(dest->OperGet() == GT_LCL_VAR && dest->gtLclVar.gtLclNum == info.compRetBuffArg)) + !(dest->OperGet() == GT_LCL_VAR && dest->gtLclVar.GetLclNum() == info.compRetBuffArg)) { // Force re-evaluating the argInfo as the return argument has changed. call->fgArgInfo = nullptr; @@ -8584,7 +8584,7 @@ GenTree* Compiler::fgMorphCall(GenTreeCall* call) { GenTree* var = origDest->gtOp.gtOp1; origDest = gtNewOperNode(GT_COMMA, var->TypeGet(), origDest, - gtNewLclvNode(var->gtLclVar.gtLclNum, var->TypeGet())); + gtNewLclvNode(var->gtLclVar.GetLclNum(), var->TypeGet())); } } GenTree* copyBlk = gtNewCpObjNode(origDest, retValVarAddr, structHnd, false); @@ -8702,7 +8702,7 @@ GenTree* Compiler::fgMorphLeaf(GenTree* tree) if (info.compIsVarArgs) { GenTree* newTree = - fgMorphStackArgForVarArgs(tree->gtLclFld.gtLclNum, tree->gtType, tree->gtLclFld.gtLclOffs); + fgMorphStackArgForVarArgs(tree->gtLclFld.GetLclNum(), tree->gtType, tree->gtLclFld.gtLclOffs); if (newTree != nullptr) { if (newTree->OperIsBlk() && ((tree->gtFlags & GTF_VAR_DEF) == 0)) @@ -8821,7 +8821,7 @@ GenTree* Compiler::fgMorphOneAsgBlockOp(GenTree* tree) size = lhsBlk->Size(); if (impIsAddressInLocal(lhsBlk->Addr(), &destLclVarTree)) { - destVarNum = destLclVarTree->AsLclVarCommon()->gtLclNum; + destVarNum = destLclVarTree->AsLclVarCommon()->GetLclNum(); destVarDsc = &(lvaTable[destVarNum]); } if (lhsBlk->OperGet() == GT_OBJ) @@ -8856,7 +8856,7 @@ GenTree* Compiler::fgMorphOneAsgBlockOp(GenTree* tree) } if (destLclVarTree != nullptr) { - destVarNum = destLclVarTree->AsLclVarCommon()->gtLclNum; + destVarNum = destLclVarTree->AsLclVarCommon()->GetLclNum(); destVarDsc = &(lvaTable[destVarNum]); if (asgType == TYP_STRUCT) { @@ -8933,11 +8933,11 @@ GenTree* Compiler::fgMorphOneAsgBlockOp(GenTree* tree) if (src->OperGet() == GT_LCL_VAR) { srcLclVarTree = src; - srcVarDsc = &(lvaTable[src->AsLclVarCommon()->gtLclNum]); + srcVarDsc = &(lvaTable[src->AsLclVarCommon()->GetLclNum()]); } else if (src->OperIsIndir() && impIsAddressInLocal(src->gtOp.gtOp1, &srcLclVarTree)) { - srcVarDsc = &(lvaTable[srcLclVarTree->AsLclVarCommon()->gtLclNum]); + srcVarDsc = &(lvaTable[srcLclVarTree->AsLclVarCommon()->GetLclNum()]); } if ((srcVarDsc != nullptr) && varTypeIsStruct(srcLclVarTree) && srcVarDsc->lvPromoted) { @@ -9083,7 +9083,7 @@ GenTree* Compiler::fgMorphOneAsgBlockOp(GenTree* tree) { // The source argument of the copyblk can potentially be accessed only through indir(addr(lclVar)) // or indir(lclVarAddr) so it must be on the stack. - unsigned lclVarNum = srcLclVarTree->gtLclVarCommon.gtLclNum; + unsigned lclVarNum = srcLclVarTree->gtLclVarCommon.GetLclNum(); lvaSetVarDoNotEnregister(lclVarNum DEBUGARG(DNER_BlockOp)); GenTree* srcAddr; if (src == srcLclVarTree) @@ -9710,7 +9710,7 @@ GenTree* Compiler::fgMorphBlkNode(GenTree* tree, bool isDest) GenTreeLclVarCommon* lclVarNode = blkNode->Addr()->gtGetOp1()->AsLclVarCommon(); if ((genTypeSize(blkNode) != genTypeSize(lclVarNode)) || (!isDest && !varTypeIsStruct(lclVarNode))) { - lvaSetVarDoNotEnregister(lclVarNode->gtLclNum DEBUG_ARG(DNER_VMNeedsStackAddr)); + lvaSetVarDoNotEnregister(lclVarNode->GetLclNum() DEBUG_ARG(DNER_VMNeedsStackAddr)); } } @@ -9785,12 +9785,12 @@ GenTree* Compiler::fgMorphBlockOperand(GenTree* tree, var_types asgType, unsigne } if (lclNode != nullptr) { - LclVarDsc* varDsc = &(lvaTable[lclNode->gtLclNum]); + LclVarDsc* varDsc = &(lvaTable[lclNode->GetLclNum()]); if (varTypeIsStruct(varDsc) && (varDsc->lvExactSize == blockWidth) && (varDsc->lvType == asgType)) { if (effectiveVal != lclNode) { - JITDUMP("Replacing block node [%06d] with lclVar V%02u\n", dspTreeID(tree), lclNode->gtLclNum); + JITDUMP("Replacing block node [%06d] with lclVar V%02u\n", dspTreeID(tree), lclNode->GetLclNum()); effectiveVal = lclNode; } needsIndirection = false; @@ -9974,7 +9974,7 @@ GenTree* Compiler::fgMorphCopyBlock(GenTree* tree) if (dest->gtOper == GT_LCL_VAR) { lclVarTree = dest->AsLclVarCommon(); - destLclNum = lclVarTree->gtLclNum; + destLclNum = lclVarTree->GetLclNum(); destLclVar = &lvaTable[destLclNum]; if (destLclVar->lvType == TYP_STRUCT) { @@ -10030,7 +10030,7 @@ GenTree* Compiler::fgMorphCopyBlock(GenTree* tree) if (destAddr->IsLocalAddrExpr(this, &lclVarTree, &destFldSeq)) { destOnStack = true; - destLclNum = lclVarTree->gtLclNum; + destLclNum = lclVarTree->GetLclNum(); destLclVar = &lvaTable[destLclNum]; } } @@ -10075,7 +10075,7 @@ GenTree* Compiler::fgMorphCopyBlock(GenTree* tree) if (rhs->IsLocal()) { srcLclVarTree = rhs->AsLclVarCommon(); - srcLclNum = srcLclVarTree->gtLclNum; + srcLclNum = srcLclVarTree->GetLclNum(); if (rhs->OperGet() == GT_LCL_FLD) { srcFldSeq = rhs->AsLclFld()->gtFieldSeq; @@ -10085,7 +10085,7 @@ GenTree* Compiler::fgMorphCopyBlock(GenTree* tree) { if (rhs->gtOp.gtOp1->IsLocalAddrExpr(this, &srcLclVarTree, &srcFldSeq)) { - srcLclNum = srcLclVarTree->gtLclNum; + srcLclNum = srcLclVarTree->GetLclNum(); } else { @@ -10445,7 +10445,7 @@ GenTree* Compiler::fgMorphCopyBlock(GenTree* tree) // We will *not* consider this to define the local, but rather have each individual field assign // be a definition. addrSpill->gtOp.gtOp1->gtFlags &= ~(GTF_LIVENESS_MASK); - assert(lvaGetPromotionType(addrSpill->gtOp.gtOp1->gtLclVarCommon.gtLclNum) != + assert(lvaGetPromotionType(addrSpill->gtOp.gtOp1->gtLclVarCommon.GetLclNum()) != PROMOTION_TYPE_INDEPENDENT); addrSpillIsStackDest = true; // addrSpill represents the address of LclVar[varNum] in our // local stack frame @@ -10484,7 +10484,7 @@ GenTree* Compiler::fgMorphCopyBlock(GenTree* tree) GenTree* addrOp = addrSpill->gtOp.gtOp1; if (addrOp->IsLocal()) { - unsigned lclVarNum = addrOp->gtLclVarCommon.gtLclNum; + unsigned lclVarNum = addrOp->gtLclVarCommon.GetLclNum(); lvaTable[lclVarNum].lvAddrExposed = true; lvaSetVarDoNotEnregister(lclVarNum DEBUGARG(DNER_AddrExposed)); } @@ -10911,7 +10911,7 @@ GenTree* Compiler::getSIMDStructFromField(GenTree* tree, if (isSIMDTypeLocal(obj)) { - unsigned lclNum = obj->gtLclVarCommon.gtLclNum; + unsigned lclNum = obj->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &lvaTable[lclNum]; if (varDsc->lvIsUsedInSIMDIntrinsic() || ignoreUsedInSIMDIntrinsic) { @@ -12141,7 +12141,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) // When we introduce the CSE we remove the GT_IND and subsitute a GT_LCL_VAR in it place. else if (op2->OperIsCompare() && (op2->gtType == TYP_BYTE) && (op1->gtOper == GT_LCL_VAR)) { - unsigned varNum = op1->gtLclVarCommon.gtLclNum; + unsigned varNum = op1->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &lvaTable[varNum]; /* We again need to zero extend the setcc instruction */ @@ -12301,7 +12301,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) goto SKIP; } - unsigned lclNum = lcl->gtLclVarCommon.gtLclNum; + unsigned lclNum = lcl->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); /* If the LCL_VAR is not a temp then bail, a temp has a single def */ @@ -12326,7 +12326,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) } /* Both of the LCL_VAR must match */ - if (asg->gtOp.gtOp1->gtLclVarCommon.gtLclNum != lclNum) + if (asg->gtOp.gtOp1->gtLclVarCommon.GetLclNum() != lclNum) { goto SKIP; } @@ -13061,7 +13061,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) } else if (temp->OperIsLocal()) { - unsigned lclNum = temp->gtLclVarCommon.gtLclNum; + unsigned lclNum = temp->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = &lvaTable[lclNum]; // We will try to optimize when we have a promoted struct promoted with a zero lvFldOffset @@ -13179,7 +13179,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) } // The emitter can get confused by invalid offsets - if (ival1 >= Compiler::lvaLclSize(temp->gtLclVarCommon.gtLclNum)) + if (ival1 >= Compiler::lvaLclSize(temp->gtLclVarCommon.GetLclNum())) { break; } @@ -13215,7 +13215,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac) { assert(temp->OperIsLocal()); - const unsigned lclNum = temp->AsLclVarCommon()->gtLclNum; + const unsigned lclNum = temp->AsLclVarCommon()->GetLclNum(); LclVarDsc* const varDsc = &lvaTable[lclNum]; const var_types tempTyp = temp->TypeGet(); @@ -15042,7 +15042,7 @@ void Compiler::fgMorphTreeDone(GenTree* tree, // check what gets assigned only when we're at an assignment. if (tree->OperIs(GT_ASG) && tree->DefinesLocal(this, &lclVarTree)) { - unsigned lclNum = lclVarTree->gtLclNum; + unsigned lclNum = lclVarTree->GetLclNum(); noway_assert(lclNum < lvaCount); fgKillDependentAssertions(lclNum DEBUGARG(tree)); } @@ -16635,7 +16635,7 @@ void Compiler::fgExpandQmarkStmt(BasicBlock* block, Statement* stmt) if (dst != nullptr) { assert(dst->gtOper == GT_LCL_VAR); - lclNum = dst->gtLclVar.gtLclNum; + lclNum = dst->gtLclVar.GetLclNum(); } else { @@ -17058,7 +17058,7 @@ void Compiler::fgMorphStructField(GenTree* tree, GenTree* parent) if ((obj != nullptr) && (obj->gtOper == GT_LCL_VAR)) { - unsigned lclNum = obj->gtLclVarCommon.gtLclNum; + unsigned lclNum = obj->gtLclVarCommon.GetLclNum(); const LclVarDsc* varDsc = &lvaTable[lclNum]; if (varTypeIsStruct(obj)) @@ -17212,7 +17212,7 @@ void Compiler::fgMorphLocalField(GenTree* tree, GenTree* parent) { noway_assert(tree->OperGet() == GT_LCL_FLD); - unsigned lclNum = tree->gtLclFld.gtLclNum; + unsigned lclNum = tree->gtLclFld.GetLclNum(); LclVarDsc* varDsc = &lvaTable[lclNum]; if (varTypeIsStruct(varDsc) && (varDsc->lvPromoted)) @@ -17247,7 +17247,7 @@ void Compiler::fgMorphLocalField(GenTree* tree, GenTree* parent) (varTypeIsEnregisterable(fieldType) || varTypeIsSIMD(fieldType))); tree->ChangeOper(GT_LCL_VAR); - assert(tree->gtLclVarCommon.gtLclNum == fieldLclIndex); + assert(tree->gtLclVarCommon.GetLclNum() == fieldLclIndex); tree->gtType = fldVarDsc->TypeGet(); #ifdef DEBUG if (verbose) @@ -17627,7 +17627,7 @@ GenTree* Compiler::fgMorphImplicitByRefArgs(GenTree* tree, bool isAddr) assert(isAddr == (tree->gtOper == GT_ADDR)); GenTree* lclVarTree = isAddr ? tree->gtOp.gtOp1 : tree; - unsigned lclNum = lclVarTree->gtLclVarCommon.gtLclNum; + unsigned lclNum = lclVarTree->gtLclVarCommon.GetLclNum(); LclVarDsc* lclVarDsc = &lvaTable[lclNum]; CORINFO_FIELD_HANDLE fieldHnd; @@ -18747,7 +18747,7 @@ bool Compiler::fgMorphCombineSIMDFieldAssignments(BasicBlock* block, Statement* if (simdStructNode->TypeGet() == TYP_BYREF) { assert(simdStructNode->OperIsLocal()); - assert(lvaIsImplicitByRefLocal(simdStructNode->AsLclVarCommon()->gtLclNum)); + assert(lvaIsImplicitByRefLocal(simdStructNode->AsLclVarCommon()->GetLclNum())); simdStructNode = gtNewIndir(simdType, simdStructNode); } else @@ -18889,7 +18889,7 @@ bool Compiler::fgCheckStmtAfterTailCall() else { noway_assert(callExpr->gtGetOp1()->OperIsLocal()); - unsigned callResultLclNumber = callExpr->gtGetOp1()->AsLclVarCommon()->gtLclNum; + unsigned callResultLclNumber = callExpr->gtGetOp1()->AsLclVarCommon()->GetLclNum(); #if FEATURE_TAILCALL_OPT_SHARED_RETURN @@ -18901,9 +18901,9 @@ bool Compiler::fgCheckStmtAfterTailCall() GenTree* moveExpr = nextMorphStmt->gtStmtExpr; noway_assert(moveExpr->gtGetOp1()->OperIsLocal() && moveExpr->gtGetOp2()->OperIsLocal()); - unsigned srcLclNum = moveExpr->gtGetOp2()->AsLclVarCommon()->gtLclNum; + unsigned srcLclNum = moveExpr->gtGetOp2()->AsLclVarCommon()->GetLclNum(); noway_assert(srcLclNum == callResultLclNumber); - unsigned dstLclNum = moveExpr->gtGetOp1()->AsLclVarCommon()->gtLclNum; + unsigned dstLclNum = moveExpr->gtGetOp1()->AsLclVarCommon()->GetLclNum(); callResultLclNumber = dstLclNum; nextMorphStmt = moveStmt->GetNextStmt(); @@ -18922,7 +18922,7 @@ bool Compiler::fgCheckStmtAfterTailCall() treeWithLcl = treeWithLcl->gtGetOp1(); } - noway_assert(callResultLclNumber == treeWithLcl->AsLclVarCommon()->gtLclNum); + noway_assert(callResultLclNumber == treeWithLcl->AsLclVarCommon()->GetLclNum()); nextMorphStmt = retStmt->GetNextStmt(); } diff --git a/src/jit/objectalloc.cpp b/src/jit/objectalloc.cpp index b2981745a38c..ac726bedb9d2 100644 --- a/src/jit/objectalloc.cpp +++ b/src/jit/objectalloc.cpp @@ -867,7 +867,7 @@ void ObjectAllocator::RewriteUses() assert(tree != nullptr); assert(tree->IsLocal()); - const unsigned int lclNum = tree->AsLclVarCommon()->gtLclNum; + const unsigned int lclNum = tree->AsLclVarCommon()->GetLclNum(); unsigned int newLclNum = BAD_VAR_NUM; LclVarDsc* lclVarDsc = m_compiler->lvaTable + lclNum; diff --git a/src/jit/optimizer.cpp b/src/jit/optimizer.cpp index e801b1fc3dd9..7f3bb2b1a915 100644 --- a/src/jit/optimizer.cpp +++ b/src/jit/optimizer.cpp @@ -722,7 +722,7 @@ bool Compiler::optPopulateInitInfo(unsigned loopInd, GenTree* init, unsigned ite GenTree* lhs = init->gtOp.gtOp1; GenTree* rhs = init->gtOp.gtOp2; // LHS has to be local and should equal iterVar. - if (lhs->gtOper != GT_LCL_VAR || lhs->gtLclVarCommon.gtLclNum != iterVar) + if (lhs->gtOper != GT_LCL_VAR || lhs->gtLclVarCommon.GetLclNum() != iterVar) { return false; } @@ -737,7 +737,7 @@ bool Compiler::optPopulateInitInfo(unsigned loopInd, GenTree* init, unsigned ite else if (rhs->gtOper == GT_LCL_VAR) { optLoopTable[loopInd].lpFlags |= LPFLG_VAR_INIT; - optLoopTable[loopInd].lpVarInit = rhs->gtLclVarCommon.gtLclNum; + optLoopTable[loopInd].lpVarInit = rhs->gtLclVarCommon.GetLclNum(); } else { @@ -788,12 +788,12 @@ bool Compiler::optCheckIterInLoopTest( GenTree* limitOp; // Make sure op1 or op2 is the iterVar. - if (opr1->gtOper == GT_LCL_VAR && opr1->gtLclVarCommon.gtLclNum == iterVar) + if (opr1->gtOper == GT_LCL_VAR && opr1->gtLclVarCommon.GetLclNum() == iterVar) { iterOp = opr1; limitOp = opr2; } - else if (opr2->gtOper == GT_LCL_VAR && opr2->gtLclVarCommon.gtLclNum == iterVar) + else if (opr2->gtOper == GT_LCL_VAR && opr2->gtLclVarCommon.GetLclNum() == iterVar) { iterOp = opr2; limitOp = opr1; @@ -820,7 +820,7 @@ bool Compiler::optCheckIterInLoopTest( optLoopTable[loopInd].lpFlags |= LPFLG_SIMD_LIMIT; } } - else if (limitOp->gtOper == GT_LCL_VAR && !optIsVarAssigned(from, to, nullptr, limitOp->gtLclVarCommon.gtLclNum)) + else if (limitOp->gtOper == GT_LCL_VAR && !optIsVarAssigned(from, to, nullptr, limitOp->gtLclVarCommon.GetLclNum())) { optLoopTable[loopInd].lpFlags |= LPFLG_VAR_LIMIT; } @@ -3655,11 +3655,11 @@ void Compiler::optUnrollLoops() /* Make sure everything looks ok */ if ((init->gtOper != GT_ASG) || (init->gtOp.gtOp1->gtOper != GT_LCL_VAR) || - (init->gtOp.gtOp1->gtLclVarCommon.gtLclNum != lvar) || (init->gtOp.gtOp2->gtOper != GT_CNS_INT) || + (init->gtOp.gtOp1->gtLclVarCommon.GetLclNum() != lvar) || (init->gtOp.gtOp2->gtOper != GT_CNS_INT) || (init->gtOp.gtOp2->gtIntCon.gtIconVal != lbeg) || !((incr->gtOper == GT_ADD) || (incr->gtOper == GT_SUB)) || (incr->gtOp.gtOp1->gtOper != GT_LCL_VAR) || - (incr->gtOp.gtOp1->gtLclVarCommon.gtLclNum != lvar) || (incr->gtOp.gtOp2->gtOper != GT_CNS_INT) || + (incr->gtOp.gtOp1->gtLclVarCommon.GetLclNum() != lvar) || (incr->gtOp.gtOp2->gtOper != GT_CNS_INT) || (incr->gtOp.gtOp2->gtIntCon.gtIconVal != iterInc) || (testStmt->gtStmtExpr->gtOper != GT_JTRUE)) @@ -5946,7 +5946,7 @@ Compiler::fgWalkResult Compiler::optIsVarAssgCB(GenTree** pTree, fgWalkData* dat if (destOper == GT_LCL_VAR) { - unsigned tvar = dest->gtLclVarCommon.gtLclNum; + unsigned tvar = dest->gtLclVarCommon.GetLclNum(); if (tvar < lclMAX_ALLSET_TRACKED) { AllVarSetOps::AddElemD(data->compiler, desc->ivaMaskVal, tvar); @@ -5970,7 +5970,7 @@ Compiler::fgWalkResult Compiler::optIsVarAssgCB(GenTree** pTree, fgWalkData* dat may access different parts of the var as different (but overlapping) fields. So just treat them as indirect accesses */ - // unsigned lclNum = dest->gtLclFld.gtLclNum; + // unsigned lclNum = dest->gtLclFld.GetLclNum(); // noway_assert(lvaTable[lclNum].lvAddrTaken); varRefKinds refs = varTypeIsGC(tree->TypeGet()) ? VR_IND_REF : VR_IND_SCL; @@ -7745,7 +7745,7 @@ void Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk) // For now, assume arbitrary side effects on GcHeap/ByrefExposed... memoryHavoc |= memoryKindSet(GcHeap, ByrefExposed); } - else if (lvaVarAddrExposed(lclVarTree->gtLclNum)) + else if (lvaVarAddrExposed(lclVarTree->GetLclNum())) { memoryHavoc |= memoryKindSet(ByrefExposed); } @@ -7774,7 +7774,7 @@ void Compiler::optComputeLoopSideEffectsOfBlock(BasicBlock* blk) } } // If the local is address-exposed, count this as ByrefExposed havoc - if (lvaVarAddrExposed(lhsLcl->gtLclNum)) + if (lvaVarAddrExposed(lhsLcl->GetLclNum())) { memoryHavoc |= memoryKindSet(ByrefExposed); } @@ -8145,7 +8145,7 @@ bool Compiler::optIdentifyLoopOptInfo(unsigned loopNum, LoopCloneContext* contex #ifdef DEBUG GenTree* op1 = pLoop->lpIterator(); - noway_assert((op1->gtOper == GT_LCL_VAR) && (op1->gtLclVarCommon.gtLclNum == ivLclNum)); + noway_assert((op1->gtOper == GT_LCL_VAR) && (op1->gtLclVarCommon.GetLclNum() == ivLclNum)); #endif JITDUMP("Checking blocks " FMT_BB ".." FMT_BB " for optimization candidates\n", beg->bbNum, @@ -8235,13 +8235,13 @@ bool Compiler::optExtractArrIndex(GenTree* tree, ArrIndex* result, unsigned lhsN { return false; } - unsigned arrLcl = arrBndsChk->gtArrLen->gtGetOp1()->gtLclVarCommon.gtLclNum; + unsigned arrLcl = arrBndsChk->gtArrLen->gtGetOp1()->gtLclVarCommon.GetLclNum(); if (lhsNum != BAD_VAR_NUM && arrLcl != lhsNum) { return false; } - unsigned indLcl = arrBndsChk->gtIndex->gtLclVarCommon.gtLclNum; + unsigned indLcl = arrBndsChk->gtIndex->gtLclVarCommon.GetLclNum(); GenTree* after = tree->gtGetOp2(); @@ -8267,7 +8267,7 @@ bool Compiler::optExtractArrIndex(GenTree* tree, ArrIndex* result, unsigned lhsN } GenTree* base = sibo->gtGetOp1(); GenTree* sio = sibo->gtGetOp2(); // sio == scale*index + offset - if (base->OperGet() != GT_LCL_VAR || base->gtLclVarCommon.gtLclNum != arrLcl) + if (base->OperGet() != GT_LCL_VAR || base->gtLclVarCommon.GetLclNum() != arrLcl) { return false; } @@ -8300,7 +8300,7 @@ bool Compiler::optExtractArrIndex(GenTree* tree, ArrIndex* result, unsigned lhsN #else GenTree* indexVar = index; #endif - if (indexVar->gtOper != GT_LCL_VAR || indexVar->gtLclVarCommon.gtLclNum != indLcl) + if (indexVar->gtOper != GT_LCL_VAR || indexVar->gtLclVarCommon.GetLclNum() != indLcl) { return false; } @@ -8377,7 +8377,7 @@ bool Compiler::optReconstructArrIndex(GenTree* tree, ArrIndex* result, unsigned { return false; } - unsigned lhsNum = lhs->gtLclVarCommon.gtLclNum; + unsigned lhsNum = lhs->gtLclVarCommon.GetLclNum(); GenTree* after = tree->gtGetOp2(); // Pass the "lhsNum", so we can verify if indeed it is used as the array base. return optExtractArrIndex(after, result, lhsNum); @@ -8520,7 +8520,7 @@ Compiler::fgWalkResult Compiler::optValidRangeCheckIndex(GenTree** pTree, fgWalk if (tree->gtOper == GT_LCL_VAR) { - if (pData->pCompiler->lvaTable[tree->gtLclVarCommon.gtLclNum].lvAddrExposed) + if (pData->pCompiler->lvaTable[tree->gtLclVarCommon.GetLclNum()].lvAddrExposed) { pData->bValidIndex = false; return WALK_ABORT; @@ -8565,9 +8565,9 @@ bool Compiler::optIsRangeCheckRemovable(GenTree* tree) else { noway_assert(pArray->gtType == TYP_REF); - noway_assert(pArray->gtLclVarCommon.gtLclNum < lvaCount); + noway_assert(pArray->gtLclVarCommon.GetLclNum() < lvaCount); - if (lvaTable[pArray->gtLclVarCommon.gtLclNum].lvAddrExposed) + if (lvaTable[pArray->gtLclVarCommon.GetLclNum()].lvAddrExposed) { // If the array address has been taken, don't do the optimization // (this restriction can be lowered a bit, but i don't think it's worth it) @@ -8718,7 +8718,7 @@ GenTree* Compiler::optIsBoolCond(GenTree* condBranch, GenTree** compPtr, bool* b { /* is it a boolean local variable */ - unsigned lclNum = opr1->gtLclVarCommon.gtLclNum; + unsigned lclNum = opr1->gtLclVarCommon.GetLclNum(); noway_assert(lclNum < lvaCount); if (lvaTable[lclNum].lvIsBoolean) diff --git a/src/jit/rangecheck.cpp b/src/jit/rangecheck.cpp index 108fdc8fdaf8..88e2d470cb93 100644 --- a/src/jit/rangecheck.cpp +++ b/src/jit/rangecheck.cpp @@ -540,7 +540,7 @@ void RangeCheck::MergeEdgeAssertions(GenTreeLclVarCommon* lcl, ASSERT_VALARG_TP Limit limit(Limit::keUndef); genTreeOps cmpOper = GT_NONE; - LclSsaVarDsc* ssaData = m_pCompiler->lvaTable[lcl->gtLclNum].GetPerSsaData(lcl->GetSsaNum()); + LclSsaVarDsc* ssaData = m_pCompiler->lvaTable[lcl->GetLclNum()].GetPerSsaData(lcl->GetSsaNum()); ValueNum normalLclVN = m_pCompiler->vnStore->VNConservativeNormalValue(ssaData->m_vnPair); // Current assertion is of the form (i < len - cns) != 0 diff --git a/src/jit/rationalize.cpp b/src/jit/rationalize.cpp index e8871388e89f..1494f8f9bfa2 100644 --- a/src/jit/rationalize.cpp +++ b/src/jit/rationalize.cpp @@ -294,7 +294,7 @@ static void RewriteAssignmentIntoStoreLclCore(GenTreeOp* assignment, GenTreeLclVarCommon* store = assignment->AsLclVarCommon(); GenTreeLclVarCommon* var = location->AsLclVarCommon(); - store->SetLclNum(var->gtLclNum); + store->SetLclNum(var->GetLclNum()); store->SetSsaNum(var->GetSsaNum()); if (locationOp == GT_LCL_FLD) @@ -368,7 +368,7 @@ void Rationalizer::RewriteAssignment(LIR::Use& use) // We need to construct a block node for the location. // Modify lcl to be the address form. location->SetOper(addrForm(locationOp)); - LclVarDsc* varDsc = &(comp->lvaTable[location->AsLclVarCommon()->gtLclNum]); + LclVarDsc* varDsc = &(comp->lvaTable[location->AsLclVarCommon()->GetLclNum()]); location->gtType = TYP_BYREF; GenTreeBlk* storeBlk = nullptr; unsigned int size = varDsc->lvExactSize; diff --git a/src/jit/simd.cpp b/src/jit/simd.cpp index b4e45a08416f..edc5f182e0c8 100644 --- a/src/jit/simd.cpp +++ b/src/jit/simd.cpp @@ -1630,7 +1630,7 @@ GenTree* Compiler::impSIMDAbs(CORINFO_CLASS_HANDLE typeHnd, var_types baseType, if (op1->OperGet() == GT_LCL_VAR) { - op1LclNum = op1->gtLclVarCommon.gtLclNum; + op1LclNum = op1->gtLclVarCommon.GetLclNum(); op1Assign = nullptr; } else @@ -2007,7 +2007,7 @@ GenTree* Compiler::getOp1ForConstructor(OPCODE opcode, GenTree* newobjThis, CORI assert(newobjThis->gtOper == GT_ADDR && newobjThis->gtOp.gtOp1->gtOper == GT_LCL_VAR); // push newobj result on type stack - unsigned tmp = op1->gtOp.gtOp1->gtLclVarCommon.gtLclNum; + unsigned tmp = op1->gtOp.gtOp1->gtLclVarCommon.GetLclNum(); impPushOnStack(gtNewLclvNode(tmp, lvaGetRealType(tmp)), verMakeTypeInfo(clsHnd).NormaliseForStack()); } else diff --git a/src/jit/simdcodegenxarch.cpp b/src/jit/simdcodegenxarch.cpp index d09d8f19e8af..7cea1e1dbcd3 100644 --- a/src/jit/simdcodegenxarch.cpp +++ b/src/jit/simdcodegenxarch.cpp @@ -850,7 +850,7 @@ void CodeGen::genSIMDIntrinsicInit(GenTreeSIMD* simdNode) else if (op1->OperIsLocalAddr()) { unsigned offset = (op1->OperGet() == GT_LCL_FLD_ADDR) ? op1->gtLclFld.gtLclOffs : 0; - GetEmitter()->emitIns_R_S(ins, emitTypeSize(targetType), targetReg, op1->gtLclVarCommon.gtLclNum, + GetEmitter()->emitIns_R_S(ins, emitTypeSize(targetType), targetReg, op1->gtLclVarCommon.GetLclNum(), offset); } else @@ -2454,7 +2454,7 @@ void CodeGen::genSIMDIntrinsicGetItem(GenTreeSIMD* simdNode) // There are three parts to the total offset here: // {offset of local} + {offset of SIMD Vector field (lclFld only)} + {offset of element within SIMD vector}. bool isEBPbased; - unsigned varNum = op1->gtLclVarCommon.gtLclNum; + unsigned varNum = op1->gtLclVarCommon.GetLclNum(); offset += compiler->lvaFrameAddress(varNum, &isEBPbased); if (op1->OperGet() == GT_LCL_FLD) { @@ -2911,7 +2911,7 @@ void CodeGen::genStoreLclTypeSIMD12(GenTree* treeNode) assert((treeNode->OperGet() == GT_STORE_LCL_FLD) || (treeNode->OperGet() == GT_STORE_LCL_VAR)); unsigned offs = 0; - unsigned varNum = treeNode->gtLclVarCommon.gtLclNum; + unsigned varNum = treeNode->gtLclVarCommon.GetLclNum(); assert(varNum < compiler->lvaCount); if (treeNode->OperGet() == GT_STORE_LCL_FLD) @@ -2953,7 +2953,7 @@ void CodeGen::genLoadLclTypeSIMD12(GenTree* treeNode) regNumber targetReg = treeNode->gtRegNum; unsigned offs = 0; - unsigned varNum = treeNode->gtLclVarCommon.gtLclNum; + unsigned varNum = treeNode->gtLclVarCommon.GetLclNum(); assert(varNum < compiler->lvaCount); if (treeNode->OperGet() == GT_LCL_FLD) @@ -3071,7 +3071,7 @@ void CodeGen::genSIMDIntrinsicUpperSave(GenTreeSIMD* simdNode) else { // The localVar must have a stack home. - unsigned varNum = op1->AsLclVarCommon()->gtLclNum; + unsigned varNum = op1->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = compiler->lvaGetDesc(varNum); assert(varDsc->lvOnFrame); // We want to store this to the upper 16 bytes of this localVar's home. @@ -3112,7 +3112,7 @@ void CodeGen::genSIMDIntrinsicUpperRestore(GenTreeSIMD* simdNode) else { // The localVar must have a stack home. - unsigned varNum = op1->AsLclVarCommon()->gtLclNum; + unsigned varNum = op1->AsLclVarCommon()->GetLclNum(); LclVarDsc* varDsc = compiler->lvaGetDesc(varNum); assert(varDsc->lvOnFrame); // We will load this from the upper 16 bytes of this localVar's home. diff --git a/src/jit/ssabuilder.cpp b/src/jit/ssabuilder.cpp index 291bad40d3cb..8167503ba672 100644 --- a/src/jit/ssabuilder.cpp +++ b/src/jit/ssabuilder.cpp @@ -665,7 +665,7 @@ static GenTree* GetPhiNode(BasicBlock* block, unsigned lclNum) GenTree* phiLhs = tree->gtOp.gtOp1; assert(phiLhs->OperGet() == GT_LCL_VAR); - if (phiLhs->gtLclVarCommon.gtLclNum == lclNum) + if (phiLhs->gtLclVarCommon.GetLclNum() == lclNum) { return tree->gtOp.gtOp2; } @@ -927,7 +927,7 @@ void SsaBuilder::TreeRenameVariables(GenTree* tree, BasicBlock* block, SsaRename GenTreeLclVarCommon* lclVarNode; bool isLocal = tree->DefinesLocal(m_pCompiler, &lclVarNode); - bool isAddrExposedLocal = isLocal && m_pCompiler->lvaVarAddrExposed(lclVarNode->gtLclNum); + bool isAddrExposedLocal = isLocal && m_pCompiler->lvaVarAddrExposed(lclVarNode->GetLclNum()); bool hasByrefHavoc = ((block->bbMemoryHavoc & memoryKindSet(ByrefExposed)) != 0); if (!isLocal || (isAddrExposedLocal && !hasByrefHavoc)) { @@ -983,7 +983,7 @@ void SsaBuilder::TreeRenameVariables(GenTree* tree, BasicBlock* block, SsaRename return; } - unsigned lclNum = tree->gtLclVarCommon.gtLclNum; + unsigned lclNum = tree->gtLclVarCommon.GetLclNum(); // Is this a variable we exclude from SSA? if (!m_pCompiler->lvaInSsa(lclNum)) { @@ -1079,7 +1079,7 @@ void SsaBuilder::AddDefToHandlerPhis(BasicBlock* block, unsigned lclNum, unsigne assert(tree->IsPhiDefn()); - if (tree->gtOp.gtOp1->gtLclVar.gtLclNum == lclNum) + if (tree->gtOp.gtOp1->gtLclVar.GetLclNum() == lclNum) { // It's the definition for the right local. Add "ssaNum" to the RHS. AddPhiArg(handler, stmt, tree->gtGetOp2()->AsPhi(), lclNum, ssaNum, block); @@ -1305,7 +1305,7 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR GenTree* tree = stmt->gtStmtExpr; GenTreePhi* phi = tree->gtGetOp2()->AsPhi(); - unsigned lclNum = tree->gtOp.gtOp1->gtLclVar.gtLclNum; + unsigned lclNum = tree->gtOp.gtOp1->gtLclVar.GetLclNum(); unsigned ssaNum = pRenameState->Top(lclNum); // Search the arglist for an existing definition for ssaNum. // (Can we assert that its the head of the list? This should only happen when we add @@ -1437,7 +1437,7 @@ void SsaBuilder::AssignPhiNodeRhsVariables(BasicBlock* block, SsaRenameState* pR // Get the phi node from GT_ASG. GenTree* lclVar = tree->gtOp.gtOp1; - unsigned lclNum = lclVar->gtLclVar.gtLclNum; + unsigned lclNum = lclVar->gtLclVar.GetLclNum(); // If the variable is live-out of "blk", and is therefore live on entry to the try-block-start // "succ", then we make sure the current SSA name for the @@ -1937,7 +1937,8 @@ void Compiler::JitTestCheckSSA() { printf(" Node: "); printTreeID(lcl); - printf(", SSA name = <%d, %d> -- SSA name class %d.\n", lcl->gtLclNum, lcl->GetSsaNum(), tlAndN.m_num); + printf(", SSA name = <%d, %d> -- SSA name class %d.\n", lcl->GetLclNum(), lcl->GetSsaNum(), + tlAndN.m_num); } SSAName ssaNm; if (labelToSSA->Lookup(tlAndN.m_num, &ssaNm)) @@ -1955,7 +1956,7 @@ void Compiler::JitTestCheckSSA() { printf("Node: "); printTreeID(lcl); - printf(", SSA name = <%d, %d> was declared in SSA name class %d,\n", lcl->gtLclNum, + printf(", SSA name = <%d, %d> was declared in SSA name class %d,\n", lcl->GetLclNum(), lcl->GetSsaNum(), tlAndN.m_num); printf( "but this SSA name <%d,%d> has already been associated with a different SSA name class: %d.\n", @@ -1963,11 +1964,11 @@ void Compiler::JitTestCheckSSA() unreached(); } // And the current node must be of the specified SSA family. - if (!(lcl->gtLclNum == ssaNm.m_lvNum && lcl->GetSsaNum() == ssaNm.m_ssaNum)) + if (!(lcl->GetLclNum() == ssaNm.m_lvNum && lcl->GetSsaNum() == ssaNm.m_ssaNum)) { printf("Node: "); printTreeID(lcl); - printf(", SSA name = <%d, %d> was declared in SSA name class %d,\n", lcl->gtLclNum, + printf(", SSA name = <%d, %d> was declared in SSA name class %d,\n", lcl->GetLclNum(), lcl->GetSsaNum(), tlAndN.m_num); printf("but that name class was previously bound to a different SSA name: <%d,%d>.\n", ssaNm.m_lvNum, ssaNm.m_ssaNum); @@ -1976,7 +1977,7 @@ void Compiler::JitTestCheckSSA() } else { - ssaNm.m_lvNum = lcl->gtLclNum; + ssaNm.m_lvNum = lcl->GetLclNum(); ssaNm.m_ssaNum = lcl->GetSsaNum(); ssize_t num; // The mapping(s) must be one-to-one: if the label has no mapping, then the ssaNm may not, either. @@ -1984,7 +1985,7 @@ void Compiler::JitTestCheckSSA() { printf("Node: "); printTreeID(lcl); - printf(", SSA name = <%d, %d> was declared in SSA name class %d,\n", lcl->gtLclNum, + printf(", SSA name = <%d, %d> was declared in SSA name class %d,\n", lcl->GetLclNum(), lcl->GetSsaNum(), tlAndN.m_num); printf("but this SSA name has already been associated with a different name class: %d.\n", num); unreached(); diff --git a/src/jit/treelifeupdater.cpp b/src/jit/treelifeupdater.cpp index 3c2b62f96d61..29fa75d04a31 100644 --- a/src/jit/treelifeupdater.cpp +++ b/src/jit/treelifeupdater.cpp @@ -37,7 +37,7 @@ void TreeLifeUpdater::UpdateLifeVar(GenTree* tree) { lclVarTree = tree; } - unsigned int lclNum = lclVarTree->gtLclVarCommon.gtLclNum; + unsigned int lclNum = lclVarTree->gtLclVarCommon.GetLclNum(); LclVarDsc* varDsc = compiler->lvaTable + lclNum; #ifdef DEBUG diff --git a/src/jit/valuenum.cpp b/src/jit/valuenum.cpp index 666611218acf..87d28a2887d6 100644 --- a/src/jit/valuenum.cpp +++ b/src/jit/valuenum.cpp @@ -6442,7 +6442,7 @@ void Compiler::fgValueNumberBlockAssignment(GenTree* tree) } #endif // DEBUG } - else if (lvaVarAddrExposed(lclVarTree->gtLclNum)) + else if (lvaVarAddrExposed(lclVarTree->GetLclNum())) { fgMutateAddressExposedLocal(tree DEBUGARG("INITBLK - address-exposed local")); } @@ -6484,7 +6484,7 @@ void Compiler::fgValueNumberBlockAssignment(GenTree* tree) if (lhs->IsLocalExpr(this, &lclVarTree, &lhsFldSeq)) { - noway_assert(lclVarTree->gtLclNum == lhsLclNum); + noway_assert(lclVarTree->GetLclNum() == lhsLclNum); } else { @@ -6743,7 +6743,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) case GT_LCL_VAR: { GenTreeLclVarCommon* lcl = tree->AsLclVarCommon(); - unsigned lclNum = lcl->gtLclNum; + unsigned lclNum = lcl->GetLclNum(); LclVarDsc* varDsc = &lvaTable[lclNum]; // Do we have a Use (read) of the LclVar? @@ -7123,8 +7123,8 @@ void Compiler::fgValueNumberTree(GenTree* tree) assert(rhsVNPair.GetLiberal() != ValueNumStore::NoVN); - lhs->gtVNPair = rhsVNPair; - lvaTable[lcl->gtLclNum].GetPerSsaData(lclDefSsaNum)->m_vnPair = rhsVNPair; + lhs->gtVNPair = rhsVNPair; + lvaTable[lcl->GetLclNum()].GetPerSsaData(lclDefSsaNum)->m_vnPair = rhsVNPair; #ifdef DEBUG if (verbose) @@ -7140,7 +7140,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) } #endif // DEBUG } - else if (lvaVarAddrExposed(lcl->gtLclNum)) + else if (lvaVarAddrExposed(lcl->GetLclNum())) { // We could use MapStore here and MapSelect on reads of address-exposed locals // (using the local nums as selectors) to get e.g. propagation of values @@ -7191,7 +7191,8 @@ void Compiler::fgValueNumberTree(GenTree* tree) { // We don't know what field this represents. Assign a new VN to the whole variable // (since we may be writing to an unknown portion of it.) - newLhsVNPair.SetBoth(vnStore->VNForExpr(compCurBB, lvaGetActualType(lclFld->gtLclNum))); + newLhsVNPair.SetBoth( + vnStore->VNForExpr(compCurBB, lvaGetActualType(lclFld->GetLclNum()))); } else { @@ -7224,7 +7225,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) } #endif // DEBUG } - else if (lvaVarAddrExposed(lclFld->gtLclNum)) + else if (lvaVarAddrExposed(lclFld->GetLclNum())) { // This side-effects ByrefExposed. Just use a new opaque VN. // As with GT_LCL_VAR, we could probably use MapStore here and MapSelect at corresponding @@ -7534,7 +7535,7 @@ void Compiler::fgValueNumberTree(GenTree* tree) GenTreeLclVarCommon* lclVarTree = nullptr; bool isLocal = tree->DefinesLocal(this, &lclVarTree); - if (isLocal && lvaVarAddrExposed(lclVarTree->gtLclNum)) + if (isLocal && lvaVarAddrExposed(lclVarTree->GetLclNum())) { // Store to address-exposed local; need to record the effect on ByrefExposed. // We could use MapStore here and MapSelect on reads of address-exposed locals