Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[release/5.0-rc2] Fix for folding of *(typ*)&lclVar for small types #41838

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions src/coreclr/src/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -476,8 +476,6 @@ class LclVarDsc
unsigned char lvContainsHoles : 1; // True when we have a promoted struct that contains holes
unsigned char lvCustomLayout : 1; // True when this struct has "CustomLayout"

unsigned char lvForceLoadNormalize : 1; // True when this local had a cast on the LHS of an assignment

unsigned char lvIsMultiRegArg : 1; // true if this is a multireg LclVar struct used in an argument context
unsigned char lvIsMultiRegRet : 1; // true if this is a multireg LclVar struct assigned from a multireg call

Expand Down Expand Up @@ -886,14 +884,14 @@ class LclVarDsc
{
return varTypeIsSmall(TypeGet()) &&
// lvIsStructField is treated the same as the aliased local, see fgDoNormalizeOnStore.
(lvIsParam || lvAddrExposed || lvIsStructField || lvForceLoadNormalize);
(lvIsParam || lvAddrExposed || lvIsStructField);
}

bool lvNormalizeOnStore() const
{
return varTypeIsSmall(TypeGet()) &&
// lvIsStructField is treated the same as the aliased local, see fgDoNormalizeOnStore.
!(lvIsParam || lvAddrExposed || lvIsStructField || lvForceLoadNormalize);
!(lvIsParam || lvAddrExposed || lvIsStructField);
}

void incRefCnts(BasicBlock::weight_t weight,
Expand Down
4 changes: 4 additions & 0 deletions src/coreclr/src/jit/flowgraph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21608,6 +21608,10 @@ void Compiler::fgDebugCheckFlags(GenTree* tree)
chkFlags |= GTF_GLOB_REF | GTF_ASG;
break;

case GT_LCL_VAR:
assert((tree->gtFlags & GTF_VAR_FOLDED_IND) == 0);
break;

default:
break;
}
Expand Down
3 changes: 3 additions & 0 deletions src/coreclr/src/jit/gentree.h
Original file line number Diff line number Diff line change
Expand Up @@ -815,6 +815,9 @@ struct GenTree
#define GTF_VAR_ITERATOR 0x00800000 // GT_LCL_VAR -- this is a iterator reference in the loop condition
#define GTF_VAR_CLONED 0x00400000 // GT_LCL_VAR -- this node has been cloned or is a clone
#define GTF_VAR_CONTEXT 0x00200000 // GT_LCL_VAR -- this node is part of a runtime lookup
#define GTF_VAR_FOLDED_IND 0x00100000 // GT_LCL_VAR -- this node was folded from *(typ*)&lclVar expression tree in fgMorphSmpOp()
// where 'typ' is a small type and 'lclVar' corresponds to a normalized-on-store local variable.
// This flag identifies such nodes in order to make sure that fgDoNormalizeOnStore() is called on their parents in post-order morph.

// Relevant for inlining optimizations (see fgInlinePrependStatements)

Expand Down
54 changes: 28 additions & 26 deletions src/coreclr/src/jit/morph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12643,6 +12643,13 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac)
{
case GT_ASG:

if (op1->OperIs(GT_LCL_VAR) && ((op1->gtFlags & GTF_VAR_FOLDED_IND) != 0))
{
op1->gtFlags &= ~GTF_VAR_FOLDED_IND;
tree = fgDoNormalizeOnStore(tree);
op2 = tree->gtGetOp2();
}

lclVarTree = fgIsIndirOfAddrOfLocal(op1);
if (lclVarTree != nullptr)
{
Expand Down Expand Up @@ -13601,17 +13608,16 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac)
break;

case GT_IND:

{
// Can not remove a GT_IND if it is currently a CSE candidate.
if (gtIsActiveCSE_Candidate(tree))
{
break;
}

bool foldAndReturnTemp;
foldAndReturnTemp = false;
temp = nullptr;
ival1 = 0;
bool foldAndReturnTemp = false;
temp = nullptr;
ival1 = 0;

// Don't remove a volatile GT_IND, even if the address points to a local variable.
if ((tree->gtFlags & GTF_IND_VOLATILE) == 0)
Expand Down Expand Up @@ -13641,11 +13647,6 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac)
unsigned lclNum = temp->AsLclVarCommon()->GetLclNum();
LclVarDsc* varDsc = &lvaTable[lclNum];

// Note that fgMorph uses GTF_DONT_CSE to mark the left side of an assignment
// Thus stores have this flag and load do not have this flag
//
bool isLoad = (tree->gtFlags & GTF_DONT_CSE) == 0;

// We will try to optimize when we have a promoted struct promoted with a zero lvFldOffset
if (varDsc->lvPromoted && (varDsc->lvFldOffset == 0))
{
Expand Down Expand Up @@ -13675,25 +13676,25 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac)
// correctly normalized.
//
// The below transformation cannot be applied if the local var needs to be normalized on load.
else if (varTypeIsSmall(typ) && (genTypeSize(lvaTable[lclNum].lvType) == genTypeSize(typ)) &&
else if (varTypeIsSmall(typ) && (genTypeSize(varDsc) == genTypeSize(typ)) &&
!lvaTable[lclNum].lvNormalizeOnLoad())
{
// For any stores of small types, we will force loads to be normalized
// this is necessary as we need to zero/sign extend any load
// after this kind of store.
//
if (!isLoad)
{
varDsc->lvForceLoadNormalize = true;
}
// otherwise we have a load operation
//
// And for loads signed/unsigned differences do matter.
//
else if (varTypeIsUnsigned(lvaTable[lclNum].lvType) == varTypeIsUnsigned(typ))
const bool definitelyLoad = (tree->gtFlags & GTF_DONT_CSE) == 0;
const bool possiblyStore = !definitelyLoad;

if (possiblyStore || (varTypeIsUnsigned(varDsc) == varTypeIsUnsigned(typ)))
{
tree->gtType = typ = temp->TypeGet();
foldAndReturnTemp = true;
typ = temp->TypeGet();
tree->gtType = typ;
foldAndReturnTemp = true;

if (possiblyStore)
{
// This node can be on the left-hand-side of an assignment node.
// Mark this node with GTF_VAR_FOLDED_IND to make sure that fgDoNormalizeOnStore()
// is called on its parent in post-order morph.
temp->gtFlags |= GTF_VAR_FOLDED_IND;
}
}
}
// For matching types we can fold
Expand Down Expand Up @@ -13970,6 +13971,7 @@ GenTree* Compiler::fgMorphSmpOp(GenTree* tree, MorphAddrContext* mac)
}

break;
}

case GT_ADDR:

Expand Down
69 changes: 69 additions & 0 deletions src/tests/JIT/Regression/JitBlue/Runtime_40607/Runtime_40607.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
using System;
using System.Runtime.CompilerServices;

namespace Runtime_40607
{
class Program
{
[MethodImpl(MethodImplOptions.AggressiveInlining)]
static bool WillBeInlined(out bool shouldBeFalse)
{
shouldBeFalse = false;
return true;
}

[MethodImpl(MethodImplOptions.NoInlining)]
[SkipLocalsInit]
static int DependsOnUnInitValue()
{
int retVal = 1;
bool shouldBeFalse;

while (WillBeInlined(out shouldBeFalse))
{
if (shouldBeFalse)
{
retVal = 0;
}
break;
}

return retVal;
}

[MethodImpl(MethodImplOptions.NoInlining)]
static unsafe int PoisonStackWith(uint fillValue)
{
int retVal = 1;
bool shouldBeFalse;

*(uint*)&shouldBeFalse = fillValue;

while (WillBeInlined(out shouldBeFalse))
{
if (shouldBeFalse)
{
retVal = 0;
}
break;
}

return retVal;
}

static int Main(string[] args)
{
PoisonStackWith(0xdeadbeef);

const int expected = 1;
int actual = DependsOnUnInitValue();

if (expected != actual)
{
return 0;
}

return 100;
}
}
}
Loading