Skip to content

Commit e9e8bc7

Browse files
Resolve call mdtokens when making tier 1 inline observations (#50675)
* Add some basic handling to allow resolving call tokens in fgFindJumpTargets when in Tier1 making inline observations * Don't count prefix opcodes as instructions and handle NI_IsSupported_True/False as constants in fgFindJumpTargets * Update src/coreclr/jit/fgbasic.cpp * Applying formatting patch
1 parent b3e3db9 commit e9e8bc7

File tree

3 files changed

+169
-29
lines changed

3 files changed

+169
-29
lines changed

src/coreclr/jit/compiler.h

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3844,6 +3844,26 @@ class Compiler
38443844
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
38453845
*/
38463846

3847+
private:
3848+
// For prefixFlags
3849+
enum
3850+
{
3851+
PREFIX_TAILCALL_EXPLICIT = 0x00000001, // call has "tail" IL prefix
3852+
PREFIX_TAILCALL_IMPLICIT =
3853+
0x00000010, // call is treated as having "tail" prefix even though there is no "tail" IL prefix
3854+
PREFIX_TAILCALL_STRESS =
3855+
0x00000100, // call doesn't "tail" IL prefix but is treated as explicit because of tail call stress
3856+
PREFIX_TAILCALL = (PREFIX_TAILCALL_EXPLICIT | PREFIX_TAILCALL_IMPLICIT | PREFIX_TAILCALL_STRESS),
3857+
PREFIX_VOLATILE = 0x00001000,
3858+
PREFIX_UNALIGNED = 0x00010000,
3859+
PREFIX_CONSTRAINED = 0x00100000,
3860+
PREFIX_READONLY = 0x01000000
3861+
};
3862+
3863+
static void impValidateMemoryAccessOpcode(const BYTE* codeAddr, const BYTE* codeEndp, bool volatilePrefix);
3864+
static OPCODE impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEndp);
3865+
static bool impOpcodeIsCallOpcode(OPCODE opcode);
3866+
38473867
public:
38483868
void impInit();
38493869
void impImport();

src/coreclr/jit/fgbasic.cpp

Lines changed: 146 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -852,7 +852,13 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
852852
const bool isForceInline = (info.compFlags & CORINFO_FLG_FORCEINLINE) != 0;
853853
const bool makeInlineObservations = (compInlineResult != nullptr);
854854
const bool isInlining = compIsForInlining();
855+
const bool isTier1 = opts.jitFlags->IsSet(JitFlags::JIT_FLAG_TIER1);
856+
const bool isPreJit = opts.jitFlags->IsSet(JitFlags::JIT_FLAG_PREJIT);
857+
const bool resolveTokens = makeInlineObservations && (isTier1 || isPreJit);
855858
unsigned retBlocks = 0;
859+
unsigned intrinsicCalls = 0;
860+
int prefixFlags = 0;
861+
int value = 0;
856862

857863
if (makeInlineObservations)
858864
{
@@ -886,12 +892,14 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
886892
compInlineResult->Note(InlineObservation::CALLEE_BEGIN_OPCODE_SCAN);
887893
}
888894

895+
CORINFO_RESOLVED_TOKEN resolvedToken;
896+
CORINFO_RESOLVED_TOKEN constrainedResolvedToken;
897+
CORINFO_CALL_INFO callInfo;
898+
889899
while (codeAddr < codeEndp)
890900
{
891901
OPCODE opcode = (OPCODE)getU1LittleEndian(codeAddr);
892902
codeAddr += sizeof(__int8);
893-
opts.instrCount++;
894-
typeIsNormed = false;
895903

896904
DECODE_OPCODE:
897905

@@ -942,19 +950,72 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
942950
// There has to be code after the call, otherwise the inlinee is unverifiable.
943951
if (isInlining)
944952
{
945-
946953
noway_assert(codeAddr < codeEndp - sz);
947954
}
948955

949-
// If the method has a call followed by a ret, assume that
950-
// it is a wrapper method.
951-
if (makeInlineObservations)
956+
if (!makeInlineObservations)
952957
{
953-
if ((OPCODE)getU1LittleEndian(codeAddr + sz) == CEE_RET)
958+
break;
959+
}
960+
961+
CORINFO_METHOD_HANDLE methodHnd = nullptr;
962+
unsigned methodFlags = 0;
963+
bool mustExpand = false;
964+
CorInfoIntrinsics intrinsicID = CORINFO_INTRINSIC_Illegal;
965+
NamedIntrinsic ni = NI_Illegal;
966+
967+
if (resolveTokens)
968+
{
969+
impResolveToken(codeAddr, &resolvedToken, CORINFO_TOKENKIND_Method);
970+
eeGetCallInfo(&resolvedToken,
971+
(prefixFlags & PREFIX_CONSTRAINED) ? &constrainedResolvedToken : nullptr,
972+
combine(CORINFO_CALLINFO_KINDONLY,
973+
(opcode == CEE_CALLVIRT) ? CORINFO_CALLINFO_CALLVIRT : CORINFO_CALLINFO_NONE),
974+
&callInfo);
975+
976+
methodHnd = callInfo.hMethod;
977+
methodFlags = callInfo.methodFlags;
978+
}
979+
980+
if ((methodFlags & (CORINFO_FLG_INTRINSIC | CORINFO_FLG_JIT_INTRINSIC)) != 0)
981+
{
982+
intrinsicCalls++;
983+
984+
if ((methodFlags & CORINFO_FLG_INTRINSIC) != 0)
985+
{
986+
intrinsicID = info.compCompHnd->getIntrinsicID(methodHnd, &mustExpand);
987+
}
988+
989+
if ((methodFlags & CORINFO_FLG_JIT_INTRINSIC) != 0)
954990
{
955-
compInlineResult->Note(InlineObservation::CALLEE_LOOKS_LIKE_WRAPPER);
991+
if (intrinsicID == CORINFO_INTRINSIC_Illegal)
992+
{
993+
ni = lookupNamedIntrinsic(methodHnd);
994+
995+
switch (ni)
996+
{
997+
case NI_IsSupported_True:
998+
case NI_IsSupported_False:
999+
{
1000+
pushedStack.PushConstant();
1001+
break;
1002+
}
1003+
1004+
default:
1005+
{
1006+
break;
1007+
}
1008+
}
1009+
}
9561010
}
9571011
}
1012+
1013+
if ((OPCODE)getU1LittleEndian(codeAddr + sz) == CEE_RET)
1014+
{
1015+
// If the method has a call followed by a ret, assume that
1016+
// it is a wrapper method.
1017+
compInlineResult->Note(InlineObservation::CALLEE_LOOKS_LIKE_WRAPPER);
1018+
}
9581019
}
9591020
break;
9601021

@@ -1085,17 +1146,79 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
10851146
break;
10861147

10871148
case CEE_UNALIGNED:
1149+
{
1150+
noway_assert(sz == sizeof(__int8));
1151+
prefixFlags |= PREFIX_UNALIGNED;
1152+
1153+
value = getU1LittleEndian(codeAddr);
1154+
codeAddr += sizeof(__int8);
1155+
1156+
impValidateMemoryAccessOpcode(codeAddr, codeEndp, false);
1157+
goto OBSERVE_OPCODE;
1158+
}
1159+
10881160
case CEE_CONSTRAINED:
1161+
{
1162+
noway_assert(sz == sizeof(unsigned));
1163+
prefixFlags |= PREFIX_CONSTRAINED;
1164+
1165+
if (resolveTokens)
1166+
{
1167+
impResolveToken(codeAddr, &constrainedResolvedToken, CORINFO_TOKENKIND_Constrained);
1168+
}
1169+
codeAddr += sizeof(unsigned);
1170+
1171+
{
1172+
OPCODE actualOpcode = impGetNonPrefixOpcode(codeAddr, codeEndp);
1173+
1174+
if (actualOpcode != CEE_CALLVIRT)
1175+
{
1176+
BADCODE("constrained. has to be followed by callvirt");
1177+
}
1178+
}
1179+
goto OBSERVE_OPCODE;
1180+
}
1181+
10891182
case CEE_READONLY:
1183+
{
1184+
noway_assert(sz == 0);
1185+
prefixFlags |= PREFIX_READONLY;
1186+
1187+
{
1188+
OPCODE actualOpcode = impGetNonPrefixOpcode(codeAddr, codeEndp);
1189+
1190+
if ((actualOpcode != CEE_LDELEMA) && !impOpcodeIsCallOpcode(actualOpcode))
1191+
{
1192+
BADCODE("readonly. has to be followed by ldelema or call");
1193+
}
1194+
}
1195+
goto OBSERVE_OPCODE;
1196+
}
1197+
10901198
case CEE_VOLATILE:
1199+
{
1200+
noway_assert(sz == 0);
1201+
prefixFlags |= PREFIX_VOLATILE;
1202+
1203+
impValidateMemoryAccessOpcode(codeAddr, codeEndp, true);
1204+
goto OBSERVE_OPCODE;
1205+
}
1206+
10911207
case CEE_TAILCALL:
10921208
{
1093-
if (codeAddr >= codeEndp)
1209+
noway_assert(sz == 0);
1210+
prefixFlags |= PREFIX_TAILCALL_EXPLICIT;
1211+
10941212
{
1095-
goto TOO_FAR;
1213+
OPCODE actualOpcode = impGetNonPrefixOpcode(codeAddr, codeEndp);
1214+
1215+
if (!impOpcodeIsCallOpcode(actualOpcode))
1216+
{
1217+
BADCODE("tailcall. has to be followed by call, callvirt or calli");
1218+
}
10961219
}
1220+
goto OBSERVE_OPCODE;
10971221
}
1098-
break;
10991222

11001223
case CEE_STARG:
11011224
case CEE_STARG_S:
@@ -1312,6 +1435,7 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
13121435
// the list of other opcodes (for all platforms).
13131436

13141437
FALLTHROUGH;
1438+
13151439
case CEE_MKREFANY:
13161440
case CEE_RETHROW:
13171441
if (makeInlineObservations)
@@ -1386,6 +1510,7 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
13861510
fgObserveInlineConstants(opcode, pushedStack, isInlining);
13871511
}
13881512
break;
1513+
13891514
case CEE_RET:
13901515
retBlocks++;
13911516
break;
@@ -1397,13 +1522,23 @@ void Compiler::fgFindJumpTargets(const BYTE* codeAddr, IL_OFFSET codeSize, Fixed
13971522
// Skip any remaining operands this opcode may have
13981523
codeAddr += sz;
13991524

1525+
// Clear any prefix flags that may have been set
1526+
prefixFlags = 0;
1527+
1528+
// Increment the number of observed instructions
1529+
opts.instrCount++;
1530+
1531+
OBSERVE_OPCODE:
1532+
14001533
// Note the opcode we just saw
14011534
if (makeInlineObservations)
14021535
{
14031536
InlineObservation obs =
14041537
typeIsNormed ? InlineObservation::CALLEE_OPCODE_NORMED : InlineObservation::CALLEE_OPCODE;
14051538
compInlineResult->NoteInt(obs, opcode);
14061539
}
1540+
1541+
typeIsNormed = false;
14071542
}
14081543

14091544
if (codeAddr != codeEndp)

src/coreclr/jit/importer.cpp

Lines changed: 3 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3109,7 +3109,7 @@ unsigned Compiler::impInitBlockLineInfo()
31093109

31103110
/*****************************************************************************/
31113111

3112-
static inline bool impOpcodeIsCallOpcode(OPCODE opcode)
3112+
bool Compiler::impOpcodeIsCallOpcode(OPCODE opcode)
31133113
{
31143114
switch (opcode)
31153115
{
@@ -7849,21 +7849,6 @@ bool Compiler::impTailCallRetTypeCompatible(var_types callerRetTy
78497849
return false;
78507850
}
78517851

7852-
// For prefixFlags
7853-
enum
7854-
{
7855-
PREFIX_TAILCALL_EXPLICIT = 0x00000001, // call has "tail" IL prefix
7856-
PREFIX_TAILCALL_IMPLICIT =
7857-
0x00000010, // call is treated as having "tail" prefix even though there is no "tail" IL prefix
7858-
PREFIX_TAILCALL_STRESS =
7859-
0x00000100, // call doesn't "tail" IL prefix but is treated as explicit because of tail call stress
7860-
PREFIX_TAILCALL = (PREFIX_TAILCALL_EXPLICIT | PREFIX_TAILCALL_IMPLICIT | PREFIX_TAILCALL_STRESS),
7861-
PREFIX_VOLATILE = 0x00001000,
7862-
PREFIX_UNALIGNED = 0x00010000,
7863-
PREFIX_CONSTRAINED = 0x00100000,
7864-
PREFIX_READONLY = 0x01000000
7865-
};
7866-
78677852
/********************************************************************************
78687853
*
78697854
* Returns true if the current opcode and and the opcodes following it correspond
@@ -10643,7 +10628,7 @@ void Compiler::impResetLeaveBlock(BasicBlock* block, unsigned jmpAddr)
1064310628
// Get the first non-prefix opcode. Used for verification of valid combinations
1064410629
// of prefixes and actual opcodes.
1064510630

10646-
static OPCODE impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEndp)
10631+
OPCODE Compiler::impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEndp)
1064710632
{
1064810633
while (codeAddr < codeEndp)
1064910634
{
@@ -10681,7 +10666,7 @@ static OPCODE impGetNonPrefixOpcode(const BYTE* codeAddr, const BYTE* codeEndp)
1068110666
/*****************************************************************************/
1068210667
// Checks whether the opcode is a valid opcode for volatile. and unaligned. prefixes
1068310668

10684-
static void impValidateMemoryAccessOpcode(const BYTE* codeAddr, const BYTE* codeEndp, bool volatilePrefix)
10669+
void Compiler::impValidateMemoryAccessOpcode(const BYTE* codeAddr, const BYTE* codeEndp, bool volatilePrefix)
1068510670
{
1068610671
OPCODE opcode = impGetNonPrefixOpcode(codeAddr, codeEndp);
1068710672

0 commit comments

Comments
 (0)