Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/coreclr/jit/compiler.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -1608,7 +1608,7 @@ inline GenTreeCall* Compiler::gtNewHelperCallNode(
/*****************************************************************************/

//------------------------------------------------------------------------------
// gtNewHelperCallNode : Helper to create a call helper node.
// gtNewVirtualFunctionLookupHelperCallNode : Helper to create a virtual function lookup helper node.
//
//
// Arguments:
Expand Down
24 changes: 16 additions & 8 deletions src/coreclr/jit/importercalls.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -387,6 +387,16 @@ var_types Compiler::impImportCall(OPCODE opcode,
{
assert(!(mflags & CORINFO_FLG_STATIC)); // can't call a static method
assert(!(clsFlags & CORINFO_FLG_VALUECLASS));

const bool needsFatPointerHandling =
(sig->sigInst.methInstCount != 0) && IsTargetAbi(CORINFO_NATIVEAOT_ABI);
if (needsFatPointerHandling)
{
// NativeAOT generic virtual method: need to handle potential fat function pointers
// Spill any side-effecting arguments before we do the LDVIRTFTN
impSpillSideEffects(false, CHECK_SPILL_ALL DEBUGARG("fat pointer arg spill"));
}

// OK, We've been told to call via LDVIRTFTN, so just
// take the call now....
call = gtNewIndCallNode(nullptr, callRetTyp, di);
Expand Down Expand Up @@ -419,17 +429,14 @@ var_types Compiler::impImportCall(OPCODE opcode,
->gtArgs.PushFront(this, NewCallArg::Primitive(thisPtrCopy).WellKnown(WellKnownArg::ThisPointer));

// Now make an indirect call through the function pointer

unsigned lclNum = lvaGrabTemp(true DEBUGARG("VirtualCall through function pointer"));
impStoreToTemp(lclNum, fptr, CHECK_SPILL_ALL);
fptr = gtNewLclvNode(lclNum, TYP_I_IMPL);

call->AsCall()->gtCallAddr = fptr;
call->gtFlags |= GTF_EXCEPT | (fptr->gtFlags & GTF_GLOB_EFFECT);

if ((sig->sigInst.methInstCount != 0) && IsTargetAbi(CORINFO_NATIVEAOT_ABI))
if (needsFatPointerHandling)
{
// NativeAOT generic virtual method: need to handle potential fat function pointers
const unsigned fptrLclNum = lvaGrabTemp(true DEBUGARG("fat pointer temp"));
impStoreToTemp(fptrLclNum, fptr, CHECK_SPILL_ALL);
call->AsCall()->gtCallAddr = gtNewLclvNode(fptrLclNum, genActualType(fptr->TypeGet()));
addFatPointerCandidate(call->AsCall());
}
#ifdef FEATURE_READYTORUN
Expand Down Expand Up @@ -6980,6 +6987,7 @@ class SpillRetExprHelper
void Compiler::addFatPointerCandidate(GenTreeCall* call)
{
JITDUMP("Marking call [%06u] as fat pointer candidate\n", dspTreeID(call));

setMethodHasFatPointer();
call->SetFatPointerCandidate();
SpillRetExprHelper helper(this);
Expand Down Expand Up @@ -8787,7 +8795,7 @@ void Compiler::impDevirtualizeCall(GenTreeCall* call,

if (dvInfo.isInstantiatingStub)
{
// We should only end up with generic methods for array interface devirt.
// We should only end up with generic methods that needs a method context (eg. array interface).
//
assert(dvInfo.wasArrayInterfaceDevirt);

Expand Down
139 changes: 139 additions & 0 deletions src/coreclr/jit/lower.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2833,6 +2833,14 @@ GenTree* Lowering::LowerCall(GenTree* node)
#endif

call->ClearOtherRegs();

#if HAS_FIXED_REGISTER_SET
if ((call->gtCallType == CT_INDIRECT) && comp->opts.Tier0OptimizationEnabled())
{
OptimizeCallIndirectTargetEvaluation(call);
}
#endif

LowerArgsForCall(call);

// note that everything generated from this point might run AFTER the outgoing args are placed
Expand Down Expand Up @@ -6286,6 +6294,137 @@ GenTree* Lowering::LowerDelegateInvoke(GenTreeCall* call)
return callTarget;
}

//------------------------------------------------------------------------
// OptimizeCallIndirectTargetEvaluation:
// Try to optimize the evaluation of the indirect call target to happen
// before arguments, if possible.
//
// Parameters:
// call - Call node
//
void Lowering::OptimizeCallIndirectTargetEvaluation(GenTreeCall* call)
{
assert((call->gtCallType == CT_INDIRECT) && (call->gtCallAddr != nullptr));

if (!call->gtCallAddr->IsHelperCall(comp, CORINFO_HELP_VIRTUAL_FUNC_PTR) &&
!call->gtCallAddr->IsHelperCall(comp, CORINFO_HELP_READYTORUN_VIRTUAL_FUNC_PTR) &&
!call->gtCallAddr->IsHelperCall(comp, CORINFO_HELP_GVMLOOKUP_FOR_SLOT) &&
!call->gtCallAddr->IsHelperCall(comp, CORINFO_HELP_READYTORUN_GENERIC_HANDLE))
{
return;
}

JITDUMP("Target is a GVM; seeing if we can move arguments ahead of resolution\n");

m_scratchSideEffects.Clear();

// We start at the call and move backwards from it. When we see a node that
// is part of the call's data flow we leave it in place. For nodes that are
// not part of the data flow, or that are part of the target's data flow,
// we move them before the call's data flow if legal. We stop when we run
// out of the call's data flow.
//
// The end result is that all nodes outside the call's data flow or inside
// the target's data flow are computed before call arguments, allowing LSRA
// to resolve the ABI constraints without interfering with the target
// computation.
unsigned numMarked = 1;
call->gtLIRFlags |= LIR::Flags::Mark;

LIR::ReadOnlyRange movingRange;

GenTree* prev;
for (GenTree* cur = call; numMarked > 0; cur = prev)
{
prev = cur->gtPrev;

if ((cur->gtLIRFlags & LIR::Flags::Mark) == 0)
{
// If we are still moving nodes then extend the range so that we
// also move this node outside the data flow of the call.
if (!movingRange.IsEmpty())
{
assert(cur->gtNext == movingRange.FirstNode());
movingRange = LIR::ReadOnlyRange(cur, movingRange.LastNode());
m_scratchSideEffects.AddNode(comp, cur);
}

continue;
}

cur->gtLIRFlags &= ~LIR::Flags::Mark;
numMarked--;

if (cur == call->gtCallAddr)
{
// Start moving this range. Do not add its side effects as we will
// check the NRE manually for precision.
movingRange = LIR::ReadOnlyRange(cur, cur);
continue;
}

cur->VisitOperands([&](GenTree* op) {
assert((op->gtLIRFlags & LIR::Flags::Mark) == 0);
op->gtLIRFlags |= LIR::Flags::Mark;
numMarked++;
return GenTree::VisitResult::Continue;
});

if (!movingRange.IsEmpty())
{
// This node is in the dataflow. See if we can move it ahead of the
// range we are moving.
bool interferes = false;
if (m_scratchSideEffects.InterferesWith(comp, cur, /* strict */ true))
{
JITDUMP(" Stopping at [%06u]; it interferes with the current range we are moving\n",
Compiler::dspTreeID(cur));
interferes = true;
}

if (!interferes)
{
// No problem so far. However the side effect set does not
// include the GVM call itself, which can throw NRE. Check the
// NRE now for precision.
GenTreeFlags flags = cur->OperEffects(comp);
if ((flags & GTF_PERSISTENT_SIDE_EFFECTS) != 0)
{
JITDUMP(" Stopping at [%06u]; it has persistent side effects\n", Compiler::dspTreeID(cur));
interferes = true;
}
else if ((flags & GTF_EXCEPT) != 0)
{
ExceptionSetFlags preciseExceptions = cur->OperExceptions(comp);
if (preciseExceptions != ExceptionSetFlags::NullReferenceException)
{
JITDUMP(" Stopping at [%06u]; it throws an exception that is not NRE\n",
Compiler::dspTreeID(cur));
interferes = true;
}
}
}

if (interferes)
{
// Stop moving the range, but keep going through the rest
// of the nodes to unmark them
movingRange = LIR::ReadOnlyRange();
}
else
{
// Move 'cur' ahead of 'movingRange'
assert(cur->gtNext == movingRange.FirstNode());
BlockRange().Remove(cur);
BlockRange().InsertAfter(movingRange.LastNode(), cur);
}
}
}

JITDUMP("Result of moved target evaluation:\n");
DISPTREERANGE(BlockRange(), call);
}

GenTree* Lowering::LowerIndirectNonvirtCall(GenTreeCall* call)
{
#ifdef TARGET_X86
Expand Down
1 change: 1 addition & 0 deletions src/coreclr/jit/lower.h
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@ class Lowering final : public Phase
GenTreeLclVar* SpillStructCallResult(GenTreeCall* call) const;
#endif // WINDOWS_AMD64_ABI
GenTree* LowerDelegateInvoke(GenTreeCall* call);
void OptimizeCallIndirectTargetEvaluation(GenTreeCall* call);
GenTree* LowerIndirectNonvirtCall(GenTreeCall* call);
GenTree* LowerDirectCall(GenTreeCall* call);
GenTree* LowerNonvirtPinvokeCall(GenTreeCall* call);
Expand Down
50 changes: 50 additions & 0 deletions src/tests/JIT/Regression/JitBlue/Runtime_121711/Runtime_121711.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.

using System;
using System.Runtime.CompilerServices;
using Xunit;

public class Runtime_121711
{
[Fact]
public static int TestEntryPoint()
{
try
{
Test(null);
return 101;
}
catch (NullReferenceException)
{
return _exitCode;
}
}

[MethodImpl(MethodImplOptions.NoInlining)]
private static void Test(Base b)
{
b.Foo<string>(Bar());
}

[MethodImpl(MethodImplOptions.NoInlining)]
private static int Bar()
{
_exitCode = 100;
return 42;
}

private static int _exitCode = 102;
}

public abstract class Base
{
public abstract void Foo<T>(int x);
}

public class Derived : Base
{
public override void Foo<T>(int x)
{
}
}
1 change: 1 addition & 0 deletions src/tests/JIT/Regression/Regression_ro_1.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,7 @@
<Compile Include="JitBlue\Runtime_120270\Runtime_120270.cs" />
<Compile Include="JitBlue\Runtime_120414\Runtime_120414.cs" />
<Compile Include="JitBlue\Runtime_120522\Runtime_120522.cs" />
<Compile Include="JitBlue\Runtime_121711\Runtime_121711.cs" />
<Compile Include="JitBlue\Runtime_31615\Runtime_31615.cs" />
<Compile Include="JitBlue\Runtime_33884\Runtime_33884.cs" />
<Compile Include="JitBlue\Runtime_38920\Runtime_38920.cs" />
Expand Down
Loading