mirror of https://github.com/dotnet/runtime
Merge 3c9049f8ea
into 02596ba8d9
This commit is contained in:
commit
9d3df56d47
|
@ -53,9 +53,6 @@ namespace System.Runtime.CompilerServices
|
|||
return obj;
|
||||
}
|
||||
|
||||
[MethodImpl(MethodImplOptions.InternalCall)]
|
||||
private static extern void WriteBarrier(ref object? dst, object? obj);
|
||||
|
||||
// IsInstanceOf test used for unusual cases (naked type parameters, variant generic types)
|
||||
// Unlike the IsInstanceOfInterface and IsInstanceOfClass functions,
|
||||
// this test must deal with all kinds of type tests
|
||||
|
@ -454,7 +451,7 @@ namespace System.Runtime.CompilerServices
|
|||
goto notExactMatch;
|
||||
|
||||
doWrite:
|
||||
WriteBarrier(ref element, obj);
|
||||
RuntimeHelpers.WriteBarrier(ref element, obj);
|
||||
return;
|
||||
|
||||
assigningNull:
|
||||
|
@ -475,7 +472,7 @@ namespace System.Runtime.CompilerServices
|
|||
CastResult result = CastCache.TryGet(s_table!, (nuint)RuntimeHelpers.GetMethodTable(obj), (nuint)elementType);
|
||||
if (result == CastResult.CanCast)
|
||||
{
|
||||
WriteBarrier(ref element, obj);
|
||||
RuntimeHelpers.WriteBarrier(ref element, obj);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -493,7 +490,7 @@ namespace System.Runtime.CompilerServices
|
|||
ThrowArrayMismatchException();
|
||||
}
|
||||
|
||||
WriteBarrier(ref element, obj2);
|
||||
RuntimeHelpers.WriteBarrier(ref element, obj2);
|
||||
}
|
||||
|
||||
[DebuggerHidden]
|
||||
|
|
|
@ -5124,6 +5124,30 @@ private:
|
|||
GenTree* dereferencedAddress,
|
||||
InlArgInfo* inlArgInfo);
|
||||
|
||||
typedef JitHashTable<CORINFO_METHOD_HANDLE, JitPtrKeyFuncs<struct CORINFO_METHOD_STRUCT_>, CORINFO_METHOD_HANDLE> HelperToManagedMap;
|
||||
HelperToManagedMap* m_helperToManagedMap = nullptr;
|
||||
|
||||
public:
|
||||
HelperToManagedMap* GetHelperToManagedMap()
|
||||
{
|
||||
if (m_helperToManagedMap == nullptr)
|
||||
{
|
||||
m_helperToManagedMap = new (getAllocator()) HelperToManagedMap(getAllocator());
|
||||
}
|
||||
return m_helperToManagedMap;
|
||||
}
|
||||
bool HelperToManagedMapLookup(CORINFO_METHOD_HANDLE helperCallHnd, CORINFO_METHOD_HANDLE* userCallHnd)
|
||||
{
|
||||
if (m_helperToManagedMap == nullptr)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
bool found = m_helperToManagedMap->Lookup(helperCallHnd, userCallHnd);
|
||||
return found;
|
||||
}
|
||||
private:
|
||||
|
||||
void impConvertToUserCallAndMarkForInlining(GenTreeCall* call);
|
||||
void impMarkInlineCandidate(GenTree* call,
|
||||
CORINFO_CONTEXT_HANDLE exactContextHnd,
|
||||
bool exactContextNeedsRuntimeLookup,
|
||||
|
|
|
@ -2383,6 +2383,34 @@ bool GenTreeCall::IsHelperCall(Compiler* compiler, unsigned helper) const
|
|||
return IsHelperCall(compiler->eeFindHelper(helper));
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// IsHelperCallOrUserEquivalent: Determine if this GT_CALL node is a specific helper call
|
||||
// or its CT_USER equivalent.
|
||||
//
|
||||
// Arguments:
|
||||
// compiler - the compiler instance so that we can call eeFindHelper
|
||||
//
|
||||
// Return Value:
|
||||
// Returns true if this GT_CALL node is a call to the specified helper.
|
||||
//
|
||||
bool GenTreeCall::IsHelperCallOrUserEquivalent(Compiler* compiler, unsigned helper) const
|
||||
{
|
||||
CORINFO_METHOD_HANDLE helperCallHnd = Compiler::eeFindHelper(helper);
|
||||
if (IsHelperCall())
|
||||
{
|
||||
return helperCallHnd == gtCallMethHnd;
|
||||
}
|
||||
|
||||
if (gtCallType == CT_USER_FUNC)
|
||||
{
|
||||
CORINFO_METHOD_HANDLE userCallHnd = NO_METHOD_HANDLE;
|
||||
return compiler->impInlineRoot()->HelperToManagedMapLookup(helperCallHnd, &userCallHnd) &&
|
||||
(userCallHnd == gtCallMethHnd);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
//-------------------------------------------------------------------------
|
||||
// IsRuntimeLookupHelperCall: Determine if this GT_CALL node represents a runtime lookup helper call.
|
||||
//
|
||||
|
@ -12908,6 +12936,9 @@ void Compiler::gtDispTree(GenTree* tree,
|
|||
case NI_System_Runtime_CompilerServices_RuntimeHelpers_IsKnownConstant:
|
||||
printf(" isKnownConst");
|
||||
break;
|
||||
case NI_System_Runtime_CompilerServices_RuntimeHelpers_WriteBarrier:
|
||||
printf(" WriteBarrier");
|
||||
break;
|
||||
#if defined(FEATURE_SIMD)
|
||||
case NI_SIMD_UpperRestore:
|
||||
printf(" simdUpperRestore");
|
||||
|
|
|
@ -5768,6 +5768,8 @@ struct GenTreeCall final : public GenTree
|
|||
|
||||
bool IsHelperCall(Compiler* compiler, unsigned helper) const;
|
||||
|
||||
bool IsHelperCallOrUserEquivalent(Compiler* compiler, unsigned helper) const;
|
||||
|
||||
bool IsRuntimeLookupHelperCall(Compiler* compiler) const;
|
||||
|
||||
bool IsSpecialIntrinsic(Compiler* compiler, NamedIntrinsic ni) const;
|
||||
|
|
|
@ -7362,7 +7362,12 @@ void Compiler::impImportBlockCode(BasicBlock* block)
|
|||
// The array helper takes a native int for array length.
|
||||
// So if we have an int, explicitly extend it to be a native int.
|
||||
index = impImplicitIorI4Cast(index, TYP_I_IMPL);
|
||||
op1 = gtNewHelperCallNode(CORINFO_HELP_ARRADDR_ST, TYP_VOID, array, index, value);
|
||||
|
||||
GenTreeCall* call = gtNewHelperCallNode(CORINFO_HELP_ARRADDR_ST, TYP_VOID, array, index, value);
|
||||
INDEBUG(call->gtRawILOffset = opcodeOffs);
|
||||
impConvertToUserCallAndMarkForInlining(call);
|
||||
op1 = call;
|
||||
|
||||
goto SPILL_APPEND;
|
||||
}
|
||||
|
||||
|
|
|
@ -3428,6 +3428,8 @@ GenTree* Compiler::impIntrinsic(CORINFO_CLASS_HANDLE clsHnd,
|
|||
// This one is just `return true/false`
|
||||
case NI_System_Runtime_CompilerServices_RuntimeHelpers_IsKnownConstant:
|
||||
|
||||
case NI_System_Runtime_CompilerServices_RuntimeHelpers_WriteBarrier:
|
||||
|
||||
// Not expanding this can lead to noticeable allocations in T0
|
||||
case NI_System_Runtime_CompilerServices_RuntimeHelpers_CreateSpan:
|
||||
|
||||
|
@ -3660,6 +3662,14 @@ GenTree* Compiler::impIntrinsic(CORINFO_CLASS_HANDLE clsHnd,
|
|||
break;
|
||||
}
|
||||
|
||||
case NI_System_Runtime_CompilerServices_RuntimeHelpers_WriteBarrier:
|
||||
{
|
||||
GenTree* val = impPopStack().val;
|
||||
GenTree* dst = impPopStack().val;
|
||||
retNode = gtNewStoreIndNode(TYP_REF, dst, val, GTF_IND_TGT_HEAP);
|
||||
break;
|
||||
}
|
||||
|
||||
case NI_System_Runtime_CompilerServices_RuntimeHelpers_IsKnownConstant:
|
||||
{
|
||||
GenTree* op1 = impPopStack().val;
|
||||
|
@ -7742,6 +7752,53 @@ void Compiler::addGuardedDevirtualizationCandidate(GenTreeCall* call,
|
|||
call->AddGDVCandidateInfo(this, pInfo);
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------
|
||||
// impConvertToUserCallAndMarkForInlining: convert a helper call to a user call
|
||||
// and mark it for inlining. This is used for helper calls that are
|
||||
// known to be backed by a user method that can be inlined.
|
||||
//
|
||||
// Arguments:
|
||||
// call - the helper call to convert
|
||||
//
|
||||
void Compiler::impConvertToUserCallAndMarkForInlining(GenTreeCall* call)
|
||||
{
|
||||
assert(call->IsHelperCall());
|
||||
|
||||
if (!opts.OptEnabled(CLFLG_INLINING))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
CORINFO_METHOD_HANDLE helperCallHnd = call->gtCallMethHnd;
|
||||
CORINFO_METHOD_HANDLE managedCallHnd = NO_METHOD_HANDLE;
|
||||
CORINFO_CONST_LOOKUP pNativeEntrypoint = {};
|
||||
info.compCompHnd->getHelperFtn(eeGetHelperNum(helperCallHnd), &pNativeEntrypoint, &managedCallHnd);
|
||||
|
||||
if (managedCallHnd != NO_METHOD_HANDLE)
|
||||
{
|
||||
call->gtCallMethHnd = managedCallHnd;
|
||||
call->gtCallType = CT_USER_FUNC;
|
||||
|
||||
CORINFO_CALL_INFO hCallInfo = {};
|
||||
hCallInfo.hMethod = managedCallHnd;
|
||||
hCallInfo.methodFlags = info.compCompHnd->getMethodAttribs(hCallInfo.hMethod);
|
||||
impMarkInlineCandidate(call, nullptr, false, &hCallInfo, compInlineContext);
|
||||
|
||||
#if DEBUG
|
||||
CORINFO_METHOD_HANDLE existingValue = NO_METHOD_HANDLE;
|
||||
if (impInlineRoot()->GetHelperToManagedMap()->Lookup(helperCallHnd, &existingValue))
|
||||
{
|
||||
// Let's make sure HelperToManagedMap::Overwrite behavior always overwrites the same value.
|
||||
assert(existingValue == managedCallHnd);
|
||||
}
|
||||
#endif
|
||||
|
||||
impInlineRoot()->GetHelperToManagedMap()->Set(helperCallHnd, managedCallHnd, HelperToManagedMap::Overwrite);
|
||||
JITDUMP("Converting helperCall [%06u] to user call [%s] and marking for inlining\n", dspTreeID(call),
|
||||
eeGetMethodFullName(managedCallHnd));
|
||||
}
|
||||
}
|
||||
|
||||
//------------------------------------------------------------------------
|
||||
// impMarkInlineCandidate: determine if this call can be subsequently inlined
|
||||
//
|
||||
|
@ -10658,7 +10715,14 @@ NamedIntrinsic Compiler::lookupNamedIntrinsic(CORINFO_METHOD_HANDLE method)
|
|||
{
|
||||
namespaceName += 8;
|
||||
|
||||
if (strcmp(namespaceName, "CompilerServices") == 0)
|
||||
if (strcmp(className, "TypeCast") == 0)
|
||||
{
|
||||
if (strcmp(methodName, "WriteBarrier") == 0)
|
||||
{
|
||||
result = NI_System_Runtime_CompilerServices_RuntimeHelpers_WriteBarrier;
|
||||
}
|
||||
}
|
||||
else if (strcmp(namespaceName, "CompilerServices") == 0)
|
||||
{
|
||||
if (strcmp(className, "RuntimeHelpers") == 0)
|
||||
{
|
||||
|
@ -10674,6 +10738,10 @@ NamedIntrinsic Compiler::lookupNamedIntrinsic(CORINFO_METHOD_HANDLE method)
|
|||
{
|
||||
result = NI_System_Runtime_CompilerServices_RuntimeHelpers_IsKnownConstant;
|
||||
}
|
||||
else if (strcmp(methodName, "WriteBarrier") == 0)
|
||||
{
|
||||
result = NI_System_Runtime_CompilerServices_RuntimeHelpers_WriteBarrier;
|
||||
}
|
||||
else if (strcmp(methodName, "IsReferenceOrContainsReferences") == 0)
|
||||
{
|
||||
result =
|
||||
|
|
|
@ -6503,13 +6503,22 @@ GenTree* Compiler::fgMorphCall(GenTreeCall* call)
|
|||
|
||||
// Morph stelem.ref helper call to store a null value, into a store into an array without the helper.
|
||||
// This needs to be done after the arguments are morphed to ensure constant propagation has already taken place.
|
||||
if (opts.OptimizationEnabled() && call->IsHelperCall(this, CORINFO_HELP_ARRADDR_ST))
|
||||
if (opts.OptimizationEnabled() && call->IsHelperCallOrUserEquivalent(this, CORINFO_HELP_ARRADDR_ST))
|
||||
{
|
||||
assert(call->gtArgs.CountArgs() == 3);
|
||||
GenTree* arr = call->gtArgs.GetArgByIndex(0)->GetNode();
|
||||
GenTree* index = call->gtArgs.GetArgByIndex(1)->GetNode();
|
||||
GenTree* value = call->gtArgs.GetArgByIndex(2)->GetNode();
|
||||
|
||||
if (!call->IsHelperCall())
|
||||
{
|
||||
// Convert back to helper call if it wasn't inlined.
|
||||
// Currently, only helper calls are eligible to be direct calls if the target has reached
|
||||
// its final tier. TODO: remove this workaround and convert this user call to direct as well.
|
||||
call->gtCallMethHnd = eeFindHelper(CORINFO_HELP_ARRADDR_ST);
|
||||
call->gtCallType = CT_HELPER;
|
||||
}
|
||||
|
||||
if (gtCanSkipCovariantStoreCheck(value, arr))
|
||||
{
|
||||
// Either or both of the array and index arguments may have been spilled to temps by `fgMorphArgs`. Copy
|
||||
|
|
|
@ -123,6 +123,7 @@ enum NamedIntrinsic : unsigned short
|
|||
NI_System_Runtime_CompilerServices_RuntimeHelpers_IsKnownConstant,
|
||||
NI_System_Runtime_CompilerServices_RuntimeHelpers_IsReferenceOrContainsReferences,
|
||||
NI_System_Runtime_CompilerServices_RuntimeHelpers_GetMethodTable,
|
||||
NI_System_Runtime_CompilerServices_RuntimeHelpers_WriteBarrier,
|
||||
|
||||
NI_System_Runtime_CompilerServices_AsyncHelpers_AsyncSuspend,
|
||||
NI_System_Runtime_CompilerServices_AsyncHelpers_Await,
|
||||
|
|
|
@ -148,10 +148,6 @@ namespace System.Runtime
|
|||
internal static extern unsafe object RhpNewFastMisalign(MethodTable * pEEType);
|
||||
#endif // FEATURE_64BIT_ALIGNMENT
|
||||
|
||||
[RuntimeImport(RuntimeLibrary, "RhpAssignRef")]
|
||||
[MethodImpl(MethodImplOptions.InternalCall)]
|
||||
internal static extern unsafe void RhpAssignRef(ref object? address, object? obj);
|
||||
|
||||
[MethodImplAttribute(MethodImplOptions.InternalCall)]
|
||||
[RuntimeImport(RuntimeLibrary, "RhpGcSafeZeroMemory")]
|
||||
internal static extern unsafe ref byte RhpGcSafeZeroMemory(ref byte dmem, nuint size);
|
||||
|
|
|
@ -802,8 +802,8 @@ namespace System.Runtime
|
|||
if (elementType != obj.GetMethodTable())
|
||||
goto notExactMatch;
|
||||
|
||||
doWrite:
|
||||
InternalCalls.RhpAssignRef(ref element, obj);
|
||||
doWrite:
|
||||
WriteBarrier(ref element, obj);
|
||||
return;
|
||||
|
||||
assigningNull:
|
||||
|
@ -826,7 +826,7 @@ namespace System.Runtime
|
|||
CastResult result = s_castCache.TryGet((nuint)obj.GetMethodTable() + (int)AssignmentVariation.BoxedSource, (nuint)elementType);
|
||||
if (result == CastResult.CanCast)
|
||||
{
|
||||
InternalCalls.RhpAssignRef(ref element, obj);
|
||||
WriteBarrier(ref element, obj);
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -843,7 +843,7 @@ namespace System.Runtime
|
|||
throw elementType->GetClasslibException(ExceptionIDs.ArrayTypeMismatch);
|
||||
}
|
||||
|
||||
InternalCalls.RhpAssignRef(ref element, obj);
|
||||
WriteBarrier(ref element, obj);
|
||||
}
|
||||
|
||||
private static unsafe object IsInstanceOfArray(MethodTable* pTargetType, object obj)
|
||||
|
@ -1275,5 +1275,8 @@ namespace System.Runtime
|
|||
|
||||
return obj;
|
||||
}
|
||||
|
||||
[Intrinsic]
|
||||
internal static void WriteBarrier(ref object? dst, object? obj) => dst = obj;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -265,11 +265,6 @@ Section segment para 'DATA'
|
|||
JIT_WriteBarrier_Loc:
|
||||
dq 0
|
||||
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||
; JIT_WriteBarrier(Object** dst, Object* src)
|
||||
jmp QWORD PTR [JIT_WriteBarrier_Loc]
|
||||
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
; There is an even more optimized version of these helpers possible which takes
|
||||
; advantage of knowledge of which way the ephemeral heap is growing to only do 1/2
|
||||
; that check (this is more significant in the JIT_WriteBarrier case).
|
||||
|
|
|
@ -224,14 +224,6 @@ LEAF_END_MARKED JIT_ByRefWriteBarrier, _TEXT
|
|||
.text
|
||||
#endif
|
||||
|
||||
// ------------------------------------------------------------------
|
||||
// __declspec(naked) void F_CALL_CONV JIT_WriteBarrier_Callable(Object **dst, Object* val)
|
||||
.balign 16
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||
// JIT_WriteBarrier(Object** dst, Object* src)
|
||||
jmp [rip + C_FUNC(JIT_WriteBarrier_Loc)]
|
||||
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
|
||||
// The following helper will access ("probe") a word on each page of the stack
|
||||
// starting with the page right beneath rsp down to the one pointed to by r11.
|
||||
|
|
|
@ -675,21 +675,6 @@ g_rgWriteBarrierDescriptors:
|
|||
|
||||
.global g_rgWriteBarrierDescriptors
|
||||
|
||||
// ------------------------------------------------------------------
|
||||
// __declspec(naked) void F_CALL_CONV JIT_WriteBarrier_Callable(Object **dst, Object* val)
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable
|
||||
|
||||
// Branch to the write barrier
|
||||
#if defined(__clang__)
|
||||
ldr r2, =JIT_WriteBarrier_Loc-(1f+4) // or R3? See targetarm.h
|
||||
1:
|
||||
add r2, pc
|
||||
#else
|
||||
ldr r2, =JIT_WriteBarrier_Loc
|
||||
#endif
|
||||
ldr pc, [r2]
|
||||
|
||||
LEAF_END JIT_WriteBarrier_Callable
|
||||
|
||||
#ifdef FEATURE_READYTORUN
|
||||
|
||||
|
|
|
@ -80,20 +80,6 @@ PATCH_LABEL ThePreStubPatchLabel
|
|||
ret lr
|
||||
LEAF_END ThePreStubPatch, _TEXT
|
||||
|
||||
// ------------------------// ------------------------------------------------------------------
|
||||
// __declspec(naked) void F_CALL_CONV JIT_WriteBarrier_Callable(Object **dst, Object* val)
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
// Setup args for JIT_WriteBarrier. x14 = dst ; x15 = val
|
||||
mov x14, x0 // x14 = dst
|
||||
mov x15, x1 // x15 = val
|
||||
|
||||
// Branch to the write barrier
|
||||
PREPARE_EXTERNAL_VAR JIT_WriteBarrier_Loc, x17
|
||||
ldr x17, [x17]
|
||||
br x17
|
||||
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
//
|
||||
// x12 = UMEntryThunkData*
|
||||
//
|
||||
|
|
|
@ -883,21 +883,6 @@ CallHelper2
|
|||
br x1
|
||||
LEAF_END
|
||||
|
||||
; ------------------------------------------------------------------
|
||||
; __declspec(naked) void F_CALL_CONV JIT_WriteBarrier_Callable(Object **dst, Object* val)
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable
|
||||
|
||||
; Setup args for JIT_WriteBarrier. x14 = dst ; x15 = val
|
||||
mov x14, x0 ; x14 = dst
|
||||
mov x15, x1 ; x15 = val
|
||||
|
||||
; Branch to the write barrier
|
||||
adrp x17, JIT_WriteBarrier_Loc
|
||||
ldr x17, [x17, JIT_WriteBarrier_Loc]
|
||||
br x17
|
||||
|
||||
LEAF_END
|
||||
|
||||
#ifdef PROFILING_SUPPORTED
|
||||
|
||||
; ------------------------------------------------------------------
|
||||
|
|
|
@ -251,10 +251,6 @@ FCFuncStart(gThreadFuncs)
|
|||
FCFuncElement("get_OptimalMaxSpinWaitsPerSpinIteration", ThreadNative::GetOptimalMaxSpinWaitsPerSpinIteration)
|
||||
FCFuncEnd()
|
||||
|
||||
FCFuncStart(gCastHelpers)
|
||||
FCFuncElement("WriteBarrier", ::WriteBarrier_Helper)
|
||||
FCFuncEnd()
|
||||
|
||||
FCFuncStart(gArrayFuncs)
|
||||
FCFuncElement("GetCorElementTypeOfElementType", ArrayNative::GetCorElementTypeOfElementType)
|
||||
FCFuncEnd()
|
||||
|
@ -381,7 +377,6 @@ FCFuncEnd()
|
|||
FCClassElement("Array", "System", gArrayFuncs)
|
||||
FCClassElement("AssemblyLoadContext", "System.Runtime.Loader", gAssemblyLoadContextFuncs)
|
||||
FCClassElement("Buffer", "System", gBufferFuncs)
|
||||
FCClassElement("CastHelpers", "System.Runtime.CompilerServices", gCastHelpers)
|
||||
FCClassElement("Delegate", "System", gDelegateFuncs)
|
||||
FCClassElement("DependentHandle", "System.Runtime", gDependentHandleFuncs)
|
||||
FCClassElement("Environment", "System", gEnvironmentFuncs)
|
||||
|
|
|
@ -385,23 +385,6 @@ C_FUNC(JIT_WriteBarrierEAX_Loc):
|
|||
.word 0
|
||||
.text
|
||||
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||
mov eax, edx
|
||||
mov edx, ecx
|
||||
push eax
|
||||
call 1f
|
||||
1:
|
||||
pop eax
|
||||
2:
|
||||
.att_syntax
|
||||
addl $_GLOBAL_OFFSET_TABLE_+(2b-1b), %eax
|
||||
.intel_syntax noprefix
|
||||
mov eax, dword ptr [eax + C_FUNC(JIT_WriteBarrierEAX_Loc)@GOT]
|
||||
mov eax, [eax]
|
||||
xchg eax, dword ptr [esp]
|
||||
ret
|
||||
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
.macro UniversalWriteBarrierHelper name
|
||||
.align 4
|
||||
|
||||
|
|
|
@ -427,14 +427,6 @@ _JIT_WriteBarrierGroup@0 PROC
|
|||
ret
|
||||
_JIT_WriteBarrierGroup@0 ENDP
|
||||
|
||||
ALIGN 4
|
||||
PUBLIC @JIT_WriteBarrier_Callable@8
|
||||
@JIT_WriteBarrier_Callable@8 PROC
|
||||
mov eax,edx
|
||||
mov edx,ecx
|
||||
jmp DWORD PTR [_JIT_WriteBarrierEAX_Loc]
|
||||
|
||||
@JIT_WriteBarrier_Callable@8 ENDP
|
||||
|
||||
UniversalWriteBarrierHelper MACRO name
|
||||
ALIGN 4
|
||||
|
|
|
@ -207,12 +207,6 @@ extern "C" FCDECL2(VOID, RhpAssignRef, Object **dst, Object *ref);
|
|||
extern "C" FCDECL2(VOID, JIT_WriteBarrier, Object **dst, Object *ref);
|
||||
extern "C" FCDECL2(VOID, JIT_WriteBarrierEnsureNonHeapTarget, Object **dst, Object *ref);
|
||||
|
||||
// ARM64 JIT_WriteBarrier uses special ABI and thus is not callable directly
|
||||
// Copied write barriers must be called at a different location
|
||||
extern "C" FCDECL2(VOID, JIT_WriteBarrier_Callable, Object **dst, Object *ref);
|
||||
|
||||
#define WriteBarrier_Helper JIT_WriteBarrier_Callable
|
||||
|
||||
EXTERN_C FCDECL2_VV(INT64, JIT_LMul, INT64 val1, INT64 val2);
|
||||
|
||||
#ifndef HOST_64BIT
|
||||
|
|
|
@ -112,20 +112,6 @@ LOCAL_LABEL(EphemeralCheckEnabled):
|
|||
|
||||
WRITE_BARRIER_END JIT_UpdateWriteBarrierState
|
||||
|
||||
// ----------------------------------------------------------------------------------------
|
||||
// __declspec(naked) void F_CALL_CONV JIT_WriteBarrier_Callable(Object **dst, Object* val)
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
// Setup args for JIT_WriteBarrier. $t0 = dst ; $t1 = val
|
||||
ori $t6, $a0, 0 // $t6 = dst
|
||||
ori $t7, $a1, 0 // $t7 = val
|
||||
|
||||
// Branch to the write barrier
|
||||
la.local $r21, JIT_WriteBarrier_Loc
|
||||
ld.d $r21, $r21, 0
|
||||
jirl $r0, $r21, 0
|
||||
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
|
||||
.balign 64 // Align to power of two at least as big as patchable literal pool so that it fits optimally in cache line
|
||||
// ------------------------------------------------------------------
|
||||
|
|
|
@ -98,18 +98,6 @@ LOCAL_LABEL(EphemeralCheckEnabled):
|
|||
|
||||
WRITE_BARRIER_END JIT_UpdateWriteBarrierState
|
||||
|
||||
// ----------------------------------------------------------------------------------------
|
||||
// __declspec(naked) void F_CALL_CONV JIT_WriteBarrier_Callable(Object **dst, Object* val)
|
||||
LEAF_ENTRY JIT_WriteBarrier_Callable, _TEXT
|
||||
// Setup args for JIT_WriteBarrier. a0 = dst ; a1 = val
|
||||
addi t3, a0, 0 // t3 = dst
|
||||
addi t4, a1, 0 // t4 = val
|
||||
|
||||
// Branch to the write barrier
|
||||
ld t1, JIT_WriteBarrier_Loc
|
||||
jr t1
|
||||
LEAF_END JIT_WriteBarrier_Callable, _TEXT
|
||||
|
||||
|
||||
.balign 64 // Align to power of two at least as big as patchable literal pool so that it fits optimally in cache line
|
||||
// ------------------------------------------------------------------
|
||||
|
|
|
@ -172,6 +172,9 @@ namespace System.Runtime.CompilerServices
|
|||
|
||||
/// <returns>true if the given type is a reference type or a value type that contains references or by-refs; otherwise, false.</returns>
|
||||
[Intrinsic]
|
||||
public static bool IsReferenceOrContainsReferences<T>() where T: allows ref struct => IsReferenceOrContainsReferences<T>();
|
||||
public static bool IsReferenceOrContainsReferences<T>() where T : allows ref struct => IsReferenceOrContainsReferences<T>();
|
||||
|
||||
[Intrinsic]
|
||||
internal static void WriteBarrier(ref object? dst, object? obj) => dst = obj;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue