Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions eng/native/configurecompiler.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -562,6 +562,11 @@ if(CLR_CMAKE_HOST_UNIX_X86)
add_compile_options(-msse2)
endif()

if(CLR_CMAKE_HOST_UNIX_ARM64)
# add_compile_options(-march=armv8-a+lse)
add_compile_options(-moutline-atomics)
endif(CLR_CMAKE_HOST_UNIX_ARM64)

if(CLR_CMAKE_HOST_UNIX)
add_compile_options(${CLR_ADDITIONAL_COMPILER_OPTIONS})
endif(CLR_CMAKE_HOST_UNIX)
Expand Down
14 changes: 7 additions & 7 deletions src/coreclr/gc/env/gcenv.interlocked.inl
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,13 @@
#ifndef _MSC_VER
__forceinline void Interlocked::ArmInterlockedOperationBarrier()
{
#ifdef HOST_ARM64
// See PAL_ArmInterlockedOperationBarrier() in the PAL
__sync_synchronize();
#endif // HOST_ARM64
#ifdef HOST_LOONGARCH64
__sync_synchronize();
#endif //HOST_LOONGARCH64
// #ifdef HOST_ARM64
// // See PAL_ArmInterlockedOperationBarrier() in the PAL
// __sync_synchronize();
// #endif // HOST_ARM64
// #ifdef HOST_LOONGARCH64
// __sync_synchronize();
// #endif //HOST_LOONGARCH64
}
#endif // !_MSC_VER

Expand Down
34 changes: 17 additions & 17 deletions src/coreclr/pal/inc/pal.h
Original file line number Diff line number Diff line change
Expand Up @@ -3447,23 +3447,23 @@ BitScanReverse64(

FORCEINLINE void PAL_ArmInterlockedOperationBarrier()
{
#ifdef HOST_ARM64
// On arm64, most of the __sync* functions generate a code sequence like:
// loop:
// ldaxr (load acquire exclusive)
// ...
// stlxr (store release exclusive)
// cbnz loop
//
// It is possible for a load following the code sequence above to be reordered to occur prior to the store above due to the
// release barrier, this is substantiated by https://github.com/dotnet/coreclr/pull/17508. Interlocked operations in the PAL
// require the load to occur after the store. This memory barrier should be used following a call to a __sync* function to
// prevent that reordering. Code generated for arm32 includes a 'dmb' after 'cbnz', so no issue there at the moment.
__sync_synchronize();
#endif // HOST_ARM64
#ifdef HOST_LOONGARCH64
__sync_synchronize();
#endif
// #ifdef HOST_ARM64
// // On arm64, most of the __sync* functions generate a code sequence like:
// // loop:
// // ldaxr (load acquire exclusive)
// // ...
// // stlxr (store release exclusive)
// // cbnz loop
// //
// // It is possible for a load following the code sequence above to be reordered to occur prior to the store above due to the
// // release barrier, this is substantiated by https://github.com/dotnet/coreclr/pull/17508. Interlocked operations in the PAL
// // require the load to occur after the store. This memory barrier should be used following a call to a __sync* function to
// // prevent that reordering. Code generated for arm32 includes a 'dmb' after 'cbnz', so no issue there at the moment.
// __sync_synchronize();
// #endif // HOST_ARM64
// #ifdef HOST_LOONGARCH64
// __sync_synchronize();
// #endif
}

/*++
Expand Down