diff --git a/backend.native/tests/runtime/workers/atomic0.kt b/backend.native/tests/runtime/workers/atomic0.kt index bce1a50eefd..f9a3a345216 100644 --- a/backend.native/tests/runtime/workers/atomic0.kt +++ b/backend.native/tests/runtime/workers/atomic0.kt @@ -78,6 +78,26 @@ fun test4() { } } +fun test5() { + assertFailsWith { + AtomicReference().set(Data(2)) + } + val ref = AtomicReference() + val value = Data(3).freeze() + assertEquals(null, ref.get()) + ref.set(value) + assertEquals(3, ref.get()!!.value) +} + +fun test6() { + val int = AtomicInt() + int.set(239) + assertEquals(239, int.get()) + val long = AtomicLong() + long.set(239L) + assertEquals(239L, long.get()) +} + @Test fun runTest() { val COUNT = 20 val workers = Array(COUNT, { _ -> startWorker()}) @@ -86,6 +106,8 @@ fun test4() { test2(workers) test3(workers) test4() + test5() + test6() workers.forEach { it.requestTermination().consume { _ -> } diff --git a/runtime/src/main/cpp/Atomic.cpp b/runtime/src/main/cpp/Atomic.cpp index 4ec1f685f92..cee9b3c1094 100644 --- a/runtime/src/main/cpp/Atomic.cpp +++ b/runtime/src/main/cpp/Atomic.cpp @@ -27,6 +27,11 @@ struct AtomicReferenceLayout { KInt lock_; }; +template void setImpl(KRef thiz, T value) { + volatile T* location = reinterpret_cast(thiz + 1); + atomicSet(location, value); +} + template T addAndGetImpl(KRef thiz, T delta) { volatile T* location = reinterpret_cast(thiz + 1); return atomicAdd(location, delta); @@ -53,6 +58,10 @@ KInt Kotlin_AtomicInt_compareAndSwap(KRef thiz, KInt expectedValue, KInt newValu return compareAndSwapImpl(thiz, expectedValue, newValue); } +void Kotlin_AtomicInt_set(KRef thiz, KInt newValue) { + setImpl(thiz, newValue); +} + KLong Kotlin_AtomicLong_addAndGet(KRef thiz, KLong delta) { return addAndGetImpl(thiz, delta); } @@ -75,10 +84,28 @@ KLong Kotlin_AtomicLong_compareAndSwap(KRef thiz, KLong expectedValue, KLong new #endif } +void Kotlin_AtomicLong_set(KRef thiz, KLong newValue) { +#ifdef __mips + // Potentially huge performance penalty, but correct. + // TODO: reconsider, once target MIPS can do proper 64-bit atomic store. + static int lock = 0; + while (compareAndSwap(&lock, 0, 1) != 0); + KLong* address = reinterpret_cast(thiz + 1); + *address = newValue; + compareAndSwap(&lock, 1, 0); +#else + setImpl(thiz, newValue); +#endif +} + KNativePtr Kotlin_AtomicNativePtr_compareAndSwap(KRef thiz, KNativePtr expectedValue, KNativePtr newValue) { return compareAndSwapImpl(thiz, expectedValue, newValue); } +void Kotlin_AtomicNativePtr_set(KRef thiz, KNativePtr newValue) { + setImpl(thiz, newValue); +} + void Kotlin_AtomicReference_checkIfFrozen(KRef value) { if (value != nullptr && !value->container()->permanentOrFrozen()) { ThrowInvalidMutabilityException(value); @@ -92,6 +119,12 @@ OBJ_GETTER(Kotlin_AtomicReference_compareAndSwap, KRef thiz, KRef expectedValue, RETURN_RESULT_OF(SwapRefLocked, &ref->value_, expectedValue, newValue, &ref->lock_); } +void Kotlin_AtomicReference_set(KRef thiz, KRef newValue) { + Kotlin_AtomicReference_checkIfFrozen(newValue); + AtomicReferenceLayout* ref = asAtomicReference(thiz); + SetRefLocked(&ref->value_, newValue, &ref->lock_); +} + OBJ_GETTER(Kotlin_AtomicReference_get, KRef thiz) { // Here we must take a lock to prevent race when value, while taken here, is CASed and immediately // destroyed by an another thread. AtomicReference no longer holds such an object, so if we got diff --git a/runtime/src/main/cpp/Atomic.h b/runtime/src/main/cpp/Atomic.h index 8499a13fa28..f957b6bd4fa 100644 --- a/runtime/src/main/cpp/Atomic.h +++ b/runtime/src/main/cpp/Atomic.h @@ -25,5 +25,13 @@ ALWAYS_INLINE inline T compareAndSwap(volatile T* where, T expectedValue, T newV #endif } +template +ALWAYS_INLINE inline void atomicSet(volatile T* where, T what) { +#ifndef KONAN_NO_THREADS + __atomic_store(where, &what, __ATOMIC_SEQ_CST); +#else + *where = what; +#endif +} #endif // RUNTIME_ATOMIC_H \ No newline at end of file diff --git a/runtime/src/main/cpp/Memory.cpp b/runtime/src/main/cpp/Memory.cpp index e8c2e1bb0e1..629a8edd302 100644 --- a/runtime/src/main/cpp/Memory.cpp +++ b/runtime/src/main/cpp/Memory.cpp @@ -1743,6 +1743,16 @@ OBJ_GETTER(SwapRefLocked, return oldValue; } +void SetRefLocked(ObjHeader** location, ObjHeader* newValue, int32_t* spinlock) { + lock(spinlock); + ObjHeader* oldValue = *location; + // We do not use UpdateRef() here to avoid having ReleaseRef() on old value under the lock. + SetRef(location, newValue); + unlock(spinlock); + if (oldValue != nullptr) + ReleaseRef(oldValue); +} + OBJ_GETTER(ReadRefLocked, ObjHeader** location, int32_t* spinlock) { lock(spinlock); ObjHeader* value = *location; diff --git a/runtime/src/main/cpp/Memory.h b/runtime/src/main/cpp/Memory.h index 787a89d7323..a0e956d54ea 100644 --- a/runtime/src/main/cpp/Memory.h +++ b/runtime/src/main/cpp/Memory.h @@ -443,6 +443,8 @@ void UpdateReturnRef(ObjHeader** returnSlot, const ObjHeader* object) RUNTIME_NO // Compares and swaps reference with taken lock. OBJ_GETTER(SwapRefLocked, ObjHeader** location, ObjHeader* expectedValue, ObjHeader* newValue, int32_t* spinlock) RUNTIME_NOTHROW; +// Sets reference with taken lock. +void SetRefLocked(ObjHeader** location, ObjHeader* newValue, int32_t* spinlock) RUNTIME_NOTHROW; // Reads reference with taken lock. OBJ_GETTER(ReadRefLocked, ObjHeader** location, int32_t* spinlock) RUNTIME_NOTHROW; // Optimization: release all references in range. diff --git a/runtime/src/main/kotlin/kotlin/native/worker/Atomics.kt b/runtime/src/main/kotlin/kotlin/native/worker/Atomics.kt index fafaa086620..7b3d3c3c033 100644 --- a/runtime/src/main/kotlin/kotlin/native/worker/Atomics.kt +++ b/runtime/src/main/kotlin/kotlin/native/worker/Atomics.kt @@ -37,6 +37,12 @@ class AtomicInt(private var value: Int = 0) { @SymbolName("Kotlin_AtomicInt_compareAndSwap") external fun compareAndSwap(expected: Int, new: Int): Int + /** + * Sets the new atomic value. + */ + @SymbolName("Kotlin_AtomicInt_set") + external fun set(new: Int): Unit + /** * Increments value by one. */ @@ -79,6 +85,12 @@ class AtomicLong(private var value: Long = 0) { @SymbolName("Kotlin_AtomicLong_compareAndSwap") external fun compareAndSwap(expected: Long, new: Long): Long + /** + * Sets the new atomic value. + */ + @SymbolName("Kotlin_AtomicLong_set") + external fun set(new: Long): Unit + /** * Increments value by one. */ @@ -146,6 +158,14 @@ class AtomicReference(private var value: T? = null) { @SymbolName("Kotlin_AtomicReference_compareAndSwap") external public fun compareAndSwap(expected: T?, new: T?): T? + /** + * Sets the value to [new] value + * If [new] value is not null, it must be frozen or permanent object, otherwise an + * @InvalidMutabilityException is thrown. + */ + @SymbolName("Kotlin_AtomicReference_set") + external public fun set(new: T?): Unit + /** * Returns the current value. */