1
1
package kotlinx.atomicfu.locks
2
2
3
3
import platform.posix.*
4
- import interop.*
5
- import kotlinx.cinterop.*
6
- import kotlin.native.internal.NativePtr
7
4
import kotlinx.atomicfu.locks.SynchronizedObject.Status.*
8
- import kotlin.concurrent.AtomicNativePtr
9
5
import kotlin.concurrent.AtomicReference
10
- import kotlin.native.concurrent.*
11
6
12
7
public actual open class SynchronizedObject {
13
8
@@ -23,6 +18,7 @@ public actual open class SynchronizedObject {
23
18
if (lock.compareAndSet(state, thinLock))
24
19
return
25
20
}
21
+
26
22
THIN -> {
27
23
if (currentThreadId == state.ownerThreadId) {
28
24
// reentrant lock
@@ -46,13 +42,16 @@ public actual open class SynchronizedObject {
46
42
}
47
43
}
48
44
}
45
+
49
46
FAT -> {
50
47
if (currentThreadId == state.ownerThreadId) {
51
48
// reentrant lock
52
- val nestedFatLock = LockState (FAT , state.nestedLocks + 1 , state.waiters, state.ownerThreadId, state.mutex)
49
+ val nestedFatLock =
50
+ LockState (FAT , state.nestedLocks + 1 , state.waiters, state.ownerThreadId, state.mutex)
53
51
if (lock.compareAndSet(state, nestedFatLock)) return
54
52
} else if (state.ownerThreadId != null ) {
55
- val fatLock = LockState (FAT , state.nestedLocks, state.waiters + 1 , state.ownerThreadId, state.mutex)
53
+ val fatLock =
54
+ LockState (FAT , state.nestedLocks, state.waiters + 1 , state.ownerThreadId, state.mutex)
56
55
if (lock.compareAndSet(state, fatLock)) {
57
56
fatLock.mutex!! .lock()
58
57
tryLockAfterResume(currentThreadId)
@@ -74,7 +73,8 @@ public actual open class SynchronizedObject {
74
73
return true
75
74
} else {
76
75
if (currentThreadId == state.ownerThreadId) {
77
- val nestedLock = LockState (state.status, state.nestedLocks + 1 , state.waiters, currentThreadId, state.mutex)
76
+ val nestedLock =
77
+ LockState (state.status, state.nestedLocks + 1 , state.waiters, currentThreadId, state.mutex)
78
78
if (lock.compareAndSet(state, nestedLock))
79
79
return true
80
80
} else {
@@ -103,6 +103,7 @@ public actual open class SynchronizedObject {
103
103
return
104
104
}
105
105
}
106
+
106
107
FAT -> {
107
108
if (state.nestedLocks == 1 ) {
108
109
// last nested unlock -> release completely, resume some waiter
@@ -119,6 +120,7 @@ public actual open class SynchronizedObject {
119
120
return
120
121
}
121
122
}
123
+
122
124
else -> error(" It is not possible to unlock the mutex that is not obtained" )
123
125
}
124
126
}
@@ -146,14 +148,10 @@ public actual open class SynchronizedObject {
146
148
val nestedLocks : Int ,
147
149
val waiters : Int ,
148
150
val ownerThreadId : pthread_t? = null ,
149
- val mutex : CPointer <mutex_node_t> ? = null
151
+ val mutex : NativeMutexNode ? = null
150
152
)
151
153
152
154
protected enum class Status { UNLOCKED , THIN , FAT }
153
-
154
- private fun CPointer<mutex_node_t>.lock () = lock(this .pointed.mutex)
155
-
156
- private fun CPointer<mutex_node_t>.unlock () = unlock(this .pointed.mutex)
157
155
}
158
156
159
157
public actual fun reentrantLock () = ReentrantLock ()
@@ -183,37 +181,40 @@ private const val INITIAL_POOL_CAPACITY = 64
183
181
private val mutexPool by lazy { MutexPool (INITIAL_POOL_CAPACITY ) }
184
182
185
183
class MutexPool (capacity : Int ) {
186
- private val top = AtomicNativePtr ( NativePtr . NULL )
184
+ private val top = AtomicReference < NativeMutexNode ?>( null )
187
185
188
- private val mutexes = nativeHeap.allocArray < mutex_node_t > (capacity) { mutex_node_init(ptr ) }
186
+ private val mutexes = Array < NativeMutexNode >(capacity) { NativeMutexNode ( ) }
189
187
190
188
init {
191
- for (i in 0 until capacity) {
192
- release(interpretCPointer< mutex_node_t> (mutexes.rawValue.plus(i * sizeOf< mutex_node_t> ()))!! )
189
+ // Immediately form a stack
190
+ for (mutex in mutexes) {
191
+ release(mutex)
193
192
}
194
193
}
195
194
196
- private fun allocMutexNode () = nativeHeap.alloc < mutex_node_t > { mutex_node_init(ptr) }.ptr
195
+ private fun allocMutexNode () = NativeMutexNode ()
197
196
198
- fun allocate (): CPointer <mutex_node_t> = pop() ? : allocMutexNode()
197
+ fun allocate (): NativeMutexNode = pop() ? : allocMutexNode()
199
198
200
- fun release (mutexNode : CPointer <mutex_node_t> ) {
199
+ fun release (mutexNode : NativeMutexNode ) {
201
200
while (true ) {
202
- val oldTop = interpretCPointer < mutex_node_t > ( top.value)
203
- mutexNode.pointed. next = oldTop
204
- if (top.compareAndSet(oldTop.rawValue , mutexNode.rawValue))
201
+ val oldTop = top.value
202
+ mutexNode.next = oldTop
203
+ if (top.compareAndSet(oldTop, mutexNode)) {
205
204
return
205
+ }
206
206
}
207
207
}
208
208
209
- private fun pop (): CPointer <mutex_node_t> ? {
209
+ private fun pop (): NativeMutexNode ? {
210
210
while (true ) {
211
- val oldTop = interpretCPointer < mutex_node_t > ( top.value)
212
- if (oldTop.rawValue == = NativePtr . NULL )
211
+ val oldTop = top.value
212
+ if (oldTop == null )
213
213
return null
214
- val newHead = oldTop!! .pointed .next
215
- if (top.compareAndSet(oldTop.rawValue , newHead.rawValue))
214
+ val newHead = oldTop.next
215
+ if (top.compareAndSet(oldTop, newHead)) {
216
216
return oldTop
217
+ }
217
218
}
218
219
}
219
220
}
0 commit comments