Sat Jan 22 19:10:17 2011 UTC ()
First attempt at MP _lock_cas.


(skrll)
diff -r1.17 -r1.18 src/sys/arch/hppa/hppa/lock_stubs.S

cvs diff -r1.17 -r1.18 src/sys/arch/hppa/hppa/lock_stubs.S (expand / switch to unified diff)

--- src/sys/arch/hppa/hppa/lock_stubs.S 2011/01/20 19:51:54 1.17
+++ src/sys/arch/hppa/hppa/lock_stubs.S 2011/01/22 19:10:16 1.18
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: lock_stubs.S,v 1.17 2011/01/20 19:51:54 skrll Exp $ */ 1/* $NetBSD: lock_stubs.S,v 1.18 2011/01/22 19:10:16 skrll Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 2006, 2007 The NetBSD Foundation, Inc. 4 * Copyright (c) 2006, 2007 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Andrew Doran and Nick Hudson. 8 * by Andrew Doran and Nick Hudson.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -185,14 +185,116 @@ LEAF_ENTRY(mutex_enter) @@ -185,14 +185,116 @@ LEAF_ENTRY(mutex_enter)
185 185
186mutex_enter_crit_start: 186mutex_enter_crit_start:
187 comib,= 0,%ret0,.Lenter_slowpath 187 comib,= 0,%ret0,.Lenter_slowpath
188 188
189 mfctl CR_CURLWP, %t2 189 mfctl CR_CURLWP, %t2
190 190
191 bv %r0(%rp) 191 bv %r0(%rp)
192mutex_enter_crit_end: 192mutex_enter_crit_end:
193 stw %t2,MTX_OWNER(%arg0) 193 stw %t2,MTX_OWNER(%arg0)
194EXIT(mutex_enter) 194EXIT(mutex_enter)
195 195
196#endif /* !LOCKDEBUG */ 196#endif /* !LOCKDEBUG */
197 197
198#endif /* !MULTIPROCESSOR */ 198#else /* !MULTIPROCESSOR */
 199
 200/*
 201 * uintptr_t _lock_cas(volatile uintptr_t *ptr, uintptr_t old, uintptr_t new);
 202 *
 203 * Perform an atomic compare-and-swap operation.
 204 *
 205 * On multi-CPU systems, this has to use an interlock and disable interrupts.
 206 * The interlock is to protect against another CPU attempting to perform the
 207 * cas. Disabling interrupts is to prevent deadlocks on the current CPU. That
 208 * is, we don't want an interrupts attempting to perform a cas on the interlock
 209 * at the same time.
 210 *
 211 */
 212
 213#define IL \
 214 .word __SIMPLELOCK_RAW_UNLOCKED ! \
 215 .word __SIMPLELOCK_RAW_UNLOCKED ! \
 216 .word __SIMPLELOCK_RAW_UNLOCKED ! \
 217 .word __SIMPLELOCK_RAW_UNLOCKED ! \
 218
 219#define I8 \
 220 IL IL IL IL IL IL IL IL
 221
 222#define I64 \
 223 I8 I8 I8 I8 I8 I8 I8 I8
 224
 225
 226
 227 .section .data
 228 .align 4096
 229 .export _lock_hash, data
 230_lock_hash:
 231 I64 I64
 232 I64 I64
 233 I64 I64
 234 I64 I64
 235 I64 I64
 236 I64 I64
 237 I64 I64
 238 I64 I64
 239
 240LEAF_ENTRY(_lock_cas)
 241ALTENTRY(_lock_cas_mp)
 242
 243 mfctl %eiem, %t1
 244 mtctl %r0, %eiem /* disable interrupts */
 245
 246 extru %arg0, 21+8-1, 8, %ret0
 247 ldil L%_lock_hash, %r1
 248 zdep %ret0, 27, 28, %ret0
 249 ldo R%_lock_hash(%r1), %r1
 250
 251 addl %ret0, %r1, %ret0
 252 ldo 15(%ret0), %ret0
 253 copy %ret0, %t3
 254 depi 0, 31, 4, %t3
 255
 256 /* %t3 is the interlock address */
 257 ldcw 0(%t3), %ret0
 258 comib,<>,n 0,%ret0, _lock_cas_mp_interlocked
 259_lock_cas_mp_spin:
 260 ldw 0(%t3),%ret0
 261 comib,= 0,%ret0, _lock_cas_mp_spin
 262 nop
 263 ldcw 0(%t3), %ret0
 264 comib,= 0,%ret0, _lock_cas_mp_spin
 265 nop
 266
 267_lock_cas_mp_interlocked:
 268 ldw 0(%arg0),%ret0
 269 comclr,<> %arg1, %ret0, %r0 /* If *ptr != old, then nullify */
 270 stw %arg2, 0(%arg0)
 271
 272 sync
 273
 274 ldi __SIMPLELOCK_RAW_UNLOCKED, %t4
 275 stw %t4, 0(%t3)
 276 bv %r0(%r2)
 277 mtctl %t1, %eiem /* enable interrupts */
 278
 279EXIT(_lock_cas)
 280
 281STRONG_ALIAS(_atomic_cas_ulong,_lock_cas_mp)
 282STRONG_ALIAS(atomic_cas_ulong,_lock_cas_mp)
 283STRONG_ALIAS(_atomic_cas_32,_lock_cas_mp)
 284STRONG_ALIAS(atomic_cas_32,_lock_cas_mp)
 285STRONG_ALIAS(_atomic_cas_uint,_lock_cas_mp)
 286STRONG_ALIAS(atomic_cas_uint,_lock_cas_mp)
 287STRONG_ALIAS(_atomic_cas_ptr,_lock_cas_mp)
 288STRONG_ALIAS(atomic_cas_ptr,_lock_cas_mp)
 289
 290STRONG_ALIAS(_atomic_cas_ulong_ni,_lock_cas_mp)
 291STRONG_ALIAS(atomic_cas_ulong_ni,_lock_cas_mp)
 292STRONG_ALIAS(_atomic_cas_32_ni,_lock_cas_mp)
 293STRONG_ALIAS(atomic_cas_32_ni,_lock_cas_mp)
 294STRONG_ALIAS(_atomic_cas_uint_ni,_lock_cas_mp)
 295STRONG_ALIAS(atomic_cas_uint_ni,_lock_cas_mp)
 296STRONG_ALIAS(_atomic_cas_ptr_ni,_lock_cas_mp)
 297STRONG_ALIAS(atomic_cas_ptr_ni,_lock_cas_mp)
 298
 299
 300#endif /* MULTIPROCESSOR */