Sun Feb 13 13:41:26 2022 UTC ()
acorn32: Partial __cpu_simple_lock membar audit.

For __cpu_simple_unlock, __insn_barrier is absolutely required.
Whether we need a stronger memory barrier, I don't know offhand and
I'm not sure what the right architecture manual to consult is -- it's
not clear why this isn't just using the generic arm lock.h.


(riastradh)
diff -r1.9 -r1.10 src/sys/arch/acorn32/include/lock.h

cvs diff -r1.9 -r1.10 src/sys/arch/acorn32/include/lock.h (expand / switch to unified diff)

--- src/sys/arch/acorn32/include/lock.h 2009/01/15 01:11:32 1.9
+++ src/sys/arch/acorn32/include/lock.h 2022/02/13 13:41:26 1.10
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: lock.h,v 1.9 2009/01/15 01:11:32 pooka Exp $ */ 1/* $NetBSD: lock.h,v 1.10 2022/02/13 13:41:26 riastradh Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 2000, 2001 The NetBSD Foundation, Inc. 4 * Copyright (c) 2000, 2001 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Jason R. Thorpe. 8 * by Jason R. Thorpe.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -72,20 +72,21 @@ __cpu_simple_lock_try(__cpu_simple_lock_ @@ -72,20 +72,21 @@ __cpu_simple_lock_try(__cpu_simple_lock_
72{ 72{
73 int __result; 73 int __result;
74 74
75 __result = __swp(__SIMPLELOCK_LOCKED, alp) == __SIMPLELOCK_UNLOCKED; 75 __result = __swp(__SIMPLELOCK_LOCKED, alp) == __SIMPLELOCK_UNLOCKED;
76 if (__result) 76 if (__result)
77 cpu_idcache_wbinv_all(); 77 cpu_idcache_wbinv_all();
78 return __result; 78 return __result;
79} 79}
80 80
81static __inline void __attribute__((__unused__)) 81static __inline void __attribute__((__unused__))
82__cpu_simple_unlock(__cpu_simple_lock_t *alp) 82__cpu_simple_unlock(__cpu_simple_lock_t *alp)
83{ 83{
84 84
 85 __insn_barrier();
85 *alp = __SIMPLELOCK_UNLOCKED; 86 *alp = __SIMPLELOCK_UNLOCKED;
86} 87}
87 88
88#else /* !(_HARDKERNEL && MULTIPROCESSOR) */ 89#else /* !(_HARDKERNEL && MULTIPROCESSOR) */
89#include <arm/lock.h> 90#include <arm/lock.h>
90#endif /* !(_HARDKERNEL && MULTIPROCESSOR) */ 91#endif /* !(_HARDKERNEL && MULTIPROCESSOR) */
91#endif 92#endif