Sun Feb 13 13:41:44 2022 UTC ()
m68k: __cpu_simple_unlock audit.

- Use `cc' clobbers in asm volatile because they touch the condition
  codes.

- Use `memory' clobbers in asm volatile so the compiler doesn't move
  up loads and stores in the critical section at _other_ addresses
  than the lock so they happen before __cpu_simple_lock or
  __cpu_simple_lock_try.

- Not sure if we have any (or if there even are any?) multicore m68k
  systems out there, but __cpu_simple_unlock needs __insn_barrier
  either way so the compiler doesn't delay loads and stores prior to
  __cpu_simple_unlock so they happen after it.


(riastradh)
diff -r1.16 -r1.17 src/sys/arch/m68k/include/lock.h

cvs diff -r1.16 -r1.17 src/sys/arch/m68k/include/lock.h (switch to unified diff)

--- src/sys/arch/m68k/include/lock.h 2019/11/29 20:05:49 1.16
+++ src/sys/arch/m68k/include/lock.h 2022/02/13 13:41:44 1.17
@@ -1,104 +1,109 @@ @@ -1,104 +1,109 @@
1/* $NetBSD: lock.h,v 1.16 2019/11/29 20:05:49 riastradh Exp $ */ 1/* $NetBSD: lock.h,v 1.17 2022/02/13 13:41:44 riastradh Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 2000 The NetBSD Foundation, Inc. 4 * Copyright (c) 2000 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Jason R. Thorpe. 8 * by Jason R. Thorpe.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright 15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the 16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution. 17 * documentation and/or other materials provided with the distribution.
18 * 18 *
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE. 29 * POSSIBILITY OF SUCH DAMAGE.
30 */ 30 */
31 31
32/* 32/*
33 * Machine-dependent spin lock operations. 33 * Machine-dependent spin lock operations.
34 */ 34 */
35 35
36#ifndef _M68K_LOCK_H_ 36#ifndef _M68K_LOCK_H_
37#define _M68K_LOCK_H_ 37#define _M68K_LOCK_H_
38 38
39static __inline int 39static __inline int
40__SIMPLELOCK_LOCKED_P(const __cpu_simple_lock_t *__ptr) 40__SIMPLELOCK_LOCKED_P(const __cpu_simple_lock_t *__ptr)
41{ 41{
42 return *__ptr == __SIMPLELOCK_LOCKED; 42 return *__ptr == __SIMPLELOCK_LOCKED;
43} 43}
44 44
45static __inline int 45static __inline int
46__SIMPLELOCK_UNLOCKED_P(const __cpu_simple_lock_t *__ptr) 46__SIMPLELOCK_UNLOCKED_P(const __cpu_simple_lock_t *__ptr)
47{ 47{
48 return *__ptr == __SIMPLELOCK_UNLOCKED; 48 return *__ptr == __SIMPLELOCK_UNLOCKED;
49} 49}
50 50
51static __inline void 51static __inline void
52__cpu_simple_lock_init(__cpu_simple_lock_t *alp) 52__cpu_simple_lock_init(__cpu_simple_lock_t *alp)
53{ 53{
54 54
55 *alp = __SIMPLELOCK_UNLOCKED; 55 *alp = __SIMPLELOCK_UNLOCKED;
56} 56}
57 57
58 58
59static __inline void 59static __inline void
60__cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr) 60__cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
61{ 61{
62 *__ptr = __SIMPLELOCK_UNLOCKED; 62 *__ptr = __SIMPLELOCK_UNLOCKED;
63} 63}
64 64
65static __inline void 65static __inline void
66__cpu_simple_lock_set(__cpu_simple_lock_t *__ptr) 66__cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
67{ 67{
68 *__ptr = __SIMPLELOCK_LOCKED; 68 *__ptr = __SIMPLELOCK_LOCKED;
69} 69}
70 70
71static __inline void 71static __inline void
72__cpu_simple_lock(__cpu_simple_lock_t *alp) 72__cpu_simple_lock(__cpu_simple_lock_t *alp)
73{ 73{
74 74
75 __asm volatile( 75 __asm volatile(
76 "1: tas %0 \n" 76 "1: tas %0 \n"
77 " jne 1b \n" 77 " jne 1b \n"
78 : "=m" (*alp)); 78 : "=m" (*alp)
 79 : /* no inputs */
 80 : "cc", "memory");
79} 81}
80 82
81static __inline int 83static __inline int
82__cpu_simple_lock_try(__cpu_simple_lock_t *alp) 84__cpu_simple_lock_try(__cpu_simple_lock_t *alp)
83{ 85{
84 int __rv; 86 int __rv;
85 87
86 __asm volatile( 88 __asm volatile(
87 " moveq #1, %1 \n" 89 " moveq #1, %1 \n"
88 " tas %0 \n" 90 " tas %0 \n"
89 " jeq 1f \n" 91 " jeq 1f \n"
90 " moveq #0, %1 \n" 92 " moveq #0, %1 \n"
91 "1: \n" 93 "1: \n"
92 : "=m" (*alp), "=d" (__rv)); 94 : "=m" (*alp), "=d" (__rv)
 95 : /* no inputs */
 96 : "cc", "memory");
93 97
94 return (__rv); 98 return (__rv);
95} 99}
96 100
97static __inline void 101static __inline void
98__cpu_simple_unlock(__cpu_simple_lock_t *alp) 102__cpu_simple_unlock(__cpu_simple_lock_t *alp)
99{ 103{
100 104
 105 __insn_barrier();
101 *alp = __SIMPLELOCK_UNLOCKED; 106 *alp = __SIMPLELOCK_UNLOCKED;
102} 107}
103 108
104#endif /* _M68K_LOCK_H_ */ 109#endif /* _M68K_LOCK_H_ */