Sun Feb 13 13:42:12 2022 UTC ()
or1k: __cpu_simple_lock membar audit.


(riastradh)
diff -r1.2 -r1.3 src/sys/arch/or1k/include/lock.h

cvs diff -r1.2 -r1.3 src/sys/arch/or1k/include/lock.h (expand / switch to unified diff)

--- src/sys/arch/or1k/include/lock.h 2017/09/17 00:01:08 1.2
+++ src/sys/arch/or1k/include/lock.h 2022/02/13 13:42:12 1.3
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: lock.h,v 1.2 2017/09/17 00:01:08 christos Exp $ */ 1/* $NetBSD: lock.h,v 1.3 2022/02/13 13:42:12 riastradh Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 2014 The NetBSD Foundation, Inc. 4 * Copyright (c) 2014 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Matt Thomas of 3am Software Foundry. 8 * by Matt Thomas of 3am Software Foundry.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -76,61 +76,74 @@ __cpu_simple_lock_init(__cpu_simple_lock @@ -76,61 +76,74 @@ __cpu_simple_lock_init(__cpu_simple_lock
76 *__ptr = __SIMPLELOCK_UNLOCKED; 76 *__ptr = __SIMPLELOCK_UNLOCKED;
77#endif 77#endif
78} 78}
79 79
80static __inline void __unused 80static __inline void __unused
81__cpu_simple_lock(__cpu_simple_lock_t *__ptr) 81__cpu_simple_lock(__cpu_simple_lock_t *__ptr)
82{ 82{
83#if 0 83#if 0
84 while (__atomic_test_and_set(__ptr, __ATOMIC_ACQUIRE)) { 84 while (__atomic_test_and_set(__ptr, __ATOMIC_ACQUIRE)) {
85 /* do nothing */ 85 /* do nothing */
86 } 86 }
87#else 87#else
88 int tmp; 88 int tmp;
89 __asm( 89 /*
 90 * No explicit memory barrier needed around ll/sc:
 91 *
 92 * `In implementations that use a weakly-ordered memory model,
 93 * l.swa nad l.lwa will serve as synchronization points,
 94 * similar to lsync.'
 95 *
 96 * https://openrisc.io/or1k.html#__RefHeading__341344_552419154
 97 */
 98 __asm volatile(
90 "1:" 99 "1:"
91 "\t" "l.lwa %[tmp],0(%[ptr])" 100 "\t" "l.lwa %[tmp],0(%[ptr])"
92 "\n\t" "l.sfeqi\t%[tmp],%[unlocked]" 101 "\n\t" "l.sfeqi\t%[tmp],%[unlocked]"
93 "\n\t" "l.bnf 1b" 102 "\n\t" "l.bnf 1b"
94 "\n\t" "l.nop" 103 "\n\t" "l.nop"
95 104
96 "\n\t" "l.swa 0(%[ptr]),%[newval]" 105 "\n\t" "l.swa 0(%[ptr]),%[newval]"
97 "\n\t" "l.bnf 1b" 106 "\n\t" "l.bnf 1b"
98 "\n\t" "l.nop" 107 "\n\t" "l.nop"
99 : [tmp] "=&r" (tmp) 108 : [tmp] "=&r" (tmp)
100 : [newval] "r" (__SIMPLELOCK_LOCKED), 109 : [newval] "r" (__SIMPLELOCK_LOCKED),
101 [ptr] "r" (__ptr), 110 [ptr] "r" (__ptr),
102 [unlocked] "n" (__SIMPLELOCK_UNLOCKED)); 111 [unlocked] "n" (__SIMPLELOCK_UNLOCKED)
 112 : "cc", "memory");
103#endif 113#endif
104} 114}
105 115
106static __inline int __unused 116static __inline int __unused
107__cpu_simple_lock_try(__cpu_simple_lock_t *__ptr) 117__cpu_simple_lock_try(__cpu_simple_lock_t *__ptr)
108{ 118{
109#if 0 119#if 0
110 return !__atomic_test_and_set(__ptr, __ATOMIC_ACQUIRE); 120 return !__atomic_test_and_set(__ptr, __ATOMIC_ACQUIRE);
111#else 121#else
112 int oldval; 122 int oldval;
113 __asm( 123 /* No explicit memory barrier needed, as in __cpu_simple_lock. */
 124 __asm volatile(
114 "1:" 125 "1:"
115 "\t" "l.lwa %[oldval],0(%[ptr])" 126 "\t" "l.lwa %[oldval],0(%[ptr])"
116 "\n\t" "l.swa 0(%[ptr]),%[newval]" 127 "\n\t" "l.swa 0(%[ptr]),%[newval]"
117 "\n\t" "l.bnf 1b" 128 "\n\t" "l.bnf 1b"
118 "\n\t" "l.nop" 129 "\n\t" "l.nop"
119 : [oldval] "=&r" (oldval) 130 : [oldval] "=&r" (oldval)
120 : [newval] "r" (__SIMPLELOCK_LOCKED), 131 : [newval] "r" (__SIMPLELOCK_LOCKED),
121 [ptr] "r" (__ptr)); 132 [ptr] "r" (__ptr)
 133 : "cc", "memory");
122 return oldval == __SIMPLELOCK_UNLOCKED; 134 return oldval == __SIMPLELOCK_UNLOCKED;
123#endif 135#endif
124} 136}
125 137
126static __inline void __unused 138static __inline void __unused
127__cpu_simple_unlock(__cpu_simple_lock_t *__ptr) 139__cpu_simple_unlock(__cpu_simple_lock_t *__ptr)
128{ 140{
129#if 0 141#if 0
130 __atomic_clear(__ptr, __ATOMIC_RELEASE); 142 __atomic_clear(__ptr, __ATOMIC_RELEASE);
131#else 143#else
 144 __asm volatile("l.msync" ::: "");
132 *__ptr = __SIMPLELOCK_UNLOCKED; 145 *__ptr = __SIMPLELOCK_UNLOCKED;
133#endif 146#endif
134} 147}
135 148
136#endif /* _OR1K_LOCK_H_ */ 149#endif /* _OR1K_LOCK_H_ */