Sun May 30 02:28:59 2021 UTC ()
Don't use V8 atomic instruction for AA32 mode.


(joerg)
diff -r1.38 -r1.39 src/sys/arch/arm/include/lock.h

cvs diff -r1.38 -r1.39 src/sys/arch/arm/include/lock.h (expand / switch to unified diff)

--- src/sys/arch/arm/include/lock.h 2021/04/27 06:03:09 1.38
+++ src/sys/arch/arm/include/lock.h 2021/05/30 02:28:59 1.39
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: lock.h,v 1.38 2021/04/27 06:03:09 skrll Exp $ */ 1/* $NetBSD: lock.h,v 1.39 2021/05/30 02:28:59 joerg Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 2000, 2001 The NetBSD Foundation, Inc. 4 * Copyright (c) 2000, 2001 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Jason R. Thorpe. 8 * by Jason R. Thorpe.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -194,27 +194,27 @@ __cpu_simple_lock_try(__cpu_simple_lock_ @@ -194,27 +194,27 @@ __cpu_simple_lock_try(__cpu_simple_lock_
194 return 1; 194 return 1;
195#else 195#else
196 return (__swp(__SIMPLELOCK_LOCKED, __alp) == __SIMPLELOCK_UNLOCKED); 196 return (__swp(__SIMPLELOCK_LOCKED, __alp) == __SIMPLELOCK_UNLOCKED);
197#endif 197#endif
198} 198}
199#else 199#else
200int __cpu_simple_lock_try(__cpu_simple_lock_t *); 200int __cpu_simple_lock_try(__cpu_simple_lock_t *);
201#endif 201#endif
202 202
203static __inline void __unused 203static __inline void __unused
204__cpu_simple_unlock(__cpu_simple_lock_t *__alp) 204__cpu_simple_unlock(__cpu_simple_lock_t *__alp)
205{ 205{
206 206
207#if defined(_ARM_ARCH_8) 207#if defined(_ARM_ARCH_8) && defined(__LP64__)
208 if (sizeof(*__alp) == 1) { 208 if (sizeof(*__alp) == 1) {
209 __asm __volatile("stlrb\t%w0, [%1]" 209 __asm __volatile("stlrb\t%w0, [%1]"
210 :: "r"(__SIMPLELOCK_UNLOCKED), "r"(__alp) : "memory"); 210 :: "r"(__SIMPLELOCK_UNLOCKED), "r"(__alp) : "memory");
211 } else { 211 } else {
212 __asm __volatile("stlr\t%0, [%1]" 212 __asm __volatile("stlr\t%0, [%1]"
213 :: "r"(__SIMPLELOCK_UNLOCKED), "r"(__alp) : "memory"); 213 :: "r"(__SIMPLELOCK_UNLOCKED), "r"(__alp) : "memory");
214 } 214 }
215#else 215#else
216 __arm_dmb_store(); 216 __arm_dmb_store();
217 *__alp = __SIMPLELOCK_UNLOCKED; 217 *__alp = __SIMPLELOCK_UNLOCKED;
218#endif 218#endif
219} 219}
220 220