Sat Apr 9 12:06:39 2022 UTC ()
sparc: Fix membar_sync with LDSTUB.

membar_sync is required to be a full sequential consistency barrier,
equivalent to MEMBAR #StoreStore|LoadStore|StoreLoad|LoadLoad on
sparcv9.  LDSTUB and SWAP are the only pre-v9 instructions that do
this and SWAP doesn't exist on all v7 hardware, so use LDSTUB.

Note: I'm having a hard time nailing down a reference for the
ordering implied by LDSTUB and SWAP.  I'm _pretty sure_ SWAP has to
imply store-load ordering since the SPARCv8 manual recommends it for
Dekker's algorithm (which notoriously requires store-load ordering),
and the formal memory model treats LDSTUB and SWAP the same for
ordering.  But the v8 and v9 manuals aren't clear.

GCC issues STBAR and LDSTUB, but (a) I don't see why STBAR is
necessary here, (b) STBAR doesn't exist on v7 so it'd be a pain to
use, and (c) from what I've heard (although again it's hard to nail
down authoritative references here) all actual SPARC hardware is TSO
or SC anyway so STBAR is a noop in all the silicon anyway.

Either way, certainly this is better than what we had before, which
was nothing implying ordering at all, just a store!


(riastradh)
diff -r1.5 -r1.6 src/common/lib/libc/arch/sparc/atomic/membar_ops.S

cvs diff -r1.5 -r1.6 src/common/lib/libc/arch/sparc/atomic/membar_ops.S (expand / switch to unified diff)

--- src/common/lib/libc/arch/sparc/atomic/membar_ops.S 2022/04/06 22:47:56 1.5
+++ src/common/lib/libc/arch/sparc/atomic/membar_ops.S 2022/04/09 12:06:39 1.6
@@ -1,14 +1,14 @@ @@ -1,14 +1,14 @@
1/* $NetBSD: membar_ops.S,v 1.5 2022/04/06 22:47:56 riastradh Exp $ */ 1/* $NetBSD: membar_ops.S,v 1.6 2022/04/09 12:06:39 riastradh Exp $ */
2 2
3/*- 3/*-
4 * Copyright (c) 2007 The NetBSD Foundation, Inc. 4 * Copyright (c) 2007 The NetBSD Foundation, Inc.
5 * All rights reserved. 5 * All rights reserved.
6 * 6 *
7 * This code is derived from software contributed to The NetBSD Foundation 7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Jason R. Thorpe, and by Andrew Doran. 8 * by Jason R. Thorpe, and by Andrew Doran.
9 * 9 *
10 * Redistribution and use in source and binary forms, with or without 10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions 11 * modification, are permitted provided that the following conditions
12 * are met: 12 * are met:
13 * 1. Redistributions of source code must retain the above copyright 13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer. 14 * notice, this list of conditions and the following disclaimer.
@@ -21,35 +21,53 @@ @@ -21,35 +21,53 @@
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE. 29 * POSSIBILITY OF SUCH DAMAGE.
30 */ 30 */
31 31
32#include "atomic_op_asm.h" 32#include "atomic_op_asm.h"
33 33
 34#ifdef _KERNEL_OPT
 35#include "opt_multiprocessor.h"
 36#endif
 37
34 .text 38 .text
35 39
36/* These assume Total Store Order (TSO) */ 40/*
 41 * These assume Total Store Order (TSO), which may reorder
 42 * store-before-load but nothing else. Hence, only membar_sync must
 43 * issue anything -- specifically, an LDSTUB, which (along with SWAP)
 44 * is the only instruction that implies a sequential consistency
 45 * barrier.
 46 *
 47 * If we ran with Partial Store Order (PSO), we would also need to
 48 * issue STBAR for membar_exit (load/store-before-store) and
 49 * membar_producer (store-before-store).
 50 */
37 51
38ENTRY(_membar_producer) 52ENTRY(_membar_consumer)
39 retl 53 retl
40 nop 54 nop
 55END(_membar_consumer)
41 56
42ENTRY(_membar_consumer) 57ENTRY(_membar_sync)
43 add %sp, -112, %sp 
44 st %g0, [%sp+100] 
45 retl 58 retl
46 sub %sp, -112, %sp 59#if !defined(_KERNEL) || defined(MULTIPROCESSOR)
 60 ldstub [%sp - 4], %g0
 61#else
 62 nop
 63#endif
 64END(_membar_sync)
47 65
48ATOMIC_OP_ALIAS(membar_producer,_membar_producer) 66ATOMIC_OP_ALIAS(membar_producer,_membar_consumer)
 67STRONG_ALIAS(_membar_producer,_membar_consumer)
49ATOMIC_OP_ALIAS(membar_consumer,_membar_consumer) 68ATOMIC_OP_ALIAS(membar_consumer,_membar_consumer)
50ATOMIC_OP_ALIAS(membar_enter,_membar_consumer) 69ATOMIC_OP_ALIAS(membar_enter,_membar_consumer)
51STRONG_ALIAS(_membar_enter,_membar_consumer) 70STRONG_ALIAS(_membar_enter,_membar_consumer)
52ATOMIC_OP_ALIAS(membar_exit,_membar_consumer) 71ATOMIC_OP_ALIAS(membar_exit,_membar_consumer)
53STRONG_ALIAS(_membar_exit,_membar_consumer) 72STRONG_ALIAS(_membar_exit,_membar_consumer)
54ATOMIC_OP_ALIAS(membar_sync,_membar_consumer) 73ATOMIC_OP_ALIAS(membar_sync,_membar_sync)
55STRONG_ALIAS(_membar_sync,_membar_consumer)