Pull up following revision(s) (requested by skrll in ticket #1314): common/lib/libc/arch/aarch64/atomic/atomic_nand_8.S: revision 1.3 common/lib/libc/arch/aarch64/atomic/atomic_nand_8.S: revision 1.4 common/lib/libc/arch/aarch64/atomic/atomic_nand_64.S: revision 1.3 common/lib/libc/arch/aarch64/atomic/atomic_nand_64.S: revision 1.4 common/lib/libc/arch/aarch64/atomic/atomic_nand_16.S: revision 1.3 common/lib/libc/arch/aarch64/atomic/atomic_nand_32.S: revision 1.3 common/lib/libc/arch/aarch64/atomic/atomic_nand_32.S: revision 1.4 Fix the logic operation for atomic_nand_{8,16,32,64} From the gcc docs the operations are as follows { tmp = *ptr; *ptr = ~(tmp & value); return tmp; } // nand { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; } // nand yes, this is really rather strange. typo in comment s/pte/ptr/diff -r1.1 -r1.1.28.1 src/common/lib/libc/arch/aarch64/atomic/atomic_nand_16.S
(martin)
--- src/common/lib/libc/arch/aarch64/atomic/atomic_nand_16.S 2014/08/10 05:47:35 1.1
+++ src/common/lib/libc/arch/aarch64/atomic/atomic_nand_16.S 2021/07/06 04:13:50 1.1.28.1
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_nand_16.S,v 1.1 2014/08/10 05:47:35 matt Exp $ */ | 1 | /* $NetBSD: atomic_nand_16.S,v 1.1.28.1 2021/07/06 04:13:50 martin Exp $ */ | |
2 | 2 | |||
3 | /*- | 3 | /*- | |
4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | 4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | |
5 | * All rights reserved. | 5 | * All rights reserved. | |
6 | * | 6 | * | |
7 | * This code is derived from software contributed to The NetBSD Foundation | 7 | * This code is derived from software contributed to The NetBSD Foundation | |
8 | * by Matt Thomas of 3am Software Foundry. | 8 | * by Matt Thomas of 3am Software Foundry. | |
9 | * | 9 | * | |
10 | * Redistribution and use in source and binary forms, with or without | 10 | * Redistribution and use in source and binary forms, with or without | |
11 | * modification, are permitted provided that the following conditions | 11 | * modification, are permitted provided that the following conditions | |
12 | * are met: | 12 | * are met: | |
13 | * 1. Redistributions of source code must retain the above copyright | 13 | * 1. Redistributions of source code must retain the above copyright | |
14 | * notice, this list of conditions and the following disclaimer. | 14 | * notice, this list of conditions and the following disclaimer. | |
@@ -21,44 +21,51 @@ | @@ -21,44 +21,51 @@ | |||
21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | 21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | 22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | |
23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | 23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | 24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | 25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | 26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | 27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | 28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | |
29 | * POSSIBILITY OF SUCH DAMAGE. | 29 | * POSSIBILITY OF SUCH DAMAGE. | |
30 | */ | 30 | */ | |
31 | 31 | |||
32 | #include "atomic_op_asm.h" | 32 | #include "atomic_op_asm.h" | |
33 | 33 | |||
34 | /* | |||
35 | * { tmp = *ptr; *ptr = ~(tmp & value); return tmp; } // nand | |||
36 | */ | |||
34 | ENTRY_NP(_atomic_nand_16) | 37 | ENTRY_NP(_atomic_nand_16) | |
35 | mov x4, x0 | 38 | mov x4, x0 | |
36 | 1: ldxrh w0, [x4] /* load old value (to be returned) */ | 39 | 1: ldxrh w0, [x4] /* load old value (*ptr) */ | |
37 | mvn w3, w0 /* complement source */ | 40 | and w3, w0, w1 /* w3 = (*ptr & value) */ | |
38 | and w3, w3, w1 /* calculate new value */ | 41 | mvn w3, w3 /* w3 = ~(*pte & value) */ | |
39 | stxrh w2, w3, [x4] /* try to store */ | 42 | stxrh w2, w3, [x4] /* try to store */ | |
40 | cbnz w2, 1b /* succeed? no, try again */ | 43 | cbnz w2, 1b /* succeed? no, try again */ | |
41 | dmb st | 44 | dmb st | |
42 | ret /* return old value */ | 45 | ret /* return old value */ | |
43 | END(_atomic_nand_16) | 46 | END(_atomic_nand_16) | |
44 | 47 | |||
45 | ATOMIC_OP_ALIAS(atomic_nand_16,_atomic_nand_16) | 48 | ATOMIC_OP_ALIAS(atomic_nand_16,_atomic_nand_16) | |
46 | ATOMIC_OP_ALIAS(atomic_nand_ushort,_atomic_nand_16) | 49 | ATOMIC_OP_ALIAS(atomic_nand_ushort,_atomic_nand_16) | |
47 | STRONG_ALIAS(__sync_fetch_and_nand_2,_atomic_nand_16) | 50 | STRONG_ALIAS(__sync_fetch_and_nand_2,_atomic_nand_16) | |
48 | STRONG_ALIAS(_atomic_nand_ushort,_atomic_nand_16) | 51 | STRONG_ALIAS(_atomic_nand_ushort,_atomic_nand_16) | |
49 | 52 | |||
53 | ||||
54 | /* | |||
55 | * { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; } // nand | |||
56 | */ | |||
50 | ENTRY_NP(_atomic_nand_16_nv) | 57 | ENTRY_NP(_atomic_nand_16_nv) | |
51 | mov x4, x0 /* need r0 for return value */ | 58 | mov x4, x0 /* need r0 for return value */ | |
52 | 1: ldxrh w0, [x4] /* load old value */ | 59 | 1: ldxrh w0, [x4] /* load old value (*ptr) */ | |
53 | mvn w0, w0 /* complement source */ | 60 | and w0, w0, w1 /* w0 = (*ptr & value) */ | |
54 | and w0, w0, w1 /* calculate new value (return value) */ | 61 | mvn w0, w0 /* w0 = ~(*pte & value), return value */ | |
55 | stxrh w2, w0, [x4] /* try to store */ | 62 | stxrh w2, w0, [x4] /* try to store */ | |
56 | cbnz w2, 1b /* succeed? no, try again? */ | 63 | cbnz w2, 1b /* succeed? no, try again? */ | |
57 | dmb st | 64 | dmb st | |
58 | ret /* return new value */ | 65 | ret /* return new value */ | |
59 | END(_atomic_nand_16_nv) | 66 | END(_atomic_nand_16_nv) | |
60 | 67 | |||
61 | ATOMIC_OP_ALIAS(atomic_nand_16_nv,_atomic_nand_16_nv) | 68 | ATOMIC_OP_ALIAS(atomic_nand_16_nv,_atomic_nand_16_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_nand_ushort_nv,_atomic_nand_16_nv) | 69 | ATOMIC_OP_ALIAS(atomic_nand_ushort_nv,_atomic_nand_16_nv) | |
63 | STRONG_ALIAS(__sync_nand_and_fetch_2,_atomic_nand_16_nv) | 70 | STRONG_ALIAS(__sync_nand_and_fetch_2,_atomic_nand_16_nv) | |
64 | STRONG_ALIAS(_atomic_nand_ushort_nv,_atomic_nand_16_nv) | 71 | STRONG_ALIAS(_atomic_nand_ushort_nv,_atomic_nand_16_nv) |
--- src/common/lib/libc/arch/aarch64/atomic/atomic_nand_32.S 2014/08/10 05:47:35 1.1
+++ src/common/lib/libc/arch/aarch64/atomic/atomic_nand_32.S 2021/07/06 04:13:50 1.1.28.1
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_nand_32.S,v 1.1 2014/08/10 05:47:35 matt Exp $ */ | 1 | /* $NetBSD: atomic_nand_32.S,v 1.1.28.1 2021/07/06 04:13:50 martin Exp $ */ | |
2 | 2 | |||
3 | /*- | 3 | /*- | |
4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | 4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | |
5 | * All rights reserved. | 5 | * All rights reserved. | |
6 | * | 6 | * | |
7 | * This code is derived from software contributed to The NetBSD Foundation | 7 | * This code is derived from software contributed to The NetBSD Foundation | |
8 | * by Matt Thomas of 3am Software Foundry. | 8 | * by Matt Thomas of 3am Software Foundry. | |
9 | * | 9 | * | |
10 | * Redistribution and use in source and binary forms, with or without | 10 | * Redistribution and use in source and binary forms, with or without | |
11 | * modification, are permitted provided that the following conditions | 11 | * modification, are permitted provided that the following conditions | |
12 | * are met: | 12 | * are met: | |
13 | * 1. Redistributions of source code must retain the above copyright | 13 | * 1. Redistributions of source code must retain the above copyright | |
14 | * notice, this list of conditions and the following disclaimer. | 14 | * notice, this list of conditions and the following disclaimer. | |
@@ -21,44 +21,51 @@ | @@ -21,44 +21,51 @@ | |||
21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | 21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | 22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | |
23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | 23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | 24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | 25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | 26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | 27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | 28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | |
29 | * POSSIBILITY OF SUCH DAMAGE. | 29 | * POSSIBILITY OF SUCH DAMAGE. | |
30 | */ | 30 | */ | |
31 | 31 | |||
32 | #include "atomic_op_asm.h" | 32 | #include "atomic_op_asm.h" | |
33 | 33 | |||
34 | /* | |||
35 | * { tmp = *ptr; *ptr = ~(tmp & value); return tmp; } // nand | |||
36 | */ | |||
34 | ENTRY_NP(_atomic_nand_32) | 37 | ENTRY_NP(_atomic_nand_32) | |
35 | mov x4, x0 | 38 | mov x4, x0 | |
36 | 1: ldxr w0, [x4] /* load old value (to be returned) */ | 39 | 1: ldxr w0, [x4] /* load old value (to be returned) */ | |
37 | mvn w3, w0 /* complement source */ | 40 | and w3, w0, w1 /* w3 = (*ptr & value) */ | |
38 | and w3, w3, w1 /* calculate new value */ | 41 | mvn w3, w3 /* x3 = ~(*ptr & value) */ | |
39 | stxr w2, w3, [x4] /* try to store */ | 42 | stxr w2, w3, [x4] /* try to store */ | |
40 | cbnz w2, 1b /* succeed? no, try again */ | 43 | cbnz w2, 1b /* succeed? no, try again */ | |
41 | dmb st | 44 | dmb st | |
42 | ret /* return old value */ | 45 | ret /* return old value */ | |
43 | END(_atomic_nand_32) | 46 | END(_atomic_nand_32) | |
44 | 47 | |||
45 | ATOMIC_OP_ALIAS(atomic_nand_32,_atomic_nand_32) | 48 | ATOMIC_OP_ALIAS(atomic_nand_32,_atomic_nand_32) | |
46 | ATOMIC_OP_ALIAS(atomic_nand_uint,_atomic_nand_32) | 49 | ATOMIC_OP_ALIAS(atomic_nand_uint,_atomic_nand_32) | |
47 | STRONG_ALIAS(__sync_fetch_and_nand_4,_atomic_nand_32) | 50 | STRONG_ALIAS(__sync_fetch_and_nand_4,_atomic_nand_32) | |
48 | STRONG_ALIAS(_atomic_nand_uint,_atomic_nand_32) | 51 | STRONG_ALIAS(_atomic_nand_uint,_atomic_nand_32) | |
49 | 52 | |||
53 | ||||
54 | /* | |||
55 | * { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; } // nand | |||
56 | */ | |||
50 | ENTRY_NP(_atomic_nand_32_nv) | 57 | ENTRY_NP(_atomic_nand_32_nv) | |
51 | mov x4, x0 /* need r0 for return value */ | 58 | mov x4, x0 /* need r0 for return value */ | |
52 | 1: ldxr w0, [x4] /* load old value */ | 59 | 1: ldxr w0, [x4] /* load old value (*ptr) */ | |
53 | mvn w0, w0 /* complement source */ | 60 | and w0, w0, w1 /* x0 = (*ptr & value) */ | |
54 | and w0, w0, w1 /* calculate new value (return value) */ | 61 | mvn w0, w0 /* x0 = ~(*ptr & value), return value */ | |
55 | stxr w2, w0, [x4] /* try to store */ | 62 | stxr w2, w0, [x4] /* try to store */ | |
56 | cbnz w2, 1b /* succeed? no, try again? */ | 63 | cbnz w2, 1b /* succeed? no, try again? */ | |
57 | dmb st | 64 | dmb st | |
58 | ret /* return new value */ | 65 | ret /* return new value */ | |
59 | END(_atomic_nand_32_nv) | 66 | END(_atomic_nand_32_nv) | |
60 | 67 | |||
61 | ATOMIC_OP_ALIAS(atomic_nand_32_nv,_atomic_nand_32_nv) | 68 | ATOMIC_OP_ALIAS(atomic_nand_32_nv,_atomic_nand_32_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_nand_uint_nv,_atomic_nand_32_nv) | 69 | ATOMIC_OP_ALIAS(atomic_nand_uint_nv,_atomic_nand_32_nv) | |
63 | STRONG_ALIAS(__sync_nand_and_fetch_4,_atomic_nand_32_nv) | 70 | STRONG_ALIAS(__sync_nand_and_fetch_4,_atomic_nand_32_nv) | |
64 | STRONG_ALIAS(_atomic_nand_uint_nv,_atomic_nand_32_nv) | 71 | STRONG_ALIAS(_atomic_nand_uint_nv,_atomic_nand_32_nv) |
--- src/common/lib/libc/arch/aarch64/atomic/atomic_nand_64.S 2014/08/10 05:47:35 1.1
+++ src/common/lib/libc/arch/aarch64/atomic/atomic_nand_64.S 2021/07/06 04:13:50 1.1.28.1
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_nand_64.S,v 1.1 2014/08/10 05:47:35 matt Exp $ */ | 1 | /* $NetBSD: atomic_nand_64.S,v 1.1.28.1 2021/07/06 04:13:50 martin Exp $ */ | |
2 | 2 | |||
3 | /*- | 3 | /*- | |
4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | 4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | |
5 | * All rights reserved. | 5 | * All rights reserved. | |
6 | * | 6 | * | |
7 | * This code is derived from software contributed to The NetBSD Foundation | 7 | * This code is derived from software contributed to The NetBSD Foundation | |
8 | * by Matt Thomas of 3am Software Foundry. | 8 | * by Matt Thomas of 3am Software Foundry. | |
9 | * | 9 | * | |
10 | * Redistribution and use in source and binary forms, with or without | 10 | * Redistribution and use in source and binary forms, with or without | |
11 | * modification, are permitted provided that the following conditions | 11 | * modification, are permitted provided that the following conditions | |
12 | * are met: | 12 | * are met: | |
13 | * 1. Redistributions of source code must retain the above copyright | 13 | * 1. Redistributions of source code must retain the above copyright | |
14 | * notice, this list of conditions and the following disclaimer. | 14 | * notice, this list of conditions and the following disclaimer. | |
@@ -21,44 +21,51 @@ | @@ -21,44 +21,51 @@ | |||
21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | 21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | 22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | |
23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | 23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | 24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | 25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | 26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | 27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | 28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | |
29 | * POSSIBILITY OF SUCH DAMAGE. | 29 | * POSSIBILITY OF SUCH DAMAGE. | |
30 | */ | 30 | */ | |
31 | 31 | |||
32 | #include "atomic_op_asm.h" | 32 | #include "atomic_op_asm.h" | |
33 | 33 | |||
34 | /* | |||
35 | * { tmp = *ptr; *ptr = ~(tmp & value); return tmp; } // nand | |||
36 | */ | |||
34 | ENTRY_NP(_atomic_nand_64) | 37 | ENTRY_NP(_atomic_nand_64) | |
35 | mov x4, x0 | 38 | mov x4, x0 | |
36 | 1: ldxr x0, [x4] /* load old value (to be returned) */ | 39 | 1: ldxr x0, [x4] /* load old value (*ptr) */ | |
37 | mvn x2, x0 /* complement source */ | 40 | and x2, x0, x1 /* x2 = (*ptr & value) */ | |
38 | and x2, x2, x1 /* calculate new value */ | 41 | mvn x2, x2 /* x2 = ~(*ptr & value) */ | |
39 | stxr w3, x2, [x4] /* try to store */ | 42 | stxr w3, x2, [x4] /* try to store */ | |
40 | cbnz w3, 1b /* succeed? no, try again */ | 43 | cbnz w3, 1b /* succeed? no, try again */ | |
41 | dmb st | 44 | dmb st | |
42 | ret /* return old value */ | 45 | ret /* return old value */ | |
43 | END(_atomic_nand_64) | 46 | END(_atomic_nand_64) | |
44 | 47 | |||
45 | ATOMIC_OP_ALIAS(atomic_nand_64,_atomic_nand_64) | 48 | ATOMIC_OP_ALIAS(atomic_nand_64,_atomic_nand_64) | |
46 | ATOMIC_OP_ALIAS(atomic_nand_ulong,_atomic_nand_64) | 49 | ATOMIC_OP_ALIAS(atomic_nand_ulong,_atomic_nand_64) | |
47 | STRONG_ALIAS(__sync_fetch_and_nand_8,_atomic_nand_64) | 50 | STRONG_ALIAS(__sync_fetch_and_nand_8,_atomic_nand_64) | |
48 | STRONG_ALIAS(_atomic_nand_ulong,_atomic_nand_64) | 51 | STRONG_ALIAS(_atomic_nand_ulong,_atomic_nand_64) | |
49 | 52 | |||
53 | ||||
54 | /* | |||
55 | * { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; } // nand | |||
56 | */ | |||
50 | ENTRY_NP(_atomic_nand_64_nv) | 57 | ENTRY_NP(_atomic_nand_64_nv) | |
51 | mov x4, x0 /* need r0 for return value */ | 58 | mov x4, x0 /* need r0 for return value */ | |
52 | 1: ldxr x0, [x4] /* load old value */ | 59 | 1: ldxr x0, [x4] /* load old value (*ptr) */ | |
53 | mvn x0, x0 /* complement source */ | 60 | and x0, x0, x1 /* x0 = (*ptr & value) */ | |
54 | and x0, x0, x1 /* calculate new value (return value) */ | 61 | mvn x0, x0 /* x0 = ~(*ptr & value), return value */ | |
55 | stxr w3, x0, [x4] /* try to store */ | 62 | stxr w3, x0, [x4] /* try to store */ | |
56 | cbnz w3, 1b /* succeed? no, try again? */ | 63 | cbnz w3, 1b /* succeed? no, try again? */ | |
57 | dmb st | 64 | dmb st | |
58 | ret /* return new value */ | 65 | ret /* return new value */ | |
59 | END(_atomic_nand_64_nv) | 66 | END(_atomic_nand_64_nv) | |
60 | 67 | |||
61 | ATOMIC_OP_ALIAS(atomic_nand_64_nv,_atomic_nand_64_nv) | 68 | ATOMIC_OP_ALIAS(atomic_nand_64_nv,_atomic_nand_64_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_nand_ulong_nv,_atomic_nand_64_nv) | 69 | ATOMIC_OP_ALIAS(atomic_nand_ulong_nv,_atomic_nand_64_nv) | |
63 | STRONG_ALIAS(__sync_nand_and_fetch_8,_atomic_nand_64_nv) | 70 | STRONG_ALIAS(__sync_nand_and_fetch_8,_atomic_nand_64_nv) | |
64 | STRONG_ALIAS(_atomic_nand_ulong_nv,_atomic_nand_64_nv) | 71 | STRONG_ALIAS(_atomic_nand_ulong_nv,_atomic_nand_64_nv) |
--- src/common/lib/libc/arch/aarch64/atomic/atomic_nand_8.S 2014/08/10 05:47:35 1.1
+++ src/common/lib/libc/arch/aarch64/atomic/atomic_nand_8.S 2021/07/06 04:13:50 1.1.28.1
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_nand_8.S,v 1.1 2014/08/10 05:47:35 matt Exp $ */ | 1 | /* $NetBSD: atomic_nand_8.S,v 1.1.28.1 2021/07/06 04:13:50 martin Exp $ */ | |
2 | 2 | |||
3 | /*- | 3 | /*- | |
4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | 4 | * Copyright (c) 2014 The NetBSD Foundation, Inc. | |
5 | * All rights reserved. | 5 | * All rights reserved. | |
6 | * | 6 | * | |
7 | * This code is derived from software contributed to The NetBSD Foundation | 7 | * This code is derived from software contributed to The NetBSD Foundation | |
8 | * by Matt Thomas of 3am Software Foundry. | 8 | * by Matt Thomas of 3am Software Foundry. | |
9 | * | 9 | * | |
10 | * Redistribution and use in source and binary forms, with or without | 10 | * Redistribution and use in source and binary forms, with or without | |
11 | * modification, are permitted provided that the following conditions | 11 | * modification, are permitted provided that the following conditions | |
12 | * are met: | 12 | * are met: | |
13 | * 1. Redistributions of source code must retain the above copyright | 13 | * 1. Redistributions of source code must retain the above copyright | |
14 | * notice, this list of conditions and the following disclaimer. | 14 | * notice, this list of conditions and the following disclaimer. | |
@@ -21,44 +21,50 @@ | @@ -21,44 +21,50 @@ | |||
21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | 21 | * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR | |
22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | 22 | * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS | |
23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | 23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | |
24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | 24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | |
25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | 25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | |
26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | 26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | |
27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | 27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | |
28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | 28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | |
29 | * POSSIBILITY OF SUCH DAMAGE. | 29 | * POSSIBILITY OF SUCH DAMAGE. | |
30 | */ | 30 | */ | |
31 | 31 | |||
32 | #include "atomic_op_asm.h" | 32 | #include "atomic_op_asm.h" | |
33 | 33 | |||
34 | /* | |||
35 | * { tmp = *ptr; *ptr = ~(tmp & value); return tmp; } // nand | |||
36 | */ | |||
34 | ENTRY_NP(_atomic_nand_8) | 37 | ENTRY_NP(_atomic_nand_8) | |
35 | mov x4, x0 | 38 | mov x4, x0 | |
36 | 1: ldxrb w0, [x4] /* load old value (to be returned) */ | 39 | 1: ldxrb w0, [x4] /* load old value (*ptr) */ | |
37 | mvn w3, w0 /* complement source */ | 40 | and w3, w0, w1 /* w3 = (*ptr & value) */ | |
38 | and w3, w3, w1 /* calculate new value */ | 41 | mvn w3, w3 /* w3 = ~(*ptr & value) */ | |
39 | stxrb w2, w3, [x4] /* try to store */ | 42 | stxrb w2, w3, [x4] /* try to store */ | |
40 | cbnz w2, 1b /* succeed? no, try again */ | 43 | cbnz w2, 1b /* succeed? no, try again */ | |
41 | dmb st | 44 | dmb st | |
42 | ret /* return old value */ | 45 | ret /* return old value */ | |
43 | END(_atomic_nand_8) | 46 | END(_atomic_nand_8) | |
44 | 47 | |||
45 | ATOMIC_OP_ALIAS(atomic_nand_8,_atomic_nand_8) | 48 | ATOMIC_OP_ALIAS(atomic_nand_8,_atomic_nand_8) | |
46 | ATOMIC_OP_ALIAS(atomic_nand_uchar,_atomic_nand_8) | 49 | ATOMIC_OP_ALIAS(atomic_nand_uchar,_atomic_nand_8) | |
47 | STRONG_ALIAS(__sync_fetch_and_nand_1,_atomic_nand_8) | 50 | STRONG_ALIAS(__sync_fetch_and_nand_1,_atomic_nand_8) | |
48 | STRONG_ALIAS(_atomic_nand_uchar,_atomic_nand_8) | 51 | STRONG_ALIAS(_atomic_nand_uchar,_atomic_nand_8) | |
49 | 52 | |||
53 | /* | |||
54 | * { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; } // nand | |||
55 | */ | |||
50 | ENTRY_NP(_atomic_nand_8_nv) | 56 | ENTRY_NP(_atomic_nand_8_nv) | |
51 | mov x4, x0 /* need r0 for return value */ | 57 | mov x4, x0 /* need r0 for return value */ | |
52 | 1: ldxrb w0, [x4] /* load old value */ | 58 | 1: ldxrb w0, [x4] /* load old value (*ptr) */ | |
53 | mvn w0, w0 /* complement source */ | 59 | and w0, w0, w1 /* w0 = (*ptr & value) */ | |
54 | and w0, w0, w1 /* calculate new value (return value) */ | 60 | mvn w0, w0 /* w0 = ~(*ptr & value), return value */ | |
55 | stxrb w2, w0, [x4] /* try to store */ | 61 | stxrb w2, w0, [x4] /* try to store */ | |
56 | cbnz w2, 1b /* succeed? no, try again? */ | 62 | cbnz w2, 1b /* succeed? no, try again? */ | |
57 | dmb st | 63 | dmb st | |
58 | ret /* return new value */ | 64 | ret /* return new value */ | |
59 | END(_atomic_nand_8_nv) | 65 | END(_atomic_nand_8_nv) | |
60 | 66 | |||
61 | ATOMIC_OP_ALIAS(atomic_nand_8_nv,_atomic_nand_8_nv) | 67 | ATOMIC_OP_ALIAS(atomic_nand_8_nv,_atomic_nand_8_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_nand_uchar_nv,_atomic_nand_8_nv) | 68 | ATOMIC_OP_ALIAS(atomic_nand_uchar_nv,_atomic_nand_8_nv) | |
63 | STRONG_ALIAS(__sync_nand_and_fetch_1,_atomic_nand_8_nv) | 69 | STRONG_ALIAS(__sync_nand_and_fetch_1,_atomic_nand_8_nv) | |
64 | STRONG_ALIAS(_atomic_nand_uchar_nv,_atomic_nand_8_nv) | 70 | STRONG_ALIAS(_atomic_nand_uchar_nv,_atomic_nand_8_nv) |