Fri Nov 8 22:42:52 2013 UTC ()
Add support for the gcc __sync builtins.
Note that these need earmv6 or later to get the ldrex/strex instructions
(matt)
diff -r1.13 -r1.14 src/common/lib/libc/arch/arm/atomic/Makefile.inc
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_add_16.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_add_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_and_16.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_and_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_cas_16.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_nand_16.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_nand_32.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_nand_64.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_nand_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_or_16.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_or_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_sub_64.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_swap_16.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_xor_16.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_xor_32.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_xor_64.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/atomic_xor_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_1.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_2.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_4.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_bool_compare_and_swap_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_fetch_and_add_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_fetch_and_and_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_fetch_and_nand_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_fetch_and_or_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_fetch_and_sub_8.S
diff -r0 -r1.1 src/common/lib/libc/arch/arm/atomic/sync_fetch_and_xor_8.S
diff -r1.5 -r1.6 src/common/lib/libc/arch/arm/atomic/atomic_add_32.S
diff -r1.5 -r1.6 src/common/lib/libc/arch/arm/atomic/atomic_and_32.S
diff -r1.5 -r1.6 src/common/lib/libc/arch/arm/atomic/atomic_cas_32.S
diff -r1.5 -r1.6 src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S
diff -r1.5 -r1.6 src/common/lib/libc/arch/arm/atomic/atomic_or_32.S
diff -r1.5 -r1.6 src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S
diff -r1.8 -r1.9 src/common/lib/libc/arch/arm/atomic/atomic_add_64.S
diff -r1.7 -r1.8 src/common/lib/libc/arch/arm/atomic/atomic_and_64.S
diff -r1.7 -r1.8 src/common/lib/libc/arch/arm/atomic/atomic_or_64.S
diff -r1.7 -r1.8 src/common/lib/libc/arch/arm/atomic/atomic_swap.S
diff -r1.3 -r1.4 src/common/lib/libc/arch/arm/atomic/atomic_cas_64.S
diff -r1.3 -r1.4 src/common/lib/libc/arch/arm/atomic/atomic_op_asm.h
diff -r1.3 -r1.4 src/common/lib/libc/arch/arm/atomic/membar_ops.S
diff -r1.6 -r1.7 src/common/lib/libc/arch/arm/atomic/atomic_inc_32.S
--- src/common/lib/libc/arch/arm/atomic/Makefile.inc 2013/08/19 03:55:12 1.13
+++ src/common/lib/libc/arch/arm/atomic/Makefile.inc 2013/11/08 22:42:52 1.14
| @@ -1,46 +1,54 @@ | | | @@ -1,46 +1,54 @@ |
1 | # $NetBSD: Makefile.inc,v 1.13 2013/08/19 03:55:12 matt Exp $ | | 1 | # $NetBSD: Makefile.inc,v 1.14 2013/11/08 22:42:52 matt Exp $ |
2 | | | 2 | |
3 | ARMV6= ${CPUFLAGS:M-march=armv6*} ${CPUFLAGS:M-mcpu=arm11*} | | 3 | ARMV6= ${CPUFLAGS:M-march=armv6*} ${CPUFLAGS:M-mcpu=arm11*} |
4 | ARMV6+= ${CFLAGS:M-march=armv6*:} ${CFLAGS:M-mcpu=arm11*} | | 4 | ARMV6+= ${CFLAGS:M-march=armv6*:} ${CFLAGS:M-mcpu=arm11*} |
5 | ARMV6+= ${CPPFLAGS:M-march=armv6*:} ${CPPFLAGS:M-mcpu=arm11*} | | 5 | ARMV6+= ${CPPFLAGS:M-march=armv6*:} ${CPPFLAGS:M-mcpu=arm11*} |
6 | ARMV7= ${CPUFLAGS:M-march=armv7*} ${CPUFLAGS:M-mcpu=cortex*} | | 6 | ARMV7= ${CPUFLAGS:M-march=armv7*} ${CPUFLAGS:M-mcpu=cortex*} |
7 | ARMV7+= ${CFLAGS:M-march=armv7*:} ${CFLAGS:M-mcpu=cortex*} | | 7 | ARMV7+= ${CFLAGS:M-march=armv7*:} ${CFLAGS:M-mcpu=cortex*} |
8 | ARMV7+= ${CPPFLAGS:M-march=armv7*:} ${CPPFLAGS:M-mcpu=cortex*} | | 8 | ARMV7+= ${CPPFLAGS:M-march=armv7*:} ${CPPFLAGS:M-mcpu=cortex*} |
9 | .if empty(CPPFLAGS:M-D_STANDALONE) \ | | 9 | .if empty(CFLAGS:M-march=*) && empty(CFLAGS:M-mcpu=*) \ |
10 | && empty(CFLAGS:M-march=*) && empty(CFLAGS:M-mcpu=*) \ | | | |
11 | && empty(CPPFLAGS:M-march=*) && empty(CPPFLAGS:M-mcpu=*) \ | | 10 | && empty(CPPFLAGS:M-march=*) && empty(CPPFLAGS:M-mcpu=*) \ |
12 | && empty(CPUFLAGS:M-march=*) && empty(CPUFLAGS:M-mcpu=*) | | 11 | && empty(CPUFLAGS:M-march=*) && empty(CPUFLAGS:M-mcpu=*) |
13 | ARMV6+= ${MACHINE_ARCH:Mearmv6*} | | 12 | ARMV6+= ${MACHINE_ARCH:Mearmv6*} |
14 | ARMV7+= ${MACHINE_ARCH:Mearmv7*} | | 13 | ARMV7+= ${MACHINE_ARCH:Mearmv7*} |
15 | .endif | | 14 | .endif |
16 | | | 15 | |
17 | .if defined(LIB) && (${LIB} == "kern" || ${LIB} == "c" || ${LIB} == "pthread" \ | | 16 | .if defined(LIB) && (${LIB} == "kern" || ${LIB} == "c" || ${LIB} == "pthread" \ |
18 | || ${LIB} == "rump") | | 17 | || ${LIB} == "rump") |
19 | | | 18 | |
20 | .if empty(ARMV6) && empty(ARMV7) | | 19 | .if empty(ARMV6) && empty(ARMV7) |
21 | SRCS.atomic+= atomic_add_32_cas.c atomic_add_32_nv_cas.c \ | | 20 | SRCS.atomic+= atomic_add_32_cas.c atomic_add_32_nv_cas.c \ |
22 | atomic_and_32_cas.c atomic_and_32_nv_cas.c \ | | 21 | atomic_and_32_cas.c atomic_and_32_nv_cas.c \ |
23 | atomic_dec_32_cas.c atomic_dec_32_nv_cas.c \ | | 22 | atomic_dec_32_cas.c atomic_dec_32_nv_cas.c \ |
24 | atomic_inc_32_cas.c atomic_inc_32_nv_cas.c \ | | 23 | atomic_inc_32_cas.c atomic_inc_32_nv_cas.c \ |
25 | atomic_or_32_cas.c atomic_or_32_nv_cas.c \ | | 24 | atomic_or_32_cas.c atomic_or_32_nv_cas.c \ |
26 | atomic_swap_32_cas.c membar_ops_nop.c | | 25 | atomic_swap_32_cas.c membar_ops_nop.c |
27 | .else | | 26 | .else |
28 | SRCS.atomic+= atomic_add_32.S atomic_and_32.S atomic_cas_32.S | | 27 | .for op in add and cas nand or xor |
29 | SRCS.atomic+= atomic_dec_32.S atomic_inc_32.S atomic_or_32.S | | 28 | .for sz in 8 16 32 64 |
30 | SRCS.atomic+= atomic_swap.S membar_ops.S | | 29 | SRCS.atomic+= atomic_${op}_${sz}.S |
31 | SRCS.atomic+= atomic_add_64.S atomic_and_64.S atomic_cas_64.S | | 30 | .endfor |
32 | SRCS.atomic+= atomic_dec_64.S atomic_inc_64.S atomic_or_64.S | | 31 | .endfor |
33 | SRCS.atomic+= atomic_swap_64.S | | 32 | SRCS.atomic+= atomic_dec_32.S atomic_dec_64.S |
| | | 33 | SRCS.atomic+= atomic_inc_32.S atomic_inc_64.S |
| | | 34 | SRCS.atomic+= atomic_swap.S atomic_swap_16.S atomic_swap_64.S |
| | | 35 | SRCS.atomic+= membar_ops.S |
| | | 36 | .for op in add and nand or sub xor |
| | | 37 | SRCS.atomic+= sync_fetch_and_${op}_8.S |
| | | 38 | .endfor |
| | | 39 | .for sz in 1 2 4 8 |
| | | 40 | SRCS.atomic+= sync_bool_compare_and_swap_${sz}.S |
| | | 41 | .endfor |
34 | .endif | | 42 | .endif |
35 | | | 43 | |
36 | .endif | | 44 | .endif |
37 | | | 45 | |
38 | .if defined(LIB) && (${LIB} == "c" || ${LIB} == "pthread" || ${LIB} == "rump") | | 46 | .if defined(LIB) && (${LIB} == "c" || ${LIB} == "pthread" || ${LIB} == "rump") |
39 | | | 47 | |
40 | SRCS.atomic+= atomic_simplelock.c | | 48 | SRCS.atomic+= atomic_simplelock.c |
41 | .if empty(ARMV7) | | 49 | .if empty(ARMV7) |
42 | CPUFLAGS.atomic_simplelock.c+= -marm | | 50 | CPUFLAGS.atomic_simplelock.c+= -marm |
43 | .endif | | 51 | .endif |
44 | | | 52 | |
45 | .endif | | 53 | .endif |
46 | | | 54 | |
/* $NetBSD: atomic_add_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_sub_16)
negs r1, r1
/* FALLTHROUGH */
ENTRY_NP(_atomic_add_16)
mov ip, r0
1: ldrexh r0, [ip] /* load old value */
adds r3, r0, r1 /* calculate new value */
strexh r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_add_16)
END(_atomic_sub_16)
ATOMIC_OP_ALIAS(atomic_add_16,_atomic_add_16)
ATOMIC_OP_ALIAS(atomic_add_short,_atomic_add_16)
ATOMIC_OP_ALIAS(atomic_add_ushort,_atomic_add_16)
STRONG_ALIAS(__sync_fetch_and_add_2,_atomic_add_16)
STRONG_ALIAS(_atomic_add_short,_atomic_add_16)
STRONG_ALIAS(_atomic_add_ushort,_atomic_add_16)
ATOMIC_OP_ALIAS(atomic_sub_16,_atomic_sub_16)
ATOMIC_OP_ALIAS(atomic_sub_short,_atomic_sub_16)
ATOMIC_OP_ALIAS(atomic_sub_ushort,_atomic_sub_16)
STRONG_ALIAS(__sync_fetch_and_sub_2,_atomic_sub_16)
STRONG_ALIAS(_atomic_sub_short,_atomic_sub_16)
STRONG_ALIAS(_atomic_sub_ushort,_atomic_sub_16)
ENTRY_NP(_atomic_sub_16_nv)
negs r1, r1
/* FALLTHROUGH */
ENTRY_NP(_atomic_add_16_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexh r0, [ip] /* load old value */
adds r0, r0, r1 /* calculate new value (return value) */
strexh r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_add_16_nv)
END(_atomic_sub_16_nv)
ATOMIC_OP_ALIAS(atomic_add_16_nv,_atomic_add_16_nv)
ATOMIC_OP_ALIAS(atomic_add_short_nv,_atomic_add_16_nv)
ATOMIC_OP_ALIAS(atomic_add_ushort_nv,_atomic_add_16_nv)
STRONG_ALIAS(__sync_add_and_fetch_2,_atomic_add_16_nv)
STRONG_ALIAS(_atomic_add_short_nv,_atomic_add_16_nv)
STRONG_ALIAS(_atomic_add_ushort_nv,_atomic_add_16_nv)
ATOMIC_OP_ALIAS(atomic_sub_16_nv,_atomic_sub_16_nv)
ATOMIC_OP_ALIAS(atomic_sub_short_nv,_atomic_sub_16_nv)
ATOMIC_OP_ALIAS(atomic_sub_ushort_nv,_atomic_sub_16_nv)
STRONG_ALIAS(__sync_sub_and_fetch_2,_atomic_sub_16_nv)
STRONG_ALIAS(_atomic_sub_short_nv,_atomic_sub_16_nv)
STRONG_ALIAS(_atomic_sub_ushort_nv,_atomic_sub_16_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_add_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_sub_8)
negs r1, r1
/* FALLTHROUGH */
ENTRY_NP(_atomic_add_8)
mov ip, r0
1: ldrexb r0, [ip] /* load old value */
adds r3, r0, r1 /* calculate new value */
strexb r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_add_8)
END(_atomic_sub_8)
ATOMIC_OP_ALIAS(atomic_add_8,_atomic_add_8)
ATOMIC_OP_ALIAS(atomic_add_char,_atomic_add_8)
ATOMIC_OP_ALIAS(atomic_add_uchar,_atomic_add_8)
STRONG_ALIAS(__sync_fetch_and_add_1,_atomic_add_8)
STRONG_ALIAS(_atomic_add_char,_atomic_add_8)
STRONG_ALIAS(_atomic_add_uchar,_atomic_add_8)
ATOMIC_OP_ALIAS(atomic_sub_8,_atomic_sub_8)
ATOMIC_OP_ALIAS(atomic_sub_char,_atomic_sub_8)
ATOMIC_OP_ALIAS(atomic_sub_uchar,_atomic_sub_8)
STRONG_ALIAS(__sync_fetch_and_sub_1,_atomic_sub_8)
STRONG_ALIAS(_atomic_sub_char,_atomic_sub_8)
STRONG_ALIAS(_atomic_sub_uchar,_atomic_sub_8)
ENTRY_NP(_atomic_sub_8_nv)
negs r1, r1
/* FALLTHROUGH */
ENTRY_NP(_atomic_add_8_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexb r0, [ip] /* load old value */
adds r0, r0, r1 /* calculate new value (return value) */
strexb r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_add_8_nv)
END(_atomic_sub_8_nv)
ATOMIC_OP_ALIAS(atomic_add_8_nv,_atomic_add_8_nv)
ATOMIC_OP_ALIAS(atomic_add_char_nv,_atomic_add_8_nv)
ATOMIC_OP_ALIAS(atomic_add_uchar_nv,_atomic_add_8_nv)
STRONG_ALIAS(__sync_add_and_fetch_1,_atomic_add_8_nv)
STRONG_ALIAS(_atomic_add_char_nv,_atomic_add_8_nv)
STRONG_ALIAS(_atomic_add_uchar_nv,_atomic_add_8_nv)
ATOMIC_OP_ALIAS(atomic_sub_8_nv,_atomic_sub_8_nv)
ATOMIC_OP_ALIAS(atomic_sub_char_nv,_atomic_sub_8_nv)
ATOMIC_OP_ALIAS(atomic_sub_uchar_nv,_atomic_sub_8_nv)
STRONG_ALIAS(__sync_sub_and_fetch_1,_atomic_sub_8_nv)
STRONG_ALIAS(_atomic_sub_char_nv,_atomic_sub_8_nv)
STRONG_ALIAS(_atomic_sub_uchar_nv,_atomic_sub_8_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_and_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_and_16)
mov ip, r0
1: ldrexh r0, [ip] /* load old value (to be returned) */
ands r3, r0, r1 /* calculate new value */
strexh r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_and_16)
ATOMIC_OP_ALIAS(atomic_and_16,_atomic_and_16)
ATOMIC_OP_ALIAS(atomic_and_ushort,_atomic_and_16)
STRONG_ALIAS(__sync_fetch_and_and_2,_atomic_and_16)
STRONG_ALIAS(_atomic_and_ushort,_atomic_and_16)
ENTRY_NP(_atomic_and_16_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexh r0, [ip] /* load old value */
ands r0, r0, r1 /* calculate new value (return value) */
strexh r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_and_16_nv)
ATOMIC_OP_ALIAS(atomic_and_16_nv,_atomic_and_16_nv)
ATOMIC_OP_ALIAS(atomic_and_ushort_nv,_atomic_and_16_nv)
STRONG_ALIAS(__sync_and_and_fetch_2,_atomic_and_16_nv)
STRONG_ALIAS(_atomic_and_ushort_nv,_atomic_and_16_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_and_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_and_8)
mov ip, r0
1: ldrexb r0, [ip] /* load old value (to be returned) */
ands r3, r0, r1 /* calculate new value */
strexb r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_and_8)
ATOMIC_OP_ALIAS(atomic_and_8,_atomic_and_8)
ATOMIC_OP_ALIAS(atomic_and_uchar,_atomic_and_8)
STRONG_ALIAS(__sync_fetch_and_and_1,_atomic_and_8)
STRONG_ALIAS(_atomic_and_uchar,_atomic_and_8)
ENTRY_NP(_atomic_and_8_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexb r0, [ip] /* load old value */
ands r0, r0, r1 /* calculate new value (return value) */
strexb r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_and_8_nv)
ATOMIC_OP_ALIAS(atomic_and_8_nv,_atomic_and_8_nv)
ATOMIC_OP_ALIAS(atomic_and_uchar_nv,_atomic_and_8_nv)
STRONG_ALIAS(__sync_and_and_fetch_1,_atomic_and_8_nv)
STRONG_ALIAS(_atomic_and_uchar_nv,_atomic_and_8_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_cas_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#if defined(_ARM_ARCH_6)
/*
* ARMv6 has load-exclusive/store-exclusive which works for both user
* and kernel.
*/
ENTRY_NP(_atomic_cas_16)
mov ip, r0 /* we need r0 for return value */
1:
ldrexh r0, [ip] /* load old value */
cmp r0, r1 /* compare? */
#ifdef __thumb__
bne 2f
#else
RETc(ne) /* return if different */
#endif
strexh r3, r2, [ip] /* store new value */
cmp r3, #0 /* succeed? */
bne 1b /* nope, try again. */
#ifdef _ARM_ARCH_7
dsb /* data synchronization barrier */
#else
mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */
#endif
2: RET /* return. */
END(_atomic_cas_16)
ATOMIC_OP_ALIAS(atomic_cas_16,_atomic_cas_16)
STRONG_ALIAS(_atomic_cas_short,_atomic_cas_16)
STRONG_ALIAS(_atomic_cas_ushort,_atomic_cas_16)
STRONG_ALIAS(__sync_val_compare_and_swap_2,_atomic_cas_16)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_nand_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_nand_16)
mov ip, r0
1: ldrexh r0, [ip] /* load old value (to be returned) */
mvns r3, r0 /* complement source */
ands r3, r3, r1 /* calculate new value */
strexh r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_nand_16)
ATOMIC_OP_ALIAS(atomic_nand_16,_atomic_nand_16)
ATOMIC_OP_ALIAS(atomic_nand_ushort,_atomic_nand_16)
STRONG_ALIAS(__sync_fetch_and_nand_2,_atomic_nand_16)
STRONG_ALIAS(_atomic_nand_ushort,_atomic_nand_16)
ENTRY_NP(_atomic_nand_16_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexh r0, [ip] /* load old value */
mvns r0, r0 /* complement source */
ands r0, r0, r1 /* calculate new value (return value) */
strexh r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_nand_16_nv)
ATOMIC_OP_ALIAS(atomic_nand_16_nv,_atomic_nand_16_nv)
ATOMIC_OP_ALIAS(atomic_nand_ushort_nv,_atomic_nand_16_nv)
STRONG_ALIAS(__sync_nand_and_fetch_2,_atomic_nand_16_nv)
STRONG_ALIAS(_atomic_nand_ushort_nv,_atomic_nand_16_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_nand_32.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_nand_32)
mov ip, r0
1: ldrex r0, [ip] /* load old value (to be returned) */
mvns r3, r0 /* complement source */
ands r3, r3, r1 /* calculate new value */
strex r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_nand_32)
ATOMIC_OP_ALIAS(atomic_nand_32,_atomic_nand_32)
ATOMIC_OP_ALIAS(atomic_nand_uint,_atomic_nand_32)
ATOMIC_OP_ALIAS(atomic_nand_ulong,_atomic_nand_32)
STRONG_ALIAS(__sync_fetch_and_nand_4,_atomic_nand_32)
STRONG_ALIAS(_atomic_nand_uint,_atomic_nand_32)
STRONG_ALIAS(_atomic_nand_ulong,_atomic_nand_32)
ENTRY_NP(_atomic_nand_32_nv)
mov ip, r0 /* need r0 for return value */
1: ldrex r0, [ip] /* load old value */
mvns r0, r0 /* complement source */
ands r0, r0, r1 /* calculate new value (return value) */
strex r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_nand_32_nv)
ATOMIC_OP_ALIAS(atomic_nand_32_nv,_atomic_nand_32_nv)
ATOMIC_OP_ALIAS(atomic_nand_uint_nv,_atomic_nand_32_nv)
ATOMIC_OP_ALIAS(atomic_nand_ulong_nv,_atomic_nand_32_nv)
STRONG_ALIAS(__sync_nand_and_fetch_4,_atomic_nand_32_nv)
STRONG_ALIAS(_atomic_nand_uint_nv,_atomic_nand_32_nv)
STRONG_ALIAS(_atomic_nand_ulong_nv,_atomic_nand_32_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_nand_64.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_nand_64_nv)
push {r3,r4} /* save temporary */
#ifndef __ARM_EABI__
mov r3, r2
mov r2, r1
#endif
mov ip, r0 /* need r0 for return value */
1: ldrexd r0, [ip] /* load old value */
mvns r0, r0 /* complement old value */
mvns r1, r1 /* complement old value */
ands r0, r0, r2 /* calculate new value */
ands r1, r1, r3 /* calculate new value */
strexd r4, r0, [ip] /* try to store */
cmp r4, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */
#endif
pop {r3,r4} /* restore temporary */
RET /* return new value */
END(_atomic_nand_64_nv)
STRONG_ALIAS(_atomic_nand_64,_atomic_nand_64_nv)
ATOMIC_OP_ALIAS(atomic_nand_64_nv,_atomic_nand_64_nv)
ATOMIC_OP_ALIAS(atomic_nand_64,_atomic_nand_64_nv)
STRONG_ALIAS(__sync_nand_and_fetch_8,_atomic_nand_64_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_nand_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_nand_8)
mov ip, r0
1: ldrexb r0, [ip] /* load old value (to be returned) */
mvns r3, r0 /* complement source */
ands r3, r3, r1 /* calculate new value */
strexb r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_nand_8)
ATOMIC_OP_ALIAS(atomic_nand_8,_atomic_nand_8)
ATOMIC_OP_ALIAS(atomic_nand_uchar,_atomic_nand_8)
STRONG_ALIAS(__sync_fetch_and_nand_1,_atomic_nand_8)
STRONG_ALIAS(_atomic_nand_uchar,_atomic_nand_8)
ENTRY_NP(_atomic_nand_8_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexb r0, [ip] /* load old value */
mvns r0, r0 /* complement source */
ands r0, r0, r1 /* calculate new value (return value) */
strexb r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_nand_8_nv)
ATOMIC_OP_ALIAS(atomic_nand_8_nv,_atomic_nand_8_nv)
ATOMIC_OP_ALIAS(atomic_nand_uchar_nv,_atomic_nand_8_nv)
STRONG_ALIAS(__sync_nand_and_fetch_1,_atomic_nand_8_nv)
STRONG_ALIAS(_atomic_nand_uchar_nv,_atomic_nand_8_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_or_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_or_16)
mov ip, r0
1: ldrexh r0, [ip] /* load old value (to be returned) */
orrs r3, r0, r1 /* calculate new value */
strexh r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_or_16)
ATOMIC_OP_ALIAS(atomic_or_16,_atomic_or_16)
ATOMIC_OP_ALIAS(atomic_or_ushort,_atomic_or_16)
STRONG_ALIAS(__sync_fetch_and_or_2,_atomic_or_16)
STRONG_ALIAS(_atomic_or_ushort,_atomic_or_16)
ENTRY_NP(_atomic_or_16_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexh r0, [ip] /* load old value */
orrs r0, r0, r1 /* calculate new value (return value) */
strexh r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_or_16_nv)
ATOMIC_OP_ALIAS(atomic_or_16_nv,_atomic_or_16_nv)
ATOMIC_OP_ALIAS(atomic_or_ushort_nv,_atomic_or_16_nv)
STRONG_ALIAS(__sync_or_and_fetch_2,_atomic_or_16_nv)
STRONG_ALIAS(_atomic_or_ushort_nv,_atomic_or_16_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_or_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_or_8)
mov ip, r0
1: ldrexb r0, [ip] /* load old value (to be returned) */
orrs r3, r0, r1 /* calculate new value */
strexb r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_or_8)
ATOMIC_OP_ALIAS(atomic_or_8,_atomic_or_8)
ATOMIC_OP_ALIAS(atomic_or_char,_atomic_or_8)
STRONG_ALIAS(__sync_fetch_and_or_1,_atomic_or_8)
STRONG_ALIAS(_atomic_or_char,_atomic_or_8)
ENTRY_NP(_atomic_or_8_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexb r0, [ip] /* load old value */
orrs r0, r0, r1 /* calculate new value (return value) */
strexb r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_or_8_nv)
ATOMIC_OP_ALIAS(atomic_or_8_nv,_atomic_or_8_nv)
ATOMIC_OP_ALIAS(atomic_or_char_nv,_atomic_or_8_nv)
STRONG_ALIAS(__sync_or_and_fetch_1,_atomic_or_8_nv)
STRONG_ALIAS(_atomic_or_char_nv,_atomic_or_8_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_sub_64.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_sub_64_nv)
push {r3,r4} /* save temporary */
mov ip, r0 /* need r0 for return value */
#ifndef __ARM_EABI__
mov r3, r2
mov r2, r1
#endif
1: ldrexd r0, [ip] /* load old value */
subs LO, LO, NLO /* calculate new value lo */
sbcs HI, HI, NHI /* calculate new value hi */
strexd r4, r0, [ip] /* try to store */
cmp r4, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */
#endif
pop {r3,r4} /* restore temporary */
RET /* return new value */
END(_atomic_sub_64_nv)
STRONG_ALIAS(_atomic_sub_64,_atomic_sub_64_nv)
ATOMIC_OP_ALIAS(atomic_sub_64_nv,_atomic_sub_64_nv)
ATOMIC_OP_ALIAS(atomic_sub_64,_atomic_sub_64)
STRONG_ALIAS(__sync_sub_and_fetch_8,_atomic_sub_64_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_swap_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas of 3am Software Foundry.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_swap_16)
mov ip, r0
1:
ldrexh r0, [ip]
strexh r3, r1, [ip]
cmp r3, #0
bne 1b
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */
#endif
RET
END(_atomic_swap_16)
ATOMIC_OP_ALIAS(atomic_swap_16,_atomic_swap_16)
ATOMIC_OP_ALIAS(atomic_swap_short,_atomic_swap_16)
ATOMIC_OP_ALIAS(atomic_swap_ushort,_atomic_swap_16)
STRONG_ALIAS(__sync_lock_test_and_set_2,_atomic_swap_16)
STRONG_ALIAS(_atomic_swap_short,_atomic_swap_16)
STRONG_ALIAS(_atomic_swap_ushort,_atomic_swap_16)
ENTRY_NP(__sync_lock_release_2)
mov r1, #0
strh r1, [r0]
RET
END(__sync_lock_release_2)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_xor_16.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_xor_16)
mov ip, r0
1: ldrexh r0, [ip] /* load old value (to be returned) */
eors r3, r0, r1 /* calculate new value */
strexh r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_xor_16)
ATOMIC_OP_ALIAS(atomic_xor_16,_atomic_xor_16)
ATOMIC_OP_ALIAS(atomic_xor_ushort,_atomic_xor_16)
STRONG_ALIAS(__sync_fetch_and_xor_2,_atomic_xor_16)
STRONG_ALIAS(_atomic_xor_ushort,_atomic_xor_16)
ENTRY_NP(_atomic_xor_16_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexh r0, [ip] /* load old value */
eors r0, r0, r1 /* calculate new value (return value) */
strexh r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_xor_16_nv)
ATOMIC_OP_ALIAS(atomic_xor_16_nv,_atomic_xor_16_nv)
ATOMIC_OP_ALIAS(atomic_xor_ushort_nv,_atomic_xor_16_nv)
STRONG_ALIAS(__sync_xor_and_fetch_2,_atomic_xor_16_nv)
STRONG_ALIAS(_atomic_xor_ushort_nv,_atomic_xor_16_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_xor_32.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2008 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_xor_32)
mov ip, r0
1: ldrex r0, [ip] /* load old value (to be returned) */
eors r3, r0, r1 /* calculate new value */
strex r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_xor_32)
ATOMIC_OP_ALIAS(atomic_xor_32,_atomic_xor_32)
ATOMIC_OP_ALIAS(atomic_xor_uint,_atomic_xor_32)
ATOMIC_OP_ALIAS(atomic_xor_ulong,_atomic_xor_32)
STRONG_ALIAS(__sync_fetch_and_xor_4,_atomic_xor_32)
STRONG_ALIAS(_atomic_xor_uint,_atomic_xor_32)
STRONG_ALIAS(_atomic_xor_ulong,_atomic_xor_32)
ENTRY_NP(_atomic_xor_32_nv)
mov ip, r0 /* need r0 for return value */
1: ldrex r0, [ip] /* load old value */
eors r0, r0, r1 /* calculate new value (return value) */
strex r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_xor_32_nv)
ATOMIC_OP_ALIAS(atomic_xor_32_nv,_atomic_xor_32_nv)
ATOMIC_OP_ALIAS(atomic_xor_uint_nv,_atomic_xor_32_nv)
ATOMIC_OP_ALIAS(atomic_xor_ulong_nv,_atomic_xor_32_nv)
STRONG_ALIAS(__sync_xor_and_fetch_4,_atomic_xor_32_nv)
STRONG_ALIAS(_atomic_xor_uint_nv,_atomic_xor_32_nv)
STRONG_ALIAS(_atomic_xor_ulong_nv,_atomic_xor_32_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_xor_64.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_xor_64_nv)
push {r3,r4} /* save temporary */
#ifndef __ARM_EABI__
mov r3, r2
mov r2, r1
#endif
mov ip, r0 /* need r0 for return value */
1: ldrexd r0, [ip] /* load old value */
eors r0, r0, r2 /* calculate new value (return value) */
eors r1, r1, r3 /* calculate new value (return value) */
strexd r4, r0, [ip] /* try to store */
cmp r4, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */
#endif
pop {r3,r4} /* restore temporary */
RET /* return new value */
END(_atomic_xor_64_nv)
STRONG_ALIAS(_atomic_xor_64,_atomic_xor_64_nv)
ATOMIC_OP_ALIAS(atomic_xor_64_nv,_atomic_xor_64_nv)
ATOMIC_OP_ALIAS(atomic_xor_64,_atomic_xor_64)
STRONG_ALIAS(__sync_xor_and_fetch_8,_atomic_xor_64)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: atomic_xor_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(_atomic_xor_8)
mov ip, r0
1: ldrexb r0, [ip] /* load old value (to be returned) */
eors r3, r0, r1 /* calculate new value */
strexb r2, r3, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return old value */
END(_atomic_xor_8)
ATOMIC_OP_ALIAS(atomic_xor_8,_atomic_xor_8)
ATOMIC_OP_ALIAS(atomic_xor_uchar,_atomic_xor_8)
STRONG_ALIAS(__sync_fetch_and_xor_1,_atomic_xor_8)
STRONG_ALIAS(_atomic_xor_uchar,_atomic_xor_8)
ENTRY_NP(_atomic_xor_8_nv)
mov ip, r0 /* need r0 for return value */
1: ldrexb r0, [ip] /* load old value */
eors r0, r0, r1 /* calculate new value (return value) */
strexb r2, r0, [ip] /* try to store */
cmp r2, #0 /* succeed? */
bne 1b /* no, try again? */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
RET /* return new value */
END(_atomic_xor_8_nv)
ATOMIC_OP_ALIAS(atomic_xor_8_nv,_atomic_xor_8_nv)
ATOMIC_OP_ALIAS(atomic_xor_uchar_nv,_atomic_xor_8_nv)
STRONG_ALIAS(__sync_xor_and_fetch_1,_atomic_xor_8_nv)
STRONG_ALIAS(_atomic_xor_uchar_nv,_atomic_xor_8_nv)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_bool_compare_and_swap_1.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#if defined(_ARM_ARCH_6)
/*
* ARMv6 has load-exclusive/store-exclusive which works for both user
* and kernel.
*/
ENTRY_NP(__sync_bool_compare_and_swap_1)
mov ip, r0 /* we need r0 for return value */
mov r0, #0 /* assume no match */
1:
ldrexb r3, [ip] /* load old value */
cmp r3, r1 /* compare? */
#ifdef __thumb__
bne 2f
#else
RETc(ne) /* return if different */
#endif
strexb r0, r2, [ip] /* store new value */
cmp r0, #0 /* succeed? */
bne 1b /* nope, try again. */
mov r0, #1 /* it was a success */
#ifdef _ARM_ARCH_7
dsb /* data synchronization barrier */
#else
mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */
#endif
2: RET /* return. */
END(__sync_bool_compare_and_swap_1)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_bool_compare_and_swap_2.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#if defined(_ARM_ARCH_6)
/*
* ARMv6 has load-exclusive/store-exclusive which works for both user
* and kernel.
*/
ENTRY_NP(__sync_bool_compare_and_swap_2)
mov ip, r0 /* we need r0 for return value */
movs r0, #0 /* assume failure */
1:
ldrexh r3, [ip] /* load old value */
cmp r3, r1 /* compare? */
#ifdef __thumb__
bne 2f
#else
RETc(ne) /* return if different */
#endif
strexh r0, r2, [ip] /* store new value */
cmp r0, #0 /* succeed? */
bne 1b /* nope, try again. */
movs r0, #1 /* indicate success */
#ifdef _ARM_ARCH_7
dsb /* data synchronization barrier */
#else
mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */
#endif
2: RET /* return. */
END(__sync_bool_compare_and_swap_2)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_bool_compare_and_swap_4.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2008 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#if defined(_ARM_ARCH_6)
/*
* ARMv6 has load-exclusive/store-exclusive which works for both user
* and kernel.
*/
ENTRY_NP(__sync_bool_compare_and_swap_4)
mov ip, r0 /* we need r0 for return value */
movs r0, #0 /* assume failure */
1:
ldrex r3, [ip] /* load old value */
cmp r3, r1 /* compare? */
#ifdef __thumb__
bne 2f /* return if different */
#else
RETc(ne) /* return if different */
#endif
strex r0, r2, [ip] /* store new value */
cmp r0, #0 /* succeed? */
bne 1b /* nope, try again. */
movs r0, #1 /* indicate success */
#ifdef _ARM_ARCH_7
dsb
#else
mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */
#endif
2: RET /* return. */
END(__sync_bool_compare_and_swap_4)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_bool_compare_and_swap_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2012 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#if defined(_ARM_ARCH_6)
/*
* ARMv6 has load-exclusive/store-exclusive which works for both user
* and kernel.
*/
ENTRY_NP(__sync_bool_compare_and_swap_8)
push {r4-r7} /* save temporaries */
mov ip, r0 /* we need r0 for return value */
movs r0, #0 /* assume failure */
#ifdef __ARM_EABI__
ldrd r4, [sp] /* fetch new value from stack */
#else
ldr r4, [sp, #0] /* fetch new value from stack */
ldr r5, [sp, #4] /* fetch new value from stack */
mov r3, r2 /* r2 will be overwritten by r1 which ... */
mov r2, r1 /* r1 will be overwritten by ldrexd */
#endif
1:
ldrexd r6, [ip] /* load current value */
cmp r6, r2 /* compare to old? 1st half */
#ifdef __thumb__
bne 2f /* jump to return if different */
cmp r7, r3 /* compare to old? 2nd half */
#else
cmpeq r7, r3 /* compare to old? 2nd half */
#endif
bne 2f /* jump to return if different */
strexd r0, r4, [ip] /* store new value */
cmp r0, #0 /* succeed? */
bne 1b /* nope, try again. */
movs r0, #1 /* indicate success */
#ifdef _ARM_ARCH_7
dsb
#else
mcr p15, 0, ip, c7, c10, 4 /* data synchronization barrier */
#endif
2: pop {r4-r7} /* restore temporaries */
RET /* return. */
END(__sync_bool_compare_and_swap_8)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_fetch_and_add_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(__sync_fetch_and_add_8)
push {r4-r7}
mov ip, r0
#ifndef __ARM__EABI__
mov r3, r2
mov r2, r1
#endif
1: ldrexd r0, [ip] /* load old value */
adds TLO, LO, NLO /* calculate new value */
adcs THI, HI, NHI /* calculate new value */
strexd r6, r4, [ip] /* try to store */
cmp r6, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
pop {r4-r7}
RET /* return old value */
END(__sync_fetch_and_add_8)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_fetch_and_and_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(__sync_fetch_and_and_8)
push {r4-r7}
mov ip, r0
#ifndef __ARM__EABI__
mov r3, r2
mov r2, r1
#endif
1: ldrexd r0, [ip] /* load old value */
ands r4, r0, r2 /* calculate new value */
ands r5, r1, r3 /* calculate new value */
strexd r6, r4, [ip] /* try to store */
cmp r6, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
pop {r4-r7}
RET /* return old value */
END(__sync_fetch_and_and_8)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_fetch_and_nand_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(__sync_fetch_and_nand_8)
push {r4-r7}
mov ip, r0
#ifndef __ARM__EABI__
mov r3, r2
mov r2, r1
#endif
1: ldrexd r0, [ip] /* load old value */
mvns r4, r0 /* complement old value */
mvns r5, r0 /* complement old value */
ands r4, r4, r2 /* calculate new value */
ands r5, r5, r3 /* calculate new value */
strexd r6, r4, [ip] /* try to store */
cmp r6, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
pop {r4-r7}
RET /* return old value */
END(__sync_fetch_and_nand_8)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_fetch_and_or_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(__sync_fetch_and_or_8)
push {r4-r7}
mov ip, r0
#ifndef __ARM__EABI__
mov r3, r2
mov r2, r1
#endif
1: ldrexd r0, [ip] /* load old value */
orrs r4, r0, r2 /* calculate new value */
orrs r5, r1, r3 /* calculate new value */
strexd r6, r4, [ip] /* try to store */
cmp r6, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
pop {r4-r7}
RET /* return old value */
END(__sync_fetch_and_or_8)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_fetch_and_sub_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(__sync_fetch_and_sub_8)
push {r4-r7}
mov ip, r0
#ifndef __ARM__EABI__
mov r3, r2
mov r2, r1
#endif
1: ldrexd r0, [ip] /* load old value */
subs TLO, LO, NLO /* calculate new value */
sbcs THI, HI, NHI /* calculate new value */
strexd r6, r4, [ip] /* try to store */
cmp r6, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
pop {r4-r7}
RET /* return old value */
END(__sync_fetch_and_sub_8)
#endif /* _ARM_ARCH_6 */
/* $NetBSD: sync_fetch_and_xor_8.S,v 1.1 2013/11/08 22:42:52 matt Exp $ */
/*-
* Copyright (c) 2013 The NetBSD Foundation, Inc.
* All rights reserved.
*
* This code is derived from software contributed to The NetBSD Foundation
* by Matt Thomas <matt@3am-software.com>
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
* ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include "atomic_op_asm.h"
#ifdef _ARM_ARCH_6
ENTRY_NP(__sync_fetch_and_xor_8)
push {r4-r7}
mov ip, r0
#ifndef __ARM__EABI__
mov r3, r2
mov r2, r1
#endif
1: ldrexd r0, [ip] /* load old value */
eors r4, r0, r2 /* calculate new value */
eors r5, r1, r3 /* calculate new value */
strexd r6, r4, [ip] /* try to store */
cmp r6, #0 /* succeed? */
bne 1b /* no, try again */
#ifdef _ARM_ARCH_7
dmb
#else
mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */
#endif
pop {r4-r7}
RET /* return old value */
END(__sync_fetch_and_xor_8)
#endif /* _ARM_ARCH_6 */
--- src/common/lib/libc/arch/arm/atomic/atomic_add_32.S 2013/08/11 04:41:17 1.5
+++ src/common/lib/libc/arch/arm/atomic/atomic_add_32.S 2013/11/08 22:42:52 1.6
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_add_32.S,v 1.5 2013/08/11 04:41:17 matt Exp $ */ | | 1 | /* $NetBSD: atomic_add_32.S,v 1.6 2013/11/08 22:42:52 matt Exp $ */ |
2 | | | 2 | |
3 | /*- | | 3 | /*- |
4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | | 4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. |
5 | * All rights reserved. | | 5 | * All rights reserved. |
6 | * | | 6 | * |
7 | * This code is derived from software contributed to The NetBSD Foundation | | 7 | * This code is derived from software contributed to The NetBSD Foundation |
8 | * by Matt Thomas <matt@3am-software.com> | | 8 | * by Matt Thomas <matt@3am-software.com> |
9 | * | | 9 | * |
10 | * Redistribution and use in source and binary forms, with or without | | 10 | * Redistribution and use in source and binary forms, with or without |
11 | * modification, are permitted provided that the following conditions | | 11 | * modification, are permitted provided that the following conditions |
12 | * are met: | | 12 | * are met: |
13 | * 1. Redistributions of source code must retain the above copyright | | 13 | * 1. Redistributions of source code must retain the above copyright |
14 | * notice, this list of conditions and the following disclaimer. | | 14 | * notice, this list of conditions and the following disclaimer. |
| @@ -23,58 +23,86 @@ | | | @@ -23,58 +23,86 @@ |
23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR | | 23 | * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR |
24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | | 24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | | 25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | | 26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | | 27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | | 28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
29 | * POSSIBILITY OF SUCH DAMAGE. | | 29 | * POSSIBILITY OF SUCH DAMAGE. |
30 | */ | | 30 | */ |
31 | | | 31 | |
32 | #include "atomic_op_asm.h" | | 32 | #include "atomic_op_asm.h" |
33 | | | 33 | |
34 | #ifdef _ARM_ARCH_6 | | 34 | #ifdef _ARM_ARCH_6 |
35 | | | 35 | |
| | | 36 | ENTRY_NP(_atomic_sub_32) |
| | | 37 | negs r1, r1 |
| | | 38 | /* FALLTHROUGH */ |
36 | ENTRY_NP(_atomic_add_32) | | 39 | ENTRY_NP(_atomic_add_32) |
37 | 1: ldrex r3, [r0] /* load old value */ | | 40 | mov ip, r0 |
38 | adds r3, r3, r1 /* calculate new value */ | | 41 | 1: ldrex r0, [ip] /* load old value */ |
39 | strex r2, r3, [r0] /* try to store */ | | 42 | adds r3, r0, r1 /* calculate new value */ |
| | | 43 | strex r2, r3, [ip] /* try to store */ |
40 | cmp r2, #0 /* succeed? */ | | 44 | cmp r2, #0 /* succeed? */ |
41 | bne 1b /* no, try again */ | | 45 | bne 1b /* no, try again */ |
42 | #ifdef _ARM_ARCH_7 | | 46 | #ifdef _ARM_ARCH_7 |
43 | dmb | | 47 | dmb |
44 | #else | | 48 | #else |
45 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 49 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
46 | #endif | | 50 | #endif |
47 | RET /* return old value */ | | 51 | RET /* return old value */ |
48 | END(_atomic_add_32) | | 52 | END(_atomic_add_32) |
| | | 53 | END(_atomic_sub_32) |
49 | | | 54 | |
50 | ATOMIC_OP_ALIAS(atomic_add_32,_atomic_add_32) | | 55 | ATOMIC_OP_ALIAS(atomic_add_32,_atomic_add_32) |
51 | ATOMIC_OP_ALIAS(atomic_add_int,_atomic_add_32) | | 56 | ATOMIC_OP_ALIAS(atomic_add_int,_atomic_add_32) |
52 | ATOMIC_OP_ALIAS(atomic_add_long,_atomic_add_32) | | 57 | ATOMIC_OP_ALIAS(atomic_add_long,_atomic_add_32) |
53 | ATOMIC_OP_ALIAS(atomic_add_ptr,_atomic_add_32) | | 58 | ATOMIC_OP_ALIAS(atomic_add_ptr,_atomic_add_32) |
| | | 59 | STRONG_ALIAS(__sync_fetch_and_add_4,_atomic_add_32) |
54 | STRONG_ALIAS(_atomic_add_int,_atomic_add_32) | | 60 | STRONG_ALIAS(_atomic_add_int,_atomic_add_32) |
55 | STRONG_ALIAS(_atomic_add_long,_atomic_add_32) | | 61 | STRONG_ALIAS(_atomic_add_long,_atomic_add_32) |
56 | STRONG_ALIAS(_atomic_add_ptr,_atomic_add_32) | | 62 | STRONG_ALIAS(_atomic_add_ptr,_atomic_add_32) |
57 | | | 63 | |
| | | 64 | ATOMIC_OP_ALIAS(atomic_sub_32,_atomic_sub_32) |
| | | 65 | ATOMIC_OP_ALIAS(atomic_sub_int,_atomic_sub_32) |
| | | 66 | ATOMIC_OP_ALIAS(atomic_sub_long,_atomic_sub_32) |
| | | 67 | ATOMIC_OP_ALIAS(atomic_sub_ptr,_atomic_sub_32) |
| | | 68 | STRONG_ALIAS(__sync_fetch_and_sub_4,_atomic_sub_32) |
| | | 69 | STRONG_ALIAS(_atomic_sub_int,_atomic_sub_32) |
| | | 70 | STRONG_ALIAS(_atomic_sub_long,_atomic_sub_32) |
| | | 71 | STRONG_ALIAS(_atomic_sub_ptr,_atomic_sub_32) |
| | | 72 | |
| | | 73 | ENTRY_NP(_atomic_sub_32_nv) |
| | | 74 | negs r1, r1 |
| | | 75 | /* FALLTHROUGH */ |
58 | ENTRY_NP(_atomic_add_32_nv) | | 76 | ENTRY_NP(_atomic_add_32_nv) |
59 | mov ip, r0 /* need r0 for return value */ | | 77 | mov ip, r0 /* need r0 for return value */ |
60 | 1: ldrex r0, [ip] /* load old value */ | | 78 | 1: ldrex r0, [ip] /* load old value */ |
61 | adds r0, r0, r1 /* calculate new value (return value) */ | | 79 | adds r0, r0, r1 /* calculate new value (return value) */ |
62 | strex r2, r0, [ip] /* try to store */ | | 80 | strex r2, r0, [ip] /* try to store */ |
63 | cmp r2, #0 /* succeed? */ | | 81 | cmp r2, #0 /* succeed? */ |
64 | bne 1b /* no, try again? */ | | 82 | bne 1b /* no, try again? */ |
65 | #ifdef _ARM_ARCH_7 | | 83 | #ifdef _ARM_ARCH_7 |
66 | dmb | | 84 | dmb |
67 | #else | | 85 | #else |
68 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 86 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
69 | #endif | | 87 | #endif |
70 | RET /* return new value */ | | 88 | RET /* return new value */ |
71 | END(_atomic_add_32_nv) | | 89 | END(_atomic_add_32_nv) |
| | | 90 | END(_atomic_sub_32_nv) |
72 | ATOMIC_OP_ALIAS(atomic_add_32_nv,_atomic_add_32_nv) | | 91 | ATOMIC_OP_ALIAS(atomic_add_32_nv,_atomic_add_32_nv) |
73 | ATOMIC_OP_ALIAS(atomic_add_int_nv,_atomic_add_32_nv) | | 92 | ATOMIC_OP_ALIAS(atomic_add_int_nv,_atomic_add_32_nv) |
74 | ATOMIC_OP_ALIAS(atomic_add_long_nv,_atomic_add_32_nv) | | 93 | ATOMIC_OP_ALIAS(atomic_add_long_nv,_atomic_add_32_nv) |
75 | ATOMIC_OP_ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv) | | 94 | ATOMIC_OP_ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv) |
| | | 95 | STRONG_ALIAS(__sync_add_and_fetch_4,_atomic_add_32_nv) |
76 | STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv) | | 96 | STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv) |
77 | STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv) | | 97 | STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv) |
78 | STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv) | | 98 | STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv) |
79 | | | 99 | |
| | | 100 | ATOMIC_OP_ALIAS(atomic_sub_32_nv,_atomic_sub_32_nv) |
| | | 101 | ATOMIC_OP_ALIAS(atomic_sub_int_nv,_atomic_sub_32_nv) |
| | | 102 | ATOMIC_OP_ALIAS(atomic_sub_long_nv,_atomic_sub_32_nv) |
| | | 103 | ATOMIC_OP_ALIAS(atomic_sub_ptr_nv,_atomic_sub_32_nv) |
| | | 104 | STRONG_ALIAS(__sync_sub_and_fetch_4,_atomic_sub_32_nv) |
| | | 105 | STRONG_ALIAS(_atomic_sub_int_nv,_atomic_sub_32_nv) |
| | | 106 | STRONG_ALIAS(_atomic_sub_long_nv,_atomic_sub_32_nv) |
| | | 107 | STRONG_ALIAS(_atomic_sub_ptr_nv,_atomic_sub_32_nv) |
80 | #endif /* _ARM_ARCH_6 */ | | 108 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_and_32.S 2013/08/11 04:41:17 1.5
+++ src/common/lib/libc/arch/arm/atomic/atomic_and_32.S 2013/11/08 22:42:52 1.6
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_and_32.S,v 1.5 2013/08/11 04:41:17 matt Exp $ */ | | 1 | /* $NetBSD: atomic_and_32.S,v 1.6 2013/11/08 22:42:52 matt Exp $ */ |
2 | | | 2 | |
3 | /*- | | 3 | /*- |
4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | | 4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. |
5 | * All rights reserved. | | 5 | * All rights reserved. |
6 | * | | 6 | * |
7 | * This code is derived from software contributed to The NetBSD Foundation | | 7 | * This code is derived from software contributed to The NetBSD Foundation |
8 | * by Matt Thomas <matt@3am-software.com> | | 8 | * by Matt Thomas <matt@3am-software.com> |
9 | * | | 9 | * |
10 | * Redistribution and use in source and binary forms, with or without | | 10 | * Redistribution and use in source and binary forms, with or without |
11 | * modification, are permitted provided that the following conditions | | 11 | * modification, are permitted provided that the following conditions |
12 | * are met: | | 12 | * are met: |
13 | * 1. Redistributions of source code must retain the above copyright | | 13 | * 1. Redistributions of source code must retain the above copyright |
14 | * notice, this list of conditions and the following disclaimer. | | 14 | * notice, this list of conditions and the following disclaimer. |
| @@ -24,54 +24,57 @@ | | | @@ -24,54 +24,57 @@ |
24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | | 24 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | | 25 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | | 26 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | | 27 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | | 28 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
29 | * POSSIBILITY OF SUCH DAMAGE. | | 29 | * POSSIBILITY OF SUCH DAMAGE. |
30 | */ | | 30 | */ |
31 | | | 31 | |
32 | #include "atomic_op_asm.h" | | 32 | #include "atomic_op_asm.h" |
33 | | | 33 | |
34 | #ifdef _ARM_ARCH_6 | | 34 | #ifdef _ARM_ARCH_6 |
35 | | | 35 | |
36 | ENTRY_NP(_atomic_and_32) | | 36 | ENTRY_NP(_atomic_and_32) |
37 | 1: ldrex r3, [r0] /* load old value (to be returned) */ | | 37 | mov ip, r0 |
38 | ands r3, r3, r1 /* calculate new value */ | | 38 | 1: ldrex r0, [ip] /* load old value (to be returned) */ |
39 | strex r2, r3, [r0] /* try to store */ | | 39 | ands r3, r0, r1 /* calculate new value */ |
| | | 40 | strex r2, r3, [ip] /* try to store */ |
40 | cmp r2, #0 /* succeed? */ | | 41 | cmp r2, #0 /* succeed? */ |
41 | bne 1b /* no, try again */ | | 42 | bne 1b /* no, try again */ |
42 | #ifdef _ARM_ARCH_7 | | 43 | #ifdef _ARM_ARCH_7 |
43 | dmb | | 44 | dmb |
44 | #else | | 45 | #else |
45 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 46 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
46 | #endif | | 47 | #endif |
47 | RET /* return old value */ | | 48 | RET /* return old value */ |
48 | END(_atomic_and_32) | | 49 | END(_atomic_and_32) |
49 | | | 50 | |
50 | ATOMIC_OP_ALIAS(atomic_and_32,_atomic_and_32) | | 51 | ATOMIC_OP_ALIAS(atomic_and_32,_atomic_and_32) |
51 | ATOMIC_OP_ALIAS(atomic_and_uint,_atomic_and_32) | | 52 | ATOMIC_OP_ALIAS(atomic_and_uint,_atomic_and_32) |
52 | ATOMIC_OP_ALIAS(atomic_and_ulong,_atomic_and_32) | | 53 | ATOMIC_OP_ALIAS(atomic_and_ulong,_atomic_and_32) |
| | | 54 | STRONG_ALIAS(__sync_fetch_and_and_4,_atomic_and_32) |
53 | STRONG_ALIAS(_atomic_and_uint,_atomic_and_32) | | 55 | STRONG_ALIAS(_atomic_and_uint,_atomic_and_32) |
54 | STRONG_ALIAS(_atomic_and_ulong,_atomic_and_32) | | 56 | STRONG_ALIAS(_atomic_and_ulong,_atomic_and_32) |
55 | | | 57 | |
56 | ENTRY_NP(_atomic_and_32_nv) | | 58 | ENTRY_NP(_atomic_and_32_nv) |
57 | mov ip, r0 /* need r0 for return value */ | | 59 | mov ip, r0 /* need r0 for return value */ |
58 | 1: ldrex r0, [ip] /* load old value */ | | 60 | 1: ldrex r0, [ip] /* load old value */ |
59 | ands r0, r0, r1 /* calculate new value (return value) */ | | 61 | ands r0, r0, r1 /* calculate new value (return value) */ |
60 | strex r2, r0, [ip] /* try to store */ | | 62 | strex r2, r0, [ip] /* try to store */ |
61 | cmp r2, #0 /* succeed? */ | | 63 | cmp r2, #0 /* succeed? */ |
62 | bne 1b /* no, try again? */ | | 64 | bne 1b /* no, try again? */ |
63 | #ifdef _ARM_ARCH_7 | | 65 | #ifdef _ARM_ARCH_7 |
64 | dmb | | 66 | dmb |
65 | #else | | 67 | #else |
66 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 68 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
67 | #endif | | 69 | #endif |
68 | RET /* return new value */ | | 70 | RET /* return new value */ |
69 | END(_atomic_and_32_nv) | | 71 | END(_atomic_and_32_nv) |
70 | | | 72 | |
71 | ATOMIC_OP_ALIAS(atomic_and_32_nv,_atomic_and_32_nv) | | 73 | ATOMIC_OP_ALIAS(atomic_and_32_nv,_atomic_and_32_nv) |
72 | ATOMIC_OP_ALIAS(atomic_and_uint_nv,_atomic_and_32_nv) | | 74 | ATOMIC_OP_ALIAS(atomic_and_uint_nv,_atomic_and_32_nv) |
73 | ATOMIC_OP_ALIAS(atomic_and_ulong_nv,_atomic_and_32_nv) | | 75 | ATOMIC_OP_ALIAS(atomic_and_ulong_nv,_atomic_and_32_nv) |
| | | 76 | STRONG_ALIAS(__sync_and_and_fetch_4,_atomic_and_32_nv) |
74 | STRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv) | | 77 | STRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv) |
75 | STRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_32_nv) | | 78 | STRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_32_nv) |
76 | | | 79 | |
77 | #endif /* _ARM_ARCH_6 */ | | 80 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_cas_32.S 2013/08/10 19:59:21 1.5
+++ src/common/lib/libc/arch/arm/atomic/atomic_cas_32.S 2013/11/08 22:42:52 1.6
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_cas_32.S,v 1.5 2013/08/10 19:59:21 matt Exp $ */ | | 1 | /* $NetBSD: atomic_cas_32.S,v 1.6 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -54,22 +54,23 @@ ENTRY_NP(_atomic_cas_32) | | | @@ -54,22 +54,23 @@ ENTRY_NP(_atomic_cas_32) |
54 | mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */ | | 54 | mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */ |
55 | #endif | | 55 | #endif |
56 | 2: RET /* return. */ | | 56 | 2: RET /* return. */ |
57 | END(_atomic_cas_32) | | 57 | END(_atomic_cas_32) |
58 | | | 58 | |
59 | ATOMIC_OP_ALIAS(atomic_cas_32,_atomic_cas_32) | | 59 | ATOMIC_OP_ALIAS(atomic_cas_32,_atomic_cas_32) |
60 | ATOMIC_OP_ALIAS(atomic_cas_uint,_atomic_cas_32) | | 60 | ATOMIC_OP_ALIAS(atomic_cas_uint,_atomic_cas_32) |
61 | ATOMIC_OP_ALIAS(atomic_cas_ulong,_atomic_cas_32) | | 61 | ATOMIC_OP_ALIAS(atomic_cas_ulong,_atomic_cas_32) |
62 | ATOMIC_OP_ALIAS(atomic_cas_ptr,_atomic_cas_32) | | 62 | ATOMIC_OP_ALIAS(atomic_cas_ptr,_atomic_cas_32) |
63 | ATOMIC_OP_ALIAS(atomic_cas_32_ni,_atomic_cas_32) | | 63 | ATOMIC_OP_ALIAS(atomic_cas_32_ni,_atomic_cas_32) |
64 | ATOMIC_OP_ALIAS(atomic_cas_uint_ni,_atomic_cas_32) | | 64 | ATOMIC_OP_ALIAS(atomic_cas_uint_ni,_atomic_cas_32) |
65 | ATOMIC_OP_ALIAS(atomic_cas_ulong_ni,_atomic_cas_32) | | 65 | ATOMIC_OP_ALIAS(atomic_cas_ulong_ni,_atomic_cas_32) |
66 | ATOMIC_OP_ALIAS(atomic_cas_ptr_ni,_atomic_cas_32) | | 66 | ATOMIC_OP_ALIAS(atomic_cas_ptr_ni,_atomic_cas_32) |
| | | 67 | STRONG_ALIAS(__sync_val_compare_and_swap_4,_atomic_cas_32) |
67 | STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32) | | 68 | STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32) |
68 | STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_32) | | 69 | STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_32) |
69 | STRONG_ALIAS(_atomic_cas_32_ni,_atomic_cas_32) | | 70 | STRONG_ALIAS(_atomic_cas_32_ni,_atomic_cas_32) |
70 | STRONG_ALIAS(_atomic_cas_ptr_ni,_atomic_cas_32) | | 71 | STRONG_ALIAS(_atomic_cas_ptr_ni,_atomic_cas_32) |
71 | STRONG_ALIAS(_atomic_cas_uint_ni,_atomic_cas_32) | | 72 | STRONG_ALIAS(_atomic_cas_uint_ni,_atomic_cas_32) |
72 | STRONG_ALIAS(_atomic_cas_ulong_ni,_atomic_cas_32) | | 73 | STRONG_ALIAS(_atomic_cas_ulong_ni,_atomic_cas_32) |
73 | STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_32) | | 74 | STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_32) |
74 | | | 75 | |
75 | #endif /* _ARM_ARCH_6 */ | | 76 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S 2013/08/10 19:59:21 1.5
+++ src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S 2013/11/08 22:42:52 1.6
| @@ -1,16 +1,17 @@ | | | @@ -1,16 +1,17 @@ |
1 | /* $NetBSD: atomic_cas_8.S,v 1.5 2013/08/10 19:59:21 matt Exp $ */ | | 1 | /* $NetBSD: atomic_cas_8.S,v 1.6 2013/11/08 22:42:52 matt Exp $ */ |
| | | 2 | |
2 | /*- | | 3 | /*- |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | | 4 | * Copyright (c) 2013 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 5 | * All rights reserved. |
5 | * | | 6 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 7 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 8 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 9 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 10 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 11 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 12 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 13 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 14 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 15 | * 2. Redistributions in binary form must reproduce the above copyright |
15 | * notice, this list of conditions and the following disclaimer in the | | 16 | * notice, this list of conditions and the following disclaimer in the |
16 | * documentation and/or other materials provided with the distribution. | | 17 | * documentation and/or other materials provided with the distribution. |
| @@ -49,15 +50,16 @@ ENTRY_NP(_atomic_cas_8) | | | @@ -49,15 +50,16 @@ ENTRY_NP(_atomic_cas_8) |
49 | cmp r3, #0 /* succeed? */ | | 50 | cmp r3, #0 /* succeed? */ |
50 | bne 1b /* nope, try again. */ | | 51 | bne 1b /* nope, try again. */ |
51 | #ifdef _ARM_ARCH_7 | | 52 | #ifdef _ARM_ARCH_7 |
52 | dsb /* data synchronization barrier */ | | 53 | dsb /* data synchronization barrier */ |
53 | #else | | 54 | #else |
54 | mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */ | | 55 | mcr p15, 0, r3, c7, c10, 4 /* data synchronization barrier */ |
55 | #endif | | 56 | #endif |
56 | 2: RET /* return. */ | | 57 | 2: RET /* return. */ |
57 | END(_atomic_cas_8) | | 58 | END(_atomic_cas_8) |
58 | | | 59 | |
59 | ATOMIC_OP_ALIAS(atomic_cas_8,_atomic_cas_8) | | 60 | ATOMIC_OP_ALIAS(atomic_cas_8,_atomic_cas_8) |
60 | STRONG_ALIAS(_atomic_cas_char,_atomic_cas_8) | | 61 | STRONG_ALIAS(_atomic_cas_char,_atomic_cas_8) |
61 | STRONG_ALIAS(_atomic_cas_uchar,_atomic_cas_8) | | 62 | STRONG_ALIAS(_atomic_cas_uchar,_atomic_cas_8) |
| | | 63 | STRONG_ALIAS(__sync_val_compare_and_swap_1,_atomic_cas_8) |
62 | | | 64 | |
63 | #endif /* _ARM_ARCH_6 */ | | 65 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_or_32.S 2013/08/11 04:41:17 1.5
+++ src/common/lib/libc/arch/arm/atomic/atomic_or_32.S 2013/11/08 22:42:52 1.6
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_or_32.S,v 1.5 2013/08/11 04:41:17 matt Exp $ */ | | 1 | /* $NetBSD: atomic_or_32.S,v 1.6 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -23,54 +23,57 @@ | | | @@ -23,54 +23,57 @@ |
23 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF | | 23 | * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF |
24 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS | | 24 | * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS |
25 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN | | 25 | * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN |
26 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) | | 26 | * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) |
27 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE | | 27 | * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE |
28 | * POSSIBILITY OF SUCH DAMAGE. | | 28 | * POSSIBILITY OF SUCH DAMAGE. |
29 | */ | | 29 | */ |
30 | | | 30 | |
31 | #include "atomic_op_asm.h" | | 31 | #include "atomic_op_asm.h" |
32 | | | 32 | |
33 | #ifdef _ARM_ARCH_6 | | 33 | #ifdef _ARM_ARCH_6 |
34 | | | 34 | |
35 | ENTRY_NP(_atomic_or_32) | | 35 | ENTRY_NP(_atomic_or_32) |
36 | 1: ldrex r3, [r0] /* load old value (to be returned) */ | | 36 | mov ip, r0 |
37 | orrs r3, r3, r1 /* calculate new value */ | | 37 | 1: ldrex r0, [ip] /* load old value (to be returned) */ |
38 | strex r2, r3, [r0] /* try to store */ | | 38 | orrs r3, r0, r1 /* calculate new value */ |
| | | 39 | strex r2, r3, [ip] /* try to store */ |
39 | cmp r2, #0 /* succeed? */ | | 40 | cmp r2, #0 /* succeed? */ |
40 | bne 1b /* no, try again */ | | 41 | bne 1b /* no, try again */ |
41 | #ifdef _ARM_ARCH_7 | | 42 | #ifdef _ARM_ARCH_7 |
42 | dmb | | 43 | dmb |
43 | #else | | 44 | #else |
44 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 45 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
45 | #endif | | 46 | #endif |
46 | RET /* return old value */ | | 47 | RET /* return old value */ |
47 | END(_atomic_or_32) | | 48 | END(_atomic_or_32) |
48 | | | 49 | |
49 | ATOMIC_OP_ALIAS(atomic_or_32,_atomic_or_32) | | 50 | ATOMIC_OP_ALIAS(atomic_or_32,_atomic_or_32) |
50 | ATOMIC_OP_ALIAS(atomic_or_uint,_atomic_or_32) | | 51 | ATOMIC_OP_ALIAS(atomic_or_uint,_atomic_or_32) |
51 | ATOMIC_OP_ALIAS(atomic_or_ulong,_atomic_or_32) | | 52 | ATOMIC_OP_ALIAS(atomic_or_ulong,_atomic_or_32) |
| | | 53 | STRONG_ALIAS(__sync_fetch_and_or_4,_atomic_or_32) |
52 | STRONG_ALIAS(_atomic_or_uint,_atomic_or_32) | | 54 | STRONG_ALIAS(_atomic_or_uint,_atomic_or_32) |
53 | STRONG_ALIAS(_atomic_or_ulong,_atomic_or_32) | | 55 | STRONG_ALIAS(_atomic_or_ulong,_atomic_or_32) |
54 | | | 56 | |
55 | ENTRY_NP(_atomic_or_32_nv) | | 57 | ENTRY_NP(_atomic_or_32_nv) |
56 | mov ip, r0 /* need r0 for return value */ | | 58 | mov ip, r0 /* need r0 for return value */ |
57 | 1: ldrex r0, [ip] /* load old value */ | | 59 | 1: ldrex r0, [ip] /* load old value */ |
58 | orrs r0, r0, r1 /* calculate new value (return value) */ | | 60 | orrs r0, r0, r1 /* calculate new value (return value) */ |
59 | strex r2, r0, [ip] /* try to store */ | | 61 | strex r2, r0, [ip] /* try to store */ |
60 | cmp r2, #0 /* succeed? */ | | 62 | cmp r2, #0 /* succeed? */ |
61 | bne 1b /* no, try again? */ | | 63 | bne 1b /* no, try again? */ |
62 | #ifdef _ARM_ARCH_7 | | 64 | #ifdef _ARM_ARCH_7 |
63 | dmb | | 65 | dmb |
64 | #else | | 66 | #else |
65 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 67 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
66 | #endif | | 68 | #endif |
67 | RET /* return new value */ | | 69 | RET /* return new value */ |
68 | END(_atomic_or_32_nv) | | 70 | END(_atomic_or_32_nv) |
69 | | | 71 | |
70 | ATOMIC_OP_ALIAS(atomic_or_32_nv,_atomic_or_32_nv) | | 72 | ATOMIC_OP_ALIAS(atomic_or_32_nv,_atomic_or_32_nv) |
71 | ATOMIC_OP_ALIAS(atomic_or_uint_nv,_atomic_or_32_nv) | | 73 | ATOMIC_OP_ALIAS(atomic_or_uint_nv,_atomic_or_32_nv) |
72 | ATOMIC_OP_ALIAS(atomic_or_ulong_nv,_atomic_or_32_nv) | | 74 | ATOMIC_OP_ALIAS(atomic_or_ulong_nv,_atomic_or_32_nv) |
| | | 75 | STRONG_ALIAS(__sync_or_and_fetch_4,_atomic_or_32_nv) |
73 | STRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv) | | 76 | STRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv) |
74 | STRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_32_nv) | | 77 | STRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_32_nv) |
75 | | | 78 | |
76 | #endif /* _ARM_ARCH_6 */ | | 79 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S 2013/08/20 07:52:31 1.5
+++ src/common/lib/libc/arch/arm/atomic/atomic_swap_64.S 2013/11/08 22:42:52 1.6
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_swap_64.S,v 1.5 2013/08/20 07:52:31 matt Exp $ */ | | 1 | /* $NetBSD: atomic_swap_64.S,v 1.6 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas. | | 7 | * by Matt Thomas. |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -38,15 +38,23 @@ ENTRY_NP(_atomic_swap_64) | | | @@ -38,15 +38,23 @@ ENTRY_NP(_atomic_swap_64) |
38 | mov r2, r1 /* and r1 will be overwritten by ldrexd */ | | 38 | mov r2, r1 /* and r1 will be overwritten by ldrexd */ |
39 | #endif | | 39 | #endif |
40 | 1: ldrexd r0, [ip] /* load old value */ | | 40 | 1: ldrexd r0, [ip] /* load old value */ |
41 | strexd r4, r2, [ip] /* store new value */ | | 41 | strexd r4, r2, [ip] /* store new value */ |
42 | cmp r4, #0 /* succeed? */ | | 42 | cmp r4, #0 /* succeed? */ |
43 | bne 1b /* no, try again */ | | 43 | bne 1b /* no, try again */ |
44 | #ifdef _ARM_ARCH_7 | | 44 | #ifdef _ARM_ARCH_7 |
45 | dmb | | 45 | dmb |
46 | #else | | 46 | #else |
47 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ | | 47 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ |
48 | #endif | | 48 | #endif |
49 | pop {r3,r4} /* restore temporary */ | | 49 | pop {r3,r4} /* restore temporary */ |
50 | RET | | 50 | RET |
51 | END(_atomic_swap_64) | | 51 | END(_atomic_swap_64) |
52 | ATOMIC_OP_ALIAS(atomic_swap_64,_atomic_swap_64) | | 52 | ATOMIC_OP_ALIAS(atomic_swap_64,_atomic_swap_64) |
| | | 53 | STRONG_ALIAS(__sync_lock_test_and_set_8,_atomic_swap_64) |
| | | 54 | |
| | | 55 | ENTRY_NP(__sync_lock_release_8) |
| | | 56 | mov r2, #0 |
| | | 57 | mov r3, #0 |
| | | 58 | strd r2, [r0] |
| | | 59 | RET |
| | | 60 | END(__sync_lock_release_8) |
--- src/common/lib/libc/arch/arm/atomic/atomic_add_64.S 2013/08/20 07:52:31 1.8
+++ src/common/lib/libc/arch/arm/atomic/atomic_add_64.S 2013/11/08 22:42:52 1.9
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_add_64.S,v 1.8 2013/08/20 07:52:31 matt Exp $ */ | | 1 | /* $NetBSD: atomic_add_64.S,v 1.9 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -47,15 +47,16 @@ ENTRY_NP(_atomic_add_64_nv) | | | @@ -47,15 +47,16 @@ ENTRY_NP(_atomic_add_64_nv) |
47 | bne 1b /* no, try again? */ | | 47 | bne 1b /* no, try again? */ |
48 | #ifdef _ARM_ARCH_7 | | 48 | #ifdef _ARM_ARCH_7 |
49 | dmb | | 49 | dmb |
50 | #else | | 50 | #else |
51 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ | | 51 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ |
52 | #endif | | 52 | #endif |
53 | pop {r3,r4} /* restore temporary */ | | 53 | pop {r3,r4} /* restore temporary */ |
54 | RET /* return new value */ | | 54 | RET /* return new value */ |
55 | END(_atomic_add_64_nv) | | 55 | END(_atomic_add_64_nv) |
56 | | | 56 | |
57 | STRONG_ALIAS(_atomic_add_64,_atomic_add_64_nv) | | 57 | STRONG_ALIAS(_atomic_add_64,_atomic_add_64_nv) |
58 | ATOMIC_OP_ALIAS(atomic_add_64_nv,_atomic_add_64_nv) | | 58 | ATOMIC_OP_ALIAS(atomic_add_64_nv,_atomic_add_64_nv) |
59 | ATOMIC_OP_ALIAS(atomic_add_64,_atomic_add_64) | | 59 | ATOMIC_OP_ALIAS(atomic_add_64,_atomic_add_64) |
| | | 60 | STRONG_ALIAS(__sync_add_and_fetch_8,_atomic_add_64_nv) |
60 | | | 61 | |
61 | #endif /* _ARM_ARCH_6 */ | | 62 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_and_64.S 2013/08/20 07:52:31 1.7
+++ src/common/lib/libc/arch/arm/atomic/atomic_and_64.S 2013/11/08 22:42:52 1.8
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_and_64.S,v 1.7 2013/08/20 07:52:31 matt Exp $ */ | | 1 | /* $NetBSD: atomic_and_64.S,v 1.8 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -47,15 +47,16 @@ ENTRY_NP(_atomic_and_64_nv) | | | @@ -47,15 +47,16 @@ ENTRY_NP(_atomic_and_64_nv) |
47 | bne 1b /* no, try again? */ | | 47 | bne 1b /* no, try again? */ |
48 | #ifdef _ARM_ARCH_7 | | 48 | #ifdef _ARM_ARCH_7 |
49 | dmb | | 49 | dmb |
50 | #else | | 50 | #else |
51 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ | | 51 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ |
52 | #endif | | 52 | #endif |
53 | pop {r3,r4} /* restore temporary */ | | 53 | pop {r3,r4} /* restore temporary */ |
54 | RET /* return new value */ | | 54 | RET /* return new value */ |
55 | END(_atomic_and_64_nv) | | 55 | END(_atomic_and_64_nv) |
56 | | | 56 | |
57 | STRONG_ALIAS(_atomic_and_64,_atomic_and_64_nv) | | 57 | STRONG_ALIAS(_atomic_and_64,_atomic_and_64_nv) |
58 | ATOMIC_OP_ALIAS(atomic_and_64_nv,_atomic_and_64_nv) | | 58 | ATOMIC_OP_ALIAS(atomic_and_64_nv,_atomic_and_64_nv) |
59 | ATOMIC_OP_ALIAS(atomic_and_64,_atomic_and_64_nv) | | 59 | ATOMIC_OP_ALIAS(atomic_and_64,_atomic_and_64_nv) |
| | | 60 | STRONG_ALIAS(__sync_and_and_fetch_8,_atomic_and_64_nv) |
60 | | | 61 | |
61 | #endif /* _ARM_ARCH_6 */ | | 62 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_or_64.S 2013/08/20 07:52:31 1.7
+++ src/common/lib/libc/arch/arm/atomic/atomic_or_64.S 2013/11/08 22:42:52 1.8
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_or_64.S,v 1.7 2013/08/20 07:52:31 matt Exp $ */ | | 1 | /* $NetBSD: atomic_or_64.S,v 1.8 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -47,15 +47,16 @@ ENTRY_NP(_atomic_or_64_nv) | | | @@ -47,15 +47,16 @@ ENTRY_NP(_atomic_or_64_nv) |
47 | bne 1b /* no, try again? */ | | 47 | bne 1b /* no, try again? */ |
48 | #ifdef _ARM_ARCH_7 | | 48 | #ifdef _ARM_ARCH_7 |
49 | dmb | | 49 | dmb |
50 | #else | | 50 | #else |
51 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ | | 51 | mcr p15, 0, r4, c7, c10, 5 /* data memory barrier */ |
52 | #endif | | 52 | #endif |
53 | pop {r3,r4} /* restore temporary */ | | 53 | pop {r3,r4} /* restore temporary */ |
54 | RET /* return new value */ | | 54 | RET /* return new value */ |
55 | END(_atomic_or_64_nv) | | 55 | END(_atomic_or_64_nv) |
56 | | | 56 | |
57 | STRONG_ALIAS(_atomic_or_64,_atomic_or_64_nv) | | 57 | STRONG_ALIAS(_atomic_or_64,_atomic_or_64_nv) |
58 | ATOMIC_OP_ALIAS(atomic_or_64_nv,_atomic_or_64_nv) | | 58 | ATOMIC_OP_ALIAS(atomic_or_64_nv,_atomic_or_64_nv) |
59 | ATOMIC_OP_ALIAS(atomic_or_64,_atomic_or_64) | | 59 | ATOMIC_OP_ALIAS(atomic_or_64,_atomic_or_64) |
| | | 60 | STRONG_ALIAS(__sync_or_and_fetch_8,_atomic_or_64) |
60 | | | 61 | |
61 | #endif /* _ARM_ARCH_6 */ | | 62 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_swap.S 2013/08/11 04:41:17 1.7
+++ src/common/lib/libc/arch/arm/atomic/atomic_swap.S 2013/11/08 22:42:52 1.8
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_swap.S,v 1.7 2013/08/11 04:41:17 matt Exp $ */ | | 1 | /* $NetBSD: atomic_swap.S,v 1.8 2013/11/08 22:42:52 matt Exp $ */ |
2 | | | 2 | |
3 | /*- | | 3 | /*- |
4 | * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. | | 4 | * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. |
5 | * All rights reserved. | | 5 | * All rights reserved. |
6 | * | | 6 | * |
7 | * This code is derived from software contributed to The NetBSD Foundation | | 7 | * This code is derived from software contributed to The NetBSD Foundation |
8 | * by Jason R. Thorpe and Matt Thomas. | | 8 | * by Jason R. Thorpe and Matt Thomas. |
9 | * | | 9 | * |
10 | * Redistribution and use in source and binary forms, with or without | | 10 | * Redistribution and use in source and binary forms, with or without |
11 | * modification, are permitted provided that the following conditions | | 11 | * modification, are permitted provided that the following conditions |
12 | * are met: | | 12 | * are met: |
13 | * 1. Redistributions of source code must retain the above copyright | | 13 | * 1. Redistributions of source code must retain the above copyright |
14 | * notice, this list of conditions and the following disclaimer. | | 14 | * notice, this list of conditions and the following disclaimer. |
| @@ -68,42 +68,56 @@ ENTRY_NP(_atomic_swap_32) | | | @@ -68,42 +68,56 @@ ENTRY_NP(_atomic_swap_32) |
68 | #ifdef _ARM_ARCH_7 | | 68 | #ifdef _ARM_ARCH_7 |
69 | dmb | | 69 | dmb |
70 | #else | | 70 | #else |
71 | mcr p15, 0, r3, c7, c10, 5 /* data memory barrier */ | | 71 | mcr p15, 0, r3, c7, c10, 5 /* data memory barrier */ |
72 | #endif | | 72 | #endif |
73 | 99: | | 73 | 99: |
74 | RET | | 74 | RET |
75 | END(_atomic_swap_32) | | 75 | END(_atomic_swap_32) |
76 | | | 76 | |
77 | ATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) | | 77 | ATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) |
78 | ATOMIC_OP_ALIAS(atomic_swap_uint,_atomic_swap_32) | | 78 | ATOMIC_OP_ALIAS(atomic_swap_uint,_atomic_swap_32) |
79 | ATOMIC_OP_ALIAS(atomic_swap_ulong,_atomic_swap_32) | | 79 | ATOMIC_OP_ALIAS(atomic_swap_ulong,_atomic_swap_32) |
80 | ATOMIC_OP_ALIAS(atomic_swap_ptr,_atomic_swap_32) | | 80 | ATOMIC_OP_ALIAS(atomic_swap_ptr,_atomic_swap_32) |
| | | 81 | STRONG_ALIAS(__sync_lock_test_and_set_4,_atomic_swap_32) |
81 | STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) | | 82 | STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) |
82 | STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32) | | 83 | STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32) |
83 | STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) | | 84 | STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) |
84 | | | 85 | |
| | | 86 | ENTRY_NP(__sync_lock_release_4) |
| | | 87 | mov r1, #0 |
| | | 88 | strb r1, [r0] |
| | | 89 | RET |
| | | 90 | END(__sync_lock_release_4) |
| | | 91 | |
85 | ENTRY_NP(_atomic_swap_8) | | 92 | ENTRY_NP(_atomic_swap_8) |
86 | mov ip, r0 | | 93 | mov ip, r0 |
87 | 1: | | 94 | 1: |
88 | #ifdef _ARM_ARCH_6 | | 95 | #ifdef _ARM_ARCH_6 |
89 | ldrexb r0, [ip] | | 96 | ldrexb r0, [ip] |
90 | strexb r3, r1, [ip] | | 97 | strexb r3, r1, [ip] |
91 | #else | | 98 | #else |
92 | swpb r0, r1, [ip] | | 99 | swpb r0, r1, [ip] |
93 | mov r3, #0 | | 100 | mov r3, #0 |
94 | #endif | | 101 | #endif |
95 | cmp r3, #0 | | 102 | cmp r3, #0 |
96 | bne 1b | | 103 | bne 1b |
97 | #ifdef _ARM_ARCH_7 | | 104 | #ifdef _ARM_ARCH_7 |
98 | dmb | | 105 | dmb |
99 | #else | | 106 | #else |
100 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | | 107 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ |
101 | #endif | | 108 | #endif |
102 | RET | | 109 | RET |
103 | END(_atomic_swap_8) | | 110 | END(_atomic_swap_8) |
104 | | | 111 | |
105 | ATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) | | 112 | ATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) |
106 | ATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) | | 113 | ATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) |
107 | ATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) | | 114 | ATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) |
| | | 115 | STRONG_ALIAS(__sync_lock_test_and_set_1,_atomic_swap_8) |
108 | STRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) | | 116 | STRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) |
109 | STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) | | 117 | STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) |
| | | 118 | |
| | | 119 | ENTRY_NP(__sync_lock_release_1) |
| | | 120 | mov r1, #0 |
| | | 121 | strb r1, [r0] |
| | | 122 | RET |
| | | 123 | END(__sync_lock_release_1) |
--- src/common/lib/libc/arch/arm/atomic/atomic_cas_64.S 2013/08/10 19:59:21 1.3
+++ src/common/lib/libc/arch/arm/atomic/atomic_cas_64.S 2013/11/08 22:42:52 1.4
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_cas_64.S,v 1.3 2013/08/10 19:59:21 matt Exp $ */ | | 1 | /* $NetBSD: atomic_cas_64.S,v 1.4 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2012 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -60,15 +60,16 @@ ENTRY_NP(_atomic_cas_64) | | | @@ -60,15 +60,16 @@ ENTRY_NP(_atomic_cas_64) |
60 | cmp r6, #0 /* succeed? */ | | 60 | cmp r6, #0 /* succeed? */ |
61 | bne 1b /* nope, try again. */ | | 61 | bne 1b /* nope, try again. */ |
62 | #ifdef _ARM_ARCH_7 | | 62 | #ifdef _ARM_ARCH_7 |
63 | dsb | | 63 | dsb |
64 | #else | | 64 | #else |
65 | mcr p15, 0, ip, c7, c10, 4 /* data synchronization barrier */ | | 65 | mcr p15, 0, ip, c7, c10, 4 /* data synchronization barrier */ |
66 | #endif | | 66 | #endif |
67 | 2: | | 67 | 2: |
68 | pop {r4, r5, r6} /* restore temporaries */ | | 68 | pop {r4, r5, r6} /* restore temporaries */ |
69 | RET /* return. */ | | 69 | RET /* return. */ |
70 | END(_atomic_cas_64) | | 70 | END(_atomic_cas_64) |
71 | | | 71 | |
72 | ATOMIC_OP_ALIAS(atomic_cas_64,_atomic_cas_64) | | 72 | ATOMIC_OP_ALIAS(atomic_cas_64,_atomic_cas_64) |
| | | 73 | STRONG_ALIAS(__sync_val_compare_and_swap_8,_atomic_cas_64) |
73 | | | 74 | |
74 | #endif /* _ARM_ARCH_6 */ | | 75 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_op_asm.h 2012/09/11 20:51:25 1.3
+++ src/common/lib/libc/arch/arm/atomic/atomic_op_asm.h 2013/11/08 22:42:52 1.4
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_op_asm.h,v 1.3 2012/09/11 20:51:25 matt Exp $ */ | | 1 | /* $NetBSD: atomic_op_asm.h,v 1.4 2013/11/08 22:42:52 matt Exp $ */ |
2 | | | 2 | |
3 | /*- | | 3 | /*- |
4 | * Copyright (c) 2007 The NetBSD Foundation, Inc. | | 4 | * Copyright (c) 2007 The NetBSD Foundation, Inc. |
5 | * All rights reserved. | | 5 | * All rights reserved. |
6 | * | | 6 | * |
7 | * This code is derived from software contributed to The NetBSD Foundation | | 7 | * This code is derived from software contributed to The NetBSD Foundation |
8 | * by Jason R. Thorpe. | | 8 | * by Jason R. Thorpe. |
9 | * | | 9 | * |
10 | * Redistribution and use in source and binary forms, with or without | | 10 | * Redistribution and use in source and binary forms, with or without |
11 | * modification, are permitted provided that the following conditions | | 11 | * modification, are permitted provided that the following conditions |
12 | * are met: | | 12 | * are met: |
13 | * 1. Redistributions of source code must retain the above copyright | | 13 | * 1. Redistributions of source code must retain the above copyright |
14 | * notice, this list of conditions and the following disclaimer. | | 14 | * notice, this list of conditions and the following disclaimer. |
| @@ -46,21 +46,25 @@ | | | @@ -46,21 +46,25 @@ |
46 | #define ATOMIC_OP_ALIAS(a,s) STRONG_ALIAS(a,s) | | 46 | #define ATOMIC_OP_ALIAS(a,s) STRONG_ALIAS(a,s) |
47 | | | 47 | |
48 | #else /* _KERNEL */ | | 48 | #else /* _KERNEL */ |
49 | | | 49 | |
50 | #define ATOMIC_OP_ALIAS(a,s) WEAK_ALIAS(a,s) | | 50 | #define ATOMIC_OP_ALIAS(a,s) WEAK_ALIAS(a,s) |
51 | | | 51 | |
52 | #endif /* _KERNEL */ | | 52 | #endif /* _KERNEL */ |
53 | | | 53 | |
54 | #ifdef __ARMEB__ | | 54 | #ifdef __ARMEB__ |
55 | #define HI r0 | | 55 | #define HI r0 |
56 | #define LO r1 | | 56 | #define LO r1 |
57 | #define NHI r2 | | 57 | #define NHI r2 |
58 | #define NLO r3 | | 58 | #define NLO r3 |
| | | 59 | #define THI r4 |
| | | 60 | #define TLO r5 |
59 | #else | | 61 | #else |
60 | #define LO r0 | | 62 | #define LO r0 |
61 | #define HI r1 | | 63 | #define HI r1 |
62 | #define NLO r2 | | 64 | #define NLO r2 |
63 | #define NHI r3 | | 65 | #define NHI r3 |
| | | 66 | #define TLO r4 |
| | | 67 | #define THI r5 |
64 | #endif | | 68 | #endif |
65 | | | 69 | |
66 | #endif /* _ATOMIC_OP_ASM_H_ */ | | 70 | #endif /* _ATOMIC_OP_ASM_H_ */ |
--- src/common/lib/libc/arch/arm/atomic/membar_ops.S 2012/08/16 16:49:10 1.3
+++ src/common/lib/libc/arch/arm/atomic/membar_ops.S 2013/11/08 22:42:52 1.4
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: membar_ops.S,v 1.3 2012/08/16 16:49:10 matt Exp $ */ | | 1 | /* $NetBSD: membar_ops.S,v 1.4 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -29,37 +29,38 @@ | | | @@ -29,37 +29,38 @@ |
29 | */ | | 29 | */ |
30 | | | 30 | |
31 | #include "atomic_op_asm.h" | | 31 | #include "atomic_op_asm.h" |
32 | | | 32 | |
33 | #ifdef _ARM_ARCH_6 | | 33 | #ifdef _ARM_ARCH_6 |
34 | | | 34 | |
35 | ENTRY_NP(_membar_producer) | | 35 | ENTRY_NP(_membar_producer) |
36 | #ifdef _ARM_ARCH_7 | | 36 | #ifdef _ARM_ARCH_7 |
37 | dsb | | 37 | dsb |
38 | #else | | 38 | #else |
39 | mcr p15, 0, r0, c7, c10, 4 /* Data Synchronization Barrier */ | | 39 | mcr p15, 0, r0, c7, c10, 4 /* Data Synchronization Barrier */ |
40 | #endif | | 40 | #endif |
41 | RET | | 41 | RET |
42 | END(_membar_producer) | | 42 | END(_membar_producer) |
43 | ATOMIC_OP_ALIAS(membar_producer,_membar_producer) | | 43 | ATOMIC_OP_ALIAS(membar_producer,_membar_producer) |
44 | ATOMIC_OP_ALIAS(membar_write,_membar_producer) | | 44 | ATOMIC_OP_ALIAS(membar_write,_membar_producer) |
45 | STRONG_ALIAS(_membar_write,_membar_producer) | | 45 | STRONG_ALIAS(_membar_write,_membar_producer) |
46 | | | 46 | |
47 | ENTRY_NP(_membar_sync) | | 47 | ENTRY_NP(_membar_sync) |
48 | #ifdef _ARM_ARCH_7 | | 48 | #ifdef _ARM_ARCH_7 |
49 | dmb | | 49 | dmb |
50 | #else | | 50 | #else |
51 | mcr p15, 0, r0, c7, c10, 5 /* Data Memory Barrier */ | | 51 | mcr p15, 0, r0, c7, c10, 5 /* Data Memory Barrier */ |
52 | #endif | | 52 | #endif |
53 | RET | | 53 | RET |
54 | END(_membar_sync) | | 54 | END(_membar_sync) |
55 | ATOMIC_OP_ALIAS(membar_sync,_membar_sync) | | 55 | ATOMIC_OP_ALIAS(membar_sync,_membar_sync) |
56 | ATOMIC_OP_ALIAS(membar_enter,_membar_sync) | | 56 | ATOMIC_OP_ALIAS(membar_enter,_membar_sync) |
57 | ATOMIC_OP_ALIAS(membar_exit,_membar_sync) | | 57 | ATOMIC_OP_ALIAS(membar_exit,_membar_sync) |
58 | ATOMIC_OP_ALIAS(membar_consumer,_membar_sync) | | 58 | ATOMIC_OP_ALIAS(membar_consumer,_membar_sync) |
59 | ATOMIC_OP_ALIAS(membar_read,_membar_sync) | | 59 | ATOMIC_OP_ALIAS(membar_read,_membar_sync) |
| | | 60 | STRONG_ALIAS(__sync_synchronize,_membar_sync) |
60 | STRONG_ALIAS(_membar_enter,_membar_sync) | | 61 | STRONG_ALIAS(_membar_enter,_membar_sync) |
61 | STRONG_ALIAS(_membar_exit,_membar_sync) | | 62 | STRONG_ALIAS(_membar_exit,_membar_sync) |
62 | STRONG_ALIAS(_membar_consumer,_membar_sync) | | 63 | STRONG_ALIAS(_membar_consumer,_membar_sync) |
63 | STRONG_ALIAS(_membar_read,_membar_sync) | | 64 | STRONG_ALIAS(_membar_read,_membar_sync) |
64 | | | 65 | |
65 | #endif /* _ARM_ARCH_6 */ | | 66 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_inc_32.S 2013/08/11 04:41:17 1.6
+++ src/common/lib/libc/arch/arm/atomic/atomic_inc_32.S 2013/11/08 22:42:52 1.7
| @@ -1,14 +1,14 @@ | | | @@ -1,14 +1,14 @@ |
1 | /* $NetBSD: atomic_inc_32.S,v 1.6 2013/08/11 04:41:17 matt Exp $ */ | | 1 | /* $NetBSD: atomic_inc_32.S,v 1.7 2013/11/08 22:42:52 matt Exp $ */ |
2 | /*- | | 2 | /*- |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. |
4 | * All rights reserved. | | 4 | * All rights reserved. |
5 | * | | 5 | * |
6 | * This code is derived from software contributed to The NetBSD Foundation | | 6 | * This code is derived from software contributed to The NetBSD Foundation |
7 | * by Matt Thomas <matt@3am-software.com> | | 7 | * by Matt Thomas <matt@3am-software.com> |
8 | * | | 8 | * |
9 | * Redistribution and use in source and binary forms, with or without | | 9 | * Redistribution and use in source and binary forms, with or without |
10 | * modification, are permitted provided that the following conditions | | 10 | * modification, are permitted provided that the following conditions |
11 | * are met: | | 11 | * are met: |
12 | * 1. Redistributions of source code must retain the above copyright | | 12 | * 1. Redistributions of source code must retain the above copyright |
13 | * notice, this list of conditions and the following disclaimer. | | 13 | * notice, this list of conditions and the following disclaimer. |
14 | * 2. Redistributions in binary form must reproduce the above copyright | | 14 | * 2. Redistributions in binary form must reproduce the above copyright |
| @@ -34,45 +34,45 @@ | | | @@ -34,45 +34,45 @@ |
34 | | | 34 | |
35 | ENTRY_NP(_atomic_inc_32) | | 35 | ENTRY_NP(_atomic_inc_32) |
36 | 1: ldrex r3, [r0] /* load old value (return value) */ | | 36 | 1: ldrex r3, [r0] /* load old value (return value) */ |
37 | adds r3, r3, #1 /* calculate new value */ | | 37 | adds r3, r3, #1 /* calculate new value */ |
38 | strex r2, r3, [r0] /* try to store */ | | 38 | strex r2, r3, [r0] /* try to store */ |
39 | cmp r2, #0 /* succeed? */ | | 39 | cmp r2, #0 /* succeed? */ |
40 | bne 1b /* no, try again? */ | | 40 | bne 1b /* no, try again? */ |
41 | #ifdef _ARM_ARCH_7 | | 41 | #ifdef _ARM_ARCH_7 |
42 | dmb | | 42 | dmb |
43 | #else | | 43 | #else |
44 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 44 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
45 | #endif | | 45 | #endif |
46 | RET /* return new value */ | | 46 | RET /* return new value */ |
47 | END(_atomic_inc_32) | | 47 | END(_atomic_inc_32) |
48 | ATOMIC_OP_ALIAS(atomic_inc_32,_atomic_inc_32) | | 48 | ATOMIC_OP_ALIAS(atomic_inc_32,_atomic_inc_32) |
49 | ATOMIC_OP_ALIAS(atomic_inc_uint,_atomic_inc_32) | | 49 | ATOMIC_OP_ALIAS(atomic_inc_uint,_atomic_inc_32) |
50 | ATOMIC_OP_ALIAS(atomic_inc_ulong,_atomic_inc_32) | | 50 | ATOMIC_OP_ALIAS(atomic_inc_ulong,_atomic_inc_32) |
51 | ATOMIC_OP_ALIAS(atomic_inc_ptr,_atomic_inc_32) | | 51 | ATOMIC_OP_ALIAS(atomic_inc_ptr,_atomic_inc_32) |
52 | STRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32) | | 52 | STRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32) |
53 | STRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_32) | | 53 | STRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_32) |
54 | STRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_32) | | 54 | STRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_32) |
55 | | | 55 | |
56 | ENTRY_NP(_atomic_inc_32_nv) | | 56 | ENTRY_NP(_atomic_inc_32_nv) |
57 | mov ip, r0 /* need r0 for return value */ | | 57 | mov ip, r0 /* need r0 for return value */ |
58 | 1: ldrex r0, [ip] /* load old value */ | | 58 | 1: ldrex r0, [ip] /* load old value */ |
59 | adds r0, r0, #1 /* calculate new value (return value) */ | | 59 | adds r0, r0, #1 /* calculate new value (return value) */ |
60 | strex r2, r0, [ip] /* try to store */ | | 60 | strex r2, r0, [ip] /* try to store */ |
61 | cmp r2, #0 /* succeed? */ | | 61 | cmp r2, #0 /* succeed? */ |
62 | bne 1b /* no, try again? */ | | 62 | bne 1b /* no, try again? */ |
63 | #ifdef _ARM_ARCH_7 | | 63 | #ifdef _ARM_ARCH_7 |
64 | dmb | | 64 | dmb |
65 | #else | | 65 | #else |
66 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ | | 66 | mcr p15, 0, r2, c7, c10, 5 /* data memory barrier */ |
67 | #endif | | 67 | #endif |
68 | RET /* return new value */ | | 68 | RET /* return new value */ |
69 | END(_atomic_inc_32_nv) | | 69 | END(_atomic_inc_32_nv) |
70 | ATOMIC_OP_ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv) | | 70 | ATOMIC_OP_ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv) |
71 | ATOMIC_OP_ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv) | | 71 | ATOMIC_OP_ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv) |
72 | ATOMIC_OP_ALIAS(atomic_inc_ulong_nv,_atomic_inc_32_nv) | | 72 | ATOMIC_OP_ALIAS(atomic_inc_ulong_nv,_atomic_inc_32_nv) |
73 | ATOMIC_OP_ALIAS(atomic_inc_ptr_nv,_atomic_inc_32_nv) | | 73 | ATOMIC_OP_ALIAS(atomic_inc_ptr_nv,_atomic_inc_32_nv) |
74 | STRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv) | | 74 | STRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv) |
75 | STRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_32_nv) | | 75 | STRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_32_nv) |
76 | STRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_32_nv) | | 76 | STRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_32_nv) |
77 | | | 77 | |
78 | #endif /* _ARM_ARCH_6 */ | | 78 | #endif /* _ARM_ARCH_6 */ |