Add dmb/dsb instructions as required by the armv7 arch man.diff -r1.2 -r1.3 src/common/lib/libc/arch/arm/atomic/atomic_add_32.S
(matt)
--- src/common/lib/libc/arch/arm/atomic/atomic_add_32.S 2008/08/16 07:12:39 1.2
+++ src/common/lib/libc/arch/arm/atomic/atomic_add_32.S 2012/08/31 23:41:52 1.3
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_add_32.S,v 1.2 2008/08/16 07:12:39 matt Exp $ */ | 1 | /* $NetBSD: atomic_add_32.S,v 1.3 2012/08/31 23:41:52 matt Exp $ */ | |
2 | 2 | |||
3 | /*- | 3 | /*- | |
4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | 4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | |
5 | * All rights reserved. | 5 | * All rights reserved. | |
6 | * | 6 | * | |
7 | * This code is derived from software contributed to The NetBSD Foundation | 7 | * This code is derived from software contributed to The NetBSD Foundation | |
8 | * by Matt Thomas <matt@3am-software.com> | 8 | * by Matt Thomas <matt@3am-software.com> | |
9 | * | 9 | * | |
10 | * Redistribution and use in source and binary forms, with or without | 10 | * Redistribution and use in source and binary forms, with or without | |
11 | * modification, are permitted provided that the following conditions | 11 | * modification, are permitted provided that the following conditions | |
12 | * are met: | 12 | * are met: | |
13 | * 1. Redistributions of source code must retain the above copyright | 13 | * 1. Redistributions of source code must retain the above copyright | |
14 | * notice, this list of conditions and the following disclaimer. | 14 | * notice, this list of conditions and the following disclaimer. | |
@@ -30,41 +30,51 @@ | @@ -30,41 +30,51 @@ | |||
30 | */ | 30 | */ | |
31 | 31 | |||
32 | #include "atomic_op_asm.h" | 32 | #include "atomic_op_asm.h" | |
33 | 33 | |||
34 | #ifdef _ARM_ARCH_6 | 34 | #ifdef _ARM_ARCH_6 | |
35 | 35 | |||
36 | ENTRY_NP(_atomic_add_32) | 36 | ENTRY_NP(_atomic_add_32) | |
37 | mov r3, r0 /* need r0 for return value */ | 37 | mov r3, r0 /* need r0 for return value */ | |
38 | 1: ldrex r0, [r3] /* load old value (to be returned) */ | 38 | 1: ldrex r0, [r3] /* load old value (to be returned) */ | |
39 | add r2, r0, r1 /* calculate new value */ | 39 | add r2, r0, r1 /* calculate new value */ | |
40 | strex ip, r2, [r3] /* try to store */ | 40 | strex ip, r2, [r3] /* try to store */ | |
41 | cmp ip, #0 /* succeed? */ | 41 | cmp ip, #0 /* succeed? */ | |
42 | bne 1b /* no, try again */ | 42 | bne 1b /* no, try again */ | |
43 | #ifdef _ARM_ARCH_7 | |||
44 | dmb | |||
45 | #else | |||
46 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
47 | #endif | |||
43 | RET /* return old value */ | 48 | RET /* return old value */ | |
44 | END(_atomic_add_32) | 49 | END(_atomic_add_32) | |
45 | ATOMIC_OP_ALIAS(atomic_add_32,_atomic_add_32) | 50 | ATOMIC_OP_ALIAS(atomic_add_32,_atomic_add_32) | |
46 | ATOMIC_OP_ALIAS(atomic_add_int,_atomic_add_32) | 51 | ATOMIC_OP_ALIAS(atomic_add_int,_atomic_add_32) | |
47 | ATOMIC_OP_ALIAS(atomic_add_long,_atomic_add_32) | 52 | ATOMIC_OP_ALIAS(atomic_add_long,_atomic_add_32) | |
48 | ATOMIC_OP_ALIAS(atomic_add_ptr,_atomic_add_32) | 53 | ATOMIC_OP_ALIAS(atomic_add_ptr,_atomic_add_32) | |
49 | STRONG_ALIAS(_atomic_add_int,_atomic_add_32) | 54 | STRONG_ALIAS(_atomic_add_int,_atomic_add_32) | |
50 | STRONG_ALIAS(_atomic_add_long,_atomic_add_32) | 55 | STRONG_ALIAS(_atomic_add_long,_atomic_add_32) | |
51 | STRONG_ALIAS(_atomic_add_ptr,_atomic_add_32) | 56 | STRONG_ALIAS(_atomic_add_ptr,_atomic_add_32) | |
52 | 57 | |||
53 | ENTRY_NP(_atomic_add_32_nv) | 58 | ENTRY_NP(_atomic_add_32_nv) | |
54 | mov r3, r0 /* need r0 for return value */ | 59 | mov r3, r0 /* need r0 for return value */ | |
55 | 1: ldrex r0, [r3] /* load old value */ | 60 | 1: ldrex r0, [r3] /* load old value */ | |
56 | add r0, r0, r1 /* calculate new value (return value) */ | 61 | add r0, r0, r1 /* calculate new value (return value) */ | |
57 | strex r2, r0, [r3] /* try to store */ | 62 | strex r2, r0, [r3] /* try to store */ | |
58 | cmp r2, #0 /* succeed? */ | 63 | cmp r2, #0 /* succeed? */ | |
59 | bne 1b /* no, try again? */ | 64 | bne 1b /* no, try again? */ | |
65 | #ifdef _ARM_ARCH_7 | |||
66 | dmb | |||
67 | #else | |||
68 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
69 | #endif | |||
60 | RET /* return new value */ | 70 | RET /* return new value */ | |
61 | END(_atomic_add_32_nv) | 71 | END(_atomic_add_32_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_add_32_nv,_atomic_add_32_nv) | 72 | ATOMIC_OP_ALIAS(atomic_add_32_nv,_atomic_add_32_nv) | |
63 | ATOMIC_OP_ALIAS(atomic_add_int_nv,_atomic_add_32_nv) | 73 | ATOMIC_OP_ALIAS(atomic_add_int_nv,_atomic_add_32_nv) | |
64 | ATOMIC_OP_ALIAS(atomic_add_long_nv,_atomic_add_32_nv) | 74 | ATOMIC_OP_ALIAS(atomic_add_long_nv,_atomic_add_32_nv) | |
65 | ATOMIC_OP_ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv) | 75 | ATOMIC_OP_ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv) | |
66 | STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv) | 76 | STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv) | |
67 | STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv) | 77 | STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv) | |
68 | STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv) | 78 | STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv) | |
69 | 79 | |||
70 | #endif /* _ARM_ARCH_6 */ | 80 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_and_32.S 2008/08/16 07:12:39 1.2
+++ src/common/lib/libc/arch/arm/atomic/atomic_and_32.S 2012/08/31 23:41:52 1.3
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_and_32.S,v 1.2 2008/08/16 07:12:39 matt Exp $ */ | 1 | /* $NetBSD: atomic_and_32.S,v 1.3 2012/08/31 23:41:52 matt Exp $ */ | |
2 | 2 | |||
3 | /*- | 3 | /*- | |
4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | 4 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | |
5 | * All rights reserved. | 5 | * All rights reserved. | |
6 | * | 6 | * | |
7 | * This code is derived from software contributed to The NetBSD Foundation | 7 | * This code is derived from software contributed to The NetBSD Foundation | |
8 | * by Matt Thomas <matt@3am-software.com> | 8 | * by Matt Thomas <matt@3am-software.com> | |
9 | * | 9 | * | |
10 | * Redistribution and use in source and binary forms, with or without | 10 | * Redistribution and use in source and binary forms, with or without | |
11 | * modification, are permitted provided that the following conditions | 11 | * modification, are permitted provided that the following conditions | |
12 | * are met: | 12 | * are met: | |
13 | * 1. Redistributions of source code must retain the above copyright | 13 | * 1. Redistributions of source code must retain the above copyright | |
14 | * notice, this list of conditions and the following disclaimer. | 14 | * notice, this list of conditions and the following disclaimer. | |
@@ -30,37 +30,47 @@ | @@ -30,37 +30,47 @@ | |||
30 | */ | 30 | */ | |
31 | 31 | |||
32 | #include "atomic_op_asm.h" | 32 | #include "atomic_op_asm.h" | |
33 | 33 | |||
34 | #ifdef _ARM_ARCH_6 | 34 | #ifdef _ARM_ARCH_6 | |
35 | 35 | |||
36 | ENTRY_NP(_atomic_and_32) | 36 | ENTRY_NP(_atomic_and_32) | |
37 | mov r3, r0 /* need r0 for return value */ | 37 | mov r3, r0 /* need r0 for return value */ | |
38 | 1: ldrex r0, [r3] /* load old value (to be returned) */ | 38 | 1: ldrex r0, [r3] /* load old value (to be returned) */ | |
39 | and r2, r0, r1 /* calculate new value */ | 39 | and r2, r0, r1 /* calculate new value */ | |
40 | strex ip, r2, [r3] /* try to store */ | 40 | strex ip, r2, [r3] /* try to store */ | |
41 | cmp ip, #0 /* succeed? */ | 41 | cmp ip, #0 /* succeed? */ | |
42 | bne 1b /* no, try again */ | 42 | bne 1b /* no, try again */ | |
43 | #ifdef _ARM_ARCH_7 | |||
44 | dmb | |||
45 | #else | |||
46 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
47 | #endif | |||
43 | RET /* return old value */ | 48 | RET /* return old value */ | |
44 | END(_atomic_and_32) | 49 | END(_atomic_and_32) | |
45 | ATOMIC_OP_ALIAS(atomic_and_32,_atomic_and_32) | 50 | ATOMIC_OP_ALIAS(atomic_and_32,_atomic_and_32) | |
46 | ATOMIC_OP_ALIAS(atomic_and_uint,_atomic_and_32) | 51 | ATOMIC_OP_ALIAS(atomic_and_uint,_atomic_and_32) | |
47 | ATOMIC_OP_ALIAS(atomic_and_ulong,_atomic_and_32) | 52 | ATOMIC_OP_ALIAS(atomic_and_ulong,_atomic_and_32) | |
48 | STRONG_ALIAS(_atomic_and_uint,_atomic_and_32) | 53 | STRONG_ALIAS(_atomic_and_uint,_atomic_and_32) | |
49 | STRONG_ALIAS(_atomic_and_ulong,_atomic_and_32) | 54 | STRONG_ALIAS(_atomic_and_ulong,_atomic_and_32) | |
50 | 55 | |||
51 | ENTRY_NP(_atomic_and_32_nv) | 56 | ENTRY_NP(_atomic_and_32_nv) | |
52 | mov r3, r0 /* need r0 for return value */ | 57 | mov r3, r0 /* need r0 for return value */ | |
53 | 1: ldrex r0, [r3] /* load old value */ | 58 | 1: ldrex r0, [r3] /* load old value */ | |
54 | and r0, r0, r1 /* calculate new value (return value) */ | 59 | and r0, r0, r1 /* calculate new value (return value) */ | |
55 | strex r2, r0, [r3] /* try to store */ | 60 | strex r2, r0, [r3] /* try to store */ | |
56 | cmp r2, #0 /* succeed? */ | 61 | cmp r2, #0 /* succeed? */ | |
57 | bne 1b /* no, try again? */ | 62 | bne 1b /* no, try again? */ | |
63 | #ifdef _ARM_ARCH_7 | |||
64 | dmb | |||
65 | #else | |||
66 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
67 | #endif | |||
58 | RET /* return new value */ | 68 | RET /* return new value */ | |
59 | END(_atomic_and_32_nv) | 69 | END(_atomic_and_32_nv) | |
60 | ATOMIC_OP_ALIAS(atomic_and_32_nv,_atomic_and_32_nv) | 70 | ATOMIC_OP_ALIAS(atomic_and_32_nv,_atomic_and_32_nv) | |
61 | ATOMIC_OP_ALIAS(atomic_and_uint_nv,_atomic_and_32_nv) | 71 | ATOMIC_OP_ALIAS(atomic_and_uint_nv,_atomic_and_32_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_and_ulong_nv,_atomic_and_32_nv) | 72 | ATOMIC_OP_ALIAS(atomic_and_ulong_nv,_atomic_and_32_nv) | |
63 | STRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv) | 73 | STRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv) | |
64 | STRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_32_nv) | 74 | STRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_32_nv) | |
65 | 75 | |||
66 | #endif /* _ARM_ARCH_6 */ | 76 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_cas_32.S 2008/08/16 07:12:39 1.2
+++ src/common/lib/libc/arch/arm/atomic/atomic_cas_32.S 2012/08/31 23:41:52 1.3
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_cas_32.S,v 1.2 2008/08/16 07:12:39 matt Exp $ */ | 1 | /* $NetBSD: atomic_cas_32.S,v 1.3 2012/08/31 23:41:52 matt Exp $ */ | |
2 | /*- | 2 | /*- | |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | |
4 | * All rights reserved. | 4 | * All rights reserved. | |
5 | * | 5 | * | |
6 | * This code is derived from software contributed to The NetBSD Foundation | 6 | * This code is derived from software contributed to The NetBSD Foundation | |
7 | * by Matt Thomas <matt@3am-software.com> | 7 | * by Matt Thomas <matt@3am-software.com> | |
8 | * | 8 | * | |
9 | * Redistribution and use in source and binary forms, with or without | 9 | * Redistribution and use in source and binary forms, with or without | |
10 | * modification, are permitted provided that the following conditions | 10 | * modification, are permitted provided that the following conditions | |
11 | * are met: | 11 | * are met: | |
12 | * 1. Redistributions of source code must retain the above copyright | 12 | * 1. Redistributions of source code must retain the above copyright | |
13 | * notice, this list of conditions and the following disclaimer. | 13 | * notice, this list of conditions and the following disclaimer. | |
14 | * 2. Redistributions in binary form must reproduce the above copyright | 14 | * 2. Redistributions in binary form must reproduce the above copyright | |
@@ -34,25 +34,38 @@ | @@ -34,25 +34,38 @@ | |||
34 | /* | 34 | /* | |
35 | * ARMv6 has load-exclusive/store-exclusive which works for both user | 35 | * ARMv6 has load-exclusive/store-exclusive which works for both user | |
36 | * and kernel. | 36 | * and kernel. | |
37 | */ | 37 | */ | |
38 | ENTRY_NP(_atomic_cas_32) | 38 | ENTRY_NP(_atomic_cas_32) | |
39 | mov r3, r0 /* we need r0 for return value */ | 39 | mov r3, r0 /* we need r0 for return value */ | |
40 | 1: | 40 | 1: | |
41 | ldrex r0, [r3] /* load old value */ | 41 | ldrex r0, [r3] /* load old value */ | |
42 | teq r0, r1 /* compare? */ | 42 | teq r0, r1 /* compare? */ | |
43 | RETc(ne) /* return if different */ | 43 | RETc(ne) /* return if different */ | |
44 | strex ip, r2, [r3] /* store new value */ | 44 | strex ip, r2, [r3] /* store new value */ | |
45 | cmp ip, #0 /* succeed? */ | 45 | cmp ip, #0 /* succeed? */ | |
46 | bne 1b /* nope, try again. */ | 46 | bne 1b /* nope, try again. */ | |
47 | RET /* yes, return. */ | 47 | #ifdef _ARM_ARCH_7 | |
48 | dsb | |||
49 | #else | |||
50 | mcr p15, 0, ip, c7, c10, 4 /* data synchronization barrier */ | |||
51 | #endif | |||
52 | RET /* return. */ | |||
48 | END(_atomic_cas_32) | 53 | END(_atomic_cas_32) | |
49 | 54 | |||
50 | ATOMIC_OP_ALIAS(atomic_cas_32,_atomic_cas_32) | 55 | ATOMIC_OP_ALIAS(atomic_cas_32,_atomic_cas_32) | |
51 | ATOMIC_OP_ALIAS(atomic_cas_uint,_atomic_cas_32) | 56 | ATOMIC_OP_ALIAS(atomic_cas_uint,_atomic_cas_32) | |
52 | ATOMIC_OP_ALIAS(atomic_cas_ulong,_atomic_cas_32) | 57 | ATOMIC_OP_ALIAS(atomic_cas_ulong,_atomic_cas_32) | |
53 | ATOMIC_OP_ALIAS(atomic_cas_ptr,_atomic_cas_32) | 58 | ATOMIC_OP_ALIAS(atomic_cas_ptr,_atomic_cas_32) | |
59 | ATOMIC_OP_ALIAS(atomic_cas_32_ni,_atomic_cas_32) | |||
60 | ATOMIC_OP_ALIAS(atomic_cas_uint_ni,_atomic_cas_32) | |||
61 | ATOMIC_OP_ALIAS(atomic_cas_ulong_ni,_atomic_cas_32) | |||
62 | ATOMIC_OP_ALIAS(atomic_cas_ptr_ni,_atomic_cas_32) | |||
54 | STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32) | 63 | STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32) | |
55 | STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_32) | 64 | STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_32) | |
65 | STRONG_ALIAS(_atomic_cas_32_ni,_atomic_cas_32) | |||
66 | STRONG_ALIAS(_atomic_cas_ptr_ni,_atomic_cas_32) | |||
67 | STRONG_ALIAS(_atomic_cas_uint_ni,_atomic_cas_32) | |||
68 | STRONG_ALIAS(_atomic_cas_ulong_ni,_atomic_cas_32) | |||
56 | STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_32) | 69 | STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_32) | |
57 | 70 | |||
58 | #endif /* _ARCH_ARM_6 */ | 71 | #endif /* _ARCH_ARM_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S 2012/08/16 16:49:10 1.2
+++ src/common/lib/libc/arch/arm/atomic/atomic_cas_8.S 2012/08/31 23:41:52 1.3
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_cas_8.S,v 1.2 2012/08/16 16:49:10 matt Exp $ */ | 1 | /* $NetBSD: atomic_cas_8.S,v 1.3 2012/08/31 23:41:52 matt Exp $ */ | |
2 | /*- | 2 | /*- | |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | |
4 | * All rights reserved. | 4 | * All rights reserved. | |
5 | * | 5 | * | |
6 | * This code is derived from software contributed to The NetBSD Foundation | 6 | * This code is derived from software contributed to The NetBSD Foundation | |
7 | * by Matt Thomas <matt@3am-software.com> | 7 | * by Matt Thomas <matt@3am-software.com> | |
8 | * | 8 | * | |
9 | * Redistribution and use in source and binary forms, with or without | 9 | * Redistribution and use in source and binary forms, with or without | |
10 | * modification, are permitted provided that the following conditions | 10 | * modification, are permitted provided that the following conditions | |
11 | * are met: | 11 | * are met: | |
12 | * 1. Redistributions of source code must retain the above copyright | 12 | * 1. Redistributions of source code must retain the above copyright | |
13 | * notice, this list of conditions and the following disclaimer. | 13 | * notice, this list of conditions and the following disclaimer. | |
14 | * 2. Redistributions in binary form must reproduce the above copyright | 14 | * 2. Redistributions in binary form must reproduce the above copyright | |
@@ -34,21 +34,26 @@ | @@ -34,21 +34,26 @@ | |||
34 | /* | 34 | /* | |
35 | * ARMv6 has load-exclusive/store-exclusive which works for both user | 35 | * ARMv6 has load-exclusive/store-exclusive which works for both user | |
36 | * and kernel. | 36 | * and kernel. | |
37 | */ | 37 | */ | |
38 | ENTRY_NP(_atomic_cas_8) | 38 | ENTRY_NP(_atomic_cas_8) | |
39 | mov r3, r0 /* we need r0 for return value */ | 39 | mov r3, r0 /* we need r0 for return value */ | |
40 | 1: | 40 | 1: | |
41 | ldrexb r0, [r3] /* load old value */ | 41 | ldrexb r0, [r3] /* load old value */ | |
42 | teq r0, r1 /* compare? */ | 42 | teq r0, r1 /* compare? */ | |
43 | RETc(ne) /* return if different */ | 43 | RETc(ne) /* return if different */ | |
44 | strexb ip, r2, [r3] /* store new value */ | 44 | strexb ip, r2, [r3] /* store new value */ | |
45 | cmp ip, #0 /* succeed? */ | 45 | cmp ip, #0 /* succeed? */ | |
46 | bne 1b /* nope, try again. */ | 46 | bne 1b /* nope, try again. */ | |
47 | RET /* yes, return. */ | 47 | #ifdef _ARM_ARCH_7 | |
48 | dsb /* data synchronization barrier */ | |||
49 | #else | |||
50 | mcr p15, 0, ip, c7, c10, 4 /* data synchronization barrier */ | |||
51 | #endif | |||
52 | RET /* return. */ | |||
48 | END(_atomic_cas_8) | 53 | END(_atomic_cas_8) | |
49 | 54 | |||
50 | ATOMIC_OP_ALIAS(atomic_cas_8,_atomic_cas_8) | 55 | ATOMIC_OP_ALIAS(atomic_cas_8,_atomic_cas_8) | |
51 | STRONG_ALIAS(_atomic_cas_char,_atomic_cas_8) | 56 | STRONG_ALIAS(_atomic_cas_char,_atomic_cas_8) | |
52 | STRONG_ALIAS(_atomic_cas_uchar,_atomic_cas_8) | 57 | STRONG_ALIAS(_atomic_cas_uchar,_atomic_cas_8) | |
53 | 58 | |||
54 | #endif /* _ARCH_ARM_6 */ | 59 | #endif /* _ARCH_ARM_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_dec_32.S 2008/08/16 07:12:39 1.2
+++ src/common/lib/libc/arch/arm/atomic/atomic_dec_32.S 2012/08/31 23:41:52 1.3
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_dec_32.S,v 1.2 2008/08/16 07:12:39 matt Exp $ */ | 1 | /* $NetBSD: atomic_dec_32.S,v 1.3 2012/08/31 23:41:52 matt Exp $ */ | |
2 | /*- | 2 | /*- | |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | |
4 | * All rights reserved. | 4 | * All rights reserved. | |
5 | * | 5 | * | |
6 | * This code is derived from software contributed to The NetBSD Foundation | 6 | * This code is derived from software contributed to The NetBSD Foundation | |
7 | * by Matt Thomas <matt@3am-software.com> | 7 | * by Matt Thomas <matt@3am-software.com> | |
8 | * | 8 | * | |
9 | * Redistribution and use in source and binary forms, with or without | 9 | * Redistribution and use in source and binary forms, with or without | |
10 | * modification, are permitted provided that the following conditions | 10 | * modification, are permitted provided that the following conditions | |
11 | * are met: | 11 | * are met: | |
12 | * 1. Redistributions of source code must retain the above copyright | 12 | * 1. Redistributions of source code must retain the above copyright | |
13 | * notice, this list of conditions and the following disclaimer. | 13 | * notice, this list of conditions and the following disclaimer. | |
14 | * 2. Redistributions in binary form must reproduce the above copyright | 14 | * 2. Redistributions in binary form must reproduce the above copyright | |
@@ -29,41 +29,51 @@ | @@ -29,41 +29,51 @@ | |||
29 | */ | 29 | */ | |
30 | 30 | |||
31 | #include "atomic_op_asm.h" | 31 | #include "atomic_op_asm.h" | |
32 | 32 | |||
33 | #ifdef _ARM_ARCH_6 | 33 | #ifdef _ARM_ARCH_6 | |
34 | 34 | |||
35 | ENTRY_NP(_atomic_dec_32) | 35 | ENTRY_NP(_atomic_dec_32) | |
36 | mov r2, r0 /* need r0 for return value */ | 36 | mov r2, r0 /* need r0 for return value */ | |
37 | 1: ldrex r0, [r2] /* load old value (return value) */ | 37 | 1: ldrex r0, [r2] /* load old value (return value) */ | |
38 | sub r1, r0, #1 /* calculate new value */ | 38 | sub r1, r0, #1 /* calculate new value */ | |
39 | strex r3, r1, [r2] /* try to store */ | 39 | strex r3, r1, [r2] /* try to store */ | |
40 | cmp r3, #0 /* succeed? */ | 40 | cmp r3, #0 /* succeed? */ | |
41 | bne 1b /* no, try again? */ | 41 | bne 1b /* no, try again? */ | |
42 | #ifdef _ARM_ARCH_7 | |||
43 | dmb | |||
44 | #else | |||
45 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
46 | #endif | |||
42 | RET /* return new value */ | 47 | RET /* return new value */ | |
43 | END(_atomic_dec_32) | 48 | END(_atomic_dec_32) | |
44 | ATOMIC_OP_ALIAS(atomic_dec_32,_atomic_dec_32) | 49 | ATOMIC_OP_ALIAS(atomic_dec_32,_atomic_dec_32) | |
45 | ATOMIC_OP_ALIAS(atomic_dec_uint,_atomic_dec_32) | 50 | ATOMIC_OP_ALIAS(atomic_dec_uint,_atomic_dec_32) | |
46 | ATOMIC_OP_ALIAS(atomic_dec_ulong,_atomic_dec_32) | 51 | ATOMIC_OP_ALIAS(atomic_dec_ulong,_atomic_dec_32) | |
47 | ATOMIC_OP_ALIAS(atomic_dec_ptr,_atomic_dec_32) | 52 | ATOMIC_OP_ALIAS(atomic_dec_ptr,_atomic_dec_32) | |
48 | STRONG_ALIAS(_atomic_dec_uint,_atomic_dec_32) | 53 | STRONG_ALIAS(_atomic_dec_uint,_atomic_dec_32) | |
49 | STRONG_ALIAS(_atomic_dec_ulong,_atomic_dec_32) | 54 | STRONG_ALIAS(_atomic_dec_ulong,_atomic_dec_32) | |
50 | STRONG_ALIAS(_atomic_dec_ptr,_atomic_dec_32) | 55 | STRONG_ALIAS(_atomic_dec_ptr,_atomic_dec_32) | |
51 | 56 | |||
52 | ENTRY_NP(_atomic_dec_32_nv) | 57 | ENTRY_NP(_atomic_dec_32_nv) | |
53 | mov r2, r0 /* need r0 for return value */ | 58 | mov r2, r0 /* need r0 for return value */ | |
54 | 1: ldrex r0, [r2] /* load old value */ | 59 | 1: ldrex r0, [r2] /* load old value */ | |
55 | sub r0, r0, #1 /* calculate new value (return value) */ | 60 | sub r0, r0, #1 /* calculate new value (return value) */ | |
56 | strex r1, r0, [r2] /* try to store */ | 61 | strex r1, r0, [r2] /* try to store */ | |
57 | cmp r1, #0 /* succeed? */ | 62 | cmp r1, #0 /* succeed? */ | |
58 | bne 1b /* no, try again? */ | 63 | bne 1b /* no, try again? */ | |
64 | #ifdef _ARM_ARCH_7 | |||
65 | dmb | |||
66 | #else | |||
67 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
68 | #endif | |||
59 | RET /* return new value */ | 69 | RET /* return new value */ | |
60 | END(_atomic_dec_32_nv) | 70 | END(_atomic_dec_32_nv) | |
61 | ATOMIC_OP_ALIAS(atomic_dec_32_nv,_atomic_dec_32_nv) | 71 | ATOMIC_OP_ALIAS(atomic_dec_32_nv,_atomic_dec_32_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_dec_uint_nv,_atomic_dec_32_nv) | 72 | ATOMIC_OP_ALIAS(atomic_dec_uint_nv,_atomic_dec_32_nv) | |
63 | ATOMIC_OP_ALIAS(atomic_dec_ulong_nv,_atomic_dec_32_nv) | 73 | ATOMIC_OP_ALIAS(atomic_dec_ulong_nv,_atomic_dec_32_nv) | |
64 | ATOMIC_OP_ALIAS(atomic_dec_ptr_nv,_atomic_dec_32_nv) | 74 | ATOMIC_OP_ALIAS(atomic_dec_ptr_nv,_atomic_dec_32_nv) | |
65 | STRONG_ALIAS(_atomic_dec_uint_nv,_atomic_dec_32_nv) | 75 | STRONG_ALIAS(_atomic_dec_uint_nv,_atomic_dec_32_nv) | |
66 | STRONG_ALIAS(_atomic_dec_ulong_nv,_atomic_dec_32_nv) | 76 | STRONG_ALIAS(_atomic_dec_ulong_nv,_atomic_dec_32_nv) | |
67 | STRONG_ALIAS(_atomic_dec_ptr_nv,_atomic_dec_32_nv) | 77 | STRONG_ALIAS(_atomic_dec_ptr_nv,_atomic_dec_32_nv) | |
68 | 78 | |||
69 | #endif /* _ARM_ARCH_6 */ | 79 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_inc_32.S 2008/08/16 07:12:39 1.2
+++ src/common/lib/libc/arch/arm/atomic/atomic_inc_32.S 2012/08/31 23:41:52 1.3
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_inc_32.S,v 1.2 2008/08/16 07:12:39 matt Exp $ */ | 1 | /* $NetBSD: atomic_inc_32.S,v 1.3 2012/08/31 23:41:52 matt Exp $ */ | |
2 | /*- | 2 | /*- | |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | |
4 | * All rights reserved. | 4 | * All rights reserved. | |
5 | * | 5 | * | |
6 | * This code is derived from software contributed to The NetBSD Foundation | 6 | * This code is derived from software contributed to The NetBSD Foundation | |
7 | * by Matt Thomas <matt@3am-software.com> | 7 | * by Matt Thomas <matt@3am-software.com> | |
8 | * | 8 | * | |
9 | * Redistribution and use in source and binary forms, with or without | 9 | * Redistribution and use in source and binary forms, with or without | |
10 | * modification, are permitted provided that the following conditions | 10 | * modification, are permitted provided that the following conditions | |
11 | * are met: | 11 | * are met: | |
12 | * 1. Redistributions of source code must retain the above copyright | 12 | * 1. Redistributions of source code must retain the above copyright | |
13 | * notice, this list of conditions and the following disclaimer. | 13 | * notice, this list of conditions and the following disclaimer. | |
14 | * 2. Redistributions in binary form must reproduce the above copyright | 14 | * 2. Redistributions in binary form must reproduce the above copyright | |
@@ -29,41 +29,51 @@ | @@ -29,41 +29,51 @@ | |||
29 | */ | 29 | */ | |
30 | 30 | |||
31 | #include "atomic_op_asm.h" | 31 | #include "atomic_op_asm.h" | |
32 | 32 | |||
33 | #ifdef _ARM_ARCH_6 | 33 | #ifdef _ARM_ARCH_6 | |
34 | 34 | |||
35 | ENTRY_NP(_atomic_inc_32) | 35 | ENTRY_NP(_atomic_inc_32) | |
36 | mov r2, r0 /* need r0 for return value */ | 36 | mov r2, r0 /* need r0 for return value */ | |
37 | 1: ldrex r0, [r2] /* load old value (return value) */ | 37 | 1: ldrex r0, [r2] /* load old value (return value) */ | |
38 | add r1, r0, #1 /* calculate new value */ | 38 | add r1, r0, #1 /* calculate new value */ | |
39 | strex r3, r1, [r2] /* try to store */ | 39 | strex r3, r1, [r2] /* try to store */ | |
40 | cmp r3, #0 /* succeed? */ | 40 | cmp r3, #0 /* succeed? */ | |
41 | bne 1b /* no, try again? */ | 41 | bne 1b /* no, try again? */ | |
42 | #ifdef _ARM_ARCH_7 | |||
43 | dmb | |||
44 | #else | |||
45 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
46 | #endif | |||
42 | RET /* return new value */ | 47 | RET /* return new value */ | |
43 | END(_atomic_inc_32) | 48 | END(_atomic_inc_32) | |
44 | ATOMIC_OP_ALIAS(atomic_inc_32,_atomic_inc_32) | 49 | ATOMIC_OP_ALIAS(atomic_inc_32,_atomic_inc_32) | |
45 | ATOMIC_OP_ALIAS(atomic_inc_uint,_atomic_inc_32) | 50 | ATOMIC_OP_ALIAS(atomic_inc_uint,_atomic_inc_32) | |
46 | ATOMIC_OP_ALIAS(atomic_inc_ulong,_atomic_inc_32) | 51 | ATOMIC_OP_ALIAS(atomic_inc_ulong,_atomic_inc_32) | |
47 | ATOMIC_OP_ALIAS(atomic_inc_ptr,_atomic_inc_32) | 52 | ATOMIC_OP_ALIAS(atomic_inc_ptr,_atomic_inc_32) | |
48 | STRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32) | 53 | STRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32) | |
49 | STRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_32) | 54 | STRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_32) | |
50 | STRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_32) | 55 | STRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_32) | |
51 | 56 | |||
52 | ENTRY_NP(_atomic_inc_32_nv) | 57 | ENTRY_NP(_atomic_inc_32_nv) | |
53 | mov r2, r0 /* need r0 for return value */ | 58 | mov r2, r0 /* need r0 for return value */ | |
54 | 1: ldrex r0, [r2] /* load old value */ | 59 | 1: ldrex r0, [r2] /* load old value */ | |
55 | add r0, r0, #1 /* calculate new value (return value) */ | 60 | add r0, r0, #1 /* calculate new value (return value) */ | |
56 | strex r1, r0, [r2] /* try to store */ | 61 | strex r1, r0, [r2] /* try to store */ | |
57 | cmp r1, #0 /* succeed? */ | 62 | cmp r1, #0 /* succeed? */ | |
58 | bne 1b /* no, try again? */ | 63 | bne 1b /* no, try again? */ | |
64 | #ifdef _ARM_ARCH_7 | |||
65 | dmb | |||
66 | #else | |||
67 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
68 | #endif | |||
59 | RET /* return new value */ | 69 | RET /* return new value */ | |
60 | END(_atomic_inc_32_nv) | 70 | END(_atomic_inc_32_nv) | |
61 | ATOMIC_OP_ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv) | 71 | ATOMIC_OP_ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv) | |
62 | ATOMIC_OP_ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv) | 72 | ATOMIC_OP_ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv) | |
63 | ATOMIC_OP_ALIAS(atomic_inc_ulong_nv,_atomic_inc_32_nv) | 73 | ATOMIC_OP_ALIAS(atomic_inc_ulong_nv,_atomic_inc_32_nv) | |
64 | ATOMIC_OP_ALIAS(atomic_inc_ptr_nv,_atomic_inc_32_nv) | 74 | ATOMIC_OP_ALIAS(atomic_inc_ptr_nv,_atomic_inc_32_nv) | |
65 | STRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv) | 75 | STRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv) | |
66 | STRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_32_nv) | 76 | STRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_32_nv) | |
67 | STRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_32_nv) | 77 | STRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_32_nv) | |
68 | 78 | |||
69 | #endif /* _ARCH_ARM_6 */ | 79 | #endif /* _ARCH_ARM_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_or_32.S 2008/08/16 07:12:39 1.2
+++ src/common/lib/libc/arch/arm/atomic/atomic_or_32.S 2012/08/31 23:41:52 1.3
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_or_32.S,v 1.2 2008/08/16 07:12:39 matt Exp $ */ | 1 | /* $NetBSD: atomic_or_32.S,v 1.3 2012/08/31 23:41:52 matt Exp $ */ | |
2 | /*- | 2 | /*- | |
3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | 3 | * Copyright (c) 2008 The NetBSD Foundation, Inc. | |
4 | * All rights reserved. | 4 | * All rights reserved. | |
5 | * | 5 | * | |
6 | * This code is derived from software contributed to The NetBSD Foundation | 6 | * This code is derived from software contributed to The NetBSD Foundation | |
7 | * by Matt Thomas <matt@3am-software.com> | 7 | * by Matt Thomas <matt@3am-software.com> | |
8 | * | 8 | * | |
9 | * Redistribution and use in source and binary forms, with or without | 9 | * Redistribution and use in source and binary forms, with or without | |
10 | * modification, are permitted provided that the following conditions | 10 | * modification, are permitted provided that the following conditions | |
11 | * are met: | 11 | * are met: | |
12 | * 1. Redistributions of source code must retain the above copyright | 12 | * 1. Redistributions of source code must retain the above copyright | |
13 | * notice, this list of conditions and the following disclaimer. | 13 | * notice, this list of conditions and the following disclaimer. | |
14 | * 2. Redistributions in binary form must reproduce the above copyright | 14 | * 2. Redistributions in binary form must reproduce the above copyright | |
@@ -29,37 +29,47 @@ | @@ -29,37 +29,47 @@ | |||
29 | */ | 29 | */ | |
30 | 30 | |||
31 | #include "atomic_op_asm.h" | 31 | #include "atomic_op_asm.h" | |
32 | 32 | |||
33 | #ifdef _ARM_ARCH_6 | 33 | #ifdef _ARM_ARCH_6 | |
34 | 34 | |||
35 | ENTRY_NP(_atomic_or_32) | 35 | ENTRY_NP(_atomic_or_32) | |
36 | mov r3, r0 /* need r0 for return value */ | 36 | mov r3, r0 /* need r0 for return value */ | |
37 | 1: ldrex r0, [r3] /* load old value (to be returned) */ | 37 | 1: ldrex r0, [r3] /* load old value (to be returned) */ | |
38 | orr r2, r0, r1 /* calculate new value */ | 38 | orr r2, r0, r1 /* calculate new value */ | |
39 | strex ip, r2, [r3] /* try to store */ | 39 | strex ip, r2, [r3] /* try to store */ | |
40 | cmp ip, #0 /* succeed? */ | 40 | cmp ip, #0 /* succeed? */ | |
41 | bne 1b /* no, try again */ | 41 | bne 1b /* no, try again */ | |
42 | #ifdef _ARM_ARCH_7 | |||
43 | dmb | |||
44 | #else | |||
45 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
46 | #endif | |||
42 | RET /* return old value */ | 47 | RET /* return old value */ | |
43 | END(_atomic_or_32) | 48 | END(_atomic_or_32) | |
44 | ATOMIC_OP_ALIAS(atomic_or_32,_atomic_or_32) | 49 | ATOMIC_OP_ALIAS(atomic_or_32,_atomic_or_32) | |
45 | ATOMIC_OP_ALIAS(atomic_or_uint,_atomic_or_32) | 50 | ATOMIC_OP_ALIAS(atomic_or_uint,_atomic_or_32) | |
46 | ATOMIC_OP_ALIAS(atomic_or_ulong,_atomic_or_32) | 51 | ATOMIC_OP_ALIAS(atomic_or_ulong,_atomic_or_32) | |
47 | STRONG_ALIAS(_atomic_or_uint,_atomic_or_32) | 52 | STRONG_ALIAS(_atomic_or_uint,_atomic_or_32) | |
48 | STRONG_ALIAS(_atomic_or_ulong,_atomic_or_32) | 53 | STRONG_ALIAS(_atomic_or_ulong,_atomic_or_32) | |
49 | 54 | |||
50 | ENTRY_NP(_atomic_or_32_nv) | 55 | ENTRY_NP(_atomic_or_32_nv) | |
51 | mov r3, r0 /* need r0 for return value */ | 56 | mov r3, r0 /* need r0 for return value */ | |
52 | 1: ldrex r0, [r3] /* load old value */ | 57 | 1: ldrex r0, [r3] /* load old value */ | |
53 | orr r0, r0, r1 /* calculate new value (return value) */ | 58 | orr r0, r0, r1 /* calculate new value (return value) */ | |
54 | strex r2, r0, [r3] /* try to store */ | 59 | strex r2, r0, [r3] /* try to store */ | |
55 | cmp r2, #0 /* succeed? */ | 60 | cmp r2, #0 /* succeed? */ | |
56 | bne 1b /* no, try again? */ | 61 | bne 1b /* no, try again? */ | |
62 | #ifdef _ARM_ARCH_7 | |||
63 | dmb | |||
64 | #else | |||
65 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
66 | #endif | |||
57 | RET /* return new value */ | 67 | RET /* return new value */ | |
58 | END(_atomic_or_32_nv) | 68 | END(_atomic_or_32_nv) | |
59 | ATOMIC_OP_ALIAS(atomic_or_32_nv,_atomic_or_32_nv) | 69 | ATOMIC_OP_ALIAS(atomic_or_32_nv,_atomic_or_32_nv) | |
60 | ATOMIC_OP_ALIAS(atomic_or_uint_nv,_atomic_or_32_nv) | 70 | ATOMIC_OP_ALIAS(atomic_or_uint_nv,_atomic_or_32_nv) | |
61 | ATOMIC_OP_ALIAS(atomic_or_ulong_nv,_atomic_or_32_nv) | 71 | ATOMIC_OP_ALIAS(atomic_or_ulong_nv,_atomic_or_32_nv) | |
62 | STRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv) | 72 | STRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv) | |
63 | STRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_32_nv) | 73 | STRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_32_nv) | |
64 | 74 | |||
65 | #endif /* _ARM_ARCH_6 */ | 75 | #endif /* _ARM_ARCH_6 */ |
--- src/common/lib/libc/arch/arm/atomic/atomic_swap.S 2012/08/16 16:49:10 1.3
+++ src/common/lib/libc/arch/arm/atomic/atomic_swap.S 2012/08/31 23:41:52 1.4
@@ -1,14 +1,14 @@ | @@ -1,14 +1,14 @@ | |||
1 | /* $NetBSD: atomic_swap.S,v 1.3 2012/08/16 16:49:10 matt Exp $ */ | 1 | /* $NetBSD: atomic_swap.S,v 1.4 2012/08/31 23:41:52 matt Exp $ */ | |
2 | 2 | |||
3 | /*- | 3 | /*- | |
4 | * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. | 4 | * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. | |
5 | * All rights reserved. | 5 | * All rights reserved. | |
6 | * | 6 | * | |
7 | * This code is derived from software contributed to The NetBSD Foundation | 7 | * This code is derived from software contributed to The NetBSD Foundation | |
8 | * by Jason R. Thorpe and Matt Thomas. | 8 | * by Jason R. Thorpe and Matt Thomas. | |
9 | * | 9 | * | |
10 | * Redistribution and use in source and binary forms, with or without | 10 | * Redistribution and use in source and binary forms, with or without | |
11 | * modification, are permitted provided that the following conditions | 11 | * modification, are permitted provided that the following conditions | |
12 | * are met: | 12 | * are met: | |
13 | * 1. Redistributions of source code must retain the above copyright | 13 | * 1. Redistributions of source code must retain the above copyright | |
14 | * notice, this list of conditions and the following disclaimer. | 14 | * notice, this list of conditions and the following disclaimer. | |
@@ -39,49 +39,61 @@ | @@ -39,49 +39,61 @@ | |||
39 | * So if we use the LDREX/STREX template, but use a SWP instruction followed | 39 | * So if we use the LDREX/STREX template, but use a SWP instruction followed | |
40 | * by a MOV instruction (using a temporary register), that gives a handler | 40 | * by a MOV instruction (using a temporary register), that gives a handler | |
41 | * for the SWP UNDEFINED exception enough information to "patch" this instance | 41 | * for the SWP UNDEFINED exception enough information to "patch" this instance | |
42 | * SWP with correct forms of LDREX/STREX. (note that this would happen even | 42 | * SWP with correct forms of LDREX/STREX. (note that this would happen even | |
43 | * "read-only" pages. If the page gets tossed, we will get another exception | 43 | * "read-only" pages. If the page gets tossed, we will get another exception | |
44 | * and fix yet again). | 44 | * and fix yet again). | |
45 | */ | 45 | */ | |
46 | 46 | |||
47 | ENTRY_NP(_atomic_swap_32) | 47 | ENTRY_NP(_atomic_swap_32) | |
48 | mov r2, r0 | 48 | mov r2, r0 | |
49 | 1: | 49 | 1: | |
50 | #ifdef _ARM_ARCH_6 | 50 | #ifdef _ARM_ARCH_6 | |
51 | ldrex r0, [r2] | 51 | ldrex r0, [r2] | |
52 | strex r3, r1, [r2] | 52 | cmp r0, r1 | |
53 | strexne ip, r1, [r2] | |||
53 | #else | 54 | #else | |
54 | swp r0, r1, [r2] | 55 | swp r0, r1, [r2] | |
55 | mov r3, #0 | 56 | cmp r0, r1 | |
57 | movsne ip, #0 | |||
56 | #endif | 58 | #endif | |
57 | cmp r3, #0 | 59 | cmpne ip, #0 | |
58 | bne 1b | 60 | bne 1b | |
61 | #ifdef _ARM_ARCH_7 | |||
62 | dmb | |||
63 | #else | |||
64 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
65 | #endif | |||
59 | RET | 66 | RET | |
60 | END(_atomic_swap_32) | 67 | END(_atomic_swap_32) | |
61 | ATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) | 68 | ATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) | |
62 | ATOMIC_OP_ALIAS(atomic_swap_uint,_atomic_swap_32) | 69 | ATOMIC_OP_ALIAS(atomic_swap_uint,_atomic_swap_32) | |
63 | ATOMIC_OP_ALIAS(atomic_swap_ulong,_atomic_swap_32) | 70 | ATOMIC_OP_ALIAS(atomic_swap_ulong,_atomic_swap_32) | |
64 | ATOMIC_OP_ALIAS(atomic_swap_ptr,_atomic_swap_32) | 71 | ATOMIC_OP_ALIAS(atomic_swap_ptr,_atomic_swap_32) | |
65 | STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) | 72 | STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) | |
66 | STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32) | 73 | STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32) | |
67 | STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) | 74 | STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) | |
68 | 75 | |||
69 | ENTRY_NP(_atomic_swap_8) | 76 | ENTRY_NP(_atomic_swap_8) | |
70 | mov r2, r0 | 77 | mov r2, r0 | |
71 | 1: | 78 | 1: | |
72 | #ifdef _ARM_ARCH_6 | 79 | #ifdef _ARM_ARCH_6 | |
73 | ldrexb r0, [r2] | 80 | ldrexb r0, [r2] | |
74 | strexb r3, r1, [r2] | 81 | strexb r3, r1, [r2] | |
75 | #else | 82 | #else | |
76 | swpb r0, r1, [r2] | 83 | swpb r0, r1, [r2] | |
77 | mov r3, #0 | 84 | mov r3, #0 | |
78 | #endif | 85 | #endif | |
79 | cmp r3, #0 | 86 | cmp r3, #0 | |
80 | bne 1b | 87 | bne 1b | |
88 | #ifdef _ARM_ARCH_7 | |||
89 | dmb | |||
90 | #else | |||
91 | mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ | |||
92 | #endif | |||
81 | RET | 93 | RET | |
82 | END(_atomic_swap_8) | 94 | END(_atomic_swap_8) | |
83 | ATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) | 95 | ATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) | |
84 | ATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) | 96 | ATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) | |
85 | ATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) | 97 | ATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) | |
86 | STRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) | 98 | STRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) | |
87 | STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) | 99 | STRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) |