| 1 | /* SPDX-License-Identifier: GPL-2.0 */ |
| 2 | #ifndef _ASM_X86_CMPXCHG_64_H |
| 3 | #define _ASM_X86_CMPXCHG_64_H |
| 4 | |
| 5 | #define arch_cmpxchg64(ptr, o, n) \ |
| 6 | ({ \ |
| 7 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 8 | arch_cmpxchg((ptr), (o), (n)); \ |
| 9 | }) |
| 10 | |
| 11 | #define arch_cmpxchg64_local(ptr, o, n) \ |
| 12 | ({ \ |
| 13 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 14 | arch_cmpxchg_local((ptr), (o), (n)); \ |
| 15 | }) |
| 16 | |
| 17 | #define arch_try_cmpxchg64(ptr, po, n) \ |
| 18 | ({ \ |
| 19 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 20 | arch_try_cmpxchg((ptr), (po), (n)); \ |
| 21 | }) |
| 22 | |
| 23 | #define arch_try_cmpxchg64_local(ptr, po, n) \ |
| 24 | ({ \ |
| 25 | BUILD_BUG_ON(sizeof(*(ptr)) != 8); \ |
| 26 | arch_try_cmpxchg_local((ptr), (po), (n)); \ |
| 27 | }) |
| 28 | |
| 29 | union __u128_halves { |
| 30 | u128 full; |
| 31 | struct { |
| 32 | u64 low, high; |
| 33 | }; |
| 34 | }; |
| 35 | |
| 36 | #define __arch_cmpxchg128(_ptr, _old, _new, _lock) \ |
| 37 | ({ \ |
| 38 | union __u128_halves o = { .full = (_old), }, \ |
| 39 | n = { .full = (_new), }; \ |
| 40 | \ |
| 41 | asm_inline volatile(_lock "cmpxchg16b %[ptr]" \ |
| 42 | : [ptr] "+m" (*(_ptr)), \ |
| 43 | "+a" (o.low), "+d" (o.high) \ |
| 44 | : "b" (n.low), "c" (n.high) \ |
| 45 | : "memory"); \ |
| 46 | \ |
| 47 | o.full; \ |
| 48 | }) |
| 49 | |
| 50 | static __always_inline u128 arch_cmpxchg128(volatile u128 *ptr, u128 old, u128 new) |
| 51 | { |
| 52 | return __arch_cmpxchg128(ptr, old, new, LOCK_PREFIX); |
| 53 | } |
| 54 | #define arch_cmpxchg128 arch_cmpxchg128 |
| 55 | |
| 56 | static __always_inline u128 arch_cmpxchg128_local(volatile u128 *ptr, u128 old, u128 new) |
| 57 | { |
| 58 | return __arch_cmpxchg128(ptr, old, new,); |
| 59 | } |
| 60 | #define arch_cmpxchg128_local arch_cmpxchg128_local |
| 61 | |
| 62 | #define __arch_try_cmpxchg128(_ptr, _oldp, _new, _lock) \ |
| 63 | ({ \ |
| 64 | union __u128_halves o = { .full = *(_oldp), }, \ |
| 65 | n = { .full = (_new), }; \ |
| 66 | bool ret; \ |
| 67 | \ |
| 68 | asm_inline volatile(_lock "cmpxchg16b %[ptr]" \ |
| 69 | CC_SET(e) \ |
| 70 | : CC_OUT(e) (ret), \ |
| 71 | [ptr] "+m" (*(_ptr)), \ |
| 72 | "+a" (o.low), "+d" (o.high) \ |
| 73 | : "b" (n.low), "c" (n.high) \ |
| 74 | : "memory"); \ |
| 75 | \ |
| 76 | if (unlikely(!ret)) \ |
| 77 | *(_oldp) = o.full; \ |
| 78 | \ |
| 79 | likely(ret); \ |
| 80 | }) |
| 81 | |
| 82 | static __always_inline bool arch_try_cmpxchg128(volatile u128 *ptr, u128 *oldp, u128 new) |
| 83 | { |
| 84 | return __arch_try_cmpxchg128(ptr, oldp, new, LOCK_PREFIX); |
| 85 | } |
| 86 | #define arch_try_cmpxchg128 arch_try_cmpxchg128 |
| 87 | |
| 88 | static __always_inline bool arch_try_cmpxchg128_local(volatile u128 *ptr, u128 *oldp, u128 new) |
| 89 | { |
| 90 | return __arch_try_cmpxchg128(ptr, oldp, new,); |
| 91 | } |
| 92 | #define arch_try_cmpxchg128_local arch_try_cmpxchg128_local |
| 93 | |
| 94 | #define system_has_cmpxchg128() boot_cpu_has(X86_FEATURE_CX16) |
| 95 | |
| 96 | #endif /* _ASM_X86_CMPXCHG_64_H */ |
| 97 | |