1 | /* SPDX-License-Identifier: GPL-2.0 */ |
2 | #ifndef _ASM_X86_FUTEX_H |
3 | #define _ASM_X86_FUTEX_H |
4 | |
5 | #ifdef __KERNEL__ |
6 | |
7 | #include <linux/futex.h> |
8 | #include <linux/uaccess.h> |
9 | |
10 | #include <asm/asm.h> |
11 | #include <asm/errno.h> |
12 | #include <asm/processor.h> |
13 | #include <asm/smap.h> |
14 | |
15 | #define unsafe_atomic_op1(insn, oval, uaddr, oparg, label) \ |
16 | do { \ |
17 | int oldval = 0, ret; \ |
18 | asm volatile("1:\t" insn "\n" \ |
19 | "2:\n" \ |
20 | _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %1) \ |
21 | : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \ |
22 | : "0" (oparg), "1" (0)); \ |
23 | if (ret) \ |
24 | goto label; \ |
25 | *oval = oldval; \ |
26 | } while(0) |
27 | |
28 | |
29 | #define unsafe_atomic_op2(insn, oval, uaddr, oparg, label) \ |
30 | do { \ |
31 | int oldval = 0, ret, tem; \ |
32 | asm volatile("1:\tmovl %2, %0\n" \ |
33 | "2:\tmovl\t%0, %3\n" \ |
34 | "\t" insn "\n" \ |
35 | "3:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" \ |
36 | "\tjnz\t2b\n" \ |
37 | "4:\n" \ |
38 | _ASM_EXTABLE_TYPE_REG(1b, 4b, EX_TYPE_EFAULT_REG, %1) \ |
39 | _ASM_EXTABLE_TYPE_REG(3b, 4b, EX_TYPE_EFAULT_REG, %1) \ |
40 | : "=&a" (oldval), "=&r" (ret), \ |
41 | "+m" (*uaddr), "=&r" (tem) \ |
42 | : "r" (oparg), "1" (0)); \ |
43 | if (ret) \ |
44 | goto label; \ |
45 | *oval = oldval; \ |
46 | } while(0) |
47 | |
48 | static __always_inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval, |
49 | u32 __user *uaddr) |
50 | { |
51 | if (!user_access_begin(uaddr, sizeof(u32))) |
52 | return -EFAULT; |
53 | |
54 | switch (op) { |
55 | case FUTEX_OP_SET: |
56 | unsafe_atomic_op1("xchgl %0, %2" , oval, uaddr, oparg, Efault); |
57 | break; |
58 | case FUTEX_OP_ADD: |
59 | unsafe_atomic_op1(LOCK_PREFIX "xaddl %0, %2" , oval, |
60 | uaddr, oparg, Efault); |
61 | break; |
62 | case FUTEX_OP_OR: |
63 | unsafe_atomic_op2("orl %4, %3" , oval, uaddr, oparg, Efault); |
64 | break; |
65 | case FUTEX_OP_ANDN: |
66 | unsafe_atomic_op2("andl %4, %3" , oval, uaddr, ~oparg, Efault); |
67 | break; |
68 | case FUTEX_OP_XOR: |
69 | unsafe_atomic_op2("xorl %4, %3" , oval, uaddr, oparg, Efault); |
70 | break; |
71 | default: |
72 | user_access_end(); |
73 | return -ENOSYS; |
74 | } |
75 | user_access_end(); |
76 | return 0; |
77 | Efault: |
78 | user_access_end(); |
79 | return -EFAULT; |
80 | } |
81 | |
82 | static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, |
83 | u32 oldval, u32 newval) |
84 | { |
85 | int ret = 0; |
86 | |
87 | if (!user_access_begin(uaddr, sizeof(u32))) |
88 | return -EFAULT; |
89 | asm volatile("\n" |
90 | "1:\t" LOCK_PREFIX "cmpxchgl %3, %2\n" |
91 | "2:\n" |
92 | _ASM_EXTABLE_TYPE_REG(1b, 2b, EX_TYPE_EFAULT_REG, %0) \ |
93 | : "+r" (ret), "=a" (oldval), "+m" (*uaddr) |
94 | : "r" (newval), "1" (oldval) |
95 | : "memory" |
96 | ); |
97 | user_access_end(); |
98 | *uval = oldval; |
99 | return ret; |
100 | } |
101 | |
102 | #endif |
103 | #endif /* _ASM_X86_FUTEX_H */ |
104 | |