1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * atomic64_t for 586+
4 *
5 * Copyright © 2010 Luca Barbieri
6 */
7
8#include <linux/linkage.h>
9#include <asm/alternative.h>
10
11.macro read64 reg
12 movl %ebx, %eax
13 movl %ecx, %edx
14/* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
15 LOCK_PREFIX
16 cmpxchg8b (\reg)
17.endm
18
19SYM_FUNC_START(atomic64_read_cx8)
20 read64 %ecx
21 RET
22SYM_FUNC_END(atomic64_read_cx8)
23
24SYM_FUNC_START(atomic64_set_cx8)
251:
26/* we don't need LOCK_PREFIX since aligned 64-bit writes
27 * are atomic on 586 and newer */
28 cmpxchg8b (%esi)
29 jne 1b
30
31 RET
32SYM_FUNC_END(atomic64_set_cx8)
33
34SYM_FUNC_START(atomic64_xchg_cx8)
351:
36 LOCK_PREFIX
37 cmpxchg8b (%esi)
38 jne 1b
39
40 RET
41SYM_FUNC_END(atomic64_xchg_cx8)
42
43.macro addsub_return func ins insc
44SYM_FUNC_START(atomic64_\func\()_return_cx8)
45 pushl %ebp
46 pushl %ebx
47 pushl %esi
48 pushl %edi
49
50 movl %eax, %esi
51 movl %edx, %edi
52 movl %ecx, %ebp
53
54 read64 %ecx
551:
56 movl %eax, %ebx
57 movl %edx, %ecx
58 \ins\()l %esi, %ebx
59 \insc\()l %edi, %ecx
60 LOCK_PREFIX
61 cmpxchg8b (%ebp)
62 jne 1b
63
6410:
65 movl %ebx, %eax
66 movl %ecx, %edx
67 popl %edi
68 popl %esi
69 popl %ebx
70 popl %ebp
71 RET
72SYM_FUNC_END(atomic64_\func\()_return_cx8)
73.endm
74
75addsub_return add add adc
76addsub_return sub sub sbb
77
78.macro incdec_return func ins insc
79SYM_FUNC_START(atomic64_\func\()_return_cx8)
80 pushl %ebx
81
82 read64 %esi
831:
84 movl %eax, %ebx
85 movl %edx, %ecx
86 \ins\()l $1, %ebx
87 \insc\()l $0, %ecx
88 LOCK_PREFIX
89 cmpxchg8b (%esi)
90 jne 1b
91
9210:
93 movl %ebx, %eax
94 movl %ecx, %edx
95 popl %ebx
96 RET
97SYM_FUNC_END(atomic64_\func\()_return_cx8)
98.endm
99
100incdec_return inc add adc
101incdec_return dec sub sbb
102
103SYM_FUNC_START(atomic64_dec_if_positive_cx8)
104 pushl %ebx
105
106 read64 %esi
1071:
108 movl %eax, %ebx
109 movl %edx, %ecx
110 subl $1, %ebx
111 sbb $0, %ecx
112 js 2f
113 LOCK_PREFIX
114 cmpxchg8b (%esi)
115 jne 1b
116
1172:
118 movl %ebx, %eax
119 movl %ecx, %edx
120 popl %ebx
121 RET
122SYM_FUNC_END(atomic64_dec_if_positive_cx8)
123
124SYM_FUNC_START(atomic64_add_unless_cx8)
125 pushl %ebp
126 pushl %ebx
127/* these just push these two parameters on the stack */
128 pushl %edi
129 pushl %ecx
130
131 movl %eax, %ebp
132 movl %edx, %edi
133
134 read64 %esi
1351:
136 cmpl %eax, 0(%esp)
137 je 4f
1382:
139 movl %eax, %ebx
140 movl %edx, %ecx
141 addl %ebp, %ebx
142 adcl %edi, %ecx
143 LOCK_PREFIX
144 cmpxchg8b (%esi)
145 jne 1b
146
147 movl $1, %eax
1483:
149 addl $8, %esp
150 popl %ebx
151 popl %ebp
152 RET
1534:
154 cmpl %edx, 4(%esp)
155 jne 2b
156 xorl %eax, %eax
157 jmp 3b
158SYM_FUNC_END(atomic64_add_unless_cx8)
159
160SYM_FUNC_START(atomic64_inc_not_zero_cx8)
161 pushl %ebx
162
163 read64 %esi
1641:
165 movl %eax, %ecx
166 orl %edx, %ecx
167 jz 3f
168 movl %eax, %ebx
169 xorl %ecx, %ecx
170 addl $1, %ebx
171 adcl %edx, %ecx
172 LOCK_PREFIX
173 cmpxchg8b (%esi)
174 jne 1b
175
176 movl $1, %eax
1773:
178 popl %ebx
179 RET
180SYM_FUNC_END(atomic64_inc_not_zero_cx8)
181

source code of linux/arch/x86/lib/atomic64_cx8_32.S