1 | /* GCC instrumentation plugin for ThreadSanitizer. |
2 | Copyright (C) 2011-2023 Free Software Foundation, Inc. |
3 | Contributed by Dmitry Vyukov <dvyukov@google.com> |
4 | |
5 | This file is part of GCC. |
6 | |
7 | GCC is free software; you can redistribute it and/or modify it under |
8 | the terms of the GNU General Public License as published by the Free |
9 | Software Foundation; either version 3, or (at your option) any later |
10 | version. |
11 | |
12 | GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
13 | WARRANTY; without even the implied warranty of MERCHANTABILITY or |
14 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
15 | for more details. |
16 | |
17 | You should have received a copy of the GNU General Public License |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ |
20 | |
21 | |
22 | #include "config.h" |
23 | #include "system.h" |
24 | #include "coretypes.h" |
25 | #include "backend.h" |
26 | #include "rtl.h" |
27 | #include "tree.h" |
28 | #include "memmodel.h" |
29 | #include "gimple.h" |
30 | #include "tree-pass.h" |
31 | #include "ssa.h" |
32 | #include "cgraph.h" |
33 | #include "fold-const.h" |
34 | #include "gimplify.h" |
35 | #include "gimple-iterator.h" |
36 | #include "gimplify-me.h" |
37 | #include "tree-cfg.h" |
38 | #include "tree-iterator.h" |
39 | #include "gimple-fold.h" |
40 | #include "tree-ssa-loop-ivopts.h" |
41 | #include "tree-eh.h" |
42 | #include "tsan.h" |
43 | #include "stringpool.h" |
44 | #include "attribs.h" |
45 | #include "asan.h" |
46 | #include "builtins.h" |
47 | #include "target.h" |
48 | #include "diagnostic-core.h" |
49 | |
50 | /* Number of instrumented memory accesses in the current function. */ |
51 | |
52 | /* Builds the following decl |
53 | void __tsan_read/writeX (void *addr); */ |
54 | |
55 | static tree |
56 | get_memory_access_decl (bool is_write, unsigned size, bool volatilep) |
57 | { |
58 | enum built_in_function fcode; |
59 | int pos; |
60 | |
61 | if (size <= 1) |
62 | pos = 0; |
63 | else if (size <= 3) |
64 | pos = 1; |
65 | else if (size <= 7) |
66 | pos = 2; |
67 | else if (size <= 15) |
68 | pos = 3; |
69 | else |
70 | pos = 4; |
71 | |
72 | if (param_tsan_distinguish_volatile && volatilep) |
73 | fcode = is_write ? BUILT_IN_TSAN_VOLATILE_WRITE1 |
74 | : BUILT_IN_TSAN_VOLATILE_READ1; |
75 | else |
76 | fcode = is_write ? BUILT_IN_TSAN_WRITE1 |
77 | : BUILT_IN_TSAN_READ1; |
78 | fcode = (built_in_function)(fcode + pos); |
79 | |
80 | return builtin_decl_implicit (fncode: fcode); |
81 | } |
82 | |
83 | /* Check as to whether EXPR refers to a store to vptr. */ |
84 | |
85 | static tree |
86 | is_vptr_store (gimple *stmt, tree expr, bool is_write) |
87 | { |
88 | if (is_write == true |
89 | && gimple_assign_single_p (gs: stmt) |
90 | && TREE_CODE (expr) == COMPONENT_REF) |
91 | { |
92 | tree field = TREE_OPERAND (expr, 1); |
93 | if (TREE_CODE (field) == FIELD_DECL |
94 | && DECL_VIRTUAL_P (field)) |
95 | return gimple_assign_rhs1 (gs: stmt); |
96 | } |
97 | return NULL; |
98 | } |
99 | |
100 | /* Instruments EXPR if needed. If any instrumentation is inserted, |
101 | return true. */ |
102 | |
103 | static bool |
104 | instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) |
105 | { |
106 | tree base, rhs, expr_ptr, builtin_decl; |
107 | basic_block bb; |
108 | HOST_WIDE_INT size; |
109 | gimple *stmt, *g; |
110 | gimple_seq seq; |
111 | location_t loc; |
112 | unsigned int align; |
113 | |
114 | size = int_size_in_bytes (TREE_TYPE (expr)); |
115 | if (size <= 0) |
116 | return false; |
117 | |
118 | poly_int64 unused_bitsize, unused_bitpos; |
119 | tree offset; |
120 | machine_mode mode; |
121 | int unsignedp, reversep, volatilep = 0; |
122 | base = get_inner_reference (expr, &unused_bitsize, &unused_bitpos, &offset, |
123 | &mode, &unsignedp, &reversep, &volatilep); |
124 | |
125 | /* No need to instrument accesses to decls that don't escape, |
126 | they can't escape to other threads then. */ |
127 | if (DECL_P (base) && !is_global_var (t: base)) |
128 | { |
129 | struct pt_solution pt; |
130 | memset (s: &pt, c: 0, n: sizeof (pt)); |
131 | pt.escaped = 1; |
132 | pt.ipa_escaped = flag_ipa_pta != 0; |
133 | if (!pt_solution_includes (&pt, base)) |
134 | return false; |
135 | if (!may_be_aliased (var: base)) |
136 | return false; |
137 | } |
138 | |
139 | if (TREE_READONLY (base) || (VAR_P (base) && DECL_HARD_REGISTER (base))) |
140 | return false; |
141 | |
142 | stmt = gsi_stmt (i: gsi); |
143 | loc = gimple_location (g: stmt); |
144 | rhs = is_vptr_store (stmt, expr, is_write); |
145 | |
146 | if ((TREE_CODE (expr) == COMPONENT_REF |
147 | && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1))) |
148 | || TREE_CODE (expr) == BIT_FIELD_REF) |
149 | { |
150 | HOST_WIDE_INT bitpos, bitsize; |
151 | base = TREE_OPERAND (expr, 0); |
152 | if (TREE_CODE (expr) == COMPONENT_REF) |
153 | { |
154 | expr = TREE_OPERAND (expr, 1); |
155 | if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr)) |
156 | expr = DECL_BIT_FIELD_REPRESENTATIVE (expr); |
157 | if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr)) |
158 | || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr)) |
159 | || !tree_fits_uhwi_p (DECL_SIZE (expr))) |
160 | return false; |
161 | bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT |
162 | + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr)); |
163 | bitsize = tree_to_uhwi (DECL_SIZE (expr)); |
164 | } |
165 | else |
166 | { |
167 | if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2)) |
168 | || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1))) |
169 | return false; |
170 | bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2)); |
171 | bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1)); |
172 | } |
173 | if (bitpos < 0 || bitsize <= 0) |
174 | return false; |
175 | size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1) |
176 | / BITS_PER_UNIT; |
177 | if (may_be_nonaddressable_p (expr: base)) |
178 | return false; |
179 | align = get_object_alignment (base); |
180 | if (align < BITS_PER_UNIT) |
181 | return false; |
182 | bitpos = bitpos & ~(BITS_PER_UNIT - 1); |
183 | if ((align - 1) & bitpos) |
184 | { |
185 | align = (align - 1) & bitpos; |
186 | align = least_bit_hwi (x: align); |
187 | } |
188 | expr = build_fold_addr_expr (unshare_expr (base)); |
189 | expr = build2 (MEM_REF, char_type_node, expr, |
190 | build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT)); |
191 | expr_ptr = build_fold_addr_expr (expr); |
192 | } |
193 | else |
194 | { |
195 | if (may_be_nonaddressable_p (expr)) |
196 | return false; |
197 | align = get_object_alignment (expr); |
198 | if (align < BITS_PER_UNIT) |
199 | return false; |
200 | expr_ptr = build_fold_addr_expr (unshare_expr (expr)); |
201 | } |
202 | expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE); |
203 | if ((size & (size - 1)) != 0 || size > 16 |
204 | || align < MIN (size, 8) * BITS_PER_UNIT) |
205 | { |
206 | builtin_decl = builtin_decl_implicit (fncode: is_write |
207 | ? BUILT_IN_TSAN_WRITE_RANGE |
208 | : BUILT_IN_TSAN_READ_RANGE); |
209 | g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size)); |
210 | } |
211 | else if (rhs == NULL) |
212 | g = gimple_build_call (get_memory_access_decl (is_write, size, |
213 | TREE_THIS_VOLATILE (expr)), |
214 | 1, expr_ptr); |
215 | else |
216 | { |
217 | builtin_decl = builtin_decl_implicit (fncode: BUILT_IN_TSAN_VPTR_UPDATE); |
218 | g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs)); |
219 | } |
220 | gimple_set_location (g, location: loc); |
221 | gimple_seq_add_stmt_without_update (&seq, g); |
222 | /* Instrumentation for assignment of a function result |
223 | must be inserted after the call. Instrumentation for |
224 | reads of function arguments must be inserted before the call. |
225 | That's because the call can contain synchronization. */ |
226 | if (is_gimple_call (gs: stmt) && is_write) |
227 | { |
228 | /* If the call can throw, it must be the last stmt in |
229 | a basic block, so the instrumented stmts need to be |
230 | inserted in successor bbs. */ |
231 | if (is_ctrl_altering_stmt (stmt)) |
232 | { |
233 | edge e; |
234 | |
235 | bb = gsi_bb (i: gsi); |
236 | e = find_fallthru_edge (edges: bb->succs); |
237 | if (e) |
238 | gsi_insert_seq_on_edge_immediate (e, seq); |
239 | } |
240 | else |
241 | gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); |
242 | } |
243 | else |
244 | gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); |
245 | |
246 | return true; |
247 | } |
248 | |
249 | /* Actions for sync/atomic builtin transformations. */ |
250 | enum tsan_atomic_action |
251 | { |
252 | check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, |
253 | bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst, |
254 | bool_clear, bool_test_and_set |
255 | }; |
256 | |
257 | /* Table how to map sync/atomic builtins to their corresponding |
258 | tsan equivalents. */ |
259 | static const struct tsan_map_atomic |
260 | { |
261 | enum built_in_function fcode, tsan_fcode; |
262 | enum tsan_atomic_action action; |
263 | enum tree_code code; |
264 | } tsan_atomic_table[] = |
265 | { |
266 | #define TRANSFORM(fcode, tsan_fcode, action, code) \ |
267 | { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } |
268 | #define CHECK_LAST(fcode, tsan_fcode) \ |
269 | TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) |
270 | #define ADD_SEQ_CST(fcode, tsan_fcode) \ |
271 | TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) |
272 | #define ADD_ACQUIRE(fcode, tsan_fcode) \ |
273 | TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) |
274 | #define WEAK_CAS(fcode, tsan_fcode) \ |
275 | TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) |
276 | #define STRONG_CAS(fcode, tsan_fcode) \ |
277 | TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) |
278 | #define BOOL_CAS(fcode, tsan_fcode) \ |
279 | TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) |
280 | #define VAL_CAS(fcode, tsan_fcode) \ |
281 | TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) |
282 | #define LOCK_RELEASE(fcode, tsan_fcode) \ |
283 | TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) |
284 | #define FETCH_OP(fcode, tsan_fcode, code) \ |
285 | TRANSFORM (fcode, tsan_fcode, fetch_op, code) |
286 | #define FETCH_OPS(fcode, tsan_fcode, code) \ |
287 | TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) |
288 | #define BOOL_CLEAR(fcode, tsan_fcode) \ |
289 | TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK) |
290 | #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \ |
291 | TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK) |
292 | |
293 | CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), |
294 | CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), |
295 | CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), |
296 | CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), |
297 | CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), |
298 | CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), |
299 | CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), |
300 | CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), |
301 | CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), |
302 | CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), |
303 | CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), |
304 | CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), |
305 | CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), |
306 | CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), |
307 | CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), |
308 | CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), |
309 | CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), |
310 | CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), |
311 | CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), |
312 | CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), |
313 | CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), |
314 | CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), |
315 | CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), |
316 | CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), |
317 | CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), |
318 | CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), |
319 | CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), |
320 | CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), |
321 | CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), |
322 | CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), |
323 | CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), |
324 | CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), |
325 | CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), |
326 | CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), |
327 | CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), |
328 | CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), |
329 | CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), |
330 | CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), |
331 | CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), |
332 | CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), |
333 | CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), |
334 | CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), |
335 | CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), |
336 | CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), |
337 | CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), |
338 | |
339 | CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), |
340 | CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), |
341 | |
342 | FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), |
343 | FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), |
344 | FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), |
345 | FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), |
346 | FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), |
347 | FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), |
348 | FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), |
349 | FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), |
350 | FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), |
351 | FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), |
352 | FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), |
353 | FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), |
354 | FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), |
355 | FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), |
356 | FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), |
357 | FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), |
358 | FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), |
359 | FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), |
360 | FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), |
361 | FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), |
362 | FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), |
363 | FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), |
364 | FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), |
365 | FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), |
366 | FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), |
367 | FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), |
368 | FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), |
369 | FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), |
370 | FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), |
371 | FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), |
372 | |
373 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), |
374 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), |
375 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), |
376 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), |
377 | ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), |
378 | |
379 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), |
380 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), |
381 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), |
382 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), |
383 | ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), |
384 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), |
385 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), |
386 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), |
387 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), |
388 | ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), |
389 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), |
390 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), |
391 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), |
392 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), |
393 | ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), |
394 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), |
395 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), |
396 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), |
397 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), |
398 | ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), |
399 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), |
400 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), |
401 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), |
402 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), |
403 | ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), |
404 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), |
405 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), |
406 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), |
407 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), |
408 | ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), |
409 | |
410 | ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), |
411 | |
412 | FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), |
413 | FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), |
414 | FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), |
415 | FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), |
416 | FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), |
417 | FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), |
418 | FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), |
419 | FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), |
420 | FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), |
421 | FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), |
422 | FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), |
423 | FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), |
424 | FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), |
425 | FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), |
426 | FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), |
427 | FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), |
428 | FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), |
429 | FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), |
430 | FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), |
431 | FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), |
432 | FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), |
433 | FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), |
434 | FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), |
435 | FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), |
436 | FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), |
437 | FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), |
438 | FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), |
439 | FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), |
440 | FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), |
441 | FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), |
442 | |
443 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), |
444 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), |
445 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), |
446 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), |
447 | WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), |
448 | |
449 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), |
450 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, |
451 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), |
452 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, |
453 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), |
454 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, |
455 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), |
456 | STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, |
457 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), |
458 | |
459 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, |
460 | TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), |
461 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, |
462 | TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), |
463 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, |
464 | TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), |
465 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, |
466 | TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), |
467 | BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, |
468 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), |
469 | |
470 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), |
471 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), |
472 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), |
473 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), |
474 | VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, |
475 | TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), |
476 | |
477 | LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), |
478 | LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), |
479 | LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), |
480 | LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), |
481 | LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE), |
482 | |
483 | BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE), |
484 | |
485 | BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE) |
486 | }; |
487 | |
488 | /* Instrument an atomic builtin. */ |
489 | |
490 | static void |
491 | instrument_builtin_call (gimple_stmt_iterator *gsi) |
492 | { |
493 | gimple *stmt = gsi_stmt (i: *gsi), *g; |
494 | tree callee = gimple_call_fndecl (gs: stmt), last_arg, args[6], t, lhs; |
495 | enum built_in_function fcode = DECL_FUNCTION_CODE (decl: callee); |
496 | unsigned int i, num = gimple_call_num_args (gs: stmt), j; |
497 | for (j = 0; j < 6 && j < num; j++) |
498 | args[j] = gimple_call_arg (gs: stmt, index: j); |
499 | for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) |
500 | if (fcode != tsan_atomic_table[i].fcode) |
501 | continue; |
502 | else |
503 | { |
504 | if (fcode == BUILT_IN_ATOMIC_THREAD_FENCE) |
505 | warning_at (gimple_location (g: stmt), OPT_Wtsan, |
506 | "%qs is not supported with %qs" , "atomic_thread_fence" , |
507 | "-fsanitize=thread" ); |
508 | |
509 | tree decl = builtin_decl_implicit (fncode: tsan_atomic_table[i].tsan_fcode); |
510 | if (decl == NULL_TREE) |
511 | return; |
512 | switch (tsan_atomic_table[i].action) |
513 | { |
514 | case check_last: |
515 | case fetch_op: |
516 | last_arg = gimple_call_arg (gs: stmt, index: num - 1); |
517 | if (tree_fits_uhwi_p (last_arg) |
518 | && memmodel_base (val: tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) |
519 | return; |
520 | gimple_call_set_fndecl (gs: stmt, decl); |
521 | update_stmt (s: stmt); |
522 | maybe_clean_eh_stmt (stmt); |
523 | if (tsan_atomic_table[i].action == fetch_op) |
524 | { |
525 | args[1] = gimple_call_arg (gs: stmt, index: 1); |
526 | goto adjust_result; |
527 | } |
528 | return; |
529 | case add_seq_cst: |
530 | case add_acquire: |
531 | case fetch_op_seq_cst: |
532 | gcc_assert (num <= 2); |
533 | for (j = 0; j < num; j++) |
534 | args[j] = gimple_call_arg (gs: stmt, index: j); |
535 | for (; j < 2; j++) |
536 | args[j] = NULL_TREE; |
537 | args[num] = build_int_cst (NULL_TREE, |
538 | tsan_atomic_table[i].action |
539 | != add_acquire |
540 | ? MEMMODEL_SEQ_CST |
541 | : MEMMODEL_ACQUIRE); |
542 | update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); |
543 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (i: *gsi)); |
544 | stmt = gsi_stmt (i: *gsi); |
545 | if (tsan_atomic_table[i].action == fetch_op_seq_cst) |
546 | { |
547 | adjust_result: |
548 | lhs = gimple_call_lhs (gs: stmt); |
549 | if (lhs == NULL_TREE) |
550 | return; |
551 | if (!useless_type_conversion_p (TREE_TYPE (lhs), |
552 | TREE_TYPE (args[1]))) |
553 | { |
554 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
555 | g = gimple_build_assign (var, NOP_EXPR, args[1]); |
556 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
557 | args[1] = var; |
558 | } |
559 | gimple_call_set_lhs (gs: stmt, lhs: make_ssa_name (TREE_TYPE (lhs))); |
560 | /* BIT_NOT_EXPR stands for NAND. */ |
561 | if (tsan_atomic_table[i].code == BIT_NOT_EXPR) |
562 | { |
563 | tree var = make_ssa_name (TREE_TYPE (lhs)); |
564 | g = gimple_build_assign (var, BIT_AND_EXPR, |
565 | gimple_call_lhs (gs: stmt), args[1]); |
566 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
567 | g = gimple_build_assign (lhs, BIT_NOT_EXPR, var); |
568 | } |
569 | else |
570 | g = gimple_build_assign (lhs, tsan_atomic_table[i].code, |
571 | gimple_call_lhs (gs: stmt), args[1]); |
572 | update_stmt (s: stmt); |
573 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
574 | } |
575 | return; |
576 | case weak_cas: |
577 | if (!integer_nonzerop (gimple_call_arg (gs: stmt, index: 3))) |
578 | continue; |
579 | /* FALLTHRU */ |
580 | case strong_cas: |
581 | gcc_assert (num == 6); |
582 | for (j = 0; j < 6; j++) |
583 | args[j] = gimple_call_arg (gs: stmt, index: j); |
584 | if (tree_fits_uhwi_p (args[4]) |
585 | && memmodel_base (val: tree_to_uhwi (args[4])) >= MEMMODEL_LAST) |
586 | return; |
587 | if (tree_fits_uhwi_p (args[5]) |
588 | && memmodel_base (val: tree_to_uhwi (args[5])) >= MEMMODEL_LAST) |
589 | return; |
590 | update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], |
591 | args[4], args[5]); |
592 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (i: *gsi)); |
593 | return; |
594 | case bool_cas: |
595 | case val_cas: |
596 | gcc_assert (num == 3); |
597 | for (j = 0; j < 3; j++) |
598 | args[j] = gimple_call_arg (gs: stmt, index: j); |
599 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); |
600 | t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); |
601 | t = create_tmp_var (t); |
602 | mark_addressable (t); |
603 | if (!useless_type_conversion_p (TREE_TYPE (t), |
604 | TREE_TYPE (args[1]))) |
605 | { |
606 | g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), |
607 | NOP_EXPR, args[1]); |
608 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
609 | args[1] = gimple_assign_lhs (gs: g); |
610 | } |
611 | g = gimple_build_assign (t, args[1]); |
612 | gsi_insert_before (gsi, g, GSI_SAME_STMT); |
613 | lhs = gimple_call_lhs (gs: stmt); |
614 | update_gimple_call (gsi, decl, 5, args[0], |
615 | build_fold_addr_expr (t), args[2], |
616 | build_int_cst (NULL_TREE, |
617 | MEMMODEL_SEQ_CST), |
618 | build_int_cst (NULL_TREE, |
619 | MEMMODEL_SEQ_CST)); |
620 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (i: *gsi)); |
621 | if (tsan_atomic_table[i].action == val_cas && lhs) |
622 | { |
623 | stmt = gsi_stmt (i: *gsi); |
624 | tree t2 = make_ssa_name (TREE_TYPE (t)); |
625 | g = gimple_build_assign (t2, t); |
626 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
627 | t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); |
628 | tree cond = make_ssa_name (boolean_type_node); |
629 | g = gimple_build_assign (cond, NE_EXPR, |
630 | t, build_zero_cst (TREE_TYPE (t))); |
631 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
632 | g = gimple_build_assign (lhs, COND_EXPR, cond, args[1], t2); |
633 | gimple_call_set_lhs (gs: stmt, lhs: t); |
634 | update_stmt (s: stmt); |
635 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
636 | } |
637 | return; |
638 | case lock_release: |
639 | gcc_assert (num == 1); |
640 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); |
641 | t = TREE_VALUE (TREE_CHAIN (t)); |
642 | update_gimple_call (gsi, decl, 3, gimple_call_arg (gs: stmt, index: 0), |
643 | build_int_cst (t, 0), |
644 | build_int_cst (NULL_TREE, |
645 | MEMMODEL_RELEASE)); |
646 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (i: *gsi)); |
647 | return; |
648 | case bool_clear: |
649 | case bool_test_and_set: |
650 | if (BOOL_TYPE_SIZE != 8) |
651 | { |
652 | decl = NULL_TREE; |
653 | for (j = 1; j < 5; j++) |
654 | if (BOOL_TYPE_SIZE == (8 << j)) |
655 | { |
656 | enum built_in_function tsan_fcode |
657 | = (enum built_in_function) |
658 | (tsan_atomic_table[i].tsan_fcode + j); |
659 | decl = builtin_decl_implicit (fncode: tsan_fcode); |
660 | break; |
661 | } |
662 | if (decl == NULL_TREE) |
663 | return; |
664 | } |
665 | last_arg = gimple_call_arg (gs: stmt, index: num - 1); |
666 | if (tree_fits_uhwi_p (last_arg) |
667 | && memmodel_base (val: tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) |
668 | return; |
669 | t = TYPE_ARG_TYPES (TREE_TYPE (decl)); |
670 | t = TREE_VALUE (TREE_CHAIN (t)); |
671 | if (tsan_atomic_table[i].action == bool_clear) |
672 | { |
673 | update_gimple_call (gsi, decl, 3, gimple_call_arg (gs: stmt, index: 0), |
674 | build_int_cst (t, 0), last_arg); |
675 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (i: *gsi)); |
676 | return; |
677 | } |
678 | t = build_int_cst (t, targetm.atomic_test_and_set_trueval); |
679 | update_gimple_call (gsi, decl, 3, gimple_call_arg (gs: stmt, index: 0), |
680 | t, last_arg); |
681 | maybe_clean_or_replace_eh_stmt (stmt, gsi_stmt (i: *gsi)); |
682 | stmt = gsi_stmt (i: *gsi); |
683 | lhs = gimple_call_lhs (gs: stmt); |
684 | if (lhs == NULL_TREE) |
685 | return; |
686 | if (targetm.atomic_test_and_set_trueval != 1 |
687 | || !useless_type_conversion_p (TREE_TYPE (lhs), |
688 | TREE_TYPE (t))) |
689 | { |
690 | tree new_lhs = make_ssa_name (TREE_TYPE (t)); |
691 | gimple_call_set_lhs (gs: stmt, lhs: new_lhs); |
692 | if (targetm.atomic_test_and_set_trueval != 1) |
693 | g = gimple_build_assign (lhs, NE_EXPR, new_lhs, |
694 | build_int_cst (TREE_TYPE (t), 0)); |
695 | else |
696 | g = gimple_build_assign (lhs, NOP_EXPR, new_lhs); |
697 | gsi_insert_after (gsi, g, GSI_NEW_STMT); |
698 | update_stmt (s: stmt); |
699 | } |
700 | return; |
701 | default: |
702 | continue; |
703 | } |
704 | } |
705 | } |
706 | |
707 | /* Instruments the gimple pointed to by GSI. Return |
708 | true if func entry/exit should be instrumented. */ |
709 | |
710 | static bool |
711 | instrument_gimple (gimple_stmt_iterator *gsi) |
712 | { |
713 | gimple *stmt; |
714 | tree rhs, lhs; |
715 | bool instrumented = false; |
716 | |
717 | stmt = gsi_stmt (i: *gsi); |
718 | if (is_gimple_call (gs: stmt) |
719 | && (gimple_call_fndecl (gs: stmt) |
720 | != builtin_decl_implicit (fncode: BUILT_IN_TSAN_INIT))) |
721 | { |
722 | /* All functions with function call will have exit instrumented, |
723 | therefore no function calls other than __tsan_func_exit |
724 | shall appear in the functions. */ |
725 | gimple_call_set_tail (s: as_a <gcall *> (p: stmt), tail_p: false); |
726 | if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) |
727 | instrument_builtin_call (gsi); |
728 | return true; |
729 | } |
730 | else if (is_gimple_assign (gs: stmt) |
731 | && !gimple_clobber_p (s: stmt)) |
732 | { |
733 | if (gimple_store_p (gs: stmt)) |
734 | { |
735 | lhs = gimple_assign_lhs (gs: stmt); |
736 | instrumented = instrument_expr (gsi: *gsi, expr: lhs, is_write: true); |
737 | } |
738 | if (gimple_assign_load_p (stmt)) |
739 | { |
740 | rhs = gimple_assign_rhs1 (gs: stmt); |
741 | instrumented = instrument_expr (gsi: *gsi, expr: rhs, is_write: false); |
742 | } |
743 | } |
744 | return instrumented; |
745 | } |
746 | |
747 | /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */ |
748 | |
749 | static void |
750 | replace_func_exit (gimple *stmt) |
751 | { |
752 | tree builtin_decl = builtin_decl_implicit (fncode: BUILT_IN_TSAN_FUNC_EXIT); |
753 | gimple *g = gimple_build_call (builtin_decl, 0); |
754 | gimple_set_location (g, cfun->function_end_locus); |
755 | gimple_stmt_iterator gsi = gsi_for_stmt (stmt); |
756 | gsi_replace (&gsi, g, true); |
757 | } |
758 | |
759 | /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */ |
760 | |
761 | static void |
762 | instrument_func_exit (void) |
763 | { |
764 | location_t loc; |
765 | basic_block exit_bb; |
766 | gimple_stmt_iterator gsi; |
767 | gimple *stmt, *g; |
768 | tree builtin_decl; |
769 | edge e; |
770 | edge_iterator ei; |
771 | |
772 | /* Find all function exits. */ |
773 | exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun); |
774 | FOR_EACH_EDGE (e, ei, exit_bb->preds) |
775 | { |
776 | gsi = gsi_last_bb (bb: e->src); |
777 | stmt = gsi_stmt (i: gsi); |
778 | gcc_assert (gimple_code (stmt) == GIMPLE_RETURN |
779 | || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); |
780 | loc = gimple_location (g: stmt); |
781 | builtin_decl = builtin_decl_implicit (fncode: BUILT_IN_TSAN_FUNC_EXIT); |
782 | g = gimple_build_call (builtin_decl, 0); |
783 | gimple_set_location (g, location: loc); |
784 | gsi_insert_before (&gsi, g, GSI_SAME_STMT); |
785 | } |
786 | } |
787 | |
788 | /* Instruments all interesting memory accesses in the current function. |
789 | Return true if func entry/exit should be instrumented. */ |
790 | |
791 | static bool |
792 | instrument_memory_accesses (bool *cfg_changed) |
793 | { |
794 | basic_block bb; |
795 | gimple_stmt_iterator gsi; |
796 | bool fentry_exit_instrument = false; |
797 | bool func_exit_seen = false; |
798 | auto_vec<gimple *> tsan_func_exits; |
799 | |
800 | FOR_EACH_BB_FN (bb, cfun) |
801 | { |
802 | for (gsi = gsi_start_bb (bb); !gsi_end_p (i: gsi); gsi_next (i: &gsi)) |
803 | { |
804 | gimple *stmt = gsi_stmt (i: gsi); |
805 | if (gimple_call_internal_p (gs: stmt, fn: IFN_TSAN_FUNC_EXIT)) |
806 | { |
807 | if (fentry_exit_instrument) |
808 | replace_func_exit (stmt); |
809 | else |
810 | tsan_func_exits.safe_push (obj: stmt); |
811 | func_exit_seen = true; |
812 | } |
813 | else |
814 | fentry_exit_instrument |
815 | |= (instrument_gimple (gsi: &gsi) |
816 | && param_tsan_instrument_func_entry_exit); |
817 | } |
818 | if (gimple_purge_dead_eh_edges (bb)) |
819 | *cfg_changed = true; |
820 | } |
821 | unsigned int i; |
822 | gimple *stmt; |
823 | FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt) |
824 | if (fentry_exit_instrument) |
825 | replace_func_exit (stmt); |
826 | else |
827 | { |
828 | gsi = gsi_for_stmt (stmt); |
829 | gsi_remove (&gsi, true); |
830 | } |
831 | if (fentry_exit_instrument && !func_exit_seen) |
832 | instrument_func_exit (); |
833 | return fentry_exit_instrument; |
834 | } |
835 | |
836 | /* Instruments function entry. */ |
837 | |
838 | static void |
839 | instrument_func_entry (void) |
840 | { |
841 | tree ret_addr, builtin_decl; |
842 | gimple *g; |
843 | gimple_seq seq = NULL; |
844 | |
845 | builtin_decl = builtin_decl_implicit (fncode: BUILT_IN_RETURN_ADDRESS); |
846 | g = gimple_build_call (builtin_decl, 1, integer_zero_node); |
847 | ret_addr = make_ssa_name (ptr_type_node); |
848 | gimple_call_set_lhs (gs: g, lhs: ret_addr); |
849 | gimple_set_location (g, cfun->function_start_locus); |
850 | gimple_seq_add_stmt_without_update (&seq, g); |
851 | |
852 | builtin_decl = builtin_decl_implicit (fncode: BUILT_IN_TSAN_FUNC_ENTRY); |
853 | g = gimple_build_call (builtin_decl, 1, ret_addr); |
854 | gimple_set_location (g, cfun->function_start_locus); |
855 | gimple_seq_add_stmt_without_update (&seq, g); |
856 | |
857 | edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); |
858 | gsi_insert_seq_on_edge_immediate (e, seq); |
859 | } |
860 | |
861 | /* ThreadSanitizer instrumentation pass. */ |
862 | |
863 | static unsigned |
864 | tsan_pass (void) |
865 | { |
866 | initialize_sanitizer_builtins (); |
867 | bool cfg_changed = false; |
868 | if (instrument_memory_accesses (cfg_changed: &cfg_changed)) |
869 | instrument_func_entry (); |
870 | return cfg_changed ? TODO_cleanup_cfg : 0; |
871 | } |
872 | |
873 | /* Inserts __tsan_init () into the list of CTORs. */ |
874 | |
875 | void |
876 | tsan_finish_file (void) |
877 | { |
878 | tree ctor_statements = NULL_TREE; |
879 | |
880 | initialize_sanitizer_builtins (); |
881 | tree init_decl = builtin_decl_implicit (fncode: BUILT_IN_TSAN_INIT); |
882 | append_to_statement_list (build_call_expr (init_decl, 0), |
883 | &ctor_statements); |
884 | cgraph_build_static_cdtor (which: 'I', body: ctor_statements, |
885 | MAX_RESERVED_INIT_PRIORITY - 1); |
886 | } |
887 | |
888 | /* The pass descriptor. */ |
889 | |
890 | namespace { |
891 | |
892 | const pass_data pass_data_tsan = |
893 | { |
894 | .type: GIMPLE_PASS, /* type */ |
895 | .name: "tsan" , /* name */ |
896 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
897 | .tv_id: TV_NONE, /* tv_id */ |
898 | .properties_required: ( PROP_ssa | PROP_cfg ), /* properties_required */ |
899 | .properties_provided: 0, /* properties_provided */ |
900 | .properties_destroyed: 0, /* properties_destroyed */ |
901 | .todo_flags_start: 0, /* todo_flags_start */ |
902 | TODO_update_ssa, /* todo_flags_finish */ |
903 | }; |
904 | |
905 | class pass_tsan : public gimple_opt_pass |
906 | { |
907 | public: |
908 | pass_tsan (gcc::context *ctxt) |
909 | : gimple_opt_pass (pass_data_tsan, ctxt) |
910 | {} |
911 | |
912 | /* opt_pass methods: */ |
913 | opt_pass * clone () final override { return new pass_tsan (m_ctxt); } |
914 | bool gate (function *) final override |
915 | { |
916 | return sanitize_flags_p (flag: SANITIZE_THREAD); |
917 | } |
918 | |
919 | unsigned int execute (function *) final override { return tsan_pass (); } |
920 | |
921 | }; // class pass_tsan |
922 | |
923 | } // anon namespace |
924 | |
925 | gimple_opt_pass * |
926 | make_pass_tsan (gcc::context *ctxt) |
927 | { |
928 | return new pass_tsan (ctxt); |
929 | } |
930 | |
931 | namespace { |
932 | |
933 | const pass_data pass_data_tsan_O0 = |
934 | { |
935 | .type: GIMPLE_PASS, /* type */ |
936 | .name: "tsan0" , /* name */ |
937 | .optinfo_flags: OPTGROUP_NONE, /* optinfo_flags */ |
938 | .tv_id: TV_NONE, /* tv_id */ |
939 | .properties_required: ( PROP_ssa | PROP_cfg ), /* properties_required */ |
940 | .properties_provided: 0, /* properties_provided */ |
941 | .properties_destroyed: 0, /* properties_destroyed */ |
942 | .todo_flags_start: 0, /* todo_flags_start */ |
943 | TODO_update_ssa, /* todo_flags_finish */ |
944 | }; |
945 | |
946 | class pass_tsan_O0 : public gimple_opt_pass |
947 | { |
948 | public: |
949 | pass_tsan_O0 (gcc::context *ctxt) |
950 | : gimple_opt_pass (pass_data_tsan_O0, ctxt) |
951 | {} |
952 | |
953 | /* opt_pass methods: */ |
954 | bool gate (function *) final override |
955 | { |
956 | return (sanitize_flags_p (flag: SANITIZE_THREAD) && !optimize); |
957 | } |
958 | |
959 | unsigned int execute (function *) final override { return tsan_pass (); } |
960 | |
961 | }; // class pass_tsan_O0 |
962 | |
963 | } // anon namespace |
964 | |
965 | gimple_opt_pass * |
966 | make_pass_tsan_O0 (gcc::context *ctxt) |
967 | { |
968 | return new pass_tsan_O0 (ctxt); |
969 | } |
970 | |