f35ef27030b74f711bc2de609652dd3b82df9989
[platform/upstream/gcc.git] / gcc / tsan.c
1 /* GCC instrumentation plugin for ThreadSanitizer.
2    Copyright (C) 2011-2015 Free Software Foundation, Inc.
3    Contributed by Dmitry Vyukov <dvyukov@google.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "alias.h"
26 #include "backend.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "rtl.h"
30 #include "ssa.h"
31 #include "options.h"
32 #include "fold-const.h"
33 #include "flags.h"
34 #include "insn-config.h"
35 #include "expmed.h"
36 #include "dojump.h"
37 #include "explow.h"
38 #include "calls.h"
39 #include "emit-rtl.h"
40 #include "varasm.h"
41 #include "stmt.h"
42 #include "expr.h"
43 #include "intl.h"
44 #include "internal-fn.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "gimplify-me.h"
48 #include "cgraph.h"
49 #include "tree-cfg.h"
50 #include "tree-pass.h"
51 #include "tree-iterator.h"
52 #include "langhooks.h"
53 #include "output.h"
54 #include "target.h"
55 #include "diagnostic.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-ssa-loop-ivopts.h"
58 #include "tsan.h"
59 #include "asan.h"
60 #include "builtins.h"
61
62 /* Number of instrumented memory accesses in the current function.  */
63
64 /* Builds the following decl
65    void __tsan_read/writeX (void *addr);  */
66
67 static tree
68 get_memory_access_decl (bool is_write, unsigned size)
69 {
70   enum built_in_function fcode;
71
72   if (size <= 1)
73     fcode = is_write ? BUILT_IN_TSAN_WRITE1
74                      : BUILT_IN_TSAN_READ1;
75   else if (size <= 3)
76     fcode = is_write ? BUILT_IN_TSAN_WRITE2
77                      : BUILT_IN_TSAN_READ2;
78   else if (size <= 7)
79     fcode = is_write ? BUILT_IN_TSAN_WRITE4
80                      : BUILT_IN_TSAN_READ4;
81   else if (size <= 15)
82     fcode = is_write ? BUILT_IN_TSAN_WRITE8
83                      : BUILT_IN_TSAN_READ8;
84   else
85     fcode = is_write ? BUILT_IN_TSAN_WRITE16
86                      : BUILT_IN_TSAN_READ16;
87
88   return builtin_decl_implicit (fcode);
89 }
90
91 /* Check as to whether EXPR refers to a store to vptr.  */
92
93 static tree
94 is_vptr_store (gimple *stmt, tree expr, bool is_write)
95 {
96   if (is_write == true
97       && gimple_assign_single_p (stmt)
98       && TREE_CODE (expr) == COMPONENT_REF)
99     {
100       tree field = TREE_OPERAND (expr, 1);
101       if (TREE_CODE (field) == FIELD_DECL
102           && DECL_VIRTUAL_P (field))
103         return gimple_assign_rhs1 (stmt);
104     }
105   return NULL;
106 }
107
108 /* Instruments EXPR if needed. If any instrumentation is inserted,
109    return true.  */
110
111 static bool
112 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
113 {
114   tree base, rhs, expr_ptr, builtin_decl;
115   basic_block bb;
116   HOST_WIDE_INT size;
117   gimple *stmt, *g;
118   gimple_seq seq;
119   location_t loc;
120   unsigned int align;
121
122   size = int_size_in_bytes (TREE_TYPE (expr));
123   if (size <= 0)
124     return false;
125
126   HOST_WIDE_INT bitsize, bitpos;
127   tree offset;
128   machine_mode mode;
129   int volatilep = 0, unsignedp = 0;
130   base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
131                               &mode, &unsignedp, &volatilep, false);
132
133   /* No need to instrument accesses to decls that don't escape,
134      they can't escape to other threads then.  */
135   if (DECL_P (base) && !is_global_var (base))
136     {
137       struct pt_solution pt;
138       memset (&pt, 0, sizeof (pt));
139       pt.escaped = 1;
140       pt.ipa_escaped = flag_ipa_pta != 0;
141       if (!pt_solution_includes (&pt, base))
142         return false;
143       if (!may_be_aliased (base))
144         return false;
145     }
146
147   if (TREE_READONLY (base)
148       || (TREE_CODE (base) == VAR_DECL
149           && DECL_HARD_REGISTER (base)))
150     return false;
151
152   stmt = gsi_stmt (gsi);
153   loc = gimple_location (stmt);
154   rhs = is_vptr_store (stmt, expr, is_write);
155
156   if ((TREE_CODE (expr) == COMPONENT_REF
157        && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1)))
158       || TREE_CODE (expr) == BIT_FIELD_REF)
159     {
160       base = TREE_OPERAND (expr, 0);
161       if (TREE_CODE (expr) == COMPONENT_REF)
162         {
163           expr = TREE_OPERAND (expr, 1);
164           if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr))
165             expr = DECL_BIT_FIELD_REPRESENTATIVE (expr);
166           if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr))
167               || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr))
168               || !tree_fits_uhwi_p (DECL_SIZE (expr)))
169             return false;
170           bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT
171                    + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr));
172           bitsize = tree_to_uhwi (DECL_SIZE (expr));
173         }
174       else
175         {
176           if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2))
177               || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1)))
178             return false;
179           bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2));
180           bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1));
181         }
182       if (bitpos < 0 || bitsize <= 0)
183         return false;
184       size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1)
185              / BITS_PER_UNIT;
186       if (may_be_nonaddressable_p (base))
187         return false;
188       align = get_object_alignment (base);
189       if (align < BITS_PER_UNIT)
190         return false;
191       bitpos = bitpos & ~(BITS_PER_UNIT - 1);
192       if ((align - 1) & bitpos)
193         {
194           align = (align - 1) & bitpos;
195           align = align & -align;
196         }
197       expr = build_fold_addr_expr (unshare_expr (base));
198       expr = build2 (MEM_REF, char_type_node, expr,
199                      build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT));
200       expr_ptr = build_fold_addr_expr (expr);
201     }
202   else
203     {
204       if (may_be_nonaddressable_p (expr))
205         return false;
206       align = get_object_alignment (expr);
207       if (align < BITS_PER_UNIT)
208         return false;
209       expr_ptr = build_fold_addr_expr (unshare_expr (expr));
210     }
211   expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE);
212   if ((size & (size - 1)) != 0 || size > 16
213       || align < MIN (size, 8) * BITS_PER_UNIT)
214     {
215       builtin_decl = builtin_decl_implicit (is_write
216                                             ? BUILT_IN_TSAN_WRITE_RANGE
217                                             : BUILT_IN_TSAN_READ_RANGE);
218       g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size));
219     }
220   else if (rhs == NULL)
221     g = gimple_build_call (get_memory_access_decl (is_write, size),
222                            1, expr_ptr);
223   else
224     {
225       builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE);
226       g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs));
227     }
228   gimple_set_location (g, loc);
229   gimple_seq_add_stmt_without_update (&seq, g);
230   /* Instrumentation for assignment of a function result
231      must be inserted after the call.  Instrumentation for
232      reads of function arguments must be inserted before the call.
233      That's because the call can contain synchronization.  */
234   if (is_gimple_call (stmt) && is_write)
235     {
236       /* If the call can throw, it must be the last stmt in
237          a basic block, so the instrumented stmts need to be
238          inserted in successor bbs.  */
239       if (is_ctrl_altering_stmt (stmt))
240         {
241           edge e;
242
243           bb = gsi_bb (gsi);
244           e = find_fallthru_edge (bb->succs);
245           if (e)
246             gsi_insert_seq_on_edge_immediate (e, seq);
247         }
248       else
249         gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT);
250     }
251   else
252     gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
253
254   return true;
255 }
256
257 /* Actions for sync/atomic builtin transformations.  */
258 enum tsan_atomic_action
259 {
260   check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
261   bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst
262 };
263
264 /* Table how to map sync/atomic builtins to their corresponding
265    tsan equivalents.  */
266 static const struct tsan_map_atomic
267 {
268   enum built_in_function fcode, tsan_fcode;
269   enum tsan_atomic_action action;
270   enum tree_code code;
271 } tsan_atomic_table[] =
272 {
273 #define TRANSFORM(fcode, tsan_fcode, action, code) \
274   { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
275 #define CHECK_LAST(fcode, tsan_fcode) \
276   TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
277 #define ADD_SEQ_CST(fcode, tsan_fcode) \
278   TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
279 #define ADD_ACQUIRE(fcode, tsan_fcode) \
280   TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
281 #define WEAK_CAS(fcode, tsan_fcode) \
282   TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
283 #define STRONG_CAS(fcode, tsan_fcode) \
284   TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
285 #define BOOL_CAS(fcode, tsan_fcode) \
286   TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
287 #define VAL_CAS(fcode, tsan_fcode) \
288   TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
289 #define LOCK_RELEASE(fcode, tsan_fcode) \
290   TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
291 #define FETCH_OP(fcode, tsan_fcode, code) \
292   TRANSFORM (fcode, tsan_fcode, fetch_op, code)
293 #define FETCH_OPS(fcode, tsan_fcode, code) \
294   TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
295
296   CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
297   CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
298   CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
299   CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
300   CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
301   CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
302   CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
303   CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
304   CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
305   CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
306   CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
307   CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
308   CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
309   CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
310   CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
311   CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
312   CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
313   CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
314   CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
315   CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
316   CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
317   CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
318   CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
319   CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
320   CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
321   CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
322   CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
323   CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
324   CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
325   CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
326   CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
327   CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
328   CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
329   CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
330   CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
331   CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
332   CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
333   CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
334   CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
335   CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
336   CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
337   CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
338   CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
339   CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
340   CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
341
342   CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
343   CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
344
345   FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
346   FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
347   FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
348   FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
349   FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
350   FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
351   FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
352   FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
353   FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
354   FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
355   FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
356   FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
357   FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
358   FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
359   FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
360   FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
361   FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
362   FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
363   FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
364   FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
365   FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
366   FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
367   FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
368   FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
369   FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
370   FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
371   FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
372   FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
373   FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
374   FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
375
376   ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
377   ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
378   ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
379   ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
380   ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
381
382   ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
383   ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
384   ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
385   ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
386   ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
387   ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
388   ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
389   ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
390   ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
391   ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
392   ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
393   ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
394   ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
395   ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
396   ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
397   ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
398   ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
399   ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
400   ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
401   ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
402   ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
403   ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
404   ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
405   ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
406   ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
407   ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
408   ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
409   ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
410   ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
411   ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
412
413   ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
414
415   FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
416   FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
417   FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
418   FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
419   FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
420   FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
421   FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
422   FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
423   FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
424   FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
425   FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
426   FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
427   FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
428   FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
429   FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
430   FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
431   FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
432   FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
433   FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
434   FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
435   FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
436   FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
437   FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
438   FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
439   FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
440   FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
441   FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
442   FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
443   FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
444   FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
445
446   WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
447   WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
448   WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
449   WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
450   WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
451
452   STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
453   STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
454               TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
455   STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
456               TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
457   STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
458               TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
459   STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
460               TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
461
462   BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
463             TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
464   BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
465             TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
466   BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
467             TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
468   BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
469             TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
470   BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
471             TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
472
473   VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
474   VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
475   VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
476   VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
477   VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
478            TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
479
480   LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
481   LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
482   LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
483   LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
484   LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE)
485 };
486
487 /* Instrument an atomic builtin.  */
488
489 static void
490 instrument_builtin_call (gimple_stmt_iterator *gsi)
491 {
492   gimple *stmt = gsi_stmt (*gsi), *g;
493   tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
494   enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
495   unsigned int i, num = gimple_call_num_args (stmt), j;
496   for (j = 0; j < 6 && j < num; j++)
497     args[j] = gimple_call_arg (stmt, j);
498   for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
499     if (fcode != tsan_atomic_table[i].fcode)
500       continue;
501     else
502       {
503         tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
504         if (decl == NULL_TREE)
505           return;
506         switch (tsan_atomic_table[i].action)
507           {
508           case check_last:
509           case fetch_op:
510             last_arg = gimple_call_arg (stmt, num - 1);
511             if (!tree_fits_uhwi_p (last_arg)
512                 || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST)
513               return;
514             gimple_call_set_fndecl (stmt, decl);
515             update_stmt (stmt);
516             if (tsan_atomic_table[i].action == fetch_op)
517               {
518                 args[1] = gimple_call_arg (stmt, 1);
519                 goto adjust_result;
520               }
521             return;
522           case add_seq_cst:
523           case add_acquire:
524           case fetch_op_seq_cst:
525             gcc_assert (num <= 2);
526             for (j = 0; j < num; j++)
527               args[j] = gimple_call_arg (stmt, j);
528             for (; j < 2; j++)
529               args[j] = NULL_TREE;
530             args[num] = build_int_cst (NULL_TREE,
531                                        tsan_atomic_table[i].action
532                                        != add_acquire
533                                        ? MEMMODEL_SEQ_CST
534                                        : MEMMODEL_ACQUIRE);
535             update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
536             stmt = gsi_stmt (*gsi);
537             if (tsan_atomic_table[i].action == fetch_op_seq_cst)
538               {
539               adjust_result:
540                 lhs = gimple_call_lhs (stmt);
541                 if (lhs == NULL_TREE)
542                   return;
543                 if (!useless_type_conversion_p (TREE_TYPE (lhs),
544                                                 TREE_TYPE (args[1])))
545                   {
546                     tree var = make_ssa_name (TREE_TYPE (lhs));
547                     g = gimple_build_assign (var, NOP_EXPR, args[1]);
548                     gsi_insert_after (gsi, g, GSI_NEW_STMT);
549                     args[1] = var;
550                   }
551                 gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs)));
552                 /* BIT_NOT_EXPR stands for NAND.  */
553                 if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
554                   {
555                     tree var = make_ssa_name (TREE_TYPE (lhs));
556                     g = gimple_build_assign (var, BIT_AND_EXPR,
557                                              gimple_call_lhs (stmt), args[1]);
558                     gsi_insert_after (gsi, g, GSI_NEW_STMT);
559                     g = gimple_build_assign (lhs, BIT_NOT_EXPR, var);
560                   }
561                 else
562                   g = gimple_build_assign (lhs, tsan_atomic_table[i].code,
563                                            gimple_call_lhs (stmt), args[1]);
564                 update_stmt (stmt);
565                 gsi_insert_after (gsi, g, GSI_NEW_STMT);
566               }
567             return;
568           case weak_cas:
569             if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
570               continue;
571             /* FALLTHRU */
572           case strong_cas:
573             gcc_assert (num == 6);
574             for (j = 0; j < 6; j++)
575               args[j] = gimple_call_arg (stmt, j);
576             if (!tree_fits_uhwi_p (args[4])
577                 || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST)
578               return;
579             if (!tree_fits_uhwi_p (args[5])
580                 || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST)
581               return;
582             update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
583                                 args[4], args[5]);
584             return;
585           case bool_cas:
586           case val_cas:
587             gcc_assert (num == 3);
588             for (j = 0; j < 3; j++)
589               args[j] = gimple_call_arg (stmt, j);
590             t = TYPE_ARG_TYPES (TREE_TYPE (decl));
591             t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
592             t = create_tmp_var (t);
593             mark_addressable (t);
594             if (!useless_type_conversion_p (TREE_TYPE (t),
595                                             TREE_TYPE (args[1])))
596               {
597                 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)),
598                                          NOP_EXPR, args[1]);
599                 gsi_insert_before (gsi, g, GSI_SAME_STMT);
600                 args[1] = gimple_assign_lhs (g);
601               }
602             g = gimple_build_assign (t, args[1]);
603             gsi_insert_before (gsi, g, GSI_SAME_STMT);
604             lhs = gimple_call_lhs (stmt);
605             update_gimple_call (gsi, decl, 5, args[0],
606                                 build_fold_addr_expr (t), args[2],
607                                 build_int_cst (NULL_TREE,
608                                                MEMMODEL_SEQ_CST),
609                                 build_int_cst (NULL_TREE,
610                                                MEMMODEL_SEQ_CST));
611             if (tsan_atomic_table[i].action == val_cas && lhs)
612               {
613                 tree cond;
614                 stmt = gsi_stmt (*gsi);
615                 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t);
616                 gsi_insert_after (gsi, g, GSI_NEW_STMT);
617                 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
618                 cond = build2 (NE_EXPR, boolean_type_node, t,
619                                build_int_cst (TREE_TYPE (t), 0));
620                 g = gimple_build_assign (lhs, COND_EXPR, cond, args[1],
621                                          gimple_assign_lhs (g));
622                 gimple_call_set_lhs (stmt, t);
623                 update_stmt (stmt);
624                 gsi_insert_after (gsi, g, GSI_NEW_STMT);
625               }
626             return;
627           case lock_release:
628             gcc_assert (num == 1);
629             t = TYPE_ARG_TYPES (TREE_TYPE (decl));
630             t = TREE_VALUE (TREE_CHAIN (t));
631             update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
632                                 build_int_cst (t, 0),
633                                 build_int_cst (NULL_TREE,
634                                                MEMMODEL_RELEASE));
635             return;
636           default:
637             continue;
638           }
639       }
640 }
641
642 /* Instruments the gimple pointed to by GSI. Return
643    true if func entry/exit should be instrumented.  */
644
645 static bool
646 instrument_gimple (gimple_stmt_iterator *gsi)
647 {
648   gimple *stmt;
649   tree rhs, lhs;
650   bool instrumented = false;
651
652   stmt = gsi_stmt (*gsi);
653   if (is_gimple_call (stmt)
654       && (gimple_call_fndecl (stmt)
655           != builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
656     {
657       /* All functions with function call will have exit instrumented,
658          therefore no function calls other than __tsan_func_exit
659          shall appear in the functions.  */
660       gimple_call_set_tail (as_a <gcall *> (stmt), false);
661       if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
662         instrument_builtin_call (gsi);
663       return true;
664     }
665   else if (is_gimple_assign (stmt)
666            && !gimple_clobber_p (stmt))
667     {
668       if (gimple_store_p (stmt))
669         {
670           lhs = gimple_assign_lhs (stmt);
671           instrumented = instrument_expr (*gsi, lhs, true);
672         }
673       if (gimple_assign_load_p (stmt))
674         {
675           rhs = gimple_assign_rhs1 (stmt);
676           instrumented = instrument_expr (*gsi, rhs, false);
677         }
678     }
679   return instrumented;
680 }
681
682 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin.  */
683
684 static void
685 replace_func_exit (gimple *stmt)
686 {
687   tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
688   gimple *g = gimple_build_call (builtin_decl, 0);
689   gimple_set_location (g, cfun->function_end_locus);
690   gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
691   gsi_replace (&gsi, g, true);
692 }
693
694 /* Instrument function exit.  Used when TSAN_FUNC_EXIT does not exist.  */
695
696 static void
697 instrument_func_exit (void)
698 {
699   location_t loc;
700   basic_block exit_bb;
701   gimple_stmt_iterator gsi;
702   gimple *stmt, *g;
703   tree builtin_decl;
704   edge e;
705   edge_iterator ei;
706
707   /* Find all function exits.  */
708   exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun);
709   FOR_EACH_EDGE (e, ei, exit_bb->preds)
710     {
711       gsi = gsi_last_bb (e->src);
712       stmt = gsi_stmt (gsi);
713       gcc_assert (gimple_code (stmt) == GIMPLE_RETURN
714                   || gimple_call_builtin_p (stmt, BUILT_IN_RETURN));
715       loc = gimple_location (stmt);
716       builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT);
717       g = gimple_build_call (builtin_decl, 0);
718       gimple_set_location (g, loc);
719       gsi_insert_before (&gsi, g, GSI_SAME_STMT);
720     }
721 }
722
723 /* Instruments all interesting memory accesses in the current function.
724    Return true if func entry/exit should be instrumented.  */
725
726 static bool
727 instrument_memory_accesses (void)
728 {
729   basic_block bb;
730   gimple_stmt_iterator gsi;
731   bool fentry_exit_instrument = false;
732   bool func_exit_seen = false;
733   auto_vec<gimple *> tsan_func_exits;
734
735   FOR_EACH_BB_FN (bb, cfun)
736     for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
737       {
738         gimple *stmt = gsi_stmt (gsi);
739         if (is_gimple_call (stmt)
740             && gimple_call_internal_p (stmt)
741             && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT)
742           {
743             if (fentry_exit_instrument)
744               replace_func_exit (stmt);
745             else
746               tsan_func_exits.safe_push (stmt);
747             func_exit_seen = true;
748           }
749         else
750           fentry_exit_instrument |= instrument_gimple (&gsi);
751       }
752   unsigned int i;
753   gimple *stmt;
754   FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt)
755     if (fentry_exit_instrument)
756       replace_func_exit (stmt);
757     else
758       {
759         gsi = gsi_for_stmt (stmt);
760         gsi_remove (&gsi, true);
761       }
762   if (fentry_exit_instrument && !func_exit_seen)
763     instrument_func_exit ();
764   return fentry_exit_instrument;
765 }
766
767 /* Instruments function entry.  */
768
769 static void
770 instrument_func_entry (void)
771 {
772   tree ret_addr, builtin_decl;
773   gimple *g;
774   gimple_seq seq = NULL;
775
776   builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
777   g = gimple_build_call (builtin_decl, 1, integer_zero_node);
778   ret_addr = make_ssa_name (ptr_type_node);
779   gimple_call_set_lhs (g, ret_addr);
780   gimple_set_location (g, cfun->function_start_locus);
781   gimple_seq_add_stmt_without_update (&seq, g);
782
783   builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY);
784   g = gimple_build_call (builtin_decl, 1, ret_addr);
785   gimple_set_location (g, cfun->function_start_locus);
786   gimple_seq_add_stmt_without_update (&seq, g);
787
788   edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
789   gsi_insert_seq_on_edge_immediate (e, seq);
790 }
791
792 /* ThreadSanitizer instrumentation pass.  */
793
794 static unsigned
795 tsan_pass (void)
796 {
797   initialize_sanitizer_builtins ();
798   if (instrument_memory_accesses ())
799     instrument_func_entry ();
800   return 0;
801 }
802
803 /* Inserts __tsan_init () into the list of CTORs.  */
804
805 void
806 tsan_finish_file (void)
807 {
808   tree ctor_statements = NULL_TREE;
809
810   initialize_sanitizer_builtins ();
811   tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT);
812   append_to_statement_list (build_call_expr (init_decl, 0),
813                             &ctor_statements);
814   cgraph_build_static_cdtor ('I', ctor_statements,
815                              MAX_RESERVED_INIT_PRIORITY - 1);
816 }
817
818 /* The pass descriptor.  */
819
820 namespace {
821
822 const pass_data pass_data_tsan =
823 {
824   GIMPLE_PASS, /* type */
825   "tsan", /* name */
826   OPTGROUP_NONE, /* optinfo_flags */
827   TV_NONE, /* tv_id */
828   ( PROP_ssa | PROP_cfg ), /* properties_required */
829   0, /* properties_provided */
830   0, /* properties_destroyed */
831   0, /* todo_flags_start */
832   TODO_update_ssa, /* todo_flags_finish */
833 };
834
835 class pass_tsan : public gimple_opt_pass
836 {
837 public:
838   pass_tsan (gcc::context *ctxt)
839     : gimple_opt_pass (pass_data_tsan, ctxt)
840   {}
841
842   /* opt_pass methods: */
843   opt_pass * clone () { return new pass_tsan (m_ctxt); }
844   virtual bool gate (function *)
845 {
846   return ((flag_sanitize & SANITIZE_THREAD) != 0
847           && !lookup_attribute ("no_sanitize_thread",
848                                 DECL_ATTRIBUTES (current_function_decl)));
849 }
850
851   virtual unsigned int execute (function *) { return tsan_pass (); }
852
853 }; // class pass_tsan
854
855 } // anon namespace
856
857 gimple_opt_pass *
858 make_pass_tsan (gcc::context *ctxt)
859 {
860   return new pass_tsan (ctxt);
861 }
862
863 namespace {
864
865 const pass_data pass_data_tsan_O0 =
866 {
867   GIMPLE_PASS, /* type */
868   "tsan0", /* name */
869   OPTGROUP_NONE, /* optinfo_flags */
870   TV_NONE, /* tv_id */
871   ( PROP_ssa | PROP_cfg ), /* properties_required */
872   0, /* properties_provided */
873   0, /* properties_destroyed */
874   0, /* todo_flags_start */
875   TODO_update_ssa, /* todo_flags_finish */
876 };
877
878 class pass_tsan_O0 : public gimple_opt_pass
879 {
880 public:
881   pass_tsan_O0 (gcc::context *ctxt)
882     : gimple_opt_pass (pass_data_tsan_O0, ctxt)
883   {}
884
885   /* opt_pass methods: */
886   virtual bool gate (function *)
887     {
888       return ((flag_sanitize & SANITIZE_THREAD) != 0 && !optimize
889               && !lookup_attribute ("no_sanitize_thread",
890                                     DECL_ATTRIBUTES (current_function_decl)));
891     }
892
893   virtual unsigned int execute (function *) { return tsan_pass (); }
894
895 }; // class pass_tsan_O0
896
897 } // anon namespace
898
899 gimple_opt_pass *
900 make_pass_tsan_O0 (gcc::context *ctxt)
901 {
902   return new pass_tsan_O0 (ctxt);
903 }