1 /* GCC instrumentation plugin for ThreadSanitizer. 2 Copyright (C) 2011-2015 Free Software Foundation, Inc. 3 Contributed by Dmitry Vyukov <dvyukov@google.com> 4 5 This file is part of GCC. 6 7 GCC is free software; you can redistribute it and/or modify it under 8 the terms of the GNU General Public License as published by the Free 9 Software Foundation; either version 3, or (at your option) any later 10 version. 11 12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY 13 WARRANTY; without even the implied warranty of MERCHANTABILITY or 14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 15 for more details. 16 17 You should have received a copy of the GNU General Public License 18 along with GCC; see the file COPYING3. If not see 19 <http://www.gnu.org/licenses/>. */ 20 21 22 #include "config.h" 23 #include "system.h" 24 #include "coretypes.h" 25 #include "hash-set.h" 26 #include "machmode.h" 27 #include "vec.h" 28 #include "double-int.h" 29 #include "input.h" 30 #include "alias.h" 31 #include "symtab.h" 32 #include "options.h" 33 #include "wide-int.h" 34 #include "inchash.h" 35 #include "tree.h" 36 #include "fold-const.h" 37 #include "hashtab.h" 38 #include "tm.h" 39 #include "hard-reg-set.h" 40 #include "function.h" 41 #include "rtl.h" 42 #include "flags.h" 43 #include "statistics.h" 44 #include "real.h" 45 #include "fixed-value.h" 46 #include "insn-config.h" 47 #include "expmed.h" 48 #include "dojump.h" 49 #include "explow.h" 50 #include "calls.h" 51 #include "emit-rtl.h" 52 #include "varasm.h" 53 #include "stmt.h" 54 #include "expr.h" 55 #include "intl.h" 56 #include "predict.h" 57 #include "dominance.h" 58 #include "cfg.h" 59 #include "basic-block.h" 60 #include "tree-ssa-alias.h" 61 #include "internal-fn.h" 62 #include "gimple-expr.h" 63 #include "is-a.h" 64 #include "gimple.h" 65 #include "gimplify.h" 66 #include "gimple-iterator.h" 67 #include "gimplify-me.h" 68 #include "gimple-ssa.h" 69 #include "hash-map.h" 70 #include "plugin-api.h" 71 #include "ipa-ref.h" 72 #include "cgraph.h" 73 #include "tree-cfg.h" 74 #include "stringpool.h" 75 #include "tree-ssanames.h" 76 #include "tree-pass.h" 77 #include "tree-iterator.h" 78 #include "langhooks.h" 79 #include "output.h" 80 #include "target.h" 81 #include "diagnostic.h" 82 #include "tree-ssa-propagate.h" 83 #include "tree-ssa-loop-ivopts.h" 84 #include "tsan.h" 85 #include "asan.h" 86 #include "builtins.h" 87 #include "target.h" 88 89 /* Number of instrumented memory accesses in the current function. */ 90 91 /* Builds the following decl 92 void __tsan_read/writeX (void *addr); */ 93 94 static tree 95 get_memory_access_decl (bool is_write, unsigned size) 96 { 97 enum built_in_function fcode; 98 99 if (size <= 1) 100 fcode = is_write ? BUILT_IN_TSAN_WRITE1 101 : BUILT_IN_TSAN_READ1; 102 else if (size <= 3) 103 fcode = is_write ? BUILT_IN_TSAN_WRITE2 104 : BUILT_IN_TSAN_READ2; 105 else if (size <= 7) 106 fcode = is_write ? BUILT_IN_TSAN_WRITE4 107 : BUILT_IN_TSAN_READ4; 108 else if (size <= 15) 109 fcode = is_write ? BUILT_IN_TSAN_WRITE8 110 : BUILT_IN_TSAN_READ8; 111 else 112 fcode = is_write ? BUILT_IN_TSAN_WRITE16 113 : BUILT_IN_TSAN_READ16; 114 115 return builtin_decl_implicit (fcode); 116 } 117 118 /* Check as to whether EXPR refers to a store to vptr. */ 119 120 static tree 121 is_vptr_store (gimple stmt, tree expr, bool is_write) 122 { 123 if (is_write == true 124 && gimple_assign_single_p (stmt) 125 && TREE_CODE (expr) == COMPONENT_REF) 126 { 127 tree field = TREE_OPERAND (expr, 1); 128 if (TREE_CODE (field) == FIELD_DECL 129 && DECL_VIRTUAL_P (field)) 130 return gimple_assign_rhs1 (stmt); 131 } 132 return NULL; 133 } 134 135 /* Instruments EXPR if needed. If any instrumentation is inserted, 136 return true. */ 137 138 static bool 139 instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write) 140 { 141 tree base, rhs, expr_ptr, builtin_decl; 142 basic_block bb; 143 HOST_WIDE_INT size; 144 gimple stmt, g; 145 gimple_seq seq; 146 location_t loc; 147 unsigned int align; 148 149 size = int_size_in_bytes (TREE_TYPE (expr)); 150 if (size <= 0) 151 return false; 152 153 HOST_WIDE_INT bitsize, bitpos; 154 tree offset; 155 machine_mode mode; 156 int volatilep = 0, unsignedp = 0; 157 base = get_inner_reference (expr, &bitsize, &bitpos, &offset, 158 &mode, &unsignedp, &volatilep, false); 159 160 /* No need to instrument accesses to decls that don't escape, 161 they can't escape to other threads then. */ 162 if (DECL_P (base) && !is_global_var (base)) 163 { 164 struct pt_solution pt; 165 memset (&pt, 0, sizeof (pt)); 166 pt.escaped = 1; 167 pt.ipa_escaped = flag_ipa_pta != 0; 168 if (!pt_solution_includes (&pt, base)) 169 return false; 170 if (!may_be_aliased (base)) 171 return false; 172 } 173 174 if (TREE_READONLY (base) 175 || (TREE_CODE (base) == VAR_DECL 176 && DECL_HARD_REGISTER (base))) 177 return false; 178 179 stmt = gsi_stmt (gsi); 180 loc = gimple_location (stmt); 181 rhs = is_vptr_store (stmt, expr, is_write); 182 183 if ((TREE_CODE (expr) == COMPONENT_REF 184 && DECL_BIT_FIELD_TYPE (TREE_OPERAND (expr, 1))) 185 || TREE_CODE (expr) == BIT_FIELD_REF) 186 { 187 base = TREE_OPERAND (expr, 0); 188 if (TREE_CODE (expr) == COMPONENT_REF) 189 { 190 expr = TREE_OPERAND (expr, 1); 191 if (is_write && DECL_BIT_FIELD_REPRESENTATIVE (expr)) 192 expr = DECL_BIT_FIELD_REPRESENTATIVE (expr); 193 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (expr)) 194 || !tree_fits_uhwi_p (DECL_FIELD_BIT_OFFSET (expr)) 195 || !tree_fits_uhwi_p (DECL_SIZE (expr))) 196 return false; 197 bitpos = tree_to_uhwi (DECL_FIELD_OFFSET (expr)) * BITS_PER_UNIT 198 + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (expr)); 199 bitsize = tree_to_uhwi (DECL_SIZE (expr)); 200 } 201 else 202 { 203 if (!tree_fits_uhwi_p (TREE_OPERAND (expr, 2)) 204 || !tree_fits_uhwi_p (TREE_OPERAND (expr, 1))) 205 return false; 206 bitpos = tree_to_uhwi (TREE_OPERAND (expr, 2)); 207 bitsize = tree_to_uhwi (TREE_OPERAND (expr, 1)); 208 } 209 if (bitpos < 0 || bitsize <= 0) 210 return false; 211 size = (bitpos % BITS_PER_UNIT + bitsize + BITS_PER_UNIT - 1) 212 / BITS_PER_UNIT; 213 if (may_be_nonaddressable_p (base)) 214 return false; 215 align = get_object_alignment (base); 216 if (align < BITS_PER_UNIT) 217 return false; 218 bitpos = bitpos & ~(BITS_PER_UNIT - 1); 219 if ((align - 1) & bitpos) 220 { 221 align = (align - 1) & bitpos; 222 align = align & -align; 223 } 224 expr = build_fold_addr_expr (unshare_expr (base)); 225 expr = build2 (MEM_REF, char_type_node, expr, 226 build_int_cst (TREE_TYPE (expr), bitpos / BITS_PER_UNIT)); 227 expr_ptr = build_fold_addr_expr (expr); 228 } 229 else 230 { 231 if (may_be_nonaddressable_p (expr)) 232 return false; 233 align = get_object_alignment (expr); 234 if (align < BITS_PER_UNIT) 235 return false; 236 expr_ptr = build_fold_addr_expr (unshare_expr (expr)); 237 } 238 expr_ptr = force_gimple_operand (expr_ptr, &seq, true, NULL_TREE); 239 if ((size & (size - 1)) != 0 || size > 16 240 || align < MIN (size, 8) * BITS_PER_UNIT) 241 { 242 builtin_decl = builtin_decl_implicit (is_write 243 ? BUILT_IN_TSAN_WRITE_RANGE 244 : BUILT_IN_TSAN_READ_RANGE); 245 g = gimple_build_call (builtin_decl, 2, expr_ptr, size_int (size)); 246 } 247 else if (rhs == NULL) 248 g = gimple_build_call (get_memory_access_decl (is_write, size), 249 1, expr_ptr); 250 else 251 { 252 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_VPTR_UPDATE); 253 g = gimple_build_call (builtin_decl, 2, expr_ptr, unshare_expr (rhs)); 254 } 255 gimple_set_location (g, loc); 256 gimple_seq_add_stmt_without_update (&seq, g); 257 /* Instrumentation for assignment of a function result 258 must be inserted after the call. Instrumentation for 259 reads of function arguments must be inserted before the call. 260 That's because the call can contain synchronization. */ 261 if (is_gimple_call (stmt) && is_write) 262 { 263 /* If the call can throw, it must be the last stmt in 264 a basic block, so the instrumented stmts need to be 265 inserted in successor bbs. */ 266 if (is_ctrl_altering_stmt (stmt)) 267 { 268 edge e; 269 270 bb = gsi_bb (gsi); 271 e = find_fallthru_edge (bb->succs); 272 if (e) 273 gsi_insert_seq_on_edge_immediate (e, seq); 274 } 275 else 276 gsi_insert_seq_after (&gsi, seq, GSI_NEW_STMT); 277 } 278 else 279 gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT); 280 281 return true; 282 } 283 284 /* Actions for sync/atomic builtin transformations. */ 285 enum tsan_atomic_action 286 { 287 check_last, add_seq_cst, add_acquire, weak_cas, strong_cas, 288 bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst, 289 bool_clear, bool_test_and_set 290 }; 291 292 /* Table how to map sync/atomic builtins to their corresponding 293 tsan equivalents. */ 294 static const struct tsan_map_atomic 295 { 296 enum built_in_function fcode, tsan_fcode; 297 enum tsan_atomic_action action; 298 enum tree_code code; 299 } tsan_atomic_table[] = 300 { 301 #define TRANSFORM(fcode, tsan_fcode, action, code) \ 302 { BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code } 303 #define CHECK_LAST(fcode, tsan_fcode) \ 304 TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK) 305 #define ADD_SEQ_CST(fcode, tsan_fcode) \ 306 TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK) 307 #define ADD_ACQUIRE(fcode, tsan_fcode) \ 308 TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK) 309 #define WEAK_CAS(fcode, tsan_fcode) \ 310 TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK) 311 #define STRONG_CAS(fcode, tsan_fcode) \ 312 TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK) 313 #define BOOL_CAS(fcode, tsan_fcode) \ 314 TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK) 315 #define VAL_CAS(fcode, tsan_fcode) \ 316 TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK) 317 #define LOCK_RELEASE(fcode, tsan_fcode) \ 318 TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK) 319 #define FETCH_OP(fcode, tsan_fcode, code) \ 320 TRANSFORM (fcode, tsan_fcode, fetch_op, code) 321 #define FETCH_OPS(fcode, tsan_fcode, code) \ 322 TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code) 323 #define BOOL_CLEAR(fcode, tsan_fcode) \ 324 TRANSFORM (fcode, tsan_fcode, bool_clear, ERROR_MARK) 325 #define BOOL_TEST_AND_SET(fcode, tsan_fcode) \ 326 TRANSFORM (fcode, tsan_fcode, bool_test_and_set, ERROR_MARK) 327 328 CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD), 329 CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD), 330 CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD), 331 CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD), 332 CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD), 333 CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE), 334 CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE), 335 CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE), 336 CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE), 337 CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE), 338 CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE), 339 CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE), 340 CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE), 341 CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE), 342 CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE), 343 CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD), 344 CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD), 345 CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD), 346 CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD), 347 CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD), 348 CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB), 349 CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB), 350 CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB), 351 CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB), 352 CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB), 353 CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND), 354 CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND), 355 CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND), 356 CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND), 357 CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND), 358 CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR), 359 CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR), 360 CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR), 361 CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR), 362 CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR), 363 CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR), 364 CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR), 365 CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR), 366 CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR), 367 CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR), 368 CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND), 369 CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND), 370 CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND), 371 CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND), 372 CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND), 373 374 CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE), 375 CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE), 376 377 FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), 378 FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), 379 FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), 380 FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), 381 FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), 382 FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), 383 FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), 384 FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), 385 FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), 386 FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), 387 FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), 388 FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), 389 FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), 390 FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), 391 FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), 392 FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), 393 FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), 394 FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), 395 FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), 396 FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), 397 FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), 398 FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), 399 FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), 400 FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), 401 FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), 402 FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), 403 FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), 404 FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), 405 FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), 406 FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), 407 408 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE), 409 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE), 410 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE), 411 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE), 412 ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE), 413 414 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD), 415 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD), 416 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD), 417 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD), 418 ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD), 419 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB), 420 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB), 421 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB), 422 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB), 423 ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB), 424 ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND), 425 ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND), 426 ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND), 427 ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND), 428 ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND), 429 ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR), 430 ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR), 431 ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR), 432 ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR), 433 ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR), 434 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR), 435 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR), 436 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR), 437 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR), 438 ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR), 439 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND), 440 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND), 441 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND), 442 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND), 443 ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND), 444 445 ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE), 446 447 FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR), 448 FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR), 449 FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR), 450 FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR), 451 FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR), 452 FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR), 453 FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR), 454 FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR), 455 FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR), 456 FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR), 457 FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR), 458 FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR), 459 FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR), 460 FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR), 461 FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR), 462 FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR), 463 FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR), 464 FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR), 465 FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR), 466 FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR), 467 FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR), 468 FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR), 469 FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR), 470 FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR), 471 FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR), 472 FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR), 473 FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR), 474 FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR), 475 FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR), 476 FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR), 477 478 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK), 479 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK), 480 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK), 481 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK), 482 WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK), 483 484 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), 485 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2, 486 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), 487 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4, 488 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), 489 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8, 490 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), 491 STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16, 492 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), 493 494 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1, 495 TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), 496 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2, 497 TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), 498 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4, 499 TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), 500 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8, 501 TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), 502 BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16, 503 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), 504 505 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG), 506 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG), 507 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG), 508 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG), 509 VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16, 510 TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG), 511 512 LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE), 513 LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE), 514 LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE), 515 LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE), 516 LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE), 517 518 BOOL_CLEAR (ATOMIC_CLEAR, TSAN_ATOMIC8_STORE), 519 520 BOOL_TEST_AND_SET (ATOMIC_TEST_AND_SET, TSAN_ATOMIC8_EXCHANGE) 521 }; 522 523 /* Instrument an atomic builtin. */ 524 525 static void 526 instrument_builtin_call (gimple_stmt_iterator *gsi) 527 { 528 gimple stmt = gsi_stmt (*gsi), g; 529 tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs; 530 enum built_in_function fcode = DECL_FUNCTION_CODE (callee); 531 unsigned int i, num = gimple_call_num_args (stmt), j; 532 for (j = 0; j < 6 && j < num; j++) 533 args[j] = gimple_call_arg (stmt, j); 534 for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++) 535 if (fcode != tsan_atomic_table[i].fcode) 536 continue; 537 else 538 { 539 tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode); 540 if (decl == NULL_TREE) 541 return; 542 switch (tsan_atomic_table[i].action) 543 { 544 case check_last: 545 case fetch_op: 546 last_arg = gimple_call_arg (stmt, num - 1); 547 if (!tree_fits_uhwi_p (last_arg) 548 || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) 549 return; 550 gimple_call_set_fndecl (stmt, decl); 551 update_stmt (stmt); 552 if (tsan_atomic_table[i].action == fetch_op) 553 { 554 args[1] = gimple_call_arg (stmt, 1); 555 goto adjust_result; 556 } 557 return; 558 case add_seq_cst: 559 case add_acquire: 560 case fetch_op_seq_cst: 561 gcc_assert (num <= 2); 562 for (j = 0; j < num; j++) 563 args[j] = gimple_call_arg (stmt, j); 564 for (; j < 2; j++) 565 args[j] = NULL_TREE; 566 args[num] = build_int_cst (NULL_TREE, 567 tsan_atomic_table[i].action 568 != add_acquire 569 ? MEMMODEL_SEQ_CST 570 : MEMMODEL_ACQUIRE); 571 update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]); 572 stmt = gsi_stmt (*gsi); 573 if (tsan_atomic_table[i].action == fetch_op_seq_cst) 574 { 575 adjust_result: 576 lhs = gimple_call_lhs (stmt); 577 if (lhs == NULL_TREE) 578 return; 579 if (!useless_type_conversion_p (TREE_TYPE (lhs), 580 TREE_TYPE (args[1]))) 581 { 582 tree var = make_ssa_name (TREE_TYPE (lhs)); 583 g = gimple_build_assign (var, NOP_EXPR, args[1]); 584 gsi_insert_after (gsi, g, GSI_NEW_STMT); 585 args[1] = var; 586 } 587 gimple_call_set_lhs (stmt, make_ssa_name (TREE_TYPE (lhs))); 588 /* BIT_NOT_EXPR stands for NAND. */ 589 if (tsan_atomic_table[i].code == BIT_NOT_EXPR) 590 { 591 tree var = make_ssa_name (TREE_TYPE (lhs)); 592 g = gimple_build_assign (var, BIT_AND_EXPR, 593 gimple_call_lhs (stmt), args[1]); 594 gsi_insert_after (gsi, g, GSI_NEW_STMT); 595 g = gimple_build_assign (lhs, BIT_NOT_EXPR, var); 596 } 597 else 598 g = gimple_build_assign (lhs, tsan_atomic_table[i].code, 599 gimple_call_lhs (stmt), args[1]); 600 update_stmt (stmt); 601 gsi_insert_after (gsi, g, GSI_NEW_STMT); 602 } 603 return; 604 case weak_cas: 605 if (!integer_nonzerop (gimple_call_arg (stmt, 3))) 606 continue; 607 /* FALLTHRU */ 608 case strong_cas: 609 gcc_assert (num == 6); 610 for (j = 0; j < 6; j++) 611 args[j] = gimple_call_arg (stmt, j); 612 if (!tree_fits_uhwi_p (args[4]) 613 || memmodel_base (tree_to_uhwi (args[4])) >= MEMMODEL_LAST) 614 return; 615 if (!tree_fits_uhwi_p (args[5]) 616 || memmodel_base (tree_to_uhwi (args[5])) >= MEMMODEL_LAST) 617 return; 618 update_gimple_call (gsi, decl, 5, args[0], args[1], args[2], 619 args[4], args[5]); 620 return; 621 case bool_cas: 622 case val_cas: 623 gcc_assert (num == 3); 624 for (j = 0; j < 3; j++) 625 args[j] = gimple_call_arg (stmt, j); 626 t = TYPE_ARG_TYPES (TREE_TYPE (decl)); 627 t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t))); 628 t = create_tmp_var (t); 629 mark_addressable (t); 630 if (!useless_type_conversion_p (TREE_TYPE (t), 631 TREE_TYPE (args[1]))) 632 { 633 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), 634 NOP_EXPR, args[1]); 635 gsi_insert_before (gsi, g, GSI_SAME_STMT); 636 args[1] = gimple_assign_lhs (g); 637 } 638 g = gimple_build_assign (t, args[1]); 639 gsi_insert_before (gsi, g, GSI_SAME_STMT); 640 lhs = gimple_call_lhs (stmt); 641 update_gimple_call (gsi, decl, 5, args[0], 642 build_fold_addr_expr (t), args[2], 643 build_int_cst (NULL_TREE, 644 MEMMODEL_SEQ_CST), 645 build_int_cst (NULL_TREE, 646 MEMMODEL_SEQ_CST)); 647 if (tsan_atomic_table[i].action == val_cas && lhs) 648 { 649 tree cond; 650 stmt = gsi_stmt (*gsi); 651 g = gimple_build_assign (make_ssa_name (TREE_TYPE (t)), t); 652 gsi_insert_after (gsi, g, GSI_NEW_STMT); 653 t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt); 654 cond = build2 (NE_EXPR, boolean_type_node, t, 655 build_int_cst (TREE_TYPE (t), 0)); 656 g = gimple_build_assign (lhs, COND_EXPR, cond, args[1], 657 gimple_assign_lhs (g)); 658 gimple_call_set_lhs (stmt, t); 659 update_stmt (stmt); 660 gsi_insert_after (gsi, g, GSI_NEW_STMT); 661 } 662 return; 663 case lock_release: 664 gcc_assert (num == 1); 665 t = TYPE_ARG_TYPES (TREE_TYPE (decl)); 666 t = TREE_VALUE (TREE_CHAIN (t)); 667 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), 668 build_int_cst (t, 0), 669 build_int_cst (NULL_TREE, 670 MEMMODEL_RELEASE)); 671 return; 672 case bool_clear: 673 case bool_test_and_set: 674 if (BOOL_TYPE_SIZE != 8) 675 { 676 decl = NULL_TREE; 677 for (j = 1; j < 5; j++) 678 if (BOOL_TYPE_SIZE == (8 << j)) 679 { 680 enum built_in_function tsan_fcode 681 = (enum built_in_function) 682 (tsan_atomic_table[i].tsan_fcode + j); 683 decl = builtin_decl_implicit (tsan_fcode); 684 break; 685 } 686 if (decl == NULL_TREE) 687 return; 688 } 689 last_arg = gimple_call_arg (stmt, num - 1); 690 if (!tree_fits_uhwi_p (last_arg) 691 || memmodel_base (tree_to_uhwi (last_arg)) >= MEMMODEL_LAST) 692 return; 693 t = TYPE_ARG_TYPES (TREE_TYPE (decl)); 694 t = TREE_VALUE (TREE_CHAIN (t)); 695 if (tsan_atomic_table[i].action == bool_clear) 696 { 697 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), 698 build_int_cst (t, 0), last_arg); 699 return; 700 } 701 t = build_int_cst (t, targetm.atomic_test_and_set_trueval); 702 update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0), 703 t, last_arg); 704 stmt = gsi_stmt (*gsi); 705 lhs = gimple_call_lhs (stmt); 706 if (lhs == NULL_TREE) 707 return; 708 if (targetm.atomic_test_and_set_trueval != 1 709 || !useless_type_conversion_p (TREE_TYPE (lhs), 710 TREE_TYPE (t))) 711 { 712 tree new_lhs = make_ssa_name (TREE_TYPE (t)); 713 gimple_call_set_lhs (stmt, new_lhs); 714 if (targetm.atomic_test_and_set_trueval != 1) 715 g = gimple_build_assign (lhs, NE_EXPR, new_lhs, 716 build_int_cst (TREE_TYPE (t), 0)); 717 else 718 g = gimple_build_assign (lhs, NOP_EXPR, new_lhs); 719 gsi_insert_after (gsi, g, GSI_NEW_STMT); 720 update_stmt (stmt); 721 } 722 return; 723 default: 724 continue; 725 } 726 } 727 } 728 729 /* Instruments the gimple pointed to by GSI. Return 730 true if func entry/exit should be instrumented. */ 731 732 static bool 733 instrument_gimple (gimple_stmt_iterator *gsi) 734 { 735 gimple stmt; 736 tree rhs, lhs; 737 bool instrumented = false; 738 739 stmt = gsi_stmt (*gsi); 740 if (is_gimple_call (stmt) 741 && (gimple_call_fndecl (stmt) 742 != builtin_decl_implicit (BUILT_IN_TSAN_INIT))) 743 { 744 /* All functions with function call will have exit instrumented, 745 therefore no function calls other than __tsan_func_exit 746 shall appear in the functions. */ 747 gimple_call_set_tail (as_a <gcall *> (stmt), false); 748 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)) 749 instrument_builtin_call (gsi); 750 return true; 751 } 752 else if (is_gimple_assign (stmt) 753 && !gimple_clobber_p (stmt)) 754 { 755 if (gimple_store_p (stmt)) 756 { 757 lhs = gimple_assign_lhs (stmt); 758 instrumented = instrument_expr (*gsi, lhs, true); 759 } 760 if (gimple_assign_load_p (stmt)) 761 { 762 rhs = gimple_assign_rhs1 (stmt); 763 instrumented = instrument_expr (*gsi, rhs, false); 764 } 765 } 766 return instrumented; 767 } 768 769 /* Replace TSAN_FUNC_EXIT internal call with function exit tsan builtin. */ 770 771 static void 772 replace_func_exit (gimple stmt) 773 { 774 tree builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); 775 gimple g = gimple_build_call (builtin_decl, 0); 776 gimple_set_location (g, cfun->function_end_locus); 777 gimple_stmt_iterator gsi = gsi_for_stmt (stmt); 778 gsi_replace (&gsi, g, true); 779 } 780 781 /* Instrument function exit. Used when TSAN_FUNC_EXIT does not exist. */ 782 783 static void 784 instrument_func_exit (void) 785 { 786 location_t loc; 787 basic_block exit_bb; 788 gimple_stmt_iterator gsi; 789 gimple stmt, g; 790 tree builtin_decl; 791 edge e; 792 edge_iterator ei; 793 794 /* Find all function exits. */ 795 exit_bb = EXIT_BLOCK_PTR_FOR_FN (cfun); 796 FOR_EACH_EDGE (e, ei, exit_bb->preds) 797 { 798 gsi = gsi_last_bb (e->src); 799 stmt = gsi_stmt (gsi); 800 gcc_assert (gimple_code (stmt) == GIMPLE_RETURN 801 || gimple_call_builtin_p (stmt, BUILT_IN_RETURN)); 802 loc = gimple_location (stmt); 803 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_EXIT); 804 g = gimple_build_call (builtin_decl, 0); 805 gimple_set_location (g, loc); 806 gsi_insert_before (&gsi, g, GSI_SAME_STMT); 807 } 808 } 809 810 /* Instruments all interesting memory accesses in the current function. 811 Return true if func entry/exit should be instrumented. */ 812 813 static bool 814 instrument_memory_accesses (void) 815 { 816 basic_block bb; 817 gimple_stmt_iterator gsi; 818 bool fentry_exit_instrument = false; 819 bool func_exit_seen = false; 820 auto_vec<gimple> tsan_func_exits; 821 822 FOR_EACH_BB_FN (bb, cfun) 823 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) 824 { 825 gimple stmt = gsi_stmt (gsi); 826 if (is_gimple_call (stmt) 827 && gimple_call_internal_p (stmt) 828 && gimple_call_internal_fn (stmt) == IFN_TSAN_FUNC_EXIT) 829 { 830 if (fentry_exit_instrument) 831 replace_func_exit (stmt); 832 else 833 tsan_func_exits.safe_push (stmt); 834 func_exit_seen = true; 835 } 836 else 837 fentry_exit_instrument |= instrument_gimple (&gsi); 838 } 839 unsigned int i; 840 gimple stmt; 841 FOR_EACH_VEC_ELT (tsan_func_exits, i, stmt) 842 if (fentry_exit_instrument) 843 replace_func_exit (stmt); 844 else 845 { 846 gsi = gsi_for_stmt (stmt); 847 gsi_remove (&gsi, true); 848 } 849 if (fentry_exit_instrument && !func_exit_seen) 850 instrument_func_exit (); 851 return fentry_exit_instrument; 852 } 853 854 /* Instruments function entry. */ 855 856 static void 857 instrument_func_entry (void) 858 { 859 tree ret_addr, builtin_decl; 860 gimple g; 861 gimple_seq seq = NULL; 862 863 builtin_decl = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS); 864 g = gimple_build_call (builtin_decl, 1, integer_zero_node); 865 ret_addr = make_ssa_name (ptr_type_node); 866 gimple_call_set_lhs (g, ret_addr); 867 gimple_set_location (g, cfun->function_start_locus); 868 gimple_seq_add_stmt_without_update (&seq, g); 869 870 builtin_decl = builtin_decl_implicit (BUILT_IN_TSAN_FUNC_ENTRY); 871 g = gimple_build_call (builtin_decl, 1, ret_addr); 872 gimple_set_location (g, cfun->function_start_locus); 873 gimple_seq_add_stmt_without_update (&seq, g); 874 875 edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)); 876 gsi_insert_seq_on_edge_immediate (e, seq); 877 } 878 879 /* ThreadSanitizer instrumentation pass. */ 880 881 static unsigned 882 tsan_pass (void) 883 { 884 initialize_sanitizer_builtins (); 885 if (instrument_memory_accesses ()) 886 instrument_func_entry (); 887 return 0; 888 } 889 890 /* Inserts __tsan_init () into the list of CTORs. */ 891 892 void 893 tsan_finish_file (void) 894 { 895 tree ctor_statements = NULL_TREE; 896 897 initialize_sanitizer_builtins (); 898 tree init_decl = builtin_decl_implicit (BUILT_IN_TSAN_INIT); 899 append_to_statement_list (build_call_expr (init_decl, 0), 900 &ctor_statements); 901 cgraph_build_static_cdtor ('I', ctor_statements, 902 MAX_RESERVED_INIT_PRIORITY - 1); 903 } 904 905 /* The pass descriptor. */ 906 907 namespace { 908 909 const pass_data pass_data_tsan = 910 { 911 GIMPLE_PASS, /* type */ 912 "tsan", /* name */ 913 OPTGROUP_NONE, /* optinfo_flags */ 914 TV_NONE, /* tv_id */ 915 ( PROP_ssa | PROP_cfg ), /* properties_required */ 916 0, /* properties_provided */ 917 0, /* properties_destroyed */ 918 0, /* todo_flags_start */ 919 TODO_update_ssa, /* todo_flags_finish */ 920 }; 921 922 class pass_tsan : public gimple_opt_pass 923 { 924 public: 925 pass_tsan (gcc::context *ctxt) 926 : gimple_opt_pass (pass_data_tsan, ctxt) 927 {} 928 929 /* opt_pass methods: */ 930 opt_pass * clone () { return new pass_tsan (m_ctxt); } 931 virtual bool gate (function *) 932 { 933 return ((flag_sanitize & SANITIZE_THREAD) != 0 934 && !lookup_attribute ("no_sanitize_thread", 935 DECL_ATTRIBUTES (current_function_decl))); 936 } 937 938 virtual unsigned int execute (function *) { return tsan_pass (); } 939 940 }; // class pass_tsan 941 942 } // anon namespace 943 944 gimple_opt_pass * 945 make_pass_tsan (gcc::context *ctxt) 946 { 947 return new pass_tsan (ctxt); 948 } 949 950 namespace { 951 952 const pass_data pass_data_tsan_O0 = 953 { 954 GIMPLE_PASS, /* type */ 955 "tsan0", /* name */ 956 OPTGROUP_NONE, /* optinfo_flags */ 957 TV_NONE, /* tv_id */ 958 ( PROP_ssa | PROP_cfg ), /* properties_required */ 959 0, /* properties_provided */ 960 0, /* properties_destroyed */ 961 0, /* todo_flags_start */ 962 TODO_update_ssa, /* todo_flags_finish */ 963 }; 964 965 class pass_tsan_O0 : public gimple_opt_pass 966 { 967 public: 968 pass_tsan_O0 (gcc::context *ctxt) 969 : gimple_opt_pass (pass_data_tsan_O0, ctxt) 970 {} 971 972 /* opt_pass methods: */ 973 virtual bool gate (function *) 974 { 975 return ((flag_sanitize & SANITIZE_THREAD) != 0 && !optimize 976 && !lookup_attribute ("no_sanitize_thread", 977 DECL_ATTRIBUTES (current_function_decl))); 978 } 979 980 virtual unsigned int execute (function *) { return tsan_pass (); } 981 982 }; // class pass_tsan_O0 983 984 } // anon namespace 985 986 gimple_opt_pass * 987 make_pass_tsan_O0 (gcc::context *ctxt) 988 { 989 return new pass_tsan_O0 (ctxt); 990 } 991