re PR sanitizer/55439 (ThreadSanitizer: handle atomic operations)
PR sanitizer/55439 * Makefile.in (tsan.o): Depend on tree-ssa-propagate.h. * sanitizer.def: Add __tsan_atomic* builtins. * asan.c (initialize_sanitizer_builtins): Adjust to also initialize __tsan_atomic* builtins. * tsan.c: Include tree-ssa-propagate.h. (enum tsan_atomic_action): New enum. (tsan_atomic_table): New table. (instrument_builtin_call): New function. (instrument_gimple): Take pointer to gimple_stmt_iterator instead of gimple_stmt_iterator. Call instrument_builtin_call on builtin call stmts. (instrument_memory_accesses): Adjust instrument_gimple caller. * builtin-types.def (BT_FN_BOOL_VPTR_PTR_I1_INT_INT, BT_FN_BOOL_VPTR_PTR_I2_INT_INT, BT_FN_BOOL_VPTR_PTR_I4_INT_INT, BT_FN_BOOL_VPTR_PTR_I8_INT_INT, BT_FN_BOOL_VPTR_PTR_I16_INT_INT): New. From-SVN: r194133
This commit is contained in:
parent
41d37c5656
commit
c954bddd5f
6 changed files with 685 additions and 8 deletions
|
@ -1,3 +1,22 @@
|
|||
2012-12-03 Jakub Jelinek <jakub@redhat.com>
|
||||
|
||||
PR sanitizer/55439
|
||||
* Makefile.in (tsan.o): Depend on tree-ssa-propagate.h.
|
||||
* sanitizer.def: Add __tsan_atomic* builtins.
|
||||
* asan.c (initialize_sanitizer_builtins): Adjust to also
|
||||
initialize __tsan_atomic* builtins.
|
||||
* tsan.c: Include tree-ssa-propagate.h.
|
||||
(enum tsan_atomic_action): New enum.
|
||||
(tsan_atomic_table): New table.
|
||||
(instrument_builtin_call): New function.
|
||||
(instrument_gimple): Take pointer to gimple_stmt_iterator
|
||||
instead of gimple_stmt_iterator. Call instrument_builtin_call
|
||||
on builtin call stmts.
|
||||
(instrument_memory_accesses): Adjust instrument_gimple caller.
|
||||
* builtin-types.def (BT_FN_BOOL_VPTR_PTR_I1_INT_INT,
|
||||
BT_FN_BOOL_VPTR_PTR_I2_INT_INT, BT_FN_BOOL_VPTR_PTR_I4_INT_INT,
|
||||
BT_FN_BOOL_VPTR_PTR_I8_INT_INT, BT_FN_BOOL_VPTR_PTR_I16_INT_INT): New.
|
||||
|
||||
2012-12-04 Tejas Belagod <tejas.belagod@arm.com>
|
||||
|
||||
* config/aarch64/aarch64.c (aarch64_simd_vector_alignment,
|
||||
|
|
|
@ -2234,7 +2234,8 @@ tsan.o : $(CONFIG_H) $(SYSTEM_H) $(TREE_H) $(TREE_INLINE_H) \
|
|||
$(TM_H) coretypes.h $(TREE_DUMP_H) $(TREE_PASS_H) $(CGRAPH_H) $(GGC_H) \
|
||||
$(BASIC_BLOCK_H) $(FLAGS_H) $(FUNCTION_H) \
|
||||
$(TM_P_H) $(TREE_FLOW_H) $(DIAGNOSTIC_CORE_H) $(GIMPLE_H) tree-iterator.h \
|
||||
intl.h cfghooks.h output.h options.h c-family/c-common.h tsan.h asan.h
|
||||
intl.h cfghooks.h output.h options.h c-family/c-common.h tsan.h asan.h \
|
||||
tree-ssa-propagate.h
|
||||
tree-ssa-tail-merge.o: tree-ssa-tail-merge.c \
|
||||
$(SYSTEM_H) $(CONFIG_H) coretypes.h $(TM_H) $(BITMAP_H) \
|
||||
$(FLAGS_H) $(TM_P_H) $(BASIC_BLOCK_H) \
|
||||
|
|
52
gcc/asan.c
52
gcc/asan.c
|
@ -1505,6 +1505,58 @@ initialize_sanitizer_builtins (void)
|
|||
= build_function_type_list (void_type_node, ptr_type_node,
|
||||
build_nonstandard_integer_type (POINTER_SIZE,
|
||||
1), NULL_TREE);
|
||||
tree BT_FN_VOID_INT
|
||||
= build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
|
||||
tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
|
||||
tree BT_FN_IX_CONST_VPTR_INT[5];
|
||||
tree BT_FN_IX_VPTR_IX_INT[5];
|
||||
tree BT_FN_VOID_VPTR_IX_INT[5];
|
||||
tree vptr
|
||||
= build_pointer_type (build_qualified_type (void_type_node,
|
||||
TYPE_QUAL_VOLATILE));
|
||||
tree cvptr
|
||||
= build_pointer_type (build_qualified_type (void_type_node,
|
||||
TYPE_QUAL_VOLATILE
|
||||
|TYPE_QUAL_CONST));
|
||||
tree boolt
|
||||
= lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
|
||||
int i;
|
||||
for (i = 0; i < 5; i++)
|
||||
{
|
||||
tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
|
||||
BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
|
||||
= build_function_type_list (boolt, vptr, ptr_type_node, ix,
|
||||
integer_type_node, integer_type_node,
|
||||
NULL_TREE);
|
||||
BT_FN_IX_CONST_VPTR_INT[i]
|
||||
= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
|
||||
BT_FN_IX_VPTR_IX_INT[i]
|
||||
= build_function_type_list (ix, vptr, ix, integer_type_node,
|
||||
NULL_TREE);
|
||||
BT_FN_VOID_VPTR_IX_INT[i]
|
||||
= build_function_type_list (void_type_node, vptr, ix,
|
||||
integer_type_node, NULL_TREE);
|
||||
}
|
||||
#define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
|
||||
#define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
|
||||
#define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
|
||||
#define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
|
||||
#define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
|
||||
#define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
|
||||
#define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
|
||||
#define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
|
||||
#define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
|
||||
#define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
|
||||
#define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
|
||||
#define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
|
||||
#define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
|
||||
#define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
|
||||
#define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
|
||||
#define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
|
||||
#define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
|
||||
#define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
|
||||
#define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
|
||||
#define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
|
||||
#undef ATTR_NOTHROW_LEAF_LIST
|
||||
#define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
|
||||
#undef ATTR_NORETURN_NOTHROW_LEAF_LIST
|
||||
|
|
|
@ -456,7 +456,16 @@ DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_LONG_LONG_LONG_LONGPTR_LONGPTR,
|
|||
BT_PTR_LONG, BT_PTR_LONG)
|
||||
DEF_FUNCTION_TYPE_5 (BT_FN_VOID_SIZE_VPTR_PTR_PTR_INT, BT_VOID, BT_SIZE,
|
||||
BT_VOLATILE_PTR, BT_PTR, BT_PTR, BT_INT)
|
||||
|
||||
DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_VPTR_PTR_I1_INT_INT,
|
||||
BT_BOOL, BT_VOLATILE_PTR, BT_PTR, BT_I1, BT_INT, BT_INT)
|
||||
DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_VPTR_PTR_I2_INT_INT,
|
||||
BT_BOOL, BT_VOLATILE_PTR, BT_PTR, BT_I2, BT_INT, BT_INT)
|
||||
DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_VPTR_PTR_I4_INT_INT,
|
||||
BT_BOOL, BT_VOLATILE_PTR, BT_PTR, BT_I4, BT_INT, BT_INT)
|
||||
DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_VPTR_PTR_I8_INT_INT,
|
||||
BT_BOOL, BT_VOLATILE_PTR, BT_PTR, BT_I8, BT_INT, BT_INT)
|
||||
DEF_FUNCTION_TYPE_5 (BT_FN_BOOL_VPTR_PTR_I16_INT_INT,
|
||||
BT_BOOL, BT_VOLATILE_PTR, BT_PTR, BT_I16, BT_INT, BT_INT)
|
||||
|
||||
DEF_FUNCTION_TYPE_6 (BT_FN_INT_STRING_SIZE_INT_SIZE_CONST_STRING_VALIST_ARG,
|
||||
BT_INT, BT_STRING, BT_SIZE, BT_INT, BT_SIZE,
|
||||
|
|
|
@ -85,3 +85,196 @@ DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_WRITE8, "__tsan_write8",
|
|||
BT_FN_VOID_PTR, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_WRITE16, "__tsan_write16",
|
||||
BT_FN_VOID_PTR, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_LOAD,
|
||||
"__tsan_atomic8_load",
|
||||
BT_FN_I1_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_LOAD,
|
||||
"__tsan_atomic16_load",
|
||||
BT_FN_I2_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_LOAD,
|
||||
"__tsan_atomic32_load",
|
||||
BT_FN_I4_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_LOAD,
|
||||
"__tsan_atomic64_load",
|
||||
BT_FN_I8_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_LOAD,
|
||||
"__tsan_atomic128_load",
|
||||
BT_FN_I16_CONST_VPTR_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_STORE,
|
||||
"__tsan_atomic8_store",
|
||||
BT_FN_VOID_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_STORE,
|
||||
"__tsan_atomic16_store",
|
||||
BT_FN_VOID_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_STORE,
|
||||
"__tsan_atomic32_store",
|
||||
BT_FN_VOID_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_STORE,
|
||||
"__tsan_atomic64_store",
|
||||
BT_FN_VOID_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_STORE,
|
||||
"__tsan_atomic128_store",
|
||||
BT_FN_VOID_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_EXCHANGE,
|
||||
"__tsan_atomic8_exchange",
|
||||
BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_EXCHANGE,
|
||||
"__tsan_atomic16_exchange",
|
||||
BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_EXCHANGE,
|
||||
"__tsan_atomic32_exchange",
|
||||
BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_EXCHANGE,
|
||||
"__tsan_atomic64_exchange",
|
||||
BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_EXCHANGE,
|
||||
"__tsan_atomic128_exchange",
|
||||
BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_FETCH_ADD,
|
||||
"__tsan_atomic8_fetch_add",
|
||||
BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_FETCH_ADD,
|
||||
"__tsan_atomic16_fetch_add",
|
||||
BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_FETCH_ADD,
|
||||
"__tsan_atomic32_fetch_add",
|
||||
BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_FETCH_ADD,
|
||||
"__tsan_atomic64_fetch_add",
|
||||
BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_FETCH_ADD,
|
||||
"__tsan_atomic128_fetch_add",
|
||||
BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_FETCH_SUB,
|
||||
"__tsan_atomic8_fetch_sub",
|
||||
BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_FETCH_SUB,
|
||||
"__tsan_atomic16_fetch_sub",
|
||||
BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_FETCH_SUB,
|
||||
"__tsan_atomic32_fetch_sub",
|
||||
BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_FETCH_SUB,
|
||||
"__tsan_atomic64_fetch_sub",
|
||||
BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_FETCH_SUB,
|
||||
"__tsan_atomic128_fetch_sub",
|
||||
BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_FETCH_AND,
|
||||
"__tsan_atomic8_fetch_and",
|
||||
BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_FETCH_AND,
|
||||
"__tsan_atomic16_fetch_and",
|
||||
BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_FETCH_AND,
|
||||
"__tsan_atomic32_fetch_and",
|
||||
BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_FETCH_AND,
|
||||
"__tsan_atomic64_fetch_and",
|
||||
BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_FETCH_AND,
|
||||
"__tsan_atomic128_fetch_and",
|
||||
BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_FETCH_OR,
|
||||
"__tsan_atomic8_fetch_or",
|
||||
BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_FETCH_OR,
|
||||
"__tsan_atomic16_fetch_or",
|
||||
BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_FETCH_OR,
|
||||
"__tsan_atomic32_fetch_or",
|
||||
BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_FETCH_OR,
|
||||
"__tsan_atomic64_fetch_or",
|
||||
BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_FETCH_OR,
|
||||
"__tsan_atomic128_fetch_or",
|
||||
BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_FETCH_XOR,
|
||||
"__tsan_atomic8_fetch_xor",
|
||||
BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_FETCH_XOR,
|
||||
"__tsan_atomic16_fetch_xor",
|
||||
BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_FETCH_XOR,
|
||||
"__tsan_atomic32_fetch_xor",
|
||||
BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_FETCH_XOR,
|
||||
"__tsan_atomic64_fetch_xor",
|
||||
BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_FETCH_XOR,
|
||||
"__tsan_atomic128_fetch_xor",
|
||||
BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_FETCH_NAND,
|
||||
"__tsan_atomic8_fetch_nand",
|
||||
BT_FN_I1_VPTR_I1_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_FETCH_NAND,
|
||||
"__tsan_atomic16_fetch_nand",
|
||||
BT_FN_I2_VPTR_I2_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_FETCH_NAND,
|
||||
"__tsan_atomic32_fetch_nand",
|
||||
BT_FN_I4_VPTR_I4_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_FETCH_NAND,
|
||||
"__tsan_atomic64_fetch_nand",
|
||||
BT_FN_I8_VPTR_I8_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_FETCH_NAND,
|
||||
"__tsan_atomic128_fetch_nand",
|
||||
BT_FN_I16_VPTR_I16_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG,
|
||||
"__tsan_atomic8_compare_exchange_strong",
|
||||
BT_FN_BOOL_VPTR_PTR_I1_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG,
|
||||
"__tsan_atomic16_compare_exchange_strong",
|
||||
BT_FN_BOOL_VPTR_PTR_I2_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG,
|
||||
"__tsan_atomic32_compare_exchange_strong",
|
||||
BT_FN_BOOL_VPTR_PTR_I4_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG,
|
||||
"__tsan_atomic64_compare_exchange_strong",
|
||||
BT_FN_BOOL_VPTR_PTR_I8_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG,
|
||||
"__tsan_atomic128_compare_exchange_strong",
|
||||
BT_FN_BOOL_VPTR_PTR_I16_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK,
|
||||
"__tsan_atomic8_compare_exchange_weak",
|
||||
BT_FN_BOOL_VPTR_PTR_I1_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK,
|
||||
"__tsan_atomic16_compare_exchange_weak",
|
||||
BT_FN_BOOL_VPTR_PTR_I2_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK,
|
||||
"__tsan_atomic32_compare_exchange_weak",
|
||||
BT_FN_BOOL_VPTR_PTR_I4_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK,
|
||||
"__tsan_atomic64_compare_exchange_weak",
|
||||
BT_FN_BOOL_VPTR_PTR_I8_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK,
|
||||
"__tsan_atomic128_compare_exchange_weak",
|
||||
BT_FN_BOOL_VPTR_PTR_I16_INT_INT,
|
||||
ATTR_NOTHROW_LEAF_LIST)
|
||||
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC_THREAD_FENCE,
|
||||
"__tsan_atomic_thread_fence",
|
||||
BT_FN_VOID_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
DEF_SANITIZER_BUILTIN(BUILT_IN_TSAN_ATOMIC_SIGNAL_FENCE,
|
||||
"__tsan_atomic_signal_fence",
|
||||
BT_FN_VOID_INT, ATTR_NOTHROW_LEAF_LIST)
|
||||
|
|
415
gcc/tsan.c
415
gcc/tsan.c
|
@ -37,6 +37,7 @@ along with GCC; see the file COPYING3. If not see
|
|||
#include "target.h"
|
||||
#include "cgraph.h"
|
||||
#include "diagnostic.h"
|
||||
#include "tree-ssa-propagate.h"
|
||||
#include "tsan.h"
|
||||
#include "asan.h"
|
||||
|
||||
|
@ -180,33 +181,435 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
|
|||
return true;
|
||||
}
|
||||
|
||||
/* Actions for sync/atomic builtin transformations. */
|
||||
enum tsan_atomic_action
|
||||
{
|
||||
check_last, add_seq_cst, add_acquire, weak_cas, strong_cas,
|
||||
bool_cas, val_cas, lock_release, fetch_op, fetch_op_seq_cst
|
||||
};
|
||||
|
||||
/* Table how to map sync/atomic builtins to their corresponding
|
||||
tsan equivalents. */
|
||||
static struct tsan_map_atomic
|
||||
{
|
||||
enum built_in_function fcode, tsan_fcode;
|
||||
enum tsan_atomic_action action;
|
||||
enum tree_code code;
|
||||
} tsan_atomic_table[] =
|
||||
{
|
||||
#define TRANSFORM(fcode, tsan_fcode, action, code) \
|
||||
{ BUILT_IN_##fcode, BUILT_IN_##tsan_fcode, action, code }
|
||||
#define CHECK_LAST(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, check_last, ERROR_MARK)
|
||||
#define ADD_SEQ_CST(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, add_seq_cst, ERROR_MARK)
|
||||
#define ADD_ACQUIRE(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, add_acquire, ERROR_MARK)
|
||||
#define WEAK_CAS(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, weak_cas, ERROR_MARK)
|
||||
#define STRONG_CAS(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, strong_cas, ERROR_MARK)
|
||||
#define BOOL_CAS(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, bool_cas, ERROR_MARK)
|
||||
#define VAL_CAS(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, val_cas, ERROR_MARK)
|
||||
#define LOCK_RELEASE(fcode, tsan_fcode) \
|
||||
TRANSFORM (fcode, tsan_fcode, lock_release, ERROR_MARK)
|
||||
#define FETCH_OP(fcode, tsan_fcode, code) \
|
||||
TRANSFORM (fcode, tsan_fcode, fetch_op, code)
|
||||
#define FETCH_OPS(fcode, tsan_fcode, code) \
|
||||
TRANSFORM (fcode, tsan_fcode, fetch_op_seq_cst, code)
|
||||
|
||||
CHECK_LAST (ATOMIC_LOAD_1, TSAN_ATOMIC8_LOAD),
|
||||
CHECK_LAST (ATOMIC_LOAD_2, TSAN_ATOMIC16_LOAD),
|
||||
CHECK_LAST (ATOMIC_LOAD_4, TSAN_ATOMIC32_LOAD),
|
||||
CHECK_LAST (ATOMIC_LOAD_8, TSAN_ATOMIC64_LOAD),
|
||||
CHECK_LAST (ATOMIC_LOAD_16, TSAN_ATOMIC128_LOAD),
|
||||
CHECK_LAST (ATOMIC_STORE_1, TSAN_ATOMIC8_STORE),
|
||||
CHECK_LAST (ATOMIC_STORE_2, TSAN_ATOMIC16_STORE),
|
||||
CHECK_LAST (ATOMIC_STORE_4, TSAN_ATOMIC32_STORE),
|
||||
CHECK_LAST (ATOMIC_STORE_8, TSAN_ATOMIC64_STORE),
|
||||
CHECK_LAST (ATOMIC_STORE_16, TSAN_ATOMIC128_STORE),
|
||||
CHECK_LAST (ATOMIC_EXCHANGE_1, TSAN_ATOMIC8_EXCHANGE),
|
||||
CHECK_LAST (ATOMIC_EXCHANGE_2, TSAN_ATOMIC16_EXCHANGE),
|
||||
CHECK_LAST (ATOMIC_EXCHANGE_4, TSAN_ATOMIC32_EXCHANGE),
|
||||
CHECK_LAST (ATOMIC_EXCHANGE_8, TSAN_ATOMIC64_EXCHANGE),
|
||||
CHECK_LAST (ATOMIC_EXCHANGE_16, TSAN_ATOMIC128_EXCHANGE),
|
||||
CHECK_LAST (ATOMIC_FETCH_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
|
||||
CHECK_LAST (ATOMIC_FETCH_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
|
||||
CHECK_LAST (ATOMIC_FETCH_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
|
||||
CHECK_LAST (ATOMIC_FETCH_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
|
||||
CHECK_LAST (ATOMIC_FETCH_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
|
||||
CHECK_LAST (ATOMIC_FETCH_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
|
||||
CHECK_LAST (ATOMIC_FETCH_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
|
||||
CHECK_LAST (ATOMIC_FETCH_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
|
||||
CHECK_LAST (ATOMIC_FETCH_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
|
||||
CHECK_LAST (ATOMIC_FETCH_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
|
||||
CHECK_LAST (ATOMIC_FETCH_AND_1, TSAN_ATOMIC8_FETCH_AND),
|
||||
CHECK_LAST (ATOMIC_FETCH_AND_2, TSAN_ATOMIC16_FETCH_AND),
|
||||
CHECK_LAST (ATOMIC_FETCH_AND_4, TSAN_ATOMIC32_FETCH_AND),
|
||||
CHECK_LAST (ATOMIC_FETCH_AND_8, TSAN_ATOMIC64_FETCH_AND),
|
||||
CHECK_LAST (ATOMIC_FETCH_AND_16, TSAN_ATOMIC128_FETCH_AND),
|
||||
CHECK_LAST (ATOMIC_FETCH_OR_1, TSAN_ATOMIC8_FETCH_OR),
|
||||
CHECK_LAST (ATOMIC_FETCH_OR_2, TSAN_ATOMIC16_FETCH_OR),
|
||||
CHECK_LAST (ATOMIC_FETCH_OR_4, TSAN_ATOMIC32_FETCH_OR),
|
||||
CHECK_LAST (ATOMIC_FETCH_OR_8, TSAN_ATOMIC64_FETCH_OR),
|
||||
CHECK_LAST (ATOMIC_FETCH_OR_16, TSAN_ATOMIC128_FETCH_OR),
|
||||
CHECK_LAST (ATOMIC_FETCH_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
|
||||
CHECK_LAST (ATOMIC_FETCH_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
|
||||
CHECK_LAST (ATOMIC_FETCH_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
|
||||
CHECK_LAST (ATOMIC_FETCH_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
|
||||
CHECK_LAST (ATOMIC_FETCH_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
|
||||
CHECK_LAST (ATOMIC_FETCH_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
|
||||
CHECK_LAST (ATOMIC_FETCH_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
|
||||
CHECK_LAST (ATOMIC_FETCH_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
|
||||
CHECK_LAST (ATOMIC_FETCH_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
|
||||
CHECK_LAST (ATOMIC_FETCH_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
|
||||
|
||||
CHECK_LAST (ATOMIC_THREAD_FENCE, TSAN_ATOMIC_THREAD_FENCE),
|
||||
CHECK_LAST (ATOMIC_SIGNAL_FENCE, TSAN_ATOMIC_SIGNAL_FENCE),
|
||||
|
||||
FETCH_OP (ATOMIC_ADD_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OP (ATOMIC_ADD_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OP (ATOMIC_ADD_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OP (ATOMIC_ADD_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OP (ATOMIC_ADD_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OP (ATOMIC_SUB_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OP (ATOMIC_SUB_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OP (ATOMIC_SUB_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OP (ATOMIC_SUB_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OP (ATOMIC_SUB_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OP (ATOMIC_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OP (ATOMIC_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OP (ATOMIC_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OP (ATOMIC_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OP (ATOMIC_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OP (ATOMIC_OR_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OP (ATOMIC_OR_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OP (ATOMIC_OR_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OP (ATOMIC_OR_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OP (ATOMIC_OR_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OP (ATOMIC_XOR_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OP (ATOMIC_XOR_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OP (ATOMIC_XOR_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OP (ATOMIC_XOR_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OP (ATOMIC_XOR_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OP (ATOMIC_NAND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OP (ATOMIC_NAND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OP (ATOMIC_NAND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OP (ATOMIC_NAND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OP (ATOMIC_NAND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
|
||||
|
||||
ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_1, TSAN_ATOMIC8_EXCHANGE),
|
||||
ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_2, TSAN_ATOMIC16_EXCHANGE),
|
||||
ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_4, TSAN_ATOMIC32_EXCHANGE),
|
||||
ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_8, TSAN_ATOMIC64_EXCHANGE),
|
||||
ADD_ACQUIRE (SYNC_LOCK_TEST_AND_SET_16, TSAN_ATOMIC128_EXCHANGE),
|
||||
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_ADD_1, TSAN_ATOMIC8_FETCH_ADD),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_ADD_2, TSAN_ATOMIC16_FETCH_ADD),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_ADD_4, TSAN_ATOMIC32_FETCH_ADD),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_ADD_8, TSAN_ATOMIC64_FETCH_ADD),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_ADD_16, TSAN_ATOMIC128_FETCH_ADD),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_SUB_1, TSAN_ATOMIC8_FETCH_SUB),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_SUB_2, TSAN_ATOMIC16_FETCH_SUB),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_SUB_4, TSAN_ATOMIC32_FETCH_SUB),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_SUB_8, TSAN_ATOMIC64_FETCH_SUB),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_SUB_16, TSAN_ATOMIC128_FETCH_SUB),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_AND_1, TSAN_ATOMIC8_FETCH_AND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_AND_2, TSAN_ATOMIC16_FETCH_AND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_AND_4, TSAN_ATOMIC32_FETCH_AND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_AND_8, TSAN_ATOMIC64_FETCH_AND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_AND_16, TSAN_ATOMIC128_FETCH_AND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_OR_1, TSAN_ATOMIC8_FETCH_OR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_OR_2, TSAN_ATOMIC16_FETCH_OR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_OR_4, TSAN_ATOMIC32_FETCH_OR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_OR_8, TSAN_ATOMIC64_FETCH_OR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_OR_16, TSAN_ATOMIC128_FETCH_OR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_XOR_1, TSAN_ATOMIC8_FETCH_XOR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_XOR_2, TSAN_ATOMIC16_FETCH_XOR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_XOR_4, TSAN_ATOMIC32_FETCH_XOR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_XOR_8, TSAN_ATOMIC64_FETCH_XOR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_XOR_16, TSAN_ATOMIC128_FETCH_XOR),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_NAND_1, TSAN_ATOMIC8_FETCH_NAND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_NAND_2, TSAN_ATOMIC16_FETCH_NAND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_NAND_4, TSAN_ATOMIC32_FETCH_NAND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_NAND_8, TSAN_ATOMIC64_FETCH_NAND),
|
||||
ADD_SEQ_CST (SYNC_FETCH_AND_NAND_16, TSAN_ATOMIC128_FETCH_NAND),
|
||||
|
||||
ADD_SEQ_CST (SYNC_SYNCHRONIZE, TSAN_ATOMIC_THREAD_FENCE),
|
||||
|
||||
FETCH_OPS (SYNC_ADD_AND_FETCH_1, TSAN_ATOMIC8_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OPS (SYNC_ADD_AND_FETCH_2, TSAN_ATOMIC16_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OPS (SYNC_ADD_AND_FETCH_4, TSAN_ATOMIC32_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OPS (SYNC_ADD_AND_FETCH_8, TSAN_ATOMIC64_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OPS (SYNC_ADD_AND_FETCH_16, TSAN_ATOMIC128_FETCH_ADD, PLUS_EXPR),
|
||||
FETCH_OPS (SYNC_SUB_AND_FETCH_1, TSAN_ATOMIC8_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OPS (SYNC_SUB_AND_FETCH_2, TSAN_ATOMIC16_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OPS (SYNC_SUB_AND_FETCH_4, TSAN_ATOMIC32_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OPS (SYNC_SUB_AND_FETCH_8, TSAN_ATOMIC64_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OPS (SYNC_SUB_AND_FETCH_16, TSAN_ATOMIC128_FETCH_SUB, MINUS_EXPR),
|
||||
FETCH_OPS (SYNC_AND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OPS (SYNC_AND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OPS (SYNC_AND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OPS (SYNC_AND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OPS (SYNC_AND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_AND, BIT_AND_EXPR),
|
||||
FETCH_OPS (SYNC_OR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OPS (SYNC_OR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OPS (SYNC_OR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OPS (SYNC_OR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OPS (SYNC_OR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_OR, BIT_IOR_EXPR),
|
||||
FETCH_OPS (SYNC_XOR_AND_FETCH_1, TSAN_ATOMIC8_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OPS (SYNC_XOR_AND_FETCH_2, TSAN_ATOMIC16_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OPS (SYNC_XOR_AND_FETCH_4, TSAN_ATOMIC32_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OPS (SYNC_XOR_AND_FETCH_8, TSAN_ATOMIC64_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OPS (SYNC_XOR_AND_FETCH_16, TSAN_ATOMIC128_FETCH_XOR, BIT_XOR_EXPR),
|
||||
FETCH_OPS (SYNC_NAND_AND_FETCH_1, TSAN_ATOMIC8_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OPS (SYNC_NAND_AND_FETCH_2, TSAN_ATOMIC16_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OPS (SYNC_NAND_AND_FETCH_4, TSAN_ATOMIC32_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OPS (SYNC_NAND_AND_FETCH_8, TSAN_ATOMIC64_FETCH_NAND, BIT_NOT_EXPR),
|
||||
FETCH_OPS (SYNC_NAND_AND_FETCH_16, TSAN_ATOMIC128_FETCH_NAND, BIT_NOT_EXPR),
|
||||
|
||||
WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_WEAK),
|
||||
WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_WEAK),
|
||||
WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_WEAK),
|
||||
WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_WEAK),
|
||||
WEAK_CAS (ATOMIC_COMPARE_EXCHANGE_16, TSAN_ATOMIC128_COMPARE_EXCHANGE_WEAK),
|
||||
|
||||
STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
|
||||
STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_2,
|
||||
TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
|
||||
STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_4,
|
||||
TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
|
||||
STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_8,
|
||||
TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
|
||||
STRONG_CAS (ATOMIC_COMPARE_EXCHANGE_16,
|
||||
TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
|
||||
|
||||
BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_1,
|
||||
TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
|
||||
BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_2,
|
||||
TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
|
||||
BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_4,
|
||||
TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
|
||||
BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_8,
|
||||
TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
|
||||
BOOL_CAS (SYNC_BOOL_COMPARE_AND_SWAP_16,
|
||||
TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
|
||||
|
||||
VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_1, TSAN_ATOMIC8_COMPARE_EXCHANGE_STRONG),
|
||||
VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_2, TSAN_ATOMIC16_COMPARE_EXCHANGE_STRONG),
|
||||
VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_4, TSAN_ATOMIC32_COMPARE_EXCHANGE_STRONG),
|
||||
VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_8, TSAN_ATOMIC64_COMPARE_EXCHANGE_STRONG),
|
||||
VAL_CAS (SYNC_VAL_COMPARE_AND_SWAP_16,
|
||||
TSAN_ATOMIC128_COMPARE_EXCHANGE_STRONG),
|
||||
|
||||
LOCK_RELEASE (SYNC_LOCK_RELEASE_1, TSAN_ATOMIC8_STORE),
|
||||
LOCK_RELEASE (SYNC_LOCK_RELEASE_2, TSAN_ATOMIC16_STORE),
|
||||
LOCK_RELEASE (SYNC_LOCK_RELEASE_4, TSAN_ATOMIC32_STORE),
|
||||
LOCK_RELEASE (SYNC_LOCK_RELEASE_8, TSAN_ATOMIC64_STORE),
|
||||
LOCK_RELEASE (SYNC_LOCK_RELEASE_16, TSAN_ATOMIC128_STORE)
|
||||
};
|
||||
|
||||
/* Instrument an atomic builtin. */
|
||||
|
||||
static void
|
||||
instrument_builtin_call (gimple_stmt_iterator *gsi)
|
||||
{
|
||||
gimple stmt = gsi_stmt (*gsi), g;
|
||||
tree callee = gimple_call_fndecl (stmt), last_arg, args[6], t, lhs;
|
||||
enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
|
||||
unsigned int i, num = gimple_call_num_args (stmt), j;
|
||||
for (j = 0; j < 6 && j < num; j++)
|
||||
args[j] = gimple_call_arg (stmt, j);
|
||||
for (i = 0; i < ARRAY_SIZE (tsan_atomic_table); i++)
|
||||
if (fcode != tsan_atomic_table[i].fcode)
|
||||
continue;
|
||||
else
|
||||
{
|
||||
tree decl = builtin_decl_implicit (tsan_atomic_table[i].tsan_fcode);
|
||||
if (decl == NULL_TREE)
|
||||
return;
|
||||
switch (tsan_atomic_table[i].action)
|
||||
{
|
||||
case check_last:
|
||||
case fetch_op:
|
||||
last_arg = gimple_call_arg (stmt, num - 1);
|
||||
if (!host_integerp (last_arg, 1)
|
||||
|| (unsigned HOST_WIDE_INT) tree_low_cst (last_arg, 1)
|
||||
> MEMMODEL_SEQ_CST)
|
||||
return;
|
||||
gimple_call_set_fndecl (stmt, decl);
|
||||
update_stmt (stmt);
|
||||
if (tsan_atomic_table[i].action == fetch_op)
|
||||
{
|
||||
args[1] = gimple_call_arg (stmt, 1);
|
||||
goto adjust_result;
|
||||
}
|
||||
return;
|
||||
case add_seq_cst:
|
||||
case add_acquire:
|
||||
case fetch_op_seq_cst:
|
||||
gcc_assert (num <= 2);
|
||||
for (j = 0; j < num; j++)
|
||||
args[j] = gimple_call_arg (stmt, j);
|
||||
for (; j < 2; j++)
|
||||
args[j] = NULL_TREE;
|
||||
args[num] = build_int_cst (NULL_TREE,
|
||||
tsan_atomic_table[i].action
|
||||
!= add_acquire
|
||||
? MEMMODEL_SEQ_CST
|
||||
: MEMMODEL_ACQUIRE);
|
||||
update_gimple_call (gsi, decl, num + 1, args[0], args[1], args[2]);
|
||||
stmt = gsi_stmt (*gsi);
|
||||
if (tsan_atomic_table[i].action == fetch_op_seq_cst)
|
||||
{
|
||||
adjust_result:
|
||||
lhs = gimple_call_lhs (stmt);
|
||||
if (lhs == NULL_TREE)
|
||||
return;
|
||||
if (!useless_type_conversion_p (TREE_TYPE (lhs),
|
||||
TREE_TYPE (args[1])))
|
||||
{
|
||||
tree var = make_ssa_name (TREE_TYPE (lhs), NULL);
|
||||
g = gimple_build_assign_with_ops (NOP_EXPR, var,
|
||||
args[1], NULL_TREE);
|
||||
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
||||
args[1] = var;
|
||||
}
|
||||
gimple_call_set_lhs (stmt,
|
||||
make_ssa_name (TREE_TYPE (lhs), NULL));
|
||||
/* BIT_NOT_EXPR stands for NAND. */
|
||||
if (tsan_atomic_table[i].code == BIT_NOT_EXPR)
|
||||
{
|
||||
tree var = make_ssa_name (TREE_TYPE (lhs), NULL);
|
||||
g = gimple_build_assign_with_ops (BIT_AND_EXPR, var,
|
||||
gimple_call_lhs (stmt),
|
||||
args[1]);
|
||||
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
||||
g = gimple_build_assign_with_ops (BIT_NOT_EXPR, lhs, var,
|
||||
NULL_TREE);
|
||||
}
|
||||
else
|
||||
g = gimple_build_assign_with_ops (tsan_atomic_table[i].code,
|
||||
lhs,
|
||||
gimple_call_lhs (stmt),
|
||||
args[1]);
|
||||
update_stmt (stmt);
|
||||
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
||||
}
|
||||
return;
|
||||
case weak_cas:
|
||||
if (!integer_nonzerop (gimple_call_arg (stmt, 3)))
|
||||
continue;
|
||||
/* FALLTHRU */
|
||||
case strong_cas:
|
||||
gcc_assert (num == 6);
|
||||
for (j = 0; j < 6; j++)
|
||||
args[j] = gimple_call_arg (stmt, j);
|
||||
if (!host_integerp (args[4], 1)
|
||||
|| (unsigned HOST_WIDE_INT) tree_low_cst (args[4], 1)
|
||||
> MEMMODEL_SEQ_CST)
|
||||
return;
|
||||
if (!host_integerp (args[5], 1)
|
||||
|| (unsigned HOST_WIDE_INT) tree_low_cst (args[5], 1)
|
||||
> MEMMODEL_SEQ_CST)
|
||||
return;
|
||||
update_gimple_call (gsi, decl, 5, args[0], args[1], args[2],
|
||||
args[4], args[5]);
|
||||
return;
|
||||
case bool_cas:
|
||||
case val_cas:
|
||||
gcc_assert (num == 3);
|
||||
for (j = 0; j < 3; j++)
|
||||
args[j] = gimple_call_arg (stmt, j);
|
||||
t = TYPE_ARG_TYPES (TREE_TYPE (decl));
|
||||
t = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (t)));
|
||||
t = create_tmp_var (t, NULL);
|
||||
mark_addressable (t);
|
||||
if (!useless_type_conversion_p (TREE_TYPE (t),
|
||||
TREE_TYPE (args[1])))
|
||||
{
|
||||
g = gimple_build_assign_with_ops (NOP_EXPR,
|
||||
make_ssa_name (TREE_TYPE (t),
|
||||
NULL),
|
||||
args[1], NULL_TREE);
|
||||
gsi_insert_before (gsi, g, GSI_SAME_STMT);
|
||||
args[1] = gimple_assign_lhs (g);
|
||||
}
|
||||
g = gimple_build_assign (t, args[1]);
|
||||
gsi_insert_before (gsi, g, GSI_SAME_STMT);
|
||||
lhs = gimple_call_lhs (stmt);
|
||||
update_gimple_call (gsi, decl, 5, args[0],
|
||||
build_fold_addr_expr (t), args[2],
|
||||
build_int_cst (NULL_TREE,
|
||||
MEMMODEL_SEQ_CST),
|
||||
build_int_cst (NULL_TREE,
|
||||
MEMMODEL_SEQ_CST));
|
||||
if (tsan_atomic_table[i].action == val_cas && lhs)
|
||||
{
|
||||
tree cond;
|
||||
stmt = gsi_stmt (*gsi);
|
||||
g = gimple_build_assign (make_ssa_name (TREE_TYPE (t), NULL),
|
||||
t);
|
||||
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
||||
t = make_ssa_name (TREE_TYPE (TREE_TYPE (decl)), stmt);
|
||||
cond = build2 (NE_EXPR, boolean_type_node, t,
|
||||
build_int_cst (TREE_TYPE (t), 0));
|
||||
g = gimple_build_assign_with_ops (COND_EXPR, lhs, cond,
|
||||
args[1],
|
||||
gimple_assign_lhs (g));
|
||||
gimple_call_set_lhs (stmt, t);
|
||||
update_stmt (stmt);
|
||||
gsi_insert_after (gsi, g, GSI_NEW_STMT);
|
||||
}
|
||||
return;
|
||||
case lock_release:
|
||||
gcc_assert (num == 1);
|
||||
t = TYPE_ARG_TYPES (TREE_TYPE (decl));
|
||||
t = TREE_VALUE (TREE_CHAIN (t));
|
||||
update_gimple_call (gsi, decl, 3, gimple_call_arg (stmt, 0),
|
||||
build_int_cst (t, 0),
|
||||
build_int_cst (NULL_TREE,
|
||||
MEMMODEL_RELEASE));
|
||||
return;
|
||||
default:
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* Instruments the gimple pointed to by GSI. Return
|
||||
true if func entry/exit should be instrumented. */
|
||||
|
||||
static bool
|
||||
instrument_gimple (gimple_stmt_iterator gsi)
|
||||
instrument_gimple (gimple_stmt_iterator *gsi)
|
||||
{
|
||||
gimple stmt;
|
||||
tree rhs, lhs;
|
||||
bool instrumented = false;
|
||||
|
||||
stmt = gsi_stmt (gsi);
|
||||
stmt = gsi_stmt (*gsi);
|
||||
if (is_gimple_call (stmt)
|
||||
&& (gimple_call_fndecl (stmt)
|
||||
!= builtin_decl_implicit (BUILT_IN_TSAN_INIT)))
|
||||
return true;
|
||||
{
|
||||
if (is_gimple_builtin_call (stmt))
|
||||
instrument_builtin_call (gsi);
|
||||
return true;
|
||||
}
|
||||
else if (is_gimple_assign (stmt)
|
||||
&& !gimple_clobber_p (stmt))
|
||||
{
|
||||
if (gimple_store_p (stmt))
|
||||
{
|
||||
lhs = gimple_assign_lhs (stmt);
|
||||
instrumented = instrument_expr (gsi, lhs, true);
|
||||
instrumented = instrument_expr (*gsi, lhs, true);
|
||||
}
|
||||
if (gimple_assign_load_p (stmt))
|
||||
{
|
||||
rhs = gimple_assign_rhs1 (stmt);
|
||||
instrumented = instrument_expr (gsi, rhs, false);
|
||||
instrumented = instrument_expr (*gsi, rhs, false);
|
||||
}
|
||||
}
|
||||
return instrumented;
|
||||
|
@ -224,7 +627,7 @@ instrument_memory_accesses (void)
|
|||
|
||||
FOR_EACH_BB (bb)
|
||||
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
|
||||
fentry_exit_instrument |= instrument_gimple (gsi);
|
||||
fentry_exit_instrument |= instrument_gimple (&gsi);
|
||||
return fentry_exit_instrument;
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Reference in a new issue