1 /* AddressSanitizer, a fast memory error detector.
2 Copyright (C) 2012-2016 Free Software Foundation, Inc.
3 Contributed by Kostya Serebryany <kcc@google.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
34 #include "stringpool.h"
35 #include "tree-ssanames.h"
39 #include "gimple-pretty-print.h"
41 #include "fold-const.h"
44 #include "gimple-iterator.h"
46 #include "stor-layout.h"
47 #include "tree-iterator.h"
53 #include "langhooks.h"
55 #include "gimple-builder.h"
61 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
62 with <2x slowdown on average.
64 The tool consists of two parts:
65 instrumentation module (this file) and a run-time library.
66 The instrumentation module adds a run-time check before every memory insn.
67 For a 8- or 16- byte load accessing address X:
68 ShadowAddr = (X >> 3) + Offset
69 ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
71 __asan_report_load8(X);
72 For a load of N bytes (N=1, 2 or 4) from address X:
73 ShadowAddr = (X >> 3) + Offset
74 ShadowValue = *(char*)ShadowAddr;
76 if ((X & 7) + N - 1 > ShadowValue)
77 __asan_report_loadN(X);
78 Stores are instrumented similarly, but using __asan_report_storeN functions.
79 A call too __asan_init_vN() is inserted to the list of module CTORs.
80 N is the version number of the AddressSanitizer API. The changes between the
81 API versions are listed in libsanitizer/asan/asan_interface_internal.h.
83 The run-time library redefines malloc (so that redzone are inserted around
84 the allocated memory) and free (so that reuse of free-ed memory is delayed),
85 provides __asan_report* and __asan_init_vN functions.
88 http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
90 The current implementation supports detection of out-of-bounds and
91 use-after-free in the heap, on the stack and for global variables.
93 [Protection of stack variables]
95 To understand how detection of out-of-bounds and use-after-free works
96 for stack variables, lets look at this example on x86_64 where the
111 For this function, the stack protected by asan will be organized as
112 follows, from the top of the stack to the bottom:
114 Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
116 Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
117 the next slot be 32 bytes aligned; this one is called Partial
118 Redzone; this 32 bytes alignment is an asan constraint]
120 Slot 3/ [24 bytes for variable 'a']
122 Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
124 Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
126 Slot 6/ [8 bytes for variable 'b']
128 Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
131 The 32 bytes of LEFT red zone at the bottom of the stack can be
134 1/ The first 8 bytes contain a magical asan number that is always
137 2/ The following 8 bytes contains a pointer to a string (to be
138 parsed at runtime by the runtime asan library), which format is
141 "<function-name> <space> <num-of-variables-on-the-stack>
142 (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
143 <length-of-var-in-bytes> ){n} "
145 where '(...){n}' means the content inside the parenthesis occurs 'n'
146 times, with 'n' being the number of variables on the stack.
148 3/ The following 8 bytes contain the PC of the current function which
149 will be used by the run-time library to print an error message.
151 4/ The following 8 bytes are reserved for internal use by the run-time.
153 The shadow memory for that stack layout is going to look like this:
155 - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
156 The F1 byte pattern is a magic number called
157 ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
158 the memory for that shadow byte is part of a the LEFT red zone
159 intended to seat at the bottom of the variables on the stack.
161 - content of shadow memory 8 bytes for slots 6 and 5:
162 0xF4F4F400. The F4 byte pattern is a magic number
163 called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
164 memory region for this shadow byte is a PARTIAL red zone
165 intended to pad a variable A, so that the slot following
166 {A,padding} is 32 bytes aligned.
168 Note that the fact that the least significant byte of this
169 shadow memory content is 00 means that 8 bytes of its
170 corresponding memory (which corresponds to the memory of
171 variable 'b') is addressable.
173 - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
174 The F2 byte pattern is a magic number called
175 ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
176 region for this shadow byte is a MIDDLE red zone intended to
177 seat between two 32 aligned slots of {variable,padding}.
179 - content of shadow memory 8 bytes for slot 3 and 2:
180 0xF4000000. This represents is the concatenation of
181 variable 'a' and the partial red zone following it, like what we
182 had for variable 'b'. The least significant 3 bytes being 00
183 means that the 3 bytes of variable 'a' are addressable.
185 - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
186 The F3 byte pattern is a magic number called
187 ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
188 region for this shadow byte is a RIGHT red zone intended to seat
189 at the top of the variables of the stack.
191 Note that the real variable layout is done in expand_used_vars in
192 cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
193 stack variables as well as the different red zones, emits some
194 prologue code to populate the shadow memory as to poison (mark as
195 non-accessible) the regions of the red zones and mark the regions of
196 stack variables as accessible, and emit some epilogue code to
197 un-poison (mark as accessible) the regions of red zones right before
200 [Protection of global variables]
202 The basic idea is to insert a red zone between two global variables
203 and install a constructor function that calls the asan runtime to do
204 the populating of the relevant shadow memory regions at load time.
206 So the global variables are laid out as to insert a red zone between
207 them. The size of the red zones is so that each variable starts on a
210 Then a constructor function is installed so that, for each global
211 variable, it calls the runtime asan library function
212 __asan_register_globals_with an instance of this type:
216 // Address of the beginning of the global variable.
219 // Initial size of the global variable.
222 // Size of the global variable + size of the red zone. This
223 // size is 32 bytes aligned.
224 uptr __size_with_redzone;
226 // Name of the global variable.
229 // Name of the module where the global variable is declared.
230 const void *__module_name;
232 // 1 if it has dynamic initialization, 0 otherwise.
233 uptr __has_dynamic_init;
235 // A pointer to struct that contains source location, could be NULL.
236 __asan_global_source_location *__location;
239 A destructor function that calls the runtime asan library function
240 _asan_unregister_globals is also installed. */
242 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
243 static bool asan_shadow_offset_computed;
244 static vec<char *> sanitized_sections;
246 /* Set of variable declarations that are going to be guarded by
247 use-after-scope sanitizer. */
249 static hash_set<tree> *asan_handled_variables = NULL;
251 hash_set <tree> *asan_used_labels = NULL;
253 /* Sets shadow offset to value in string VAL. */
256 set_asan_shadow_offset (const char *val)
261 #ifdef HAVE_LONG_LONG
262 asan_shadow_offset_value = strtoull (val, &endp, 0);
264 asan_shadow_offset_value = strtoul (val, &endp, 0);
266 if (!(*val != '\0' && *endp == '\0' && errno == 0))
269 asan_shadow_offset_computed = true;
274 /* Set list of user-defined sections that need to be sanitized. */
277 set_sanitized_sections (const char *sections)
281 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
283 sanitized_sections.truncate (0);
285 for (const char *s = sections; *s; )
288 for (end = s; *end && *end != ','; ++end);
289 size_t len = end - s;
290 sanitized_sections.safe_push (xstrndup (s, len));
291 s = *end ? end + 1 : end;
296 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
298 return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
299 && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
303 asan_sanitize_stack_p (void)
305 return ((flag_sanitize & SANITIZE_ADDRESS)
307 && !asan_no_sanitize_address_p ());
310 /* Checks whether section SEC should be sanitized. */
313 section_sanitized_p (const char *sec)
317 FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
318 if (fnmatch (pat, sec, FNM_PERIOD) == 0)
323 /* Returns Asan shadow offset. */
325 static unsigned HOST_WIDE_INT
326 asan_shadow_offset ()
328 if (!asan_shadow_offset_computed)
330 asan_shadow_offset_computed = true;
331 asan_shadow_offset_value = targetm.asan_shadow_offset ();
333 return asan_shadow_offset_value;
336 alias_set_type asan_shadow_set = -1;
338 /* Pointer types to 1, 2 or 4 byte integers in shadow memory. A separate
339 alias set is used for all shadow memory accesses. */
340 static GTY(()) tree shadow_ptr_types[3];
342 /* Decl for __asan_option_detect_stack_use_after_return. */
343 static GTY(()) tree asan_detect_stack_use_after_return;
345 /* Hashtable support for memory references used by gimple
348 /* This type represents a reference to a memory region. */
351 /* The expression of the beginning of the memory region. */
354 /* The size of the access. */
355 HOST_WIDE_INT access_size;
358 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
360 /* Initializes an instance of asan_mem_ref. */
363 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
366 ref->access_size = access_size;
369 /* Allocates memory for an instance of asan_mem_ref into the memory
370 pool returned by asan_mem_ref_get_alloc_pool and initialize it.
371 START is the address of (or the expression pointing to) the
372 beginning of memory reference. ACCESS_SIZE is the size of the
373 access to the referenced memory. */
376 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
378 asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
380 asan_mem_ref_init (ref, start, access_size);
384 /* This builds and returns a pointer to the end of the memory region
385 that starts at START and of length LEN. */
388 asan_mem_ref_get_end (tree start, tree len)
390 if (len == NULL_TREE || integer_zerop (len))
393 if (!ptrofftype_p (len))
394 len = convert_to_ptrofftype (len);
396 return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
399 /* Return a tree expression that represents the end of the referenced
400 memory region. Beware that this function can actually build a new
404 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
406 return asan_mem_ref_get_end (ref->start, len);
409 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
411 static inline hashval_t hash (const asan_mem_ref *);
412 static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
415 /* Hash a memory reference. */
418 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
420 return iterative_hash_expr (mem_ref->start, 0);
423 /* Compare two memory references. We accept the length of either
424 memory references to be NULL_TREE. */
427 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
428 const asan_mem_ref *m2)
430 return operand_equal_p (m1->start, m2->start, 0);
433 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
435 /* Returns a reference to the hash table containing memory references.
436 This function ensures that the hash table is created. Note that
437 this hash table is updated by the function
438 update_mem_ref_hash_table. */
440 static hash_table<asan_mem_ref_hasher> *
441 get_mem_ref_hash_table ()
443 if (!asan_mem_ref_ht)
444 asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
446 return asan_mem_ref_ht;
449 /* Clear all entries from the memory references hash table. */
452 empty_mem_ref_hash_table ()
455 asan_mem_ref_ht->empty ();
458 /* Free the memory references hash table. */
461 free_mem_ref_resources ()
463 delete asan_mem_ref_ht;
464 asan_mem_ref_ht = NULL;
466 asan_mem_ref_pool.release ();
469 /* Return true iff the memory reference REF has been instrumented. */
472 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
475 asan_mem_ref_init (&r, ref, access_size);
477 asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
478 return saved_ref && saved_ref->access_size >= access_size;
481 /* Return true iff the memory reference REF has been instrumented. */
484 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
486 return has_mem_ref_been_instrumented (ref->start, ref->access_size);
489 /* Return true iff access to memory region starting at REF and of
490 length LEN has been instrumented. */
493 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
495 HOST_WIDE_INT size_in_bytes
496 = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
498 return size_in_bytes != -1
499 && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
502 /* Set REF to the memory reference present in a gimple assignment
503 ASSIGNMENT. Return true upon successful completion, false
507 get_mem_ref_of_assignment (const gassign *assignment,
511 gcc_assert (gimple_assign_single_p (assignment));
513 if (gimple_store_p (assignment)
514 && !gimple_clobber_p (assignment))
516 ref->start = gimple_assign_lhs (assignment);
517 *ref_is_store = true;
519 else if (gimple_assign_load_p (assignment))
521 ref->start = gimple_assign_rhs1 (assignment);
522 *ref_is_store = false;
527 ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
531 /* Return the memory references contained in a gimple statement
532 representing a builtin call that has to do with memory access. */
535 get_mem_refs_of_builtin_call (const gcall *call,
548 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
550 tree callee = gimple_call_fndecl (call);
551 tree source0 = NULL_TREE, source1 = NULL_TREE,
552 dest = NULL_TREE, len = NULL_TREE;
553 bool is_store = true, got_reference_p = false;
554 HOST_WIDE_INT access_size = 1;
556 *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
558 switch (DECL_FUNCTION_CODE (callee))
560 /* (s, s, n) style memops. */
562 case BUILT_IN_MEMCMP:
563 source0 = gimple_call_arg (call, 0);
564 source1 = gimple_call_arg (call, 1);
565 len = gimple_call_arg (call, 2);
568 /* (src, dest, n) style memops. */
570 source0 = gimple_call_arg (call, 0);
571 dest = gimple_call_arg (call, 1);
572 len = gimple_call_arg (call, 2);
575 /* (dest, src, n) style memops. */
576 case BUILT_IN_MEMCPY:
577 case BUILT_IN_MEMCPY_CHK:
578 case BUILT_IN_MEMMOVE:
579 case BUILT_IN_MEMMOVE_CHK:
580 case BUILT_IN_MEMPCPY:
581 case BUILT_IN_MEMPCPY_CHK:
582 dest = gimple_call_arg (call, 0);
583 source0 = gimple_call_arg (call, 1);
584 len = gimple_call_arg (call, 2);
587 /* (dest, n) style memops. */
589 dest = gimple_call_arg (call, 0);
590 len = gimple_call_arg (call, 1);
593 /* (dest, x, n) style memops*/
594 case BUILT_IN_MEMSET:
595 case BUILT_IN_MEMSET_CHK:
596 dest = gimple_call_arg (call, 0);
597 len = gimple_call_arg (call, 2);
600 case BUILT_IN_STRLEN:
601 source0 = gimple_call_arg (call, 0);
602 len = gimple_call_lhs (call);
605 /* And now the __atomic* and __sync builtins.
606 These are handled differently from the classical memory memory
607 access builtins above. */
609 case BUILT_IN_ATOMIC_LOAD_1:
610 case BUILT_IN_ATOMIC_LOAD_2:
611 case BUILT_IN_ATOMIC_LOAD_4:
612 case BUILT_IN_ATOMIC_LOAD_8:
613 case BUILT_IN_ATOMIC_LOAD_16:
617 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
618 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
619 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
620 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
621 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
623 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
624 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
625 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
626 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
627 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
629 case BUILT_IN_SYNC_FETCH_AND_OR_1:
630 case BUILT_IN_SYNC_FETCH_AND_OR_2:
631 case BUILT_IN_SYNC_FETCH_AND_OR_4:
632 case BUILT_IN_SYNC_FETCH_AND_OR_8:
633 case BUILT_IN_SYNC_FETCH_AND_OR_16:
635 case BUILT_IN_SYNC_FETCH_AND_AND_1:
636 case BUILT_IN_SYNC_FETCH_AND_AND_2:
637 case BUILT_IN_SYNC_FETCH_AND_AND_4:
638 case BUILT_IN_SYNC_FETCH_AND_AND_8:
639 case BUILT_IN_SYNC_FETCH_AND_AND_16:
641 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
642 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
643 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
644 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
645 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
647 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
648 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
649 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
650 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
652 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
653 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
654 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
655 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
656 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
658 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
659 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
660 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
661 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
662 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
664 case BUILT_IN_SYNC_OR_AND_FETCH_1:
665 case BUILT_IN_SYNC_OR_AND_FETCH_2:
666 case BUILT_IN_SYNC_OR_AND_FETCH_4:
667 case BUILT_IN_SYNC_OR_AND_FETCH_8:
668 case BUILT_IN_SYNC_OR_AND_FETCH_16:
670 case BUILT_IN_SYNC_AND_AND_FETCH_1:
671 case BUILT_IN_SYNC_AND_AND_FETCH_2:
672 case BUILT_IN_SYNC_AND_AND_FETCH_4:
673 case BUILT_IN_SYNC_AND_AND_FETCH_8:
674 case BUILT_IN_SYNC_AND_AND_FETCH_16:
676 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
677 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
678 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
679 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
680 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
682 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
683 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
684 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
685 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
687 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
688 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
689 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
690 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
691 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
693 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
694 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
695 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
696 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
697 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
699 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
700 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
701 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
702 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
703 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
705 case BUILT_IN_SYNC_LOCK_RELEASE_1:
706 case BUILT_IN_SYNC_LOCK_RELEASE_2:
707 case BUILT_IN_SYNC_LOCK_RELEASE_4:
708 case BUILT_IN_SYNC_LOCK_RELEASE_8:
709 case BUILT_IN_SYNC_LOCK_RELEASE_16:
711 case BUILT_IN_ATOMIC_EXCHANGE_1:
712 case BUILT_IN_ATOMIC_EXCHANGE_2:
713 case BUILT_IN_ATOMIC_EXCHANGE_4:
714 case BUILT_IN_ATOMIC_EXCHANGE_8:
715 case BUILT_IN_ATOMIC_EXCHANGE_16:
717 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
718 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
719 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
720 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
721 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
723 case BUILT_IN_ATOMIC_STORE_1:
724 case BUILT_IN_ATOMIC_STORE_2:
725 case BUILT_IN_ATOMIC_STORE_4:
726 case BUILT_IN_ATOMIC_STORE_8:
727 case BUILT_IN_ATOMIC_STORE_16:
729 case BUILT_IN_ATOMIC_ADD_FETCH_1:
730 case BUILT_IN_ATOMIC_ADD_FETCH_2:
731 case BUILT_IN_ATOMIC_ADD_FETCH_4:
732 case BUILT_IN_ATOMIC_ADD_FETCH_8:
733 case BUILT_IN_ATOMIC_ADD_FETCH_16:
735 case BUILT_IN_ATOMIC_SUB_FETCH_1:
736 case BUILT_IN_ATOMIC_SUB_FETCH_2:
737 case BUILT_IN_ATOMIC_SUB_FETCH_4:
738 case BUILT_IN_ATOMIC_SUB_FETCH_8:
739 case BUILT_IN_ATOMIC_SUB_FETCH_16:
741 case BUILT_IN_ATOMIC_AND_FETCH_1:
742 case BUILT_IN_ATOMIC_AND_FETCH_2:
743 case BUILT_IN_ATOMIC_AND_FETCH_4:
744 case BUILT_IN_ATOMIC_AND_FETCH_8:
745 case BUILT_IN_ATOMIC_AND_FETCH_16:
747 case BUILT_IN_ATOMIC_NAND_FETCH_1:
748 case BUILT_IN_ATOMIC_NAND_FETCH_2:
749 case BUILT_IN_ATOMIC_NAND_FETCH_4:
750 case BUILT_IN_ATOMIC_NAND_FETCH_8:
751 case BUILT_IN_ATOMIC_NAND_FETCH_16:
753 case BUILT_IN_ATOMIC_XOR_FETCH_1:
754 case BUILT_IN_ATOMIC_XOR_FETCH_2:
755 case BUILT_IN_ATOMIC_XOR_FETCH_4:
756 case BUILT_IN_ATOMIC_XOR_FETCH_8:
757 case BUILT_IN_ATOMIC_XOR_FETCH_16:
759 case BUILT_IN_ATOMIC_OR_FETCH_1:
760 case BUILT_IN_ATOMIC_OR_FETCH_2:
761 case BUILT_IN_ATOMIC_OR_FETCH_4:
762 case BUILT_IN_ATOMIC_OR_FETCH_8:
763 case BUILT_IN_ATOMIC_OR_FETCH_16:
765 case BUILT_IN_ATOMIC_FETCH_ADD_1:
766 case BUILT_IN_ATOMIC_FETCH_ADD_2:
767 case BUILT_IN_ATOMIC_FETCH_ADD_4:
768 case BUILT_IN_ATOMIC_FETCH_ADD_8:
769 case BUILT_IN_ATOMIC_FETCH_ADD_16:
771 case BUILT_IN_ATOMIC_FETCH_SUB_1:
772 case BUILT_IN_ATOMIC_FETCH_SUB_2:
773 case BUILT_IN_ATOMIC_FETCH_SUB_4:
774 case BUILT_IN_ATOMIC_FETCH_SUB_8:
775 case BUILT_IN_ATOMIC_FETCH_SUB_16:
777 case BUILT_IN_ATOMIC_FETCH_AND_1:
778 case BUILT_IN_ATOMIC_FETCH_AND_2:
779 case BUILT_IN_ATOMIC_FETCH_AND_4:
780 case BUILT_IN_ATOMIC_FETCH_AND_8:
781 case BUILT_IN_ATOMIC_FETCH_AND_16:
783 case BUILT_IN_ATOMIC_FETCH_NAND_1:
784 case BUILT_IN_ATOMIC_FETCH_NAND_2:
785 case BUILT_IN_ATOMIC_FETCH_NAND_4:
786 case BUILT_IN_ATOMIC_FETCH_NAND_8:
787 case BUILT_IN_ATOMIC_FETCH_NAND_16:
789 case BUILT_IN_ATOMIC_FETCH_XOR_1:
790 case BUILT_IN_ATOMIC_FETCH_XOR_2:
791 case BUILT_IN_ATOMIC_FETCH_XOR_4:
792 case BUILT_IN_ATOMIC_FETCH_XOR_8:
793 case BUILT_IN_ATOMIC_FETCH_XOR_16:
795 case BUILT_IN_ATOMIC_FETCH_OR_1:
796 case BUILT_IN_ATOMIC_FETCH_OR_2:
797 case BUILT_IN_ATOMIC_FETCH_OR_4:
798 case BUILT_IN_ATOMIC_FETCH_OR_8:
799 case BUILT_IN_ATOMIC_FETCH_OR_16:
801 dest = gimple_call_arg (call, 0);
802 /* DEST represents the address of a memory location.
803 instrument_derefs wants the memory location, so lets
804 dereference the address DEST before handing it to
805 instrument_derefs. */
806 if (TREE_CODE (dest) == ADDR_EXPR)
807 dest = TREE_OPERAND (dest, 0);
808 else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
809 dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
810 dest, build_int_cst (TREE_TYPE (dest), 0));
814 access_size = int_size_in_bytes (TREE_TYPE (dest));
818 /* The other builtins memory access are not instrumented in this
819 function because they either don't have any length parameter,
820 or their length parameter is just a limit. */
824 if (len != NULL_TREE)
826 if (source0 != NULL_TREE)
828 src0->start = source0;
829 src0->access_size = access_size;
831 *src0_is_store = false;
834 if (source1 != NULL_TREE)
836 src1->start = source1;
837 src1->access_size = access_size;
839 *src1_is_store = false;
842 if (dest != NULL_TREE)
845 dst->access_size = access_size;
847 *dst_is_store = true;
850 got_reference_p = true;
855 dst->access_size = access_size;
856 *dst_len = NULL_TREE;
857 *dst_is_store = is_store;
858 *dest_is_deref = true;
859 got_reference_p = true;
862 return got_reference_p;
865 /* Return true iff a given gimple statement has been instrumented.
866 Note that the statement is "defined" by the memory references it
870 has_stmt_been_instrumented_p (gimple *stmt)
872 if (gimple_assign_single_p (stmt))
876 asan_mem_ref_init (&r, NULL, 1);
878 if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
880 return has_mem_ref_been_instrumented (&r);
882 else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
884 asan_mem_ref src0, src1, dest;
885 asan_mem_ref_init (&src0, NULL, 1);
886 asan_mem_ref_init (&src1, NULL, 1);
887 asan_mem_ref_init (&dest, NULL, 1);
889 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
890 bool src0_is_store = false, src1_is_store = false,
891 dest_is_store = false, dest_is_deref = false, intercepted_p = true;
892 if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
893 &src0, &src0_len, &src0_is_store,
894 &src1, &src1_len, &src1_is_store,
895 &dest, &dest_len, &dest_is_store,
896 &dest_is_deref, &intercepted_p))
898 if (src0.start != NULL_TREE
899 && !has_mem_ref_been_instrumented (&src0, src0_len))
902 if (src1.start != NULL_TREE
903 && !has_mem_ref_been_instrumented (&src1, src1_len))
906 if (dest.start != NULL_TREE
907 && !has_mem_ref_been_instrumented (&dest, dest_len))
913 else if (is_gimple_call (stmt) && gimple_store_p (stmt))
916 asan_mem_ref_init (&r, NULL, 1);
918 r.start = gimple_call_lhs (stmt);
919 r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
920 return has_mem_ref_been_instrumented (&r);
926 /* Insert a memory reference into the hash table. */
929 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
931 hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
934 asan_mem_ref_init (&r, ref, access_size);
936 asan_mem_ref **slot = ht->find_slot (&r, INSERT);
937 if (*slot == NULL || (*slot)->access_size < access_size)
938 *slot = asan_mem_ref_new (ref, access_size);
941 /* Initialize shadow_ptr_types array. */
944 asan_init_shadow_ptr_types (void)
946 asan_shadow_set = new_alias_set ();
947 tree types[3] = { signed_char_type_node, short_integer_type_node,
950 for (unsigned i = 0; i < 3; i++)
952 shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
953 TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
954 shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
957 initialize_sanitizer_builtins ();
960 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
963 asan_pp_string (pretty_printer *pp)
965 const char *buf = pp_formatted_text (pp);
966 size_t len = strlen (buf);
967 tree ret = build_string (len + 1, buf);
969 = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
970 build_index_type (size_int (len)));
971 TREE_READONLY (ret) = 1;
972 TREE_STATIC (ret) = 1;
973 return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
976 /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
979 asan_shadow_cst (unsigned char shadow_bytes[4])
982 unsigned HOST_WIDE_INT val = 0;
983 gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
984 for (i = 0; i < 4; i++)
985 val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
986 << (BITS_PER_UNIT * i);
987 return gen_int_mode (val, SImode);
990 /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
994 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
996 rtx_insn *insn, *insns, *jump;
997 rtx_code_label *top_label;
1001 clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1002 insns = get_insns ();
1004 for (insn = insns; insn; insn = NEXT_INSN (insn))
1007 if (insn == NULL_RTX)
1013 gcc_assert ((len & 3) == 0);
1014 top_label = gen_label_rtx ();
1015 addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1016 shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1017 end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1018 emit_label (top_label);
1020 emit_move_insn (shadow_mem, const0_rtx);
1021 tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1022 true, OPTAB_LIB_WIDEN);
1024 emit_move_insn (addr, tmp);
1025 emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1026 jump = get_last_insn ();
1027 gcc_assert (JUMP_P (jump));
1028 add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1032 asan_function_start (void)
1034 section *fnsec = function_section (current_function_decl);
1035 switch_to_section (fnsec);
1036 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1037 current_function_funcdef_no);
1040 /* Return number of shadow bytes that are occupied by a local variable
1043 static unsigned HOST_WIDE_INT
1044 shadow_mem_size (unsigned HOST_WIDE_INT size)
1046 return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1049 /* Insert code to protect stack vars. The prologue sequence should be emitted
1050 directly, epilogue sequence returned. BASE is the register holding the
1051 stack base, against which OFFSETS array offsets are relative to, OFFSETS
1052 array contains pairs of offsets in reverse order, always the end offset
1053 of some gap that needs protection followed by starting offset,
1054 and DECLS is an array of representative decls for each var partition.
1055 LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1056 elements long (OFFSETS include gap before the first variable as well
1057 as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
1058 register which stack vars DECL_RTLs are based on. Either BASE should be
1059 assigned to PBASE, when not doing use after return protection, or
1060 corresponding address based on __asan_stack_malloc* return value. */
1063 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1064 HOST_WIDE_INT *offsets, tree *decls, int length)
1066 rtx shadow_base, shadow_mem, ret, mem, orig_base;
1067 rtx_code_label *lab;
1070 unsigned char shadow_bytes[4];
1071 HOST_WIDE_INT base_offset = offsets[length - 1];
1072 HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1073 HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1074 HOST_WIDE_INT last_offset;
1076 unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1077 tree str_cst, decl, id;
1078 int use_after_return_class = -1;
1080 if (shadow_ptr_types[0] == NULL_TREE)
1081 asan_init_shadow_ptr_types ();
1083 /* First of all, prepare the description string. */
1084 pretty_printer asan_pp;
1086 pp_decimal_int (&asan_pp, length / 2 - 1);
1087 pp_space (&asan_pp);
1088 for (l = length - 2; l; l -= 2)
1090 tree decl = decls[l / 2 - 1];
1091 pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1092 pp_space (&asan_pp);
1093 pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1094 pp_space (&asan_pp);
1095 if (DECL_P (decl) && DECL_NAME (decl))
1097 pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1098 pp_space (&asan_pp);
1099 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1102 pp_string (&asan_pp, "9 <unknown>");
1103 pp_space (&asan_pp);
1105 str_cst = asan_pp_string (&asan_pp);
1107 /* Emit the prologue sequence. */
1108 if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1109 && ASAN_USE_AFTER_RETURN)
1111 use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1112 /* __asan_stack_malloc_N guarantees alignment
1113 N < 6 ? (64 << N) : 4096 bytes. */
1114 if (alignb > (use_after_return_class < 6
1115 ? (64U << use_after_return_class) : 4096U))
1116 use_after_return_class = -1;
1117 else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1118 base_align_bias = ((asan_frame_size + alignb - 1)
1119 & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1121 /* Align base if target is STRICT_ALIGNMENT. */
1122 if (STRICT_ALIGNMENT)
1123 base = expand_binop (Pmode, and_optab, base,
1124 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1125 << ASAN_SHADOW_SHIFT)
1126 / BITS_PER_UNIT), Pmode), NULL_RTX,
1129 if (use_after_return_class == -1 && pbase)
1130 emit_move_insn (pbase, base);
1132 base = expand_binop (Pmode, add_optab, base,
1133 gen_int_mode (base_offset - base_align_bias, Pmode),
1134 NULL_RTX, 1, OPTAB_DIRECT);
1135 orig_base = NULL_RTX;
1136 if (use_after_return_class != -1)
1138 if (asan_detect_stack_use_after_return == NULL_TREE)
1140 id = get_identifier ("__asan_option_detect_stack_use_after_return");
1141 decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1143 SET_DECL_ASSEMBLER_NAME (decl, id);
1144 TREE_ADDRESSABLE (decl) = 1;
1145 DECL_ARTIFICIAL (decl) = 1;
1146 DECL_IGNORED_P (decl) = 1;
1147 DECL_EXTERNAL (decl) = 1;
1148 TREE_STATIC (decl) = 1;
1149 TREE_PUBLIC (decl) = 1;
1150 TREE_USED (decl) = 1;
1151 asan_detect_stack_use_after_return = decl;
1153 orig_base = gen_reg_rtx (Pmode);
1154 emit_move_insn (orig_base, base);
1155 ret = expand_normal (asan_detect_stack_use_after_return);
1156 lab = gen_label_rtx ();
1157 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1158 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1159 VOIDmode, 0, lab, very_likely);
1160 snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1161 use_after_return_class);
1162 ret = init_one_libfunc (buf);
1163 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
1164 GEN_INT (asan_frame_size
1166 TYPE_MODE (pointer_sized_int_node));
1167 /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1168 and NULL otherwise. Check RET value is NULL here and jump over the
1169 BASE reassignment in this case. Otherwise, reassign BASE to RET. */
1170 int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1171 emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1172 VOIDmode, 0, lab, very_unlikely);
1173 ret = convert_memory_address (Pmode, ret);
1174 emit_move_insn (base, ret);
1176 emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1177 gen_int_mode (base_align_bias
1178 - base_offset, Pmode),
1179 NULL_RTX, 1, OPTAB_DIRECT));
1181 mem = gen_rtx_MEM (ptr_mode, base);
1182 mem = adjust_address (mem, VOIDmode, base_align_bias);
1183 emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1184 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1185 emit_move_insn (mem, expand_normal (str_cst));
1186 mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1187 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1188 id = get_identifier (buf);
1189 decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1190 VAR_DECL, id, char_type_node);
1191 SET_DECL_ASSEMBLER_NAME (decl, id);
1192 TREE_ADDRESSABLE (decl) = 1;
1193 TREE_READONLY (decl) = 1;
1194 DECL_ARTIFICIAL (decl) = 1;
1195 DECL_IGNORED_P (decl) = 1;
1196 TREE_STATIC (decl) = 1;
1197 TREE_PUBLIC (decl) = 0;
1198 TREE_USED (decl) = 1;
1199 DECL_INITIAL (decl) = decl;
1200 TREE_ASM_WRITTEN (decl) = 1;
1201 TREE_ASM_WRITTEN (id) = 1;
1202 emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1203 shadow_base = expand_binop (Pmode, lshr_optab, base,
1204 GEN_INT (ASAN_SHADOW_SHIFT),
1205 NULL_RTX, 1, OPTAB_DIRECT);
1207 = plus_constant (Pmode, shadow_base,
1208 asan_shadow_offset ()
1209 + (base_align_bias >> ASAN_SHADOW_SHIFT));
1210 gcc_assert (asan_shadow_set != -1
1211 && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1212 shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1213 set_mem_alias_set (shadow_mem, asan_shadow_set);
1214 if (STRICT_ALIGNMENT)
1215 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1216 prev_offset = base_offset;
1217 for (l = length; l; l -= 2)
1220 cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1221 offset = offsets[l - 1];
1222 if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1226 = base_offset + ((offset - base_offset)
1227 & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1228 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1229 (aoff - prev_offset)
1230 >> ASAN_SHADOW_SHIFT);
1232 for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
1235 if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
1236 shadow_bytes[i] = 0;
1238 shadow_bytes[i] = offset - aoff;
1241 shadow_bytes[i] = ASAN_STACK_MAGIC_MIDDLE;
1242 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1245 while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1247 shadow_mem = adjust_address (shadow_mem, VOIDmode,
1248 (offset - prev_offset)
1249 >> ASAN_SHADOW_SHIFT);
1250 prev_offset = offset;
1251 memset (shadow_bytes, cur_shadow_byte, 4);
1252 emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1253 offset += ASAN_RED_ZONE_SIZE;
1255 cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1257 do_pending_stack_adjust ();
1259 /* Construct epilogue sequence. */
1263 if (use_after_return_class != -1)
1265 rtx_code_label *lab2 = gen_label_rtx ();
1266 char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1267 int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1268 emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1269 VOIDmode, 0, lab2, very_likely);
1270 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1271 set_mem_alias_set (shadow_mem, asan_shadow_set);
1272 mem = gen_rtx_MEM (ptr_mode, base);
1273 mem = adjust_address (mem, VOIDmode, base_align_bias);
1274 emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1275 unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1276 if (use_after_return_class < 5
1277 && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1278 BITS_PER_UNIT, true))
1279 store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1280 BITS_PER_UNIT, true, 0);
1281 else if (use_after_return_class >= 5
1282 || !set_storage_via_setmem (shadow_mem,
1284 gen_int_mode (c, QImode),
1285 BITS_PER_UNIT, BITS_PER_UNIT,
1288 snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1289 use_after_return_class);
1290 ret = init_one_libfunc (buf);
1291 rtx addr = convert_memory_address (ptr_mode, base);
1292 rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1293 emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1294 GEN_INT (asan_frame_size + base_align_bias),
1295 TYPE_MODE (pointer_sized_int_node),
1296 orig_addr, ptr_mode);
1298 lab = gen_label_rtx ();
1303 shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1304 set_mem_alias_set (shadow_mem, asan_shadow_set);
1306 if (STRICT_ALIGNMENT)
1307 set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1309 /* Unpoison shadow memory of a stack at the very end of a function.
1310 As we're poisoning stack variables at the end of their scope,
1311 shadow memory must be properly unpoisoned here. The easiest approach
1312 would be to collect all variables that should not be unpoisoned and
1313 we unpoison shadow memory of the whole stack except ranges
1314 occupied by these variables. */
1315 last_offset = base_offset;
1316 HOST_WIDE_INT current_offset = last_offset;
1319 HOST_WIDE_INT var_end_offset = 0;
1320 HOST_WIDE_INT stack_start = offsets[length - 1];
1321 gcc_assert (last_offset == stack_start);
1323 for (int l = length - 2; l > 0; l -= 2)
1325 HOST_WIDE_INT var_offset = offsets[l];
1326 current_offset = var_offset;
1327 var_end_offset = offsets[l - 1];
1328 HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
1331 /* Should we unpoison the variable? */
1332 if (asan_handled_variables != NULL
1333 && asan_handled_variables->contains (decl))
1335 if (dump_file && (dump_flags & TDF_DETAILS))
1337 const char *n = (DECL_NAME (decl)
1338 ? IDENTIFIER_POINTER (DECL_NAME (decl))
1340 fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1341 "%s (%" PRId64 "B)\n", n,
1342 var_end_offset - var_offset);
1345 unsigned HOST_WIDE_INT s
1346 = shadow_mem_size (current_offset - last_offset);
1347 asan_clear_shadow (shadow_mem, s);
1349 = shadow_mem_size (current_offset - last_offset + rounded_size);
1350 shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
1351 last_offset = var_offset + rounded_size;
1352 current_offset = last_offset;
1357 /* Handle last redzone. */
1358 current_offset = offsets[0];
1359 asan_clear_shadow (shadow_mem,
1360 shadow_mem_size (current_offset - last_offset));
1363 /* Clean-up set with instrumented stack variables. */
1364 delete asan_handled_variables;
1365 asan_handled_variables = NULL;
1366 delete asan_used_labels;
1367 asan_used_labels = NULL;
1369 do_pending_stack_adjust ();
1373 insns = get_insns ();
1378 /* Return true if DECL, a global var, might be overridden and needs
1379 therefore a local alias. */
1382 asan_needs_local_alias (tree decl)
1384 return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1387 /* Return true if DECL is a VAR_DECL that should be protected
1388 by Address Sanitizer, by appending a red zone with protected
1389 shadow memory after it and aligning it to at least
1390 ASAN_RED_ZONE_SIZE bytes. */
1393 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
1400 if (TREE_CODE (decl) == STRING_CST)
1402 /* Instrument all STRING_CSTs except those created
1403 by asan_pp_string here. */
1404 if (shadow_ptr_types[0] != NULL_TREE
1405 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1406 && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1410 if (TREE_CODE (decl) != VAR_DECL
1411 /* TLS vars aren't statically protectable. */
1412 || DECL_THREAD_LOCAL_P (decl)
1413 /* Externs will be protected elsewhere. */
1414 || DECL_EXTERNAL (decl)
1415 /* PR sanitizer/81697: For architectures that use section anchors first
1416 call to asan_protect_global may occur before DECL_RTL (decl) is set.
1417 We should ignore DECL_RTL_SET_P then, because otherwise the first call
1418 to asan_protect_global will return FALSE and the following calls on the
1419 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
1420 up with inconsistency at runtime. */
1421 || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
1422 /* Comdat vars pose an ABI problem, we can't know if
1423 the var that is selected by the linker will have
1425 || DECL_ONE_ONLY (decl)
1426 /* Similarly for common vars. People can use -fno-common.
1427 Note: Linux kernel is built with -fno-common, so we do instrument
1428 globals there even if it is C. */
1429 || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1430 /* Don't protect if using user section, often vars placed
1431 into user section from multiple TUs are then assumed
1432 to be an array of such vars, putting padding in there
1433 breaks this assumption. */
1434 || (DECL_SECTION_NAME (decl) != NULL
1435 && !symtab_node::get (decl)->implicit_section
1436 && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1437 || DECL_SIZE (decl) == 0
1438 || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1439 || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1440 || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1441 || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1444 if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
1447 rtl = DECL_RTL (decl);
1448 if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1450 symbol = XEXP (rtl, 0);
1452 if (CONSTANT_POOL_ADDRESS_P (symbol)
1453 || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1457 if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1460 #ifndef ASM_OUTPUT_DEF
1461 if (asan_needs_local_alias (decl))
1468 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1469 IS_STORE is either 1 (for a store) or 0 (for a load). */
1472 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1475 static enum built_in_function report[2][2][6]
1476 = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1477 BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1478 BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1479 { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1480 BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1481 BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1482 { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1483 BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1484 BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1485 BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1486 BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1487 BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1488 { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1489 BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1490 BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1491 BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1492 BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1493 BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1494 if (size_in_bytes == -1)
1497 return builtin_decl_implicit (report[recover_p][is_store][5]);
1500 int size_log2 = exact_log2 (size_in_bytes);
1501 return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1504 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1505 IS_STORE is either 1 (for a store) or 0 (for a load). */
1508 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1511 static enum built_in_function check[2][2][6]
1512 = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1513 BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1514 BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1515 { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1516 BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1517 BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1518 { { BUILT_IN_ASAN_LOAD1_NOABORT,
1519 BUILT_IN_ASAN_LOAD2_NOABORT,
1520 BUILT_IN_ASAN_LOAD4_NOABORT,
1521 BUILT_IN_ASAN_LOAD8_NOABORT,
1522 BUILT_IN_ASAN_LOAD16_NOABORT,
1523 BUILT_IN_ASAN_LOADN_NOABORT },
1524 { BUILT_IN_ASAN_STORE1_NOABORT,
1525 BUILT_IN_ASAN_STORE2_NOABORT,
1526 BUILT_IN_ASAN_STORE4_NOABORT,
1527 BUILT_IN_ASAN_STORE8_NOABORT,
1528 BUILT_IN_ASAN_STORE16_NOABORT,
1529 BUILT_IN_ASAN_STOREN_NOABORT } } };
1530 if (size_in_bytes == -1)
1533 return builtin_decl_implicit (check[recover_p][is_store][5]);
1536 int size_log2 = exact_log2 (size_in_bytes);
1537 return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1540 /* Split the current basic block and create a condition statement
1541 insertion point right before or after the statement pointed to by
1542 ITER. Return an iterator to the point at which the caller might
1543 safely insert the condition statement.
1545 THEN_BLOCK must be set to the address of an uninitialized instance
1546 of basic_block. The function will then set *THEN_BLOCK to the
1547 'then block' of the condition statement to be inserted by the
1550 If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1551 *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1553 Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1554 block' of the condition statement to be inserted by the caller.
1556 Note that *FALLTHROUGH_BLOCK is a new block that contains the
1557 statements starting from *ITER, and *THEN_BLOCK is a new empty
1560 *ITER is adjusted to point to always point to the first statement
1561 of the basic block * FALLTHROUGH_BLOCK. That statement is the
1562 same as what ITER was pointing to prior to calling this function,
1563 if BEFORE_P is true; otherwise, it is its following statement. */
1565 gimple_stmt_iterator
1566 create_cond_insert_point (gimple_stmt_iterator *iter,
1568 bool then_more_likely_p,
1569 bool create_then_fallthru_edge,
1570 basic_block *then_block,
1571 basic_block *fallthrough_block)
1573 gimple_stmt_iterator gsi = *iter;
1575 if (!gsi_end_p (gsi) && before_p)
1578 basic_block cur_bb = gsi_bb (*iter);
1580 edge e = split_block (cur_bb, gsi_stmt (gsi));
1582 /* Get a hold on the 'condition block', the 'then block' and the
1584 basic_block cond_bb = e->src;
1585 basic_block fallthru_bb = e->dest;
1586 basic_block then_bb = create_empty_bb (cond_bb);
1589 add_bb_to_loop (then_bb, cond_bb->loop_father);
1590 loops_state_set (LOOPS_NEED_FIXUP);
1593 /* Set up the newly created 'then block'. */
1594 e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1595 int fallthrough_probability
1596 = then_more_likely_p
1597 ? PROB_VERY_UNLIKELY
1598 : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1599 e->probability = PROB_ALWAYS - fallthrough_probability;
1600 if (create_then_fallthru_edge)
1601 make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1603 /* Set up the fallthrough basic block. */
1604 e = find_edge (cond_bb, fallthru_bb);
1605 e->flags = EDGE_FALSE_VALUE;
1606 e->count = cond_bb->count;
1607 e->probability = fallthrough_probability;
1609 /* Update dominance info for the newly created then_bb; note that
1610 fallthru_bb's dominance info has already been updated by
1612 if (dom_info_available_p (CDI_DOMINATORS))
1613 set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1615 *then_block = then_bb;
1616 *fallthrough_block = fallthru_bb;
1617 *iter = gsi_start_bb (fallthru_bb);
1619 return gsi_last_bb (cond_bb);
1622 /* Insert an if condition followed by a 'then block' right before the
1623 statement pointed to by ITER. The fallthrough block -- which is the
1624 else block of the condition as well as the destination of the
1625 outcoming edge of the 'then block' -- starts with the statement
1628 COND is the condition of the if.
1630 If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1631 'then block' is higher than the probability of the edge to the
1634 Upon completion of the function, *THEN_BB is set to the newly
1635 inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1638 *ITER is adjusted to still point to the same statement it was
1639 pointing to initially. */
1642 insert_if_then_before_iter (gcond *cond,
1643 gimple_stmt_iterator *iter,
1644 bool then_more_likely_p,
1645 basic_block *then_bb,
1646 basic_block *fallthrough_bb)
1648 gimple_stmt_iterator cond_insert_point =
1649 create_cond_insert_point (iter,
1652 /*create_then_fallthru_edge=*/true,
1655 gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1658 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
1659 If RETURN_ADDRESS is set to true, return memory location instread
1660 of a value in the shadow memory. */
1663 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1664 tree base_addr, tree shadow_ptr_type,
1665 bool return_address = false)
1667 tree t, uintptr_type = TREE_TYPE (base_addr);
1668 tree shadow_type = TREE_TYPE (shadow_ptr_type);
1671 t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1672 g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1674 gimple_set_location (g, location);
1675 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1677 t = build_int_cst (uintptr_type, asan_shadow_offset ());
1678 g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1679 gimple_assign_lhs (g), t);
1680 gimple_set_location (g, location);
1681 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1683 g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1684 gimple_assign_lhs (g));
1685 gimple_set_location (g, location);
1686 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1688 if (!return_address)
1690 t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1691 build_int_cst (shadow_ptr_type, 0));
1692 g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1693 gimple_set_location (g, location);
1694 gsi_insert_after (gsi, g, GSI_NEW_STMT);
1697 return gimple_assign_lhs (g);
1700 /* BASE can already be an SSA_NAME; in that case, do not create a
1701 new SSA_NAME for it. */
1704 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1707 if (TREE_CODE (base) == SSA_NAME)
1709 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1710 TREE_CODE (base), base);
1711 gimple_set_location (g, loc);
1713 gsi_insert_before (iter, g, GSI_SAME_STMT);
1715 gsi_insert_after (iter, g, GSI_NEW_STMT);
1716 return gimple_assign_lhs (g);
1719 /* LEN can already have necessary size and precision;
1720 in that case, do not create a new variable. */
1723 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1726 if (ptrofftype_p (len))
1728 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1730 gimple_set_location (g, loc);
1732 gsi_insert_before (iter, g, GSI_SAME_STMT);
1734 gsi_insert_after (iter, g, GSI_NEW_STMT);
1735 return gimple_assign_lhs (g);
1738 /* Instrument the memory access instruction BASE. Insert new
1739 statements before or after ITER.
1741 Note that the memory access represented by BASE can be either an
1742 SSA_NAME, or a non-SSA expression. LOCATION is the source code
1743 location. IS_STORE is TRUE for a store, FALSE for a load.
1744 BEFORE_P is TRUE for inserting the instrumentation code before
1745 ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
1746 for a scalar memory access and FALSE for memory region access.
1747 NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1748 length. ALIGN tells alignment of accessed memory object.
1750 START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1751 memory region have already been instrumented.
1753 If BEFORE_P is TRUE, *ITER is arranged to still point to the
1754 statement it was pointing to prior to calling this function,
1755 otherwise, it points to the statement logically following it. */
1758 build_check_stmt (location_t loc, tree base, tree len,
1759 HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1760 bool is_non_zero_len, bool before_p, bool is_store,
1761 bool is_scalar_access, unsigned int align = 0)
1763 gimple_stmt_iterator gsi = *iter;
1766 gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1770 base = unshare_expr (base);
1771 base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1775 len = unshare_expr (len);
1776 len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1780 gcc_assert (size_in_bytes != -1);
1781 len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1784 if (size_in_bytes > 1)
1786 if ((size_in_bytes & (size_in_bytes - 1)) != 0
1787 || size_in_bytes > 16)
1788 is_scalar_access = false;
1789 else if (align && align < size_in_bytes * BITS_PER_UNIT)
1791 /* On non-strict alignment targets, if
1792 16-byte access is just 8-byte aligned,
1793 this will result in misaligned shadow
1794 memory 2 byte load, but otherwise can
1795 be handled using one read. */
1796 if (size_in_bytes != 16
1798 || align < 8 * BITS_PER_UNIT)
1799 is_scalar_access = false;
1803 HOST_WIDE_INT flags = 0;
1805 flags |= ASAN_CHECK_STORE;
1806 if (is_non_zero_len)
1807 flags |= ASAN_CHECK_NON_ZERO_LEN;
1808 if (is_scalar_access)
1809 flags |= ASAN_CHECK_SCALAR_ACCESS;
1811 g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1812 build_int_cst (integer_type_node, flags),
1814 build_int_cst (integer_type_node,
1815 align / BITS_PER_UNIT));
1816 gimple_set_location (g, loc);
1818 gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1821 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1827 /* If T represents a memory access, add instrumentation code before ITER.
1828 LOCATION is source code location.
1829 IS_STORE is either TRUE (for a store) or FALSE (for a load). */
1832 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1833 location_t location, bool is_store)
1835 if (is_store && !ASAN_INSTRUMENT_WRITES)
1837 if (!is_store && !ASAN_INSTRUMENT_READS)
1841 HOST_WIDE_INT size_in_bytes;
1842 if (location == UNKNOWN_LOCATION)
1843 location = EXPR_LOCATION (t);
1845 type = TREE_TYPE (t);
1846 switch (TREE_CODE (t))
1860 size_in_bytes = int_size_in_bytes (type);
1861 if (size_in_bytes <= 0)
1864 HOST_WIDE_INT bitsize, bitpos;
1867 int unsignedp, reversep, volatilep = 0;
1868 tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1869 &unsignedp, &reversep, &volatilep, false);
1871 if (TREE_CODE (t) == COMPONENT_REF
1872 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1874 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1875 instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1876 TREE_OPERAND (t, 0), repr,
1877 NULL_TREE), location, is_store);
1881 if (bitpos % BITS_PER_UNIT
1882 || bitsize != size_in_bytes * BITS_PER_UNIT)
1885 if (TREE_CODE (inner) == VAR_DECL
1886 && offset == NULL_TREE
1888 && DECL_SIZE (inner)
1889 && tree_fits_shwi_p (DECL_SIZE (inner))
1890 && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1892 if (DECL_THREAD_LOCAL_P (inner))
1894 if (!ASAN_GLOBALS && is_global_var (inner))
1896 if (!TREE_STATIC (inner))
1898 /* Automatic vars in the current function will be always
1900 if (decl_function_context (inner) == current_function_decl
1901 && (!asan_sanitize_use_after_scope ()
1902 || !TREE_ADDRESSABLE (inner)))
1905 /* Always instrument external vars, they might be dynamically
1907 else if (!DECL_EXTERNAL (inner))
1909 /* For static vars if they are known not to be dynamically
1910 initialized, they will be always accessible. */
1911 varpool_node *vnode = varpool_node::get (inner);
1912 if (vnode && !vnode->dynamically_initialized)
1917 base = build_fold_addr_expr (t);
1918 if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1920 unsigned int align = get_object_alignment (t);
1921 build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1922 /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1923 is_store, /*is_scalar_access*/true, align);
1924 update_mem_ref_hash_table (base, size_in_bytes);
1925 update_mem_ref_hash_table (t, size_in_bytes);
1930 /* Insert a memory reference into the hash table if access length
1931 can be determined in compile time. */
1934 maybe_update_mem_ref_hash_table (tree base, tree len)
1936 if (!POINTER_TYPE_P (TREE_TYPE (base))
1937 || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1940 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1942 if (size_in_bytes != -1)
1943 update_mem_ref_hash_table (base, size_in_bytes);
1946 /* Instrument an access to a contiguous memory region that starts at
1947 the address pointed to by BASE, over a length of LEN (expressed in
1948 the sizeof (*BASE) bytes). ITER points to the instruction before
1949 which the instrumentation instructions must be inserted. LOCATION
1950 is the source location that the instrumentation instructions must
1951 have. If IS_STORE is true, then the memory access is a store;
1952 otherwise, it's a load. */
1955 instrument_mem_region_access (tree base, tree len,
1956 gimple_stmt_iterator *iter,
1957 location_t location, bool is_store)
1959 if (!POINTER_TYPE_P (TREE_TYPE (base))
1960 || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1961 || integer_zerop (len))
1964 HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1966 if ((size_in_bytes == -1)
1967 || !has_mem_ref_been_instrumented (base, size_in_bytes))
1969 build_check_stmt (location, base, len, size_in_bytes, iter,
1970 /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1971 is_store, /*is_scalar_access*/false, /*align*/0);
1974 maybe_update_mem_ref_hash_table (base, len);
1975 *iter = gsi_for_stmt (gsi_stmt (*iter));
1978 /* Instrument the call to a built-in memory access function that is
1979 pointed to by the iterator ITER.
1981 Upon completion, return TRUE iff *ITER has been advanced to the
1982 statement following the one it was originally pointing to. */
1985 instrument_builtin_call (gimple_stmt_iterator *iter)
1987 if (!ASAN_MEMINTRIN)
1990 bool iter_advanced_p = false;
1991 gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1993 gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1995 location_t loc = gimple_location (call);
1997 asan_mem_ref src0, src1, dest;
1998 asan_mem_ref_init (&src0, NULL, 1);
1999 asan_mem_ref_init (&src1, NULL, 1);
2000 asan_mem_ref_init (&dest, NULL, 1);
2002 tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2003 bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2004 dest_is_deref = false, intercepted_p = true;
2006 if (get_mem_refs_of_builtin_call (call,
2007 &src0, &src0_len, &src0_is_store,
2008 &src1, &src1_len, &src1_is_store,
2009 &dest, &dest_len, &dest_is_store,
2010 &dest_is_deref, &intercepted_p))
2014 instrument_derefs (iter, dest.start, loc, dest_is_store);
2016 iter_advanced_p = true;
2018 else if (!intercepted_p
2019 && (src0_len || src1_len || dest_len))
2021 if (src0.start != NULL_TREE)
2022 instrument_mem_region_access (src0.start, src0_len,
2023 iter, loc, /*is_store=*/false);
2024 if (src1.start != NULL_TREE)
2025 instrument_mem_region_access (src1.start, src1_len,
2026 iter, loc, /*is_store=*/false);
2027 if (dest.start != NULL_TREE)
2028 instrument_mem_region_access (dest.start, dest_len,
2029 iter, loc, /*is_store=*/true);
2031 *iter = gsi_for_stmt (call);
2033 iter_advanced_p = true;
2037 if (src0.start != NULL_TREE)
2038 maybe_update_mem_ref_hash_table (src0.start, src0_len);
2039 if (src1.start != NULL_TREE)
2040 maybe_update_mem_ref_hash_table (src1.start, src1_len);
2041 if (dest.start != NULL_TREE)
2042 maybe_update_mem_ref_hash_table (dest.start, dest_len);
2045 return iter_advanced_p;
2048 /* Instrument the assignment statement ITER if it is subject to
2049 instrumentation. Return TRUE iff instrumentation actually
2050 happened. In that case, the iterator ITER is advanced to the next
2051 logical expression following the one initially pointed to by ITER,
2052 and the relevant memory reference that which access has been
2053 instrumented is added to the memory references hash table. */
2056 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2058 gimple *s = gsi_stmt (*iter);
2060 gcc_assert (gimple_assign_single_p (s));
2062 tree ref_expr = NULL_TREE;
2063 bool is_store, is_instrumented = false;
2065 if (gimple_store_p (s))
2067 ref_expr = gimple_assign_lhs (s);
2069 instrument_derefs (iter, ref_expr,
2070 gimple_location (s),
2072 is_instrumented = true;
2075 if (gimple_assign_load_p (s))
2077 ref_expr = gimple_assign_rhs1 (s);
2079 instrument_derefs (iter, ref_expr,
2080 gimple_location (s),
2082 is_instrumented = true;
2085 if (is_instrumented)
2088 return is_instrumented;
2091 /* Instrument the function call pointed to by the iterator ITER, if it
2092 is subject to instrumentation. At the moment, the only function
2093 calls that are instrumented are some built-in functions that access
2094 memory. Look at instrument_builtin_call to learn more.
2096 Upon completion return TRUE iff *ITER was advanced to the statement
2097 following the one it was originally pointing to. */
2100 maybe_instrument_call (gimple_stmt_iterator *iter)
2102 gimple *stmt = gsi_stmt (*iter);
2103 bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2105 if (is_builtin && instrument_builtin_call (iter))
2108 if (gimple_call_noreturn_p (stmt))
2112 tree callee = gimple_call_fndecl (stmt);
2113 switch (DECL_FUNCTION_CODE (callee))
2115 case BUILT_IN_UNREACHABLE:
2117 /* Don't instrument these. */
2123 tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2124 gimple *g = gimple_build_call (decl, 0);
2125 gimple_set_location (g, gimple_location (stmt));
2126 gsi_insert_before (iter, g, GSI_SAME_STMT);
2129 bool instrumented = false;
2130 if (gimple_store_p (stmt))
2132 tree ref_expr = gimple_call_lhs (stmt);
2133 instrument_derefs (iter, ref_expr,
2134 gimple_location (stmt),
2137 instrumented = true;
2140 /* Walk through gimple_call arguments and check them id needed. */
2141 unsigned args_num = gimple_call_num_args (stmt);
2142 for (unsigned i = 0; i < args_num; ++i)
2144 tree arg = gimple_call_arg (stmt, i);
2145 /* If ARG is not a non-aggregate register variable, compiler in general
2146 creates temporary for it and pass it as argument to gimple call.
2147 But in some cases, e.g. when we pass by value a small structure that
2148 fits to register, compiler can avoid extra overhead by pulling out
2149 these temporaries. In this case, we should check the argument. */
2150 if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2152 instrument_derefs (iter, arg,
2153 gimple_location (stmt),
2154 /*is_store=*/false);
2155 instrumented = true;
2160 return instrumented;
2163 /* Walk each instruction of all basic block and instrument those that
2164 represent memory references: loads, stores, or function calls.
2165 In a given basic block, this function avoids instrumenting memory
2166 references that have already been instrumented. */
2169 transform_statements (void)
2171 basic_block bb, last_bb = NULL;
2172 gimple_stmt_iterator i;
2173 int saved_last_basic_block = last_basic_block_for_fn (cfun);
2175 FOR_EACH_BB_FN (bb, cfun)
2177 basic_block prev_bb = bb;
2179 if (bb->index >= saved_last_basic_block) continue;
2181 /* Flush the mem ref hash table, if current bb doesn't have
2182 exactly one predecessor, or if that predecessor (skipping
2183 over asan created basic blocks) isn't the last processed
2184 basic block. Thus we effectively flush on extended basic
2185 block boundaries. */
2186 while (single_pred_p (prev_bb))
2188 prev_bb = single_pred (prev_bb);
2189 if (prev_bb->index < saved_last_basic_block)
2192 if (prev_bb != last_bb)
2193 empty_mem_ref_hash_table ();
2196 for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2198 gimple *s = gsi_stmt (i);
2200 if (has_stmt_been_instrumented_p (s))
2202 else if (gimple_assign_single_p (s)
2203 && !gimple_clobber_p (s)
2204 && maybe_instrument_assignment (&i))
2205 /* Nothing to do as maybe_instrument_assignment advanced
2207 else if (is_gimple_call (s) && maybe_instrument_call (&i))
2208 /* Nothing to do as maybe_instrument_call
2209 advanced the iterator I. */;
2212 /* No instrumentation happened.
2214 If the current instruction is a function call that
2215 might free something, let's forget about the memory
2216 references that got instrumented. Otherwise we might
2217 miss some instrumentation opportunities. Do the same
2218 for a ASAN_MARK poisoning internal function. */
2219 if (is_gimple_call (s)
2220 && (!nonfreeing_call_p (s)
2221 || asan_mark_p (s, ASAN_MARK_POISON)))
2222 empty_mem_ref_hash_table ();
2228 free_mem_ref_resources ();
2232 __asan_before_dynamic_init (module_name)
2234 __asan_after_dynamic_init ()
2238 asan_dynamic_init_call (bool after_p)
2240 if (shadow_ptr_types[0] == NULL_TREE)
2241 asan_init_shadow_ptr_types ();
2243 tree fn = builtin_decl_implicit (after_p
2244 ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2245 : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2246 tree module_name_cst = NULL_TREE;
2249 pretty_printer module_name_pp;
2250 pp_string (&module_name_pp, main_input_filename);
2252 module_name_cst = asan_pp_string (&module_name_pp);
2253 module_name_cst = fold_convert (const_ptr_type_node,
2257 return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2261 struct __asan_global
2265 uptr __size_with_redzone;
2267 const void *__module_name;
2268 uptr __has_dynamic_init;
2269 __asan_global_source_location *__location;
2273 asan_global_struct (void)
2275 static const char *field_names[]
2276 = { "__beg", "__size", "__size_with_redzone",
2277 "__name", "__module_name", "__has_dynamic_init", "__location"};
2278 tree fields[ARRAY_SIZE(field_names)], ret;
2281 ret = make_node (RECORD_TYPE);
2282 for (i = 0; i < ARRAY_SIZE(field_names); i++)
2285 = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2286 get_identifier (field_names[i]),
2287 (i == 0 || i == 3) ? const_ptr_type_node
2288 : pointer_sized_int_node);
2289 DECL_CONTEXT (fields[i]) = ret;
2291 DECL_CHAIN (fields[i - 1]) = fields[i];
2293 tree type_decl = build_decl (input_location, TYPE_DECL,
2294 get_identifier ("__asan_global"), ret);
2295 DECL_IGNORED_P (type_decl) = 1;
2296 DECL_ARTIFICIAL (type_decl) = 1;
2297 TYPE_FIELDS (ret) = fields[0];
2298 TYPE_NAME (ret) = type_decl;
2299 TYPE_STUB_DECL (ret) = type_decl;
2304 /* Append description of a single global DECL into vector V.
2305 TYPE is __asan_global struct type as returned by asan_global_struct. */
2308 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2310 tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2311 unsigned HOST_WIDE_INT size;
2312 tree str_cst, module_name_cst, refdecl = decl;
2313 vec<constructor_elt, va_gc> *vinner = NULL;
2315 pretty_printer asan_pp, module_name_pp;
2317 if (DECL_NAME (decl))
2318 pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2320 pp_string (&asan_pp, "<unknown>");
2321 str_cst = asan_pp_string (&asan_pp);
2323 pp_string (&module_name_pp, main_input_filename);
2324 module_name_cst = asan_pp_string (&module_name_pp);
2326 if (asan_needs_local_alias (decl))
2329 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2330 refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2331 VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2332 TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2333 TREE_READONLY (refdecl) = TREE_READONLY (decl);
2334 TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2335 DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2336 DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2337 DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2338 TREE_STATIC (refdecl) = 1;
2339 TREE_PUBLIC (refdecl) = 0;
2340 TREE_USED (refdecl) = 1;
2341 assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2344 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2345 fold_convert (const_ptr_type_node,
2346 build_fold_addr_expr (refdecl)));
2347 size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2348 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2349 size += asan_red_zone_size (size);
2350 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2351 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2352 fold_convert (const_ptr_type_node, str_cst));
2353 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2354 fold_convert (const_ptr_type_node, module_name_cst));
2355 varpool_node *vnode = varpool_node::get (decl);
2356 int has_dynamic_init = 0;
2357 /* FIXME: Enable initialization order fiasco detection in LTO mode once
2358 proper fix for PR 79061 will be applied. */
2360 has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2361 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2362 build_int_cst (uptr, has_dynamic_init));
2363 tree locptr = NULL_TREE;
2364 location_t loc = DECL_SOURCE_LOCATION (decl);
2365 expanded_location xloc = expand_location (loc);
2366 if (xloc.file != NULL)
2368 static int lasanloccnt = 0;
2370 ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2371 tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2372 ubsan_get_source_location_type ());
2373 TREE_STATIC (var) = 1;
2374 TREE_PUBLIC (var) = 0;
2375 DECL_ARTIFICIAL (var) = 1;
2376 DECL_IGNORED_P (var) = 1;
2377 pretty_printer filename_pp;
2378 pp_string (&filename_pp, xloc.file);
2379 tree str = asan_pp_string (&filename_pp);
2380 tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2381 NULL_TREE, str, NULL_TREE,
2382 build_int_cst (unsigned_type_node,
2383 xloc.line), NULL_TREE,
2384 build_int_cst (unsigned_type_node,
2386 TREE_CONSTANT (ctor) = 1;
2387 TREE_STATIC (ctor) = 1;
2388 DECL_INITIAL (var) = ctor;
2389 varpool_node::finalize_decl (var);
2390 locptr = fold_convert (uptr, build_fold_addr_expr (var));
2393 locptr = build_int_cst (uptr, 0);
2394 CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2395 init = build_constructor (type, vinner);
2396 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2399 /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
2401 initialize_sanitizer_builtins (void)
2405 if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2408 tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2410 = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2411 tree BT_FN_VOID_CONST_PTR
2412 = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2413 tree BT_FN_VOID_PTR_PTR
2414 = build_function_type_list (void_type_node, ptr_type_node,
2415 ptr_type_node, NULL_TREE);
2416 tree BT_FN_VOID_PTR_PTR_PTR
2417 = build_function_type_list (void_type_node, ptr_type_node,
2418 ptr_type_node, ptr_type_node, NULL_TREE);
2419 tree BT_FN_VOID_PTR_PTRMODE
2420 = build_function_type_list (void_type_node, ptr_type_node,
2421 pointer_sized_int_node, NULL_TREE);
2423 = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2424 tree BT_FN_SIZE_CONST_PTR_INT
2425 = build_function_type_list (size_type_node, const_ptr_type_node,
2426 integer_type_node, NULL_TREE);
2427 tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2428 tree BT_FN_IX_CONST_VPTR_INT[5];
2429 tree BT_FN_IX_VPTR_IX_INT[5];
2430 tree BT_FN_VOID_VPTR_IX_INT[5];
2432 = build_pointer_type (build_qualified_type (void_type_node,
2433 TYPE_QUAL_VOLATILE));
2435 = build_pointer_type (build_qualified_type (void_type_node,
2439 = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2441 for (i = 0; i < 5; i++)
2443 tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2444 BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2445 = build_function_type_list (boolt, vptr, ptr_type_node, ix,
2446 integer_type_node, integer_type_node,
2448 BT_FN_IX_CONST_VPTR_INT[i]
2449 = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2450 BT_FN_IX_VPTR_IX_INT[i]
2451 = build_function_type_list (ix, vptr, ix, integer_type_node,
2453 BT_FN_VOID_VPTR_IX_INT[i]
2454 = build_function_type_list (void_type_node, vptr, ix,
2455 integer_type_node, NULL_TREE);
2457 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2458 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2459 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2460 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2461 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2462 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2463 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2464 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2465 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2466 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2467 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2468 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2469 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2470 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2471 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2472 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2473 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2474 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2475 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2476 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2477 #undef ATTR_NOTHROW_LEAF_LIST
2478 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2479 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2480 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2481 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2482 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2483 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2484 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2485 ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2486 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2487 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2488 ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2489 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2490 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2491 /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2492 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2493 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2494 /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2495 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2496 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2497 /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2498 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2499 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2500 #undef DEF_BUILTIN_STUB
2501 #define DEF_BUILTIN_STUB(ENUM, NAME)
2502 #undef DEF_SANITIZER_BUILTIN
2503 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2504 decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
2505 BUILT_IN_NORMAL, NAME, NULL_TREE); \
2506 set_call_expr_flags (decl, ATTRS); \
2507 set_builtin_decl (ENUM, decl, true);
2509 #include "sanitizer.def"
2511 /* -fsanitize=object-size uses __builtin_object_size, but that might
2512 not be available for e.g. Fortran at this point. We use
2513 DEF_SANITIZER_BUILTIN here only as a convenience macro. */
2514 if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2515 && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2516 DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2517 BT_FN_SIZE_CONST_PTR_INT,
2518 ATTR_PURE_NOTHROW_LEAF_LIST)
2520 #undef DEF_SANITIZER_BUILTIN
2521 #undef DEF_BUILTIN_STUB
2524 /* Called via htab_traverse. Count number of emitted
2525 STRING_CSTs in the constant hash table. */
2528 count_string_csts (constant_descriptor_tree **slot,
2529 unsigned HOST_WIDE_INT *data)
2531 struct constant_descriptor_tree *desc = *slot;
2532 if (TREE_CODE (desc->value) == STRING_CST
2533 && TREE_ASM_WRITTEN (desc->value)
2534 && asan_protect_global (desc->value))
2539 /* Helper structure to pass two parameters to
2542 struct asan_add_string_csts_data
2545 vec<constructor_elt, va_gc> *v;
2548 /* Called via hash_table::traverse. Call asan_add_global
2549 on emitted STRING_CSTs from the constant hash table. */
2552 add_string_csts (constant_descriptor_tree **slot,
2553 asan_add_string_csts_data *aascd)
2555 struct constant_descriptor_tree *desc = *slot;
2556 if (TREE_CODE (desc->value) == STRING_CST
2557 && TREE_ASM_WRITTEN (desc->value)
2558 && asan_protect_global (desc->value))
2560 asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2561 aascd->type, aascd->v);
2566 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2567 invoke ggc_collect. */
2568 static GTY(()) tree asan_ctor_statements;
2570 /* Module-level instrumentation.
2571 - Insert __asan_init_vN() into the list of CTORs.
2572 - TODO: insert redzones around globals.
2576 asan_finish_file (void)
2578 varpool_node *vnode;
2579 unsigned HOST_WIDE_INT gcount = 0;
2581 if (shadow_ptr_types[0] == NULL_TREE)
2582 asan_init_shadow_ptr_types ();
2583 /* Avoid instrumenting code in the asan ctors/dtors.
2584 We don't need to insert padding after the description strings,
2585 nor after .LASAN* array. */
2586 flag_sanitize &= ~SANITIZE_ADDRESS;
2588 /* For user-space we want asan constructors to run first.
2589 Linux kernel does not support priorities other than default, and the only
2590 other user of constructors is coverage. So we run with the default
2592 int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2593 ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2595 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2597 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2598 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2599 fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2600 append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2602 FOR_EACH_DEFINED_VARIABLE (vnode)
2603 if (TREE_ASM_WRITTEN (vnode->decl)
2604 && asan_protect_global (vnode->decl))
2606 hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2607 const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2611 tree type = asan_global_struct (), var, ctor;
2612 tree dtor_statements = NULL_TREE;
2613 vec<constructor_elt, va_gc> *v;
2616 type = build_array_type_nelts (type, gcount);
2617 ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2618 var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2620 TREE_STATIC (var) = 1;
2621 TREE_PUBLIC (var) = 0;
2622 DECL_ARTIFICIAL (var) = 1;
2623 DECL_IGNORED_P (var) = 1;
2624 vec_alloc (v, gcount);
2625 FOR_EACH_DEFINED_VARIABLE (vnode)
2626 if (TREE_ASM_WRITTEN (vnode->decl)
2627 && asan_protect_global (vnode->decl))
2628 asan_add_global (vnode->decl, TREE_TYPE (type), v);
2629 struct asan_add_string_csts_data aascd;
2630 aascd.type = TREE_TYPE (type);
2632 const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2634 ctor = build_constructor (type, v);
2635 TREE_CONSTANT (ctor) = 1;
2636 TREE_STATIC (ctor) = 1;
2637 DECL_INITIAL (var) = ctor;
2638 varpool_node::finalize_decl (var);
2640 tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2641 tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2642 append_to_statement_list (build_call_expr (fn, 2,
2643 build_fold_addr_expr (var),
2645 &asan_ctor_statements);
2647 fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2648 append_to_statement_list (build_call_expr (fn, 2,
2649 build_fold_addr_expr (var),
2652 cgraph_build_static_cdtor ('D', dtor_statements, priority);
2654 if (asan_ctor_statements)
2655 cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2656 flag_sanitize |= SANITIZE_ADDRESS;
2659 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
2660 on SHADOW address. Newly added statements will be added to ITER with
2661 given location LOC. We mark SIZE bytes in shadow memory, where
2662 LAST_CHUNK_SIZE is greater than zero in situation where we are at the
2663 end of a variable. */
2666 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
2668 unsigned HOST_WIDE_INT base_addr_offset,
2669 bool is_clobber, unsigned size,
2670 unsigned last_chunk_size)
2672 tree shadow_ptr_type;
2677 shadow_ptr_type = shadow_ptr_types[0];
2680 shadow_ptr_type = shadow_ptr_types[1];
2683 shadow_ptr_type = shadow_ptr_types[2];
2689 unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
2690 unsigned HOST_WIDE_INT val = 0;
2691 for (unsigned i = 0; i < size; ++i)
2693 unsigned char shadow_c = c;
2694 if (i == size - 1 && last_chunk_size && !is_clobber)
2695 shadow_c = last_chunk_size;
2696 val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
2699 /* Handle last chunk in unpoisoning. */
2700 tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
2702 tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
2703 build_int_cst (shadow_ptr_type, base_addr_offset));
2705 gimple *g = gimple_build_assign (dest, magic);
2706 gimple_set_location (g, loc);
2707 gsi_insert_after (iter, g, GSI_NEW_STMT);
2710 /* Expand the ASAN_MARK builtins. */
2713 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
2715 gimple *g = gsi_stmt (*iter);
2716 location_t loc = gimple_location (g);
2717 HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
2718 bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
2720 tree base = gimple_call_arg (g, 1);
2721 gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
2722 tree decl = TREE_OPERAND (base, 0);
2723 gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2724 if (asan_handled_variables == NULL)
2725 asan_handled_variables = new hash_set<tree> (16);
2726 asan_handled_variables->add (decl);
2727 tree len = gimple_call_arg (g, 2);
2729 gcc_assert (tree_fits_shwi_p (len));
2730 unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
2731 gcc_assert (size_in_bytes);
2733 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2735 gimple_set_location (g, loc);
2736 gsi_replace (iter, g, false);
2737 tree base_addr = gimple_assign_lhs (g);
2739 /* Generate direct emission if size_in_bytes is small. */
2740 if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
2742 unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
2744 tree shadow = build_shadow_mem_access (iter, loc, base_addr,
2745 shadow_ptr_types[0], true);
2747 for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
2750 if (shadow_size - offset >= 4)
2752 else if (shadow_size - offset >= 2)
2755 unsigned HOST_WIDE_INT last_chunk_size = 0;
2756 unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
2757 if (s > size_in_bytes)
2758 last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
2760 asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
2761 size, last_chunk_size);
2767 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2769 gimple_set_location (g, loc);
2770 gsi_insert_before (iter, g, GSI_SAME_STMT);
2771 tree sz_arg = gimple_assign_lhs (g);
2773 tree fun = builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_CLOBBER_N
2774 : BUILT_IN_ASAN_UNCLOBBER_N);
2775 g = gimple_build_call (fun, 2, base_addr, sz_arg);
2776 gimple_set_location (g, loc);
2777 gsi_insert_after (iter, g, GSI_NEW_STMT);
2783 /* Expand the ASAN_{LOAD,STORE} builtins. */
2786 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2788 gimple *g = gsi_stmt (*iter);
2789 location_t loc = gimple_location (g);
2791 if (flag_sanitize & SANITIZE_USER_ADDRESS)
2792 recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
2794 recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2796 HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2797 gcc_assert (flags < ASAN_CHECK_LAST);
2798 bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2799 bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2800 bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2802 tree base = gimple_call_arg (g, 1);
2803 tree len = gimple_call_arg (g, 2);
2804 HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2806 HOST_WIDE_INT size_in_bytes
2807 = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2811 /* Instrument using callbacks. */
2812 gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2814 gimple_set_location (g, loc);
2815 gsi_insert_before (iter, g, GSI_SAME_STMT);
2816 tree base_addr = gimple_assign_lhs (g);
2819 tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2821 g = gimple_build_call (fun, 1, base_addr);
2824 gcc_assert (nargs == 2);
2825 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2827 gimple_set_location (g, loc);
2828 gsi_insert_before (iter, g, GSI_SAME_STMT);
2829 tree sz_arg = gimple_assign_lhs (g);
2830 g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2832 gimple_set_location (g, loc);
2833 gsi_replace (iter, g, false);
2837 HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2839 tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2840 tree shadow_type = TREE_TYPE (shadow_ptr_type);
2842 gimple_stmt_iterator gsi = *iter;
2844 if (!is_non_zero_len)
2846 /* So, the length of the memory area to asan-protect is
2847 non-constant. Let's guard the generated instrumentation code
2852 //asan instrumentation code goes here.
2854 // falltrough instructions, starting with *ITER. */
2856 g = gimple_build_cond (NE_EXPR,
2858 build_int_cst (TREE_TYPE (len), 0),
2859 NULL_TREE, NULL_TREE);
2860 gimple_set_location (g, loc);
2862 basic_block then_bb, fallthrough_bb;
2863 insert_if_then_before_iter (as_a <gcond *> (g), iter,
2864 /*then_more_likely_p=*/true,
2865 &then_bb, &fallthrough_bb);
2866 /* Note that fallthrough_bb starts with the statement that was
2867 pointed to by ITER. */
2869 /* The 'then block' of the 'if (len != 0) condition is where
2870 we'll generate the asan instrumentation code now. */
2871 gsi = gsi_last_bb (then_bb);
2874 /* Get an iterator on the point where we can add the condition
2875 statement for the instrumentation. */
2876 basic_block then_bb, else_bb;
2877 gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2878 /*then_more_likely_p=*/false,
2879 /*create_then_fallthru_edge*/recover_p,
2883 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2885 gimple_set_location (g, loc);
2886 gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2887 tree base_addr = gimple_assign_lhs (g);
2890 if (real_size_in_bytes >= 8)
2892 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2898 /* Slow path for 1, 2 and 4 byte accesses. */
2899 /* Test (shadow != 0)
2900 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
2901 tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2903 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2904 gimple_seq seq = NULL;
2905 gimple_seq_add_stmt (&seq, shadow_test);
2906 /* Aligned (>= 8 bytes) can test just
2907 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2911 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2913 gimple_seq_add_stmt (&seq,
2914 build_type_cast (shadow_type,
2915 gimple_seq_last (seq)));
2916 if (real_size_in_bytes > 1)
2917 gimple_seq_add_stmt (&seq,
2918 build_assign (PLUS_EXPR,
2919 gimple_seq_last (seq),
2920 real_size_in_bytes - 1));
2921 t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2924 t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2925 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2926 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2927 gimple_seq_last (seq)));
2928 t = gimple_assign_lhs (gimple_seq_last (seq));
2929 gimple_seq_set_location (seq, loc);
2930 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2932 /* For non-constant, misaligned or otherwise weird access sizes,
2933 check first and last byte. */
2934 if (size_in_bytes == -1)
2936 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2938 build_int_cst (pointer_sized_int_node, 1));
2939 gimple_set_location (g, loc);
2940 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2941 tree last = gimple_assign_lhs (g);
2942 g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2943 PLUS_EXPR, base_addr, last);
2944 gimple_set_location (g, loc);
2945 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2946 tree base_end_addr = gimple_assign_lhs (g);
2948 tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2950 gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2951 gimple_seq seq = NULL;
2952 gimple_seq_add_stmt (&seq, shadow_test);
2953 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2955 gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2956 gimple_seq_last (seq)));
2957 gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2958 gimple_seq_last (seq),
2960 gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2961 gimple_seq_last (seq)));
2962 gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2963 gimple_seq_last (seq)));
2964 t = gimple_assign_lhs (gimple_seq_last (seq));
2965 gimple_seq_set_location (seq, loc);
2966 gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2970 g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2971 NULL_TREE, NULL_TREE);
2972 gimple_set_location (g, loc);
2973 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2975 /* Generate call to the run-time library (e.g. __asan_report_load8). */
2976 gsi = gsi_start_bb (then_bb);
2978 tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2979 g = gimple_build_call (fun, nargs, base_addr, len);
2980 gimple_set_location (g, loc);
2981 gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2983 gsi_remove (iter, true);
2984 *iter = gsi_start_bb (else_bb);
2989 /* Instrument the current function. */
2992 asan_instrument (void)
2994 if (shadow_ptr_types[0] == NULL_TREE)
2995 asan_init_shadow_ptr_types ();
2996 transform_statements ();
3003 return (flag_sanitize & SANITIZE_ADDRESS) != 0
3004 && !lookup_attribute ("no_sanitize_address",
3005 DECL_ATTRIBUTES (current_function_decl));
3010 const pass_data pass_data_asan =
3012 GIMPLE_PASS, /* type */
3014 OPTGROUP_NONE, /* optinfo_flags */
3015 TV_NONE, /* tv_id */
3016 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3017 0, /* properties_provided */
3018 0, /* properties_destroyed */
3019 0, /* todo_flags_start */
3020 TODO_update_ssa, /* todo_flags_finish */
3023 class pass_asan : public gimple_opt_pass
3026 pass_asan (gcc::context *ctxt)
3027 : gimple_opt_pass (pass_data_asan, ctxt)
3030 /* opt_pass methods: */
3031 opt_pass * clone () { return new pass_asan (m_ctxt); }
3032 virtual bool gate (function *) { return gate_asan (); }
3033 virtual unsigned int execute (function *) { return asan_instrument (); }
3035 }; // class pass_asan
3040 make_pass_asan (gcc::context *ctxt)
3042 return new pass_asan (ctxt);
3047 const pass_data pass_data_asan_O0 =
3049 GIMPLE_PASS, /* type */
3051 OPTGROUP_NONE, /* optinfo_flags */
3052 TV_NONE, /* tv_id */
3053 ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3054 0, /* properties_provided */
3055 0, /* properties_destroyed */
3056 0, /* todo_flags_start */
3057 TODO_update_ssa, /* todo_flags_finish */
3060 class pass_asan_O0 : public gimple_opt_pass
3063 pass_asan_O0 (gcc::context *ctxt)
3064 : gimple_opt_pass (pass_data_asan_O0, ctxt)
3067 /* opt_pass methods: */
3068 virtual bool gate (function *) { return !optimize && gate_asan (); }
3069 virtual unsigned int execute (function *) { return asan_instrument (); }
3071 }; // class pass_asan_O0
3076 make_pass_asan_O0 (gcc::context *ctxt)
3078 return new pass_asan_O0 (ctxt);
3081 #include "gt-asan.h"