xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/asan.c (revision 23f5f46327e37e7811da3520f4bb933f9489322f)
1 /* AddressSanitizer, a fast memory error detector.
2    Copyright (C) 2012-2020 Free Software Foundation, Inc.
3    Contributed by Kostya Serebryany <kcc@google.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "ssa.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "stringpool.h"
51 #include "attribs.h"
52 #include "asan.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "output.h"
57 #include "langhooks.h"
58 #include "cfgloop.h"
59 #include "gimple-builder.h"
60 #include "gimple-fold.h"
61 #include "ubsan.h"
62 #include "builtins.h"
63 #include "fnmatch.h"
64 #include "tree-inline.h"
65 #include "tree-ssa.h"
66 #include "tree-eh.h"
67 #include "diagnostic-core.h"
68 
69 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
70    with <2x slowdown on average.
71 
72    The tool consists of two parts:
73    instrumentation module (this file) and a run-time library.
74    The instrumentation module adds a run-time check before every memory insn.
75      For a 8- or 16- byte load accessing address X:
76        ShadowAddr = (X >> 3) + Offset
77        ShadowValue = *(char*)ShadowAddr;  // *(short*) for 16-byte access.
78        if (ShadowValue)
79 	 __asan_report_load8(X);
80      For a load of N bytes (N=1, 2 or 4) from address X:
81        ShadowAddr = (X >> 3) + Offset
82        ShadowValue = *(char*)ShadowAddr;
83        if (ShadowValue)
84 	 if ((X & 7) + N - 1 > ShadowValue)
85 	   __asan_report_loadN(X);
86    Stores are instrumented similarly, but using __asan_report_storeN functions.
87    A call too __asan_init_vN() is inserted to the list of module CTORs.
88    N is the version number of the AddressSanitizer API. The changes between the
89    API versions are listed in libsanitizer/asan/asan_interface_internal.h.
90 
91    The run-time library redefines malloc (so that redzone are inserted around
92    the allocated memory) and free (so that reuse of free-ed memory is delayed),
93    provides __asan_report* and __asan_init_vN functions.
94 
95    Read more:
96    http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
97 
98    The current implementation supports detection of out-of-bounds and
99    use-after-free in the heap, on the stack and for global variables.
100 
101    [Protection of stack variables]
102 
103    To understand how detection of out-of-bounds and use-after-free works
104    for stack variables, lets look at this example on x86_64 where the
105    stack grows downward:
106 
107      int
108      foo ()
109      {
110        char a[23] = {0};
111        int b[2] = {0};
112 
113        a[5] = 1;
114        b[1] = 2;
115 
116        return a[5] + b[1];
117      }
118 
119    For this function, the stack protected by asan will be organized as
120    follows, from the top of the stack to the bottom:
121 
122    Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
123 
124    Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
125 	   the next slot be 32 bytes aligned; this one is called Partial
126 	   Redzone; this 32 bytes alignment is an asan constraint]
127 
128    Slot 3/ [24 bytes for variable 'a']
129 
130    Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
131 
132    Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
133 
134    Slot 6/ [8 bytes for variable 'b']
135 
136    Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
137 	    'LEFT RedZone']
138 
139    The 32 bytes of LEFT red zone at the bottom of the stack can be
140    decomposed as such:
141 
142      1/ The first 8 bytes contain a magical asan number that is always
143      0x41B58AB3.
144 
145      2/ The following 8 bytes contains a pointer to a string (to be
146      parsed at runtime by the runtime asan library), which format is
147      the following:
148 
149       "<function-name> <space> <num-of-variables-on-the-stack>
150       (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
151       <length-of-var-in-bytes> ){n} "
152 
153 	where '(...){n}' means the content inside the parenthesis occurs 'n'
154 	times, with 'n' being the number of variables on the stack.
155 
156      3/ The following 8 bytes contain the PC of the current function which
157      will be used by the run-time library to print an error message.
158 
159      4/ The following 8 bytes are reserved for internal use by the run-time.
160 
161    The shadow memory for that stack layout is going to look like this:
162 
163      - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
164        The F1 byte pattern is a magic number called
165        ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
166        the memory for that shadow byte is part of a the LEFT red zone
167        intended to seat at the bottom of the variables on the stack.
168 
169      - content of shadow memory 8 bytes for slots 6 and 5:
170        0xF4F4F400.  The F4 byte pattern is a magic number
171        called ASAN_STACK_MAGIC_PARTIAL.  It flags the fact that the
172        memory region for this shadow byte is a PARTIAL red zone
173        intended to pad a variable A, so that the slot following
174        {A,padding} is 32 bytes aligned.
175 
176        Note that the fact that the least significant byte of this
177        shadow memory content is 00 means that 8 bytes of its
178        corresponding memory (which corresponds to the memory of
179        variable 'b') is addressable.
180 
181      - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
182        The F2 byte pattern is a magic number called
183        ASAN_STACK_MAGIC_MIDDLE.  It flags the fact that the memory
184        region for this shadow byte is a MIDDLE red zone intended to
185        seat between two 32 aligned slots of {variable,padding}.
186 
187      - content of shadow memory 8 bytes for slot 3 and 2:
188        0xF4000000.  This represents is the concatenation of
189        variable 'a' and the partial red zone following it, like what we
190        had for variable 'b'.  The least significant 3 bytes being 00
191        means that the 3 bytes of variable 'a' are addressable.
192 
193      - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
194        The F3 byte pattern is a magic number called
195        ASAN_STACK_MAGIC_RIGHT.  It flags the fact that the memory
196        region for this shadow byte is a RIGHT red zone intended to seat
197        at the top of the variables of the stack.
198 
199    Note that the real variable layout is done in expand_used_vars in
200    cfgexpand.c.  As far as Address Sanitizer is concerned, it lays out
201    stack variables as well as the different red zones, emits some
202    prologue code to populate the shadow memory as to poison (mark as
203    non-accessible) the regions of the red zones and mark the regions of
204    stack variables as accessible, and emit some epilogue code to
205    un-poison (mark as accessible) the regions of red zones right before
206    the function exits.
207 
208    [Protection of global variables]
209 
210    The basic idea is to insert a red zone between two global variables
211    and install a constructor function that calls the asan runtime to do
212    the populating of the relevant shadow memory regions at load time.
213 
214    So the global variables are laid out as to insert a red zone between
215    them. The size of the red zones is so that each variable starts on a
216    32 bytes boundary.
217 
218    Then a constructor function is installed so that, for each global
219    variable, it calls the runtime asan library function
220    __asan_register_globals_with an instance of this type:
221 
222      struct __asan_global
223      {
224        // Address of the beginning of the global variable.
225        const void *__beg;
226 
227        // Initial size of the global variable.
228        uptr __size;
229 
230        // Size of the global variable + size of the red zone.  This
231        //   size is 32 bytes aligned.
232        uptr __size_with_redzone;
233 
234        // Name of the global variable.
235        const void *__name;
236 
237        // Name of the module where the global variable is declared.
238        const void *__module_name;
239 
240        // 1 if it has dynamic initialization, 0 otherwise.
241        uptr __has_dynamic_init;
242 
243        // A pointer to struct that contains source location, could be NULL.
244        __asan_global_source_location *__location;
245      }
246 
247    A destructor function that calls the runtime asan library function
248    _asan_unregister_globals is also installed.  */
249 
250 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
251 static bool asan_shadow_offset_computed;
252 static vec<char *> sanitized_sections;
253 static tree last_alloca_addr;
254 
255 /* Set of variable declarations that are going to be guarded by
256    use-after-scope sanitizer.  */
257 
258 hash_set<tree> *asan_handled_variables = NULL;
259 
260 hash_set <tree> *asan_used_labels = NULL;
261 
262 /* Sets shadow offset to value in string VAL.  */
263 
264 bool
set_asan_shadow_offset(const char * val)265 set_asan_shadow_offset (const char *val)
266 {
267   char *endp;
268 
269   errno = 0;
270 #ifdef HAVE_LONG_LONG
271   asan_shadow_offset_value = strtoull (val, &endp, 0);
272 #else
273   asan_shadow_offset_value = strtoul (val, &endp, 0);
274 #endif
275   if (!(*val != '\0' && *endp == '\0' && errno == 0))
276     return false;
277 
278   asan_shadow_offset_computed = true;
279 
280   return true;
281 }
282 
283 /* Set list of user-defined sections that need to be sanitized.  */
284 
285 void
set_sanitized_sections(const char * sections)286 set_sanitized_sections (const char *sections)
287 {
288   char *pat;
289   unsigned i;
290   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
291     free (pat);
292   sanitized_sections.truncate (0);
293 
294   for (const char *s = sections; *s; )
295     {
296       const char *end;
297       for (end = s; *end && *end != ','; ++end);
298       size_t len = end - s;
299       sanitized_sections.safe_push (xstrndup (s, len));
300       s = *end ? end + 1 : end;
301     }
302 }
303 
304 bool
asan_mark_p(gimple * stmt,enum asan_mark_flags flag)305 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
306 {
307   return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
308 	  && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
309 }
310 
311 bool
asan_sanitize_stack_p(void)312 asan_sanitize_stack_p (void)
313 {
314   return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
315 }
316 
317 bool
asan_sanitize_allocas_p(void)318 asan_sanitize_allocas_p (void)
319 {
320   return (asan_sanitize_stack_p () && param_asan_protect_allocas);
321 }
322 
323 /* Checks whether section SEC should be sanitized.  */
324 
325 static bool
section_sanitized_p(const char * sec)326 section_sanitized_p (const char *sec)
327 {
328   char *pat;
329   unsigned i;
330   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
331     if (fnmatch (pat, sec, FNM_PERIOD) == 0)
332       return true;
333   return false;
334 }
335 
336 /* Returns Asan shadow offset.  */
337 
338 static unsigned HOST_WIDE_INT
asan_shadow_offset()339 asan_shadow_offset ()
340 {
341   if (!asan_shadow_offset_computed)
342     {
343       asan_shadow_offset_computed = true;
344       asan_shadow_offset_value = targetm.asan_shadow_offset ();
345     }
346   return asan_shadow_offset_value;
347 }
348 
349 /* Returns Asan shadow offset has been set.  */
350 bool
asan_shadow_offset_set_p()351 asan_shadow_offset_set_p ()
352 {
353   return asan_shadow_offset_computed;
354 }
355 
356 alias_set_type asan_shadow_set = -1;
357 
358 /* Pointer types to 1, 2 or 4 byte integers in shadow memory.  A separate
359    alias set is used for all shadow memory accesses.  */
360 static GTY(()) tree shadow_ptr_types[3];
361 
362 /* Decl for __asan_option_detect_stack_use_after_return.  */
363 static GTY(()) tree asan_detect_stack_use_after_return;
364 
365 /* Hashtable support for memory references used by gimple
366    statements.  */
367 
368 /* This type represents a reference to a memory region.  */
369 struct asan_mem_ref
370 {
371   /* The expression of the beginning of the memory region.  */
372   tree start;
373 
374   /* The size of the access.  */
375   HOST_WIDE_INT access_size;
376 };
377 
378 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
379 
380 /* Initializes an instance of asan_mem_ref.  */
381 
382 static void
asan_mem_ref_init(asan_mem_ref * ref,tree start,HOST_WIDE_INT access_size)383 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
384 {
385   ref->start = start;
386   ref->access_size = access_size;
387 }
388 
389 /* Allocates memory for an instance of asan_mem_ref into the memory
390    pool returned by asan_mem_ref_get_alloc_pool and initialize it.
391    START is the address of (or the expression pointing to) the
392    beginning of memory reference.  ACCESS_SIZE is the size of the
393    access to the referenced memory.  */
394 
395 static asan_mem_ref*
asan_mem_ref_new(tree start,HOST_WIDE_INT access_size)396 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
397 {
398   asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
399 
400   asan_mem_ref_init (ref, start, access_size);
401   return ref;
402 }
403 
404 /* This builds and returns a pointer to the end of the memory region
405    that starts at START and of length LEN.  */
406 
407 tree
asan_mem_ref_get_end(tree start,tree len)408 asan_mem_ref_get_end (tree start, tree len)
409 {
410   if (len == NULL_TREE || integer_zerop (len))
411     return start;
412 
413   if (!ptrofftype_p (len))
414     len = convert_to_ptrofftype (len);
415 
416   return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
417 }
418 
419 /*  Return a tree expression that represents the end of the referenced
420     memory region.  Beware that this function can actually build a new
421     tree expression.  */
422 
423 tree
asan_mem_ref_get_end(const asan_mem_ref * ref,tree len)424 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
425 {
426   return asan_mem_ref_get_end (ref->start, len);
427 }
428 
429 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
430 {
431   static inline hashval_t hash (const asan_mem_ref *);
432   static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
433 };
434 
435 /* Hash a memory reference.  */
436 
437 inline hashval_t
hash(const asan_mem_ref * mem_ref)438 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
439 {
440   return iterative_hash_expr (mem_ref->start, 0);
441 }
442 
443 /* Compare two memory references.  We accept the length of either
444    memory references to be NULL_TREE.  */
445 
446 inline bool
equal(const asan_mem_ref * m1,const asan_mem_ref * m2)447 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
448 			    const asan_mem_ref *m2)
449 {
450   return operand_equal_p (m1->start, m2->start, 0);
451 }
452 
453 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
454 
455 /* Returns a reference to the hash table containing memory references.
456    This function ensures that the hash table is created.  Note that
457    this hash table is updated by the function
458    update_mem_ref_hash_table.  */
459 
460 static hash_table<asan_mem_ref_hasher> *
get_mem_ref_hash_table()461 get_mem_ref_hash_table ()
462 {
463   if (!asan_mem_ref_ht)
464     asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
465 
466   return asan_mem_ref_ht;
467 }
468 
469 /* Clear all entries from the memory references hash table.  */
470 
471 static void
empty_mem_ref_hash_table()472 empty_mem_ref_hash_table ()
473 {
474   if (asan_mem_ref_ht)
475     asan_mem_ref_ht->empty ();
476 }
477 
478 /* Free the memory references hash table.  */
479 
480 static void
free_mem_ref_resources()481 free_mem_ref_resources ()
482 {
483   delete asan_mem_ref_ht;
484   asan_mem_ref_ht = NULL;
485 
486   asan_mem_ref_pool.release ();
487 }
488 
489 /* Return true iff the memory reference REF has been instrumented.  */
490 
491 static bool
has_mem_ref_been_instrumented(tree ref,HOST_WIDE_INT access_size)492 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
493 {
494   asan_mem_ref r;
495   asan_mem_ref_init (&r, ref, access_size);
496 
497   asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
498   return saved_ref && saved_ref->access_size >= access_size;
499 }
500 
501 /* Return true iff the memory reference REF has been instrumented.  */
502 
503 static bool
has_mem_ref_been_instrumented(const asan_mem_ref * ref)504 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
505 {
506   return has_mem_ref_been_instrumented (ref->start, ref->access_size);
507 }
508 
509 /* Return true iff access to memory region starting at REF and of
510    length LEN has been instrumented.  */
511 
512 static bool
has_mem_ref_been_instrumented(const asan_mem_ref * ref,tree len)513 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
514 {
515   HOST_WIDE_INT size_in_bytes
516     = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
517 
518   return size_in_bytes != -1
519     && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
520 }
521 
522 /* Set REF to the memory reference present in a gimple assignment
523    ASSIGNMENT.  Return true upon successful completion, false
524    otherwise.  */
525 
526 static bool
get_mem_ref_of_assignment(const gassign * assignment,asan_mem_ref * ref,bool * ref_is_store)527 get_mem_ref_of_assignment (const gassign *assignment,
528 			   asan_mem_ref *ref,
529 			   bool *ref_is_store)
530 {
531   gcc_assert (gimple_assign_single_p (assignment));
532 
533   if (gimple_store_p (assignment)
534       && !gimple_clobber_p (assignment))
535     {
536       ref->start = gimple_assign_lhs (assignment);
537       *ref_is_store = true;
538     }
539   else if (gimple_assign_load_p (assignment))
540     {
541       ref->start = gimple_assign_rhs1 (assignment);
542       *ref_is_store = false;
543     }
544   else
545     return false;
546 
547   ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
548   return true;
549 }
550 
551 /* Return address of last allocated dynamic alloca.  */
552 
553 static tree
get_last_alloca_addr()554 get_last_alloca_addr ()
555 {
556   if (last_alloca_addr)
557     return last_alloca_addr;
558 
559   last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
560   gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
561   edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
562   gsi_insert_on_edge_immediate (e, g);
563   return last_alloca_addr;
564 }
565 
566 /* Insert __asan_allocas_unpoison (top, bottom) call before
567    __builtin_stack_restore (new_sp) call.
568    The pseudocode of this routine should look like this:
569      top = last_alloca_addr;
570      bot = new_sp;
571      __asan_allocas_unpoison (top, bot);
572      last_alloca_addr = new_sp;
573      __builtin_stack_restore (new_sp);
574    In general, we can't use new_sp as bot parameter because on some
575    architectures SP has non zero offset from dynamic stack area.  Moreover, on
576    some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
577    particular function only after all callees were expanded to rtl.
578    The most noticeable example is PowerPC{,64}, see
579    http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
580    To overcome the issue we use following trick: pass new_sp as a second
581    parameter to __asan_allocas_unpoison and rewrite it during expansion with
582    new_sp + (virtual_dynamic_stack_rtx - sp) later in
583    expand_asan_emit_allocas_unpoison function.  */
584 
585 static void
handle_builtin_stack_restore(gcall * call,gimple_stmt_iterator * iter)586 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
587 {
588   if (!iter || !asan_sanitize_allocas_p ())
589     return;
590 
591   tree last_alloca = get_last_alloca_addr ();
592   tree restored_stack = gimple_call_arg (call, 0);
593   tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
594   gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
595   gsi_insert_before (iter, g, GSI_SAME_STMT);
596   g = gimple_build_assign (last_alloca, restored_stack);
597   gsi_insert_before (iter, g, GSI_SAME_STMT);
598 }
599 
600 /* Deploy and poison redzones around __builtin_alloca call.  To do this, we
601    should replace this call with another one with changed parameters and
602    replace all its uses with new address, so
603        addr = __builtin_alloca (old_size, align);
604    is replaced by
605        left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
606    Following two statements are optimized out if we know that
607    old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
608    redzone.
609        misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
610        partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
611        right_redzone_size = ASAN_RED_ZONE_SIZE;
612        additional_size = left_redzone_size + partial_redzone_size +
613                          right_redzone_size;
614        new_size = old_size + additional_size;
615        new_alloca = __builtin_alloca (new_size, max (align, 32))
616        __asan_alloca_poison (new_alloca, old_size)
617        addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
618        last_alloca_addr = new_alloca;
619    ADDITIONAL_SIZE is added to make new memory allocation contain not only
620    requested memory, but also left, partial and right redzones as well as some
621    additional space, required by alignment.  */
622 
623 static void
handle_builtin_alloca(gcall * call,gimple_stmt_iterator * iter)624 handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
625 {
626   if (!iter || !asan_sanitize_allocas_p ())
627     return;
628 
629   gassign *g;
630   gcall *gg;
631   const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
632 
633   tree last_alloca = get_last_alloca_addr ();
634   tree callee = gimple_call_fndecl (call);
635   tree lhs = gimple_call_lhs (call);
636   tree old_size = gimple_call_arg (call, 0);
637   tree ptr_type = lhs ? TREE_TYPE (lhs) : ptr_type_node;
638   tree partial_size = NULL_TREE;
639   unsigned int align
640     = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
641       ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
642 
643   bool throws = false;
644   edge e = NULL;
645   if (stmt_can_throw_internal (cfun, call))
646     {
647       if (!lhs)
648 	return;
649       throws = true;
650       e = find_fallthru_edge (gsi_bb (*iter)->succs);
651     }
652 
653   /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
654      bytes of allocated space.  Otherwise, align alloca to ASAN_RED_ZONE_SIZE
655      manually.  */
656   align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
657 
658   tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
659   tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
660 
661   /* Extract lower bits from old_size.  */
662   wide_int size_nonzero_bits = get_nonzero_bits (old_size);
663   wide_int rz_mask
664     = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
665   wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
666 
667   /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
668      redzone.  Otherwise, compute its size here.  */
669   if (wi::ne_p (old_size_lower_bits, 0))
670     {
671       /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
672          partial_size = ASAN_RED_ZONE_SIZE - misalign.  */
673       g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
674 			       BIT_AND_EXPR, old_size, alloca_rz_mask);
675       gsi_insert_before (iter, g, GSI_SAME_STMT);
676       tree misalign = gimple_assign_lhs (g);
677       g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
678 			       redzone_size, misalign);
679       gsi_insert_before (iter, g, GSI_SAME_STMT);
680       partial_size = gimple_assign_lhs (g);
681     }
682 
683   /* additional_size = align + ASAN_RED_ZONE_SIZE.  */
684   tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
685 							+ ASAN_RED_ZONE_SIZE);
686   /* If alloca has partial redzone, include it to additional_size too.  */
687   if (partial_size)
688     {
689       /* additional_size += partial_size.  */
690       g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
691 			       partial_size, additional_size);
692       gsi_insert_before (iter, g, GSI_SAME_STMT);
693       additional_size = gimple_assign_lhs (g);
694     }
695 
696   /* new_size = old_size + additional_size.  */
697   g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
698 			   additional_size);
699   gsi_insert_before (iter, g, GSI_SAME_STMT);
700   tree new_size = gimple_assign_lhs (g);
701 
702   /* Build new __builtin_alloca call:
703        new_alloca_with_rz = __builtin_alloca (new_size, align).  */
704   tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
705   gg = gimple_build_call (fn, 2, new_size,
706 			  build_int_cst (size_type_node, align));
707   tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
708   gimple_call_set_lhs (gg, new_alloca_with_rz);
709   if (throws)
710     {
711       gimple_call_set_lhs (call, NULL);
712       gsi_replace (iter, gg, true);
713     }
714   else
715     gsi_insert_before (iter, gg, GSI_SAME_STMT);
716 
717   /* new_alloca = new_alloca_with_rz + align.  */
718   g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
719 			   new_alloca_with_rz,
720 			   build_int_cst (size_type_node,
721 					  align / BITS_PER_UNIT));
722   gimple_stmt_iterator gsi = gsi_none ();
723   if (throws)
724     {
725       gsi_insert_on_edge_immediate (e, g);
726       gsi = gsi_for_stmt (g);
727     }
728   else
729     gsi_insert_before (iter, g, GSI_SAME_STMT);
730   tree new_alloca = gimple_assign_lhs (g);
731 
732   /* Poison newly created alloca redzones:
733       __asan_alloca_poison (new_alloca, old_size).  */
734   fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
735   gg = gimple_build_call (fn, 2, new_alloca, old_size);
736   if (throws)
737     gsi_insert_after (&gsi, gg, GSI_NEW_STMT);
738   else
739     gsi_insert_before (iter, gg, GSI_SAME_STMT);
740 
741   /* Save new_alloca_with_rz value into last_alloca to use it during
742      allocas unpoisoning.  */
743   g = gimple_build_assign (last_alloca, new_alloca_with_rz);
744   if (throws)
745     gsi_insert_after (&gsi, g, GSI_NEW_STMT);
746   else
747     gsi_insert_before (iter, g, GSI_SAME_STMT);
748 
749   /* Finally, replace old alloca ptr with NEW_ALLOCA.  */
750   if (throws)
751     {
752       g = gimple_build_assign (lhs, new_alloca);
753       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
754     }
755   else
756     replace_call_with_value (iter, new_alloca);
757 }
758 
759 /* Return the memory references contained in a gimple statement
760    representing a builtin call that has to do with memory access.  */
761 
762 static bool
763 get_mem_refs_of_builtin_call (gcall *call,
764 			      asan_mem_ref *src0,
765 			      tree *src0_len,
766 			      bool *src0_is_store,
767 			      asan_mem_ref *src1,
768 			      tree *src1_len,
769 			      bool *src1_is_store,
770 			      asan_mem_ref *dst,
771 			      tree *dst_len,
772 			      bool *dst_is_store,
773 			      bool *dest_is_deref,
774 			      bool *intercepted_p,
775 			      gimple_stmt_iterator *iter = NULL)
776 {
777   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
778 
779   tree callee = gimple_call_fndecl (call);
780   tree source0 = NULL_TREE, source1 = NULL_TREE,
781     dest = NULL_TREE, len = NULL_TREE;
782   bool is_store = true, got_reference_p = false;
783   HOST_WIDE_INT access_size = 1;
784 
785   *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
786 
787   switch (DECL_FUNCTION_CODE (callee))
788     {
789       /* (s, s, n) style memops.  */
790     case BUILT_IN_BCMP:
791     case BUILT_IN_MEMCMP:
792       source0 = gimple_call_arg (call, 0);
793       source1 = gimple_call_arg (call, 1);
794       len = gimple_call_arg (call, 2);
795       break;
796 
797       /* (src, dest, n) style memops.  */
798     case BUILT_IN_BCOPY:
799       source0 = gimple_call_arg (call, 0);
800       dest = gimple_call_arg (call, 1);
801       len = gimple_call_arg (call, 2);
802       break;
803 
804       /* (dest, src, n) style memops.  */
805     case BUILT_IN_MEMCPY:
806     case BUILT_IN_MEMCPY_CHK:
807     case BUILT_IN_MEMMOVE:
808     case BUILT_IN_MEMMOVE_CHK:
809     case BUILT_IN_MEMPCPY:
810     case BUILT_IN_MEMPCPY_CHK:
811       dest = gimple_call_arg (call, 0);
812       source0 = gimple_call_arg (call, 1);
813       len = gimple_call_arg (call, 2);
814       break;
815 
816       /* (dest, n) style memops.  */
817     case BUILT_IN_BZERO:
818       dest = gimple_call_arg (call, 0);
819       len = gimple_call_arg (call, 1);
820       break;
821 
822       /* (dest, x, n) style memops*/
823     case BUILT_IN_MEMSET:
824     case BUILT_IN_MEMSET_CHK:
825       dest = gimple_call_arg (call, 0);
826       len = gimple_call_arg (call, 2);
827       break;
828 
829     case BUILT_IN_STRLEN:
830       source0 = gimple_call_arg (call, 0);
831       len = gimple_call_lhs (call);
832       break;
833 
834     case BUILT_IN_STACK_RESTORE:
835       handle_builtin_stack_restore (call, iter);
836       break;
837 
838     CASE_BUILT_IN_ALLOCA:
839       handle_builtin_alloca (call, iter);
840       break;
841     /* And now the __atomic* and __sync builtins.
842        These are handled differently from the classical memory
843        access builtins above.  */
844 
845     case BUILT_IN_ATOMIC_LOAD_1:
846       is_store = false;
847       /* FALLTHRU */
848     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
849     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
850     case BUILT_IN_SYNC_FETCH_AND_OR_1:
851     case BUILT_IN_SYNC_FETCH_AND_AND_1:
852     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
853     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
854     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
855     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
856     case BUILT_IN_SYNC_OR_AND_FETCH_1:
857     case BUILT_IN_SYNC_AND_AND_FETCH_1:
858     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
859     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
860     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
861     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
862     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
863     case BUILT_IN_SYNC_LOCK_RELEASE_1:
864     case BUILT_IN_ATOMIC_EXCHANGE_1:
865     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
866     case BUILT_IN_ATOMIC_STORE_1:
867     case BUILT_IN_ATOMIC_ADD_FETCH_1:
868     case BUILT_IN_ATOMIC_SUB_FETCH_1:
869     case BUILT_IN_ATOMIC_AND_FETCH_1:
870     case BUILT_IN_ATOMIC_NAND_FETCH_1:
871     case BUILT_IN_ATOMIC_XOR_FETCH_1:
872     case BUILT_IN_ATOMIC_OR_FETCH_1:
873     case BUILT_IN_ATOMIC_FETCH_ADD_1:
874     case BUILT_IN_ATOMIC_FETCH_SUB_1:
875     case BUILT_IN_ATOMIC_FETCH_AND_1:
876     case BUILT_IN_ATOMIC_FETCH_NAND_1:
877     case BUILT_IN_ATOMIC_FETCH_XOR_1:
878     case BUILT_IN_ATOMIC_FETCH_OR_1:
879       access_size = 1;
880       goto do_atomic;
881 
882     case BUILT_IN_ATOMIC_LOAD_2:
883       is_store = false;
884       /* FALLTHRU */
885     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
886     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
887     case BUILT_IN_SYNC_FETCH_AND_OR_2:
888     case BUILT_IN_SYNC_FETCH_AND_AND_2:
889     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
890     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
891     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
892     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
893     case BUILT_IN_SYNC_OR_AND_FETCH_2:
894     case BUILT_IN_SYNC_AND_AND_FETCH_2:
895     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
896     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
897     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
898     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
899     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
900     case BUILT_IN_SYNC_LOCK_RELEASE_2:
901     case BUILT_IN_ATOMIC_EXCHANGE_2:
902     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
903     case BUILT_IN_ATOMIC_STORE_2:
904     case BUILT_IN_ATOMIC_ADD_FETCH_2:
905     case BUILT_IN_ATOMIC_SUB_FETCH_2:
906     case BUILT_IN_ATOMIC_AND_FETCH_2:
907     case BUILT_IN_ATOMIC_NAND_FETCH_2:
908     case BUILT_IN_ATOMIC_XOR_FETCH_2:
909     case BUILT_IN_ATOMIC_OR_FETCH_2:
910     case BUILT_IN_ATOMIC_FETCH_ADD_2:
911     case BUILT_IN_ATOMIC_FETCH_SUB_2:
912     case BUILT_IN_ATOMIC_FETCH_AND_2:
913     case BUILT_IN_ATOMIC_FETCH_NAND_2:
914     case BUILT_IN_ATOMIC_FETCH_XOR_2:
915     case BUILT_IN_ATOMIC_FETCH_OR_2:
916       access_size = 2;
917       goto do_atomic;
918 
919     case BUILT_IN_ATOMIC_LOAD_4:
920       is_store = false;
921       /* FALLTHRU */
922     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
923     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
924     case BUILT_IN_SYNC_FETCH_AND_OR_4:
925     case BUILT_IN_SYNC_FETCH_AND_AND_4:
926     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
927     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
928     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
929     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
930     case BUILT_IN_SYNC_OR_AND_FETCH_4:
931     case BUILT_IN_SYNC_AND_AND_FETCH_4:
932     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
933     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
934     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
935     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
936     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
937     case BUILT_IN_SYNC_LOCK_RELEASE_4:
938     case BUILT_IN_ATOMIC_EXCHANGE_4:
939     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
940     case BUILT_IN_ATOMIC_STORE_4:
941     case BUILT_IN_ATOMIC_ADD_FETCH_4:
942     case BUILT_IN_ATOMIC_SUB_FETCH_4:
943     case BUILT_IN_ATOMIC_AND_FETCH_4:
944     case BUILT_IN_ATOMIC_NAND_FETCH_4:
945     case BUILT_IN_ATOMIC_XOR_FETCH_4:
946     case BUILT_IN_ATOMIC_OR_FETCH_4:
947     case BUILT_IN_ATOMIC_FETCH_ADD_4:
948     case BUILT_IN_ATOMIC_FETCH_SUB_4:
949     case BUILT_IN_ATOMIC_FETCH_AND_4:
950     case BUILT_IN_ATOMIC_FETCH_NAND_4:
951     case BUILT_IN_ATOMIC_FETCH_XOR_4:
952     case BUILT_IN_ATOMIC_FETCH_OR_4:
953       access_size = 4;
954       goto do_atomic;
955 
956     case BUILT_IN_ATOMIC_LOAD_8:
957       is_store = false;
958       /* FALLTHRU */
959     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
960     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
961     case BUILT_IN_SYNC_FETCH_AND_OR_8:
962     case BUILT_IN_SYNC_FETCH_AND_AND_8:
963     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
964     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
965     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
966     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
967     case BUILT_IN_SYNC_OR_AND_FETCH_8:
968     case BUILT_IN_SYNC_AND_AND_FETCH_8:
969     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
970     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
971     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
972     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
973     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
974     case BUILT_IN_SYNC_LOCK_RELEASE_8:
975     case BUILT_IN_ATOMIC_EXCHANGE_8:
976     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
977     case BUILT_IN_ATOMIC_STORE_8:
978     case BUILT_IN_ATOMIC_ADD_FETCH_8:
979     case BUILT_IN_ATOMIC_SUB_FETCH_8:
980     case BUILT_IN_ATOMIC_AND_FETCH_8:
981     case BUILT_IN_ATOMIC_NAND_FETCH_8:
982     case BUILT_IN_ATOMIC_XOR_FETCH_8:
983     case BUILT_IN_ATOMIC_OR_FETCH_8:
984     case BUILT_IN_ATOMIC_FETCH_ADD_8:
985     case BUILT_IN_ATOMIC_FETCH_SUB_8:
986     case BUILT_IN_ATOMIC_FETCH_AND_8:
987     case BUILT_IN_ATOMIC_FETCH_NAND_8:
988     case BUILT_IN_ATOMIC_FETCH_XOR_8:
989     case BUILT_IN_ATOMIC_FETCH_OR_8:
990       access_size = 8;
991       goto do_atomic;
992 
993     case BUILT_IN_ATOMIC_LOAD_16:
994       is_store = false;
995       /* FALLTHRU */
996     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
997     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
998     case BUILT_IN_SYNC_FETCH_AND_OR_16:
999     case BUILT_IN_SYNC_FETCH_AND_AND_16:
1000     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1001     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
1002     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1003     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1004     case BUILT_IN_SYNC_OR_AND_FETCH_16:
1005     case BUILT_IN_SYNC_AND_AND_FETCH_16:
1006     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1007     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
1008     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1009     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1010     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1011     case BUILT_IN_SYNC_LOCK_RELEASE_16:
1012     case BUILT_IN_ATOMIC_EXCHANGE_16:
1013     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1014     case BUILT_IN_ATOMIC_STORE_16:
1015     case BUILT_IN_ATOMIC_ADD_FETCH_16:
1016     case BUILT_IN_ATOMIC_SUB_FETCH_16:
1017     case BUILT_IN_ATOMIC_AND_FETCH_16:
1018     case BUILT_IN_ATOMIC_NAND_FETCH_16:
1019     case BUILT_IN_ATOMIC_XOR_FETCH_16:
1020     case BUILT_IN_ATOMIC_OR_FETCH_16:
1021     case BUILT_IN_ATOMIC_FETCH_ADD_16:
1022     case BUILT_IN_ATOMIC_FETCH_SUB_16:
1023     case BUILT_IN_ATOMIC_FETCH_AND_16:
1024     case BUILT_IN_ATOMIC_FETCH_NAND_16:
1025     case BUILT_IN_ATOMIC_FETCH_XOR_16:
1026     case BUILT_IN_ATOMIC_FETCH_OR_16:
1027       access_size = 16;
1028       /* FALLTHRU */
1029     do_atomic:
1030       {
1031 	dest = gimple_call_arg (call, 0);
1032 	/* DEST represents the address of a memory location.
1033 	   instrument_derefs wants the memory location, so lets
1034 	   dereference the address DEST before handing it to
1035 	   instrument_derefs.  */
1036 	tree type = build_nonstandard_integer_type (access_size
1037 						    * BITS_PER_UNIT, 1);
1038 	dest = build2 (MEM_REF, type, dest,
1039 		       build_int_cst (build_pointer_type (char_type_node), 0));
1040 	break;
1041       }
1042 
1043     default:
1044       /* The other builtins memory access are not instrumented in this
1045 	 function because they either don't have any length parameter,
1046 	 or their length parameter is just a limit.  */
1047       break;
1048     }
1049 
1050   if (len != NULL_TREE)
1051     {
1052       if (source0 != NULL_TREE)
1053 	{
1054 	  src0->start = source0;
1055 	  src0->access_size = access_size;
1056 	  *src0_len = len;
1057 	  *src0_is_store = false;
1058 	}
1059 
1060       if (source1 != NULL_TREE)
1061 	{
1062 	  src1->start = source1;
1063 	  src1->access_size = access_size;
1064 	  *src1_len = len;
1065 	  *src1_is_store = false;
1066 	}
1067 
1068       if (dest != NULL_TREE)
1069 	{
1070 	  dst->start = dest;
1071 	  dst->access_size = access_size;
1072 	  *dst_len = len;
1073 	  *dst_is_store = true;
1074 	}
1075 
1076       got_reference_p = true;
1077     }
1078   else if (dest)
1079     {
1080       dst->start = dest;
1081       dst->access_size = access_size;
1082       *dst_len = NULL_TREE;
1083       *dst_is_store = is_store;
1084       *dest_is_deref = true;
1085       got_reference_p = true;
1086     }
1087 
1088   return got_reference_p;
1089 }
1090 
1091 /* Return true iff a given gimple statement has been instrumented.
1092    Note that the statement is "defined" by the memory references it
1093    contains.  */
1094 
1095 static bool
has_stmt_been_instrumented_p(gimple * stmt)1096 has_stmt_been_instrumented_p (gimple *stmt)
1097 {
1098   if (gimple_assign_single_p (stmt))
1099     {
1100       bool r_is_store;
1101       asan_mem_ref r;
1102       asan_mem_ref_init (&r, NULL, 1);
1103 
1104       if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1105 				     &r_is_store))
1106 	return has_mem_ref_been_instrumented (&r);
1107     }
1108   else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1109     {
1110       asan_mem_ref src0, src1, dest;
1111       asan_mem_ref_init (&src0, NULL, 1);
1112       asan_mem_ref_init (&src1, NULL, 1);
1113       asan_mem_ref_init (&dest, NULL, 1);
1114 
1115       tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1116       bool src0_is_store = false, src1_is_store = false,
1117 	dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1118       if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1119 					&src0, &src0_len, &src0_is_store,
1120 					&src1, &src1_len, &src1_is_store,
1121 					&dest, &dest_len, &dest_is_store,
1122 					&dest_is_deref, &intercepted_p))
1123 	{
1124 	  if (src0.start != NULL_TREE
1125 	      && !has_mem_ref_been_instrumented (&src0, src0_len))
1126 	    return false;
1127 
1128 	  if (src1.start != NULL_TREE
1129 	      && !has_mem_ref_been_instrumented (&src1, src1_len))
1130 	    return false;
1131 
1132 	  if (dest.start != NULL_TREE
1133 	      && !has_mem_ref_been_instrumented (&dest, dest_len))
1134 	    return false;
1135 
1136 	  return true;
1137 	}
1138     }
1139   else if (is_gimple_call (stmt) && gimple_store_p (stmt))
1140     {
1141       asan_mem_ref r;
1142       asan_mem_ref_init (&r, NULL, 1);
1143 
1144       r.start = gimple_call_lhs (stmt);
1145       r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1146       return has_mem_ref_been_instrumented (&r);
1147     }
1148 
1149   return false;
1150 }
1151 
1152 /*  Insert a memory reference into the hash table.  */
1153 
1154 static void
update_mem_ref_hash_table(tree ref,HOST_WIDE_INT access_size)1155 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1156 {
1157   hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1158 
1159   asan_mem_ref r;
1160   asan_mem_ref_init (&r, ref, access_size);
1161 
1162   asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1163   if (*slot == NULL || (*slot)->access_size < access_size)
1164     *slot = asan_mem_ref_new (ref, access_size);
1165 }
1166 
1167 /* Initialize shadow_ptr_types array.  */
1168 
1169 static void
asan_init_shadow_ptr_types(void)1170 asan_init_shadow_ptr_types (void)
1171 {
1172   asan_shadow_set = new_alias_set ();
1173   tree types[3] = { signed_char_type_node, short_integer_type_node,
1174 		    integer_type_node };
1175 
1176   for (unsigned i = 0; i < 3; i++)
1177     {
1178       shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1179       TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1180       shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1181     }
1182 
1183   initialize_sanitizer_builtins ();
1184 }
1185 
1186 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text.  */
1187 
1188 static tree
asan_pp_string(pretty_printer * pp)1189 asan_pp_string (pretty_printer *pp)
1190 {
1191   const char *buf = pp_formatted_text (pp);
1192   size_t len = strlen (buf);
1193   tree ret = build_string (len + 1, buf);
1194   TREE_TYPE (ret)
1195     = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1196 			build_index_type (size_int (len)));
1197   TREE_READONLY (ret) = 1;
1198   TREE_STATIC (ret) = 1;
1199   return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1200 }
1201 
1202 /* Clear shadow memory at SHADOW_MEM, LEN bytes.  Can't call a library call here
1203    though.  */
1204 
1205 static void
asan_clear_shadow(rtx shadow_mem,HOST_WIDE_INT len)1206 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1207 {
1208   rtx_insn *insn, *insns, *jump;
1209   rtx_code_label *top_label;
1210   rtx end, addr, tmp;
1211 
1212   gcc_assert ((len & 3) == 0);
1213   start_sequence ();
1214   clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1215   insns = get_insns ();
1216   end_sequence ();
1217   for (insn = insns; insn; insn = NEXT_INSN (insn))
1218     if (CALL_P (insn))
1219       break;
1220   if (insn == NULL_RTX)
1221     {
1222       emit_insn (insns);
1223       return;
1224     }
1225 
1226   top_label = gen_label_rtx ();
1227   addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1228   shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1229   end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1230   emit_label (top_label);
1231 
1232   emit_move_insn (shadow_mem, const0_rtx);
1233   tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1234 			     true, OPTAB_LIB_WIDEN);
1235   if (tmp != addr)
1236     emit_move_insn (addr, tmp);
1237   emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1238   jump = get_last_insn ();
1239   gcc_assert (JUMP_P (jump));
1240   add_reg_br_prob_note (jump,
1241 			profile_probability::guessed_always ()
1242 			   .apply_scale (80, 100));
1243 }
1244 
1245 void
asan_function_start(void)1246 asan_function_start (void)
1247 {
1248   section *fnsec = function_section (current_function_decl);
1249   switch_to_section (fnsec);
1250   ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1251 			 current_function_funcdef_no);
1252 }
1253 
1254 /* Return number of shadow bytes that are occupied by a local variable
1255    of SIZE bytes.  */
1256 
1257 static unsigned HOST_WIDE_INT
shadow_mem_size(unsigned HOST_WIDE_INT size)1258 shadow_mem_size (unsigned HOST_WIDE_INT size)
1259 {
1260   /* It must be possible to align stack variables to granularity
1261      of shadow memory.  */
1262   gcc_assert (BITS_PER_UNIT
1263 	      * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1264 
1265   return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1266 }
1267 
1268 /* Always emit 4 bytes at a time.  */
1269 #define RZ_BUFFER_SIZE 4
1270 
1271 /* ASAN redzone buffer container that handles emission of shadow bytes.  */
1272 class asan_redzone_buffer
1273 {
1274 public:
1275   /* Constructor.  */
asan_redzone_buffer(rtx shadow_mem,HOST_WIDE_INT prev_offset)1276   asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1277     m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1278     m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1279   {}
1280 
1281   /* Emit VALUE shadow byte at a given OFFSET.  */
1282   void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1283 
1284   /* Emit RTX emission of the content of the buffer.  */
1285   void flush_redzone_payload (void);
1286 
1287 private:
1288   /* Flush if the content of the buffer is full
1289      (equal to RZ_BUFFER_SIZE).  */
1290   void flush_if_full (void);
1291 
1292   /* Memory where we last emitted a redzone payload.  */
1293   rtx m_shadow_mem;
1294 
1295   /* Relative offset where we last emitted a redzone payload.  */
1296   HOST_WIDE_INT m_prev_offset;
1297 
1298   /* Relative original offset.  Used for checking only.  */
1299   HOST_WIDE_INT m_original_offset;
1300 
1301 public:
1302   /* Buffer with redzone payload.  */
1303   auto_vec<unsigned char> m_shadow_bytes;
1304 };
1305 
1306 /* Emit VALUE shadow byte at a given OFFSET.  */
1307 
1308 void
emit_redzone_byte(HOST_WIDE_INT offset,unsigned char value)1309 asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1310 					unsigned char value)
1311 {
1312   gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1313   gcc_assert (offset >= m_prev_offset);
1314 
1315   HOST_WIDE_INT off
1316     = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1317   if (off == offset)
1318     /* Consecutive shadow memory byte.  */;
1319   else if (offset < m_prev_offset + (HOST_WIDE_INT) (ASAN_SHADOW_GRANULARITY
1320 						     * RZ_BUFFER_SIZE)
1321 	   && !m_shadow_bytes.is_empty ())
1322     {
1323       /* Shadow memory byte with a small gap.  */
1324       for (; off < offset; off += ASAN_SHADOW_GRANULARITY)
1325 	m_shadow_bytes.safe_push (0);
1326     }
1327   else
1328     {
1329       if (!m_shadow_bytes.is_empty ())
1330 	flush_redzone_payload ();
1331 
1332       /* Maybe start earlier in order to use aligned store.  */
1333       HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1334       if (align)
1335 	{
1336 	  offset -= align;
1337 	  for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1338 	    m_shadow_bytes.safe_push (0);
1339 	}
1340 
1341       /* Adjust m_prev_offset and m_shadow_mem.  */
1342       HOST_WIDE_INT diff = offset - m_prev_offset;
1343       m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1344 				     diff >> ASAN_SHADOW_SHIFT);
1345       m_prev_offset = offset;
1346     }
1347   m_shadow_bytes.safe_push (value);
1348   flush_if_full ();
1349 }
1350 
1351 /* Emit RTX emission of the content of the buffer.  */
1352 
1353 void
flush_redzone_payload(void)1354 asan_redzone_buffer::flush_redzone_payload (void)
1355 {
1356   gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1357 
1358   if (m_shadow_bytes.is_empty ())
1359     return;
1360 
1361   /* Be sure we always emit to an aligned address.  */
1362   gcc_assert (((m_prev_offset - m_original_offset)
1363 	      & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1364 
1365   /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed.  */
1366   unsigned l = m_shadow_bytes.length ();
1367   for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1368     m_shadow_bytes.safe_push (0);
1369 
1370   if (dump_file && (dump_flags & TDF_DETAILS))
1371     fprintf (dump_file,
1372 	     "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1373 
1374   unsigned HOST_WIDE_INT val = 0;
1375   for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1376     {
1377       unsigned char v
1378 	= m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1379       val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1380       if (dump_file && (dump_flags & TDF_DETAILS))
1381 	fprintf (dump_file, "%02x ", v);
1382     }
1383 
1384   if (dump_file && (dump_flags & TDF_DETAILS))
1385     fprintf (dump_file, "\n");
1386 
1387   rtx c = gen_int_mode (val, SImode);
1388   m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1389   emit_move_insn (m_shadow_mem, c);
1390   m_shadow_bytes.truncate (0);
1391 }
1392 
1393 /* Flush if the content of the buffer is full
1394    (equal to RZ_BUFFER_SIZE).  */
1395 
1396 void
flush_if_full(void)1397 asan_redzone_buffer::flush_if_full (void)
1398 {
1399   if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1400     flush_redzone_payload ();
1401 }
1402 
1403 /* Insert code to protect stack vars.  The prologue sequence should be emitted
1404    directly, epilogue sequence returned.  BASE is the register holding the
1405    stack base, against which OFFSETS array offsets are relative to, OFFSETS
1406    array contains pairs of offsets in reverse order, always the end offset
1407    of some gap that needs protection followed by starting offset,
1408    and DECLS is an array of representative decls for each var partition.
1409    LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1410    elements long (OFFSETS include gap before the first variable as well
1411    as gaps after each stack variable).  PBASE is, if non-NULL, some pseudo
1412    register which stack vars DECL_RTLs are based on.  Either BASE should be
1413    assigned to PBASE, when not doing use after return protection, or
1414    corresponding address based on __asan_stack_malloc* return value.  */
1415 
1416 rtx_insn *
asan_emit_stack_protection(rtx base,rtx pbase,unsigned int alignb,HOST_WIDE_INT * offsets,tree * decls,int length)1417 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1418 			    HOST_WIDE_INT *offsets, tree *decls, int length)
1419 {
1420   rtx shadow_base, shadow_mem, ret, mem, orig_base;
1421   rtx_code_label *lab;
1422   rtx_insn *insns;
1423   char buf[32];
1424   HOST_WIDE_INT base_offset = offsets[length - 1];
1425   HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1426   HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1427   HOST_WIDE_INT last_offset, last_size, last_size_aligned;
1428   int l;
1429   unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1430   tree str_cst, decl, id;
1431   int use_after_return_class = -1;
1432 
1433   /* Don't emit anything when doing error recovery, the assertions
1434      might fail e.g. if a function had a frame offset overflow.  */
1435   if (seen_error ())
1436     return NULL;
1437 
1438   if (shadow_ptr_types[0] == NULL_TREE)
1439     asan_init_shadow_ptr_types ();
1440 
1441   expanded_location cfun_xloc
1442     = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1443 
1444   /* First of all, prepare the description string.  */
1445   pretty_printer asan_pp;
1446 
1447   pp_decimal_int (&asan_pp, length / 2 - 1);
1448   pp_space (&asan_pp);
1449   for (l = length - 2; l; l -= 2)
1450     {
1451       tree decl = decls[l / 2 - 1];
1452       pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1453       pp_space (&asan_pp);
1454       pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1455       pp_space (&asan_pp);
1456 
1457       expanded_location xloc
1458 	= expand_location (DECL_SOURCE_LOCATION (decl));
1459       char location[32];
1460 
1461       if (xloc.file == cfun_xloc.file)
1462 	sprintf (location, ":%d", xloc.line);
1463       else
1464 	location[0] = '\0';
1465 
1466       if (DECL_P (decl) && DECL_NAME (decl))
1467 	{
1468 	  unsigned idlen
1469 	    = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1470 	  pp_decimal_int (&asan_pp, idlen);
1471 	  pp_space (&asan_pp);
1472 	  pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1473 	  pp_string (&asan_pp, location);
1474 	}
1475       else
1476 	pp_string (&asan_pp, "9 <unknown>");
1477 
1478       if (l > 2)
1479 	pp_space (&asan_pp);
1480     }
1481   str_cst = asan_pp_string (&asan_pp);
1482 
1483   /* Emit the prologue sequence.  */
1484   if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1485       && param_asan_use_after_return)
1486     {
1487       use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1488       /* __asan_stack_malloc_N guarantees alignment
1489 	 N < 6 ? (64 << N) : 4096 bytes.  */
1490       if (alignb > (use_after_return_class < 6
1491 		    ? (64U << use_after_return_class) : 4096U))
1492 	use_after_return_class = -1;
1493       else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1494 	base_align_bias = ((asan_frame_size + alignb - 1)
1495 			   & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1496     }
1497 
1498   /* Align base if target is STRICT_ALIGNMENT.  */
1499   if (STRICT_ALIGNMENT)
1500     {
1501       const HOST_WIDE_INT align
1502 	= (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
1503       base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
1504 			   NULL_RTX, 1, OPTAB_DIRECT);
1505     }
1506 
1507   if (use_after_return_class == -1 && pbase)
1508     emit_move_insn (pbase, base);
1509 
1510   base = expand_binop (Pmode, add_optab, base,
1511 		       gen_int_mode (base_offset - base_align_bias, Pmode),
1512 		       NULL_RTX, 1, OPTAB_DIRECT);
1513   orig_base = NULL_RTX;
1514   if (use_after_return_class != -1)
1515     {
1516       if (asan_detect_stack_use_after_return == NULL_TREE)
1517 	{
1518 	  id = get_identifier ("__asan_option_detect_stack_use_after_return");
1519 	  decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1520 			     integer_type_node);
1521 	  SET_DECL_ASSEMBLER_NAME (decl, id);
1522 	  TREE_ADDRESSABLE (decl) = 1;
1523 	  DECL_ARTIFICIAL (decl) = 1;
1524 	  DECL_IGNORED_P (decl) = 1;
1525 	  DECL_EXTERNAL (decl) = 1;
1526 	  TREE_STATIC (decl) = 1;
1527 	  TREE_PUBLIC (decl) = 1;
1528 	  TREE_USED (decl) = 1;
1529 	  asan_detect_stack_use_after_return = decl;
1530 	}
1531       orig_base = gen_reg_rtx (Pmode);
1532       emit_move_insn (orig_base, base);
1533       ret = expand_normal (asan_detect_stack_use_after_return);
1534       lab = gen_label_rtx ();
1535       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1536 			       VOIDmode, 0, lab,
1537 			       profile_probability::very_likely ());
1538       snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1539 		use_after_return_class);
1540       ret = init_one_libfunc (buf);
1541       ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1542 				     GEN_INT (asan_frame_size
1543 					      + base_align_bias),
1544 				     TYPE_MODE (pointer_sized_int_node));
1545       /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1546 	 and NULL otherwise.  Check RET value is NULL here and jump over the
1547 	 BASE reassignment in this case.  Otherwise, reassign BASE to RET.  */
1548       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1549 			       VOIDmode, 0, lab,
1550 			       profile_probability:: very_unlikely ());
1551       ret = convert_memory_address (Pmode, ret);
1552       emit_move_insn (base, ret);
1553       emit_label (lab);
1554       emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1555 					   gen_int_mode (base_align_bias
1556 							 - base_offset, Pmode),
1557 					   NULL_RTX, 1, OPTAB_DIRECT));
1558     }
1559   mem = gen_rtx_MEM (ptr_mode, base);
1560   mem = adjust_address (mem, VOIDmode, base_align_bias);
1561   emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1562   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1563   emit_move_insn (mem, expand_normal (str_cst));
1564   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1565   ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1566   id = get_identifier (buf);
1567   decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1568 		    VAR_DECL, id, char_type_node);
1569   SET_DECL_ASSEMBLER_NAME (decl, id);
1570   TREE_ADDRESSABLE (decl) = 1;
1571   TREE_READONLY (decl) = 1;
1572   DECL_ARTIFICIAL (decl) = 1;
1573   DECL_IGNORED_P (decl) = 1;
1574   TREE_STATIC (decl) = 1;
1575   TREE_PUBLIC (decl) = 0;
1576   TREE_USED (decl) = 1;
1577   DECL_INITIAL (decl) = decl;
1578   TREE_ASM_WRITTEN (decl) = 1;
1579   TREE_ASM_WRITTEN (id) = 1;
1580   emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1581   shadow_base = expand_binop (Pmode, lshr_optab, base,
1582 			      gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
1583 			      NULL_RTX, 1, OPTAB_DIRECT);
1584   shadow_base
1585     = plus_constant (Pmode, shadow_base,
1586 		     asan_shadow_offset ()
1587 		     + (base_align_bias >> ASAN_SHADOW_SHIFT));
1588   gcc_assert (asan_shadow_set != -1
1589 	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1590   shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1591   set_mem_alias_set (shadow_mem, asan_shadow_set);
1592   if (STRICT_ALIGNMENT)
1593     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1594   prev_offset = base_offset;
1595 
1596   asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
1597   for (l = length; l; l -= 2)
1598     {
1599       if (l == 2)
1600 	cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1601       offset = offsets[l - 1];
1602 
1603       bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
1604       /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
1605 	 the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
1606 	 In that case we have to emit one extra byte that will describe
1607 	 how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed.  */
1608       if (extra_byte)
1609 	{
1610 	  HOST_WIDE_INT aoff
1611 	    = base_offset + ((offset - base_offset)
1612 			     & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
1613 	  rz_buffer.emit_redzone_byte (aoff, offset - aoff);
1614 	  offset = aoff + ASAN_SHADOW_GRANULARITY;
1615 	}
1616 
1617       /* Calculate size of red zone payload.  */
1618       while (offset < offsets[l - 2])
1619 	{
1620 	  rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
1621 	  offset += ASAN_SHADOW_GRANULARITY;
1622 	}
1623 
1624       cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1625     }
1626 
1627   /* As the automatic variables are aligned to
1628      ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
1629      flushed here.  */
1630   gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
1631 
1632   do_pending_stack_adjust ();
1633 
1634   /* Construct epilogue sequence.  */
1635   start_sequence ();
1636 
1637   lab = NULL;
1638   if (use_after_return_class != -1)
1639     {
1640       rtx_code_label *lab2 = gen_label_rtx ();
1641       char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1642       emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1643 			       VOIDmode, 0, lab2,
1644 			       profile_probability::very_likely ());
1645       shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1646       set_mem_alias_set (shadow_mem, asan_shadow_set);
1647       mem = gen_rtx_MEM (ptr_mode, base);
1648       mem = adjust_address (mem, VOIDmode, base_align_bias);
1649       emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1650       unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1651       if (use_after_return_class < 5
1652 	  && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1653 				  BITS_PER_UNIT, true))
1654 	{
1655 	  /* Emit:
1656 	       memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
1657 	       **SavedFlagPtr(FakeStack, class_id) = 0
1658 	  */
1659 	  store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1660 			   BITS_PER_UNIT, true, RETURN_BEGIN);
1661 
1662 	  unsigned HOST_WIDE_INT offset
1663 	    = (1 << (use_after_return_class + 6));
1664 	  offset -= GET_MODE_SIZE (ptr_mode);
1665 	  mem = gen_rtx_MEM (ptr_mode, base);
1666 	  mem = adjust_address (mem, ptr_mode, offset);
1667 	  rtx addr = gen_reg_rtx (ptr_mode);
1668 	  emit_move_insn (addr, mem);
1669 	  addr = convert_memory_address (Pmode, addr);
1670 	  mem = gen_rtx_MEM (QImode, addr);
1671 	  emit_move_insn (mem, const0_rtx);
1672 	}
1673       else if (use_after_return_class >= 5
1674 	       || !set_storage_via_setmem (shadow_mem,
1675 					   GEN_INT (sz),
1676 					   gen_int_mode (c, QImode),
1677 					   BITS_PER_UNIT, BITS_PER_UNIT,
1678 					   -1, sz, sz, sz))
1679 	{
1680 	  snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1681 		    use_after_return_class);
1682 	  ret = init_one_libfunc (buf);
1683 	  rtx addr = convert_memory_address (ptr_mode, base);
1684 	  rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1685 	  emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
1686 			     GEN_INT (asan_frame_size + base_align_bias),
1687 			     TYPE_MODE (pointer_sized_int_node),
1688 			     orig_addr, ptr_mode);
1689 	}
1690       lab = gen_label_rtx ();
1691       emit_jump (lab);
1692       emit_label (lab2);
1693     }
1694 
1695   shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1696   set_mem_alias_set (shadow_mem, asan_shadow_set);
1697 
1698   if (STRICT_ALIGNMENT)
1699     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1700 
1701   prev_offset = base_offset;
1702   last_offset = base_offset;
1703   last_size = 0;
1704   last_size_aligned = 0;
1705   for (l = length; l; l -= 2)
1706     {
1707       offset = base_offset + ((offsets[l - 1] - base_offset)
1708 			      & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1709       if (last_offset + last_size_aligned < offset)
1710 	{
1711 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1712 				       (last_offset - prev_offset)
1713 				       >> ASAN_SHADOW_SHIFT);
1714 	  prev_offset = last_offset;
1715 	  asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1716 	  last_offset = offset;
1717 	  last_size = 0;
1718 	}
1719       else
1720 	last_size = offset - last_offset;
1721       last_size += base_offset + ((offsets[l - 2] - base_offset)
1722 				  & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1723 		   - offset;
1724 
1725       /* Unpoison shadow memory that corresponds to a variable that is
1726 	 is subject of use-after-return sanitization.  */
1727       if (l > 2)
1728 	{
1729 	  decl = decls[l / 2 - 2];
1730 	  if (asan_handled_variables != NULL
1731 	      && asan_handled_variables->contains (decl))
1732 	    {
1733 	      HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
1734 	      if (dump_file && (dump_flags & TDF_DETAILS))
1735 		{
1736 		  const char *n = (DECL_NAME (decl)
1737 				   ? IDENTIFIER_POINTER (DECL_NAME (decl))
1738 				   : "<unknown>");
1739 		  fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1740 			   "%s (%" PRId64 " B)\n", n, size);
1741 		}
1742 
1743 		last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
1744 	    }
1745 	}
1746       last_size_aligned
1747 	= ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1748 	   & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1749     }
1750   if (last_size_aligned)
1751     {
1752       shadow_mem = adjust_address (shadow_mem, VOIDmode,
1753 				   (last_offset - prev_offset)
1754 				   >> ASAN_SHADOW_SHIFT);
1755       asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1756     }
1757 
1758   /* Clean-up set with instrumented stack variables.  */
1759   delete asan_handled_variables;
1760   asan_handled_variables = NULL;
1761   delete asan_used_labels;
1762   asan_used_labels = NULL;
1763 
1764   do_pending_stack_adjust ();
1765   if (lab)
1766     emit_label (lab);
1767 
1768   insns = get_insns ();
1769   end_sequence ();
1770   return insns;
1771 }
1772 
1773 /* Emit __asan_allocas_unpoison (top, bot) call.  The BASE parameter corresponds
1774    to BOT argument, for TOP virtual_stack_dynamic_rtx is used.  NEW_SEQUENCE
1775    indicates whether we're emitting new instructions sequence or not.  */
1776 
1777 rtx_insn *
asan_emit_allocas_unpoison(rtx top,rtx bot,rtx_insn * before)1778 asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
1779 {
1780   if (before)
1781     push_to_sequence (before);
1782   else
1783     start_sequence ();
1784   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
1785   top = convert_memory_address (ptr_mode, top);
1786   bot = convert_memory_address (ptr_mode, bot);
1787   emit_library_call (ret, LCT_NORMAL, ptr_mode,
1788 		     top, ptr_mode, bot, ptr_mode);
1789 
1790   do_pending_stack_adjust ();
1791   rtx_insn *insns = get_insns ();
1792   end_sequence ();
1793   return insns;
1794 }
1795 
1796 /* Return true if DECL, a global var, might be overridden and needs
1797    therefore a local alias.  */
1798 
1799 static bool
asan_needs_local_alias(tree decl)1800 asan_needs_local_alias (tree decl)
1801 {
1802   return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1803 }
1804 
1805 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1806    therefore doesn't need protection.  */
1807 
1808 static bool
is_odr_indicator(tree decl)1809 is_odr_indicator (tree decl)
1810 {
1811   return (DECL_ARTIFICIAL (decl)
1812 	  && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1813 }
1814 
1815 /* Return true if DECL is a VAR_DECL that should be protected
1816    by Address Sanitizer, by appending a red zone with protected
1817    shadow memory after it and aligning it to at least
1818    ASAN_RED_ZONE_SIZE bytes.  */
1819 
1820 bool
asan_protect_global(tree decl,bool ignore_decl_rtl_set_p)1821 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
1822 {
1823   if (!param_asan_globals)
1824     return false;
1825 
1826   rtx rtl, symbol;
1827 
1828   if (TREE_CODE (decl) == STRING_CST)
1829     {
1830       /* Instrument all STRING_CSTs except those created
1831 	 by asan_pp_string here.  */
1832       if (shadow_ptr_types[0] != NULL_TREE
1833 	  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1834 	  && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1835 	return false;
1836       return true;
1837     }
1838   if (!VAR_P (decl)
1839       /* TLS vars aren't statically protectable.  */
1840       || DECL_THREAD_LOCAL_P (decl)
1841       /* Externs will be protected elsewhere.  */
1842       || DECL_EXTERNAL (decl)
1843       /* PR sanitizer/81697: For architectures that use section anchors first
1844 	 call to asan_protect_global may occur before DECL_RTL (decl) is set.
1845 	 We should ignore DECL_RTL_SET_P then, because otherwise the first call
1846 	 to asan_protect_global will return FALSE and the following calls on the
1847 	 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
1848 	 up with inconsistency at runtime.  */
1849       || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
1850       /* Comdat vars pose an ABI problem, we can't know if
1851 	 the var that is selected by the linker will have
1852 	 padding or not.  */
1853       || DECL_ONE_ONLY (decl)
1854       /* Similarly for common vars.  People can use -fno-common.
1855 	 Note: Linux kernel is built with -fno-common, so we do instrument
1856 	 globals there even if it is C.  */
1857       || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1858       /* Don't protect if using user section, often vars placed
1859 	 into user section from multiple TUs are then assumed
1860 	 to be an array of such vars, putting padding in there
1861 	 breaks this assumption.  */
1862       || (DECL_SECTION_NAME (decl) != NULL
1863 	  && !symtab_node::get (decl)->implicit_section
1864 	  && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1865       || DECL_SIZE (decl) == 0
1866       || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1867       || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1868       || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1869       || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1870       || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1871       || is_odr_indicator (decl))
1872     return false;
1873 
1874   if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
1875     {
1876 
1877       rtl = DECL_RTL (decl);
1878       if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1879 	return false;
1880       symbol = XEXP (rtl, 0);
1881 
1882       if (CONSTANT_POOL_ADDRESS_P (symbol)
1883 	  || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1884 	return false;
1885     }
1886 
1887   if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1888     return false;
1889 
1890   if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
1891     return false;
1892 
1893   return true;
1894 }
1895 
1896 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1897    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1898 
1899 static tree
report_error_func(bool is_store,bool recover_p,HOST_WIDE_INT size_in_bytes,int * nargs)1900 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1901 		   int *nargs)
1902 {
1903   static enum built_in_function report[2][2][6]
1904     = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1905 	    BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1906 	    BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1907 	  { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1908 	    BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1909 	    BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1910 	{ { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1911 	    BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1912 	    BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1913 	    BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1914 	    BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1915 	    BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1916 	  { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1917 	    BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1918 	    BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1919 	    BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1920 	    BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1921 	    BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1922   if (size_in_bytes == -1)
1923     {
1924       *nargs = 2;
1925       return builtin_decl_implicit (report[recover_p][is_store][5]);
1926     }
1927   *nargs = 1;
1928   int size_log2 = exact_log2 (size_in_bytes);
1929   return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1930 }
1931 
1932 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1933    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1934 
1935 static tree
check_func(bool is_store,bool recover_p,HOST_WIDE_INT size_in_bytes,int * nargs)1936 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1937 	    int *nargs)
1938 {
1939   static enum built_in_function check[2][2][6]
1940     = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1941 	    BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1942 	    BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1943 	  { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1944 	    BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1945 	    BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1946 	{ { BUILT_IN_ASAN_LOAD1_NOABORT,
1947 	    BUILT_IN_ASAN_LOAD2_NOABORT,
1948 	    BUILT_IN_ASAN_LOAD4_NOABORT,
1949 	    BUILT_IN_ASAN_LOAD8_NOABORT,
1950 	    BUILT_IN_ASAN_LOAD16_NOABORT,
1951 	    BUILT_IN_ASAN_LOADN_NOABORT },
1952 	  { BUILT_IN_ASAN_STORE1_NOABORT,
1953 	    BUILT_IN_ASAN_STORE2_NOABORT,
1954 	    BUILT_IN_ASAN_STORE4_NOABORT,
1955 	    BUILT_IN_ASAN_STORE8_NOABORT,
1956 	    BUILT_IN_ASAN_STORE16_NOABORT,
1957 	    BUILT_IN_ASAN_STOREN_NOABORT } } };
1958   if (size_in_bytes == -1)
1959     {
1960       *nargs = 2;
1961       return builtin_decl_implicit (check[recover_p][is_store][5]);
1962     }
1963   *nargs = 1;
1964   int size_log2 = exact_log2 (size_in_bytes);
1965   return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1966 }
1967 
1968 /* Split the current basic block and create a condition statement
1969    insertion point right before or after the statement pointed to by
1970    ITER.  Return an iterator to the point at which the caller might
1971    safely insert the condition statement.
1972 
1973    THEN_BLOCK must be set to the address of an uninitialized instance
1974    of basic_block.  The function will then set *THEN_BLOCK to the
1975    'then block' of the condition statement to be inserted by the
1976    caller.
1977 
1978    If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1979    *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1980 
1981    Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1982    block' of the condition statement to be inserted by the caller.
1983 
1984    Note that *FALLTHROUGH_BLOCK is a new block that contains the
1985    statements starting from *ITER, and *THEN_BLOCK is a new empty
1986    block.
1987 
1988    *ITER is adjusted to point to always point to the first statement
1989     of the basic block * FALLTHROUGH_BLOCK.  That statement is the
1990     same as what ITER was pointing to prior to calling this function,
1991     if BEFORE_P is true; otherwise, it is its following statement.  */
1992 
1993 gimple_stmt_iterator
create_cond_insert_point(gimple_stmt_iterator * iter,bool before_p,bool then_more_likely_p,bool create_then_fallthru_edge,basic_block * then_block,basic_block * fallthrough_block)1994 create_cond_insert_point (gimple_stmt_iterator *iter,
1995 			  bool before_p,
1996 			  bool then_more_likely_p,
1997 			  bool create_then_fallthru_edge,
1998 			  basic_block *then_block,
1999 			  basic_block *fallthrough_block)
2000 {
2001   gimple_stmt_iterator gsi = *iter;
2002 
2003   if (!gsi_end_p (gsi) && before_p)
2004     gsi_prev (&gsi);
2005 
2006   basic_block cur_bb = gsi_bb (*iter);
2007 
2008   edge e = split_block (cur_bb, gsi_stmt (gsi));
2009 
2010   /* Get a hold on the 'condition block', the 'then block' and the
2011      'else block'.  */
2012   basic_block cond_bb = e->src;
2013   basic_block fallthru_bb = e->dest;
2014   basic_block then_bb = create_empty_bb (cond_bb);
2015   if (current_loops)
2016     {
2017       add_bb_to_loop (then_bb, cond_bb->loop_father);
2018       loops_state_set (LOOPS_NEED_FIXUP);
2019     }
2020 
2021   /* Set up the newly created 'then block'.  */
2022   e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2023   profile_probability fallthrough_probability
2024     = then_more_likely_p
2025     ? profile_probability::very_unlikely ()
2026     : profile_probability::very_likely ();
2027   e->probability = fallthrough_probability.invert ();
2028   then_bb->count = e->count ();
2029   if (create_then_fallthru_edge)
2030     make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
2031 
2032   /* Set up the fallthrough basic block.  */
2033   e = find_edge (cond_bb, fallthru_bb);
2034   e->flags = EDGE_FALSE_VALUE;
2035   e->probability = fallthrough_probability;
2036 
2037   /* Update dominance info for the newly created then_bb; note that
2038      fallthru_bb's dominance info has already been updated by
2039      split_bock.  */
2040   if (dom_info_available_p (CDI_DOMINATORS))
2041     set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2042 
2043   *then_block = then_bb;
2044   *fallthrough_block = fallthru_bb;
2045   *iter = gsi_start_bb (fallthru_bb);
2046 
2047   return gsi_last_bb (cond_bb);
2048 }
2049 
2050 /* Insert an if condition followed by a 'then block' right before the
2051    statement pointed to by ITER.  The fallthrough block -- which is the
2052    else block of the condition as well as the destination of the
2053    outcoming edge of the 'then block' -- starts with the statement
2054    pointed to by ITER.
2055 
2056    COND is the condition of the if.
2057 
2058    If THEN_MORE_LIKELY_P is true, the probability of the edge to the
2059    'then block' is higher than the probability of the edge to the
2060    fallthrough block.
2061 
2062    Upon completion of the function, *THEN_BB is set to the newly
2063    inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
2064    fallthrough block.
2065 
2066    *ITER is adjusted to still point to the same statement it was
2067    pointing to initially.  */
2068 
2069 static void
insert_if_then_before_iter(gcond * cond,gimple_stmt_iterator * iter,bool then_more_likely_p,basic_block * then_bb,basic_block * fallthrough_bb)2070 insert_if_then_before_iter (gcond *cond,
2071 			    gimple_stmt_iterator *iter,
2072 			    bool then_more_likely_p,
2073 			    basic_block *then_bb,
2074 			    basic_block *fallthrough_bb)
2075 {
2076   gimple_stmt_iterator cond_insert_point =
2077     create_cond_insert_point (iter,
2078 			      /*before_p=*/true,
2079 			      then_more_likely_p,
2080 			      /*create_then_fallthru_edge=*/true,
2081 			      then_bb,
2082 			      fallthrough_bb);
2083   gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2084 }
2085 
2086 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2087    If RETURN_ADDRESS is set to true, return memory location instread
2088    of a value in the shadow memory.  */
2089 
2090 static tree
2091 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2092 			 tree base_addr, tree shadow_ptr_type,
2093 			 bool return_address = false)
2094 {
2095   tree t, uintptr_type = TREE_TYPE (base_addr);
2096   tree shadow_type = TREE_TYPE (shadow_ptr_type);
2097   gimple *g;
2098 
2099   t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2100   g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2101 			   base_addr, t);
2102   gimple_set_location (g, location);
2103   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2104 
2105   t = build_int_cst (uintptr_type, asan_shadow_offset ());
2106   g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2107 			   gimple_assign_lhs (g), t);
2108   gimple_set_location (g, location);
2109   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2110 
2111   g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2112 			   gimple_assign_lhs (g));
2113   gimple_set_location (g, location);
2114   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2115 
2116   if (!return_address)
2117     {
2118       t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2119 		  build_int_cst (shadow_ptr_type, 0));
2120       g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2121       gimple_set_location (g, location);
2122       gsi_insert_after (gsi, g, GSI_NEW_STMT);
2123     }
2124 
2125   return gimple_assign_lhs (g);
2126 }
2127 
2128 /* BASE can already be an SSA_NAME; in that case, do not create a
2129    new SSA_NAME for it.  */
2130 
2131 static tree
maybe_create_ssa_name(location_t loc,tree base,gimple_stmt_iterator * iter,bool before_p)2132 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2133 		       bool before_p)
2134 {
2135   STRIP_USELESS_TYPE_CONVERSION (base);
2136   if (TREE_CODE (base) == SSA_NAME)
2137     return base;
2138   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)), base);
2139   gimple_set_location (g, loc);
2140   if (before_p)
2141     gsi_insert_before (iter, g, GSI_SAME_STMT);
2142   else
2143     gsi_insert_after (iter, g, GSI_NEW_STMT);
2144   return gimple_assign_lhs (g);
2145 }
2146 
2147 /* LEN can already have necessary size and precision;
2148    in that case, do not create a new variable.  */
2149 
2150 tree
maybe_cast_to_ptrmode(location_t loc,tree len,gimple_stmt_iterator * iter,bool before_p)2151 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2152 		       bool before_p)
2153 {
2154   if (ptrofftype_p (len))
2155     return len;
2156   gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2157 				  NOP_EXPR, len);
2158   gimple_set_location (g, loc);
2159   if (before_p)
2160     gsi_insert_before (iter, g, GSI_SAME_STMT);
2161   else
2162     gsi_insert_after (iter, g, GSI_NEW_STMT);
2163   return gimple_assign_lhs (g);
2164 }
2165 
2166 /* Instrument the memory access instruction BASE.  Insert new
2167    statements before or after ITER.
2168 
2169    Note that the memory access represented by BASE can be either an
2170    SSA_NAME, or a non-SSA expression.  LOCATION is the source code
2171    location.  IS_STORE is TRUE for a store, FALSE for a load.
2172    BEFORE_P is TRUE for inserting the instrumentation code before
2173    ITER, FALSE for inserting it after ITER.  IS_SCALAR_ACCESS is TRUE
2174    for a scalar memory access and FALSE for memory region access.
2175    NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2176    length.  ALIGN tells alignment of accessed memory object.
2177 
2178    START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2179    memory region have already been instrumented.
2180 
2181    If BEFORE_P is TRUE, *ITER is arranged to still point to the
2182    statement it was pointing to prior to calling this function,
2183    otherwise, it points to the statement logically following it.  */
2184 
2185 static void
2186 build_check_stmt (location_t loc, tree base, tree len,
2187 		  HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2188 		  bool is_non_zero_len, bool before_p, bool is_store,
2189 		  bool is_scalar_access, unsigned int align = 0)
2190 {
2191   gimple_stmt_iterator gsi = *iter;
2192   gimple *g;
2193 
2194   gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2195 
2196   gsi = *iter;
2197 
2198   base = unshare_expr (base);
2199   base = maybe_create_ssa_name (loc, base, &gsi, before_p);
2200 
2201   if (len)
2202     {
2203       len = unshare_expr (len);
2204       len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2205     }
2206   else
2207     {
2208       gcc_assert (size_in_bytes != -1);
2209       len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2210     }
2211 
2212   if (size_in_bytes > 1)
2213     {
2214       if ((size_in_bytes & (size_in_bytes - 1)) != 0
2215 	  || size_in_bytes > 16)
2216 	is_scalar_access = false;
2217       else if (align && align < size_in_bytes * BITS_PER_UNIT)
2218 	{
2219 	  /* On non-strict alignment targets, if
2220 	     16-byte access is just 8-byte aligned,
2221 	     this will result in misaligned shadow
2222 	     memory 2 byte load, but otherwise can
2223 	     be handled using one read.  */
2224 	  if (size_in_bytes != 16
2225 	      || STRICT_ALIGNMENT
2226 	      || align < 8 * BITS_PER_UNIT)
2227 	    is_scalar_access = false;
2228 	}
2229     }
2230 
2231   HOST_WIDE_INT flags = 0;
2232   if (is_store)
2233     flags |= ASAN_CHECK_STORE;
2234   if (is_non_zero_len)
2235     flags |= ASAN_CHECK_NON_ZERO_LEN;
2236   if (is_scalar_access)
2237     flags |= ASAN_CHECK_SCALAR_ACCESS;
2238 
2239   g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
2240 				  build_int_cst (integer_type_node, flags),
2241 				  base, len,
2242 				  build_int_cst (integer_type_node,
2243 						 align / BITS_PER_UNIT));
2244   gimple_set_location (g, loc);
2245   if (before_p)
2246     gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2247   else
2248     {
2249       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2250       gsi_next (&gsi);
2251       *iter = gsi;
2252     }
2253 }
2254 
2255 /* If T represents a memory access, add instrumentation code before ITER.
2256    LOCATION is source code location.
2257    IS_STORE is either TRUE (for a store) or FALSE (for a load).  */
2258 
2259 static void
instrument_derefs(gimple_stmt_iterator * iter,tree t,location_t location,bool is_store)2260 instrument_derefs (gimple_stmt_iterator *iter, tree t,
2261 		   location_t location, bool is_store)
2262 {
2263   if (is_store && !param_asan_instrument_writes)
2264     return;
2265   if (!is_store && !param_asan_instrument_reads)
2266     return;
2267 
2268   tree type, base;
2269   HOST_WIDE_INT size_in_bytes;
2270   if (location == UNKNOWN_LOCATION)
2271     location = EXPR_LOCATION (t);
2272 
2273   type = TREE_TYPE (t);
2274   switch (TREE_CODE (t))
2275     {
2276     case ARRAY_REF:
2277     case COMPONENT_REF:
2278     case INDIRECT_REF:
2279     case MEM_REF:
2280     case VAR_DECL:
2281     case BIT_FIELD_REF:
2282       break;
2283       /* FALLTHRU */
2284     default:
2285       return;
2286     }
2287 
2288   size_in_bytes = int_size_in_bytes (type);
2289   if (size_in_bytes <= 0)
2290     return;
2291 
2292   poly_int64 bitsize, bitpos;
2293   tree offset;
2294   machine_mode mode;
2295   int unsignedp, reversep, volatilep = 0;
2296   tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2297 				    &unsignedp, &reversep, &volatilep);
2298 
2299   if (TREE_CODE (t) == COMPONENT_REF
2300       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2301     {
2302       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2303       instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2304 				       TREE_OPERAND (t, 0), repr,
2305 				       TREE_OPERAND (t, 2)),
2306 			 location, is_store);
2307       return;
2308     }
2309 
2310   if (!multiple_p (bitpos, BITS_PER_UNIT)
2311       || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2312     return;
2313 
2314   if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2315     return;
2316 
2317   poly_int64 decl_size;
2318   if ((VAR_P (inner) || TREE_CODE (inner) == RESULT_DECL)
2319       && offset == NULL_TREE
2320       && DECL_SIZE (inner)
2321       && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2322       && known_subrange_p (bitpos, bitsize, 0, decl_size))
2323     {
2324       if (VAR_P (inner) && DECL_THREAD_LOCAL_P (inner))
2325 	return;
2326       if (!param_asan_globals && is_global_var (inner))
2327         return;
2328       if (!TREE_STATIC (inner))
2329 	{
2330 	  /* Automatic vars in the current function will be always
2331 	     accessible.  */
2332 	  if (decl_function_context (inner) == current_function_decl
2333 	      && (!asan_sanitize_use_after_scope ()
2334 		  || !TREE_ADDRESSABLE (inner)))
2335 	    return;
2336 	}
2337       /* Always instrument external vars, they might be dynamically
2338 	 initialized.  */
2339       else if (!DECL_EXTERNAL (inner))
2340 	{
2341 	  /* For static vars if they are known not to be dynamically
2342 	     initialized, they will be always accessible.  */
2343 	  varpool_node *vnode = varpool_node::get (inner);
2344 	  if (vnode && !vnode->dynamically_initialized)
2345 	    return;
2346 	}
2347     }
2348 
2349   if (DECL_P (inner)
2350       && decl_function_context (inner) == current_function_decl
2351       && !TREE_ADDRESSABLE (inner))
2352     mark_addressable (inner);
2353 
2354   base = build_fold_addr_expr (t);
2355   if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2356     {
2357       unsigned int align = get_object_alignment (t);
2358       build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2359 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2360 			is_store, /*is_scalar_access*/true, align);
2361       update_mem_ref_hash_table (base, size_in_bytes);
2362       update_mem_ref_hash_table (t, size_in_bytes);
2363     }
2364 
2365 }
2366 
2367 /*  Insert a memory reference into the hash table if access length
2368     can be determined in compile time.  */
2369 
2370 static void
maybe_update_mem_ref_hash_table(tree base,tree len)2371 maybe_update_mem_ref_hash_table (tree base, tree len)
2372 {
2373   if (!POINTER_TYPE_P (TREE_TYPE (base))
2374       || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2375     return;
2376 
2377   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2378 
2379   if (size_in_bytes != -1)
2380     update_mem_ref_hash_table (base, size_in_bytes);
2381 }
2382 
2383 /* Instrument an access to a contiguous memory region that starts at
2384    the address pointed to by BASE, over a length of LEN (expressed in
2385    the sizeof (*BASE) bytes).  ITER points to the instruction before
2386    which the instrumentation instructions must be inserted.  LOCATION
2387    is the source location that the instrumentation instructions must
2388    have.  If IS_STORE is true, then the memory access is a store;
2389    otherwise, it's a load.  */
2390 
2391 static void
instrument_mem_region_access(tree base,tree len,gimple_stmt_iterator * iter,location_t location,bool is_store)2392 instrument_mem_region_access (tree base, tree len,
2393 			      gimple_stmt_iterator *iter,
2394 			      location_t location, bool is_store)
2395 {
2396   if (!POINTER_TYPE_P (TREE_TYPE (base))
2397       || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2398       || integer_zerop (len))
2399     return;
2400 
2401   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2402 
2403   if ((size_in_bytes == -1)
2404       || !has_mem_ref_been_instrumented (base, size_in_bytes))
2405     {
2406       build_check_stmt (location, base, len, size_in_bytes, iter,
2407 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2408 			is_store, /*is_scalar_access*/false, /*align*/0);
2409     }
2410 
2411   maybe_update_mem_ref_hash_table (base, len);
2412   *iter = gsi_for_stmt (gsi_stmt (*iter));
2413 }
2414 
2415 /* Instrument the call to a built-in memory access function that is
2416    pointed to by the iterator ITER.
2417 
2418    Upon completion, return TRUE iff *ITER has been advanced to the
2419    statement following the one it was originally pointing to.  */
2420 
2421 static bool
instrument_builtin_call(gimple_stmt_iterator * iter)2422 instrument_builtin_call (gimple_stmt_iterator *iter)
2423 {
2424   if (!param_asan_memintrin)
2425     return false;
2426 
2427   bool iter_advanced_p = false;
2428   gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2429 
2430   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2431 
2432   location_t loc = gimple_location (call);
2433 
2434   asan_mem_ref src0, src1, dest;
2435   asan_mem_ref_init (&src0, NULL, 1);
2436   asan_mem_ref_init (&src1, NULL, 1);
2437   asan_mem_ref_init (&dest, NULL, 1);
2438 
2439   tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2440   bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2441     dest_is_deref = false, intercepted_p = true;
2442 
2443   if (get_mem_refs_of_builtin_call (call,
2444 				    &src0, &src0_len, &src0_is_store,
2445 				    &src1, &src1_len, &src1_is_store,
2446 				    &dest, &dest_len, &dest_is_store,
2447 				    &dest_is_deref, &intercepted_p, iter))
2448     {
2449       if (dest_is_deref)
2450 	{
2451 	  instrument_derefs (iter, dest.start, loc, dest_is_store);
2452 	  gsi_next (iter);
2453 	  iter_advanced_p = true;
2454 	}
2455       else if (!intercepted_p
2456 	       && (src0_len || src1_len || dest_len))
2457 	{
2458 	  if (src0.start != NULL_TREE)
2459 	    instrument_mem_region_access (src0.start, src0_len,
2460 					  iter, loc, /*is_store=*/false);
2461 	  if (src1.start != NULL_TREE)
2462 	    instrument_mem_region_access (src1.start, src1_len,
2463 					  iter, loc, /*is_store=*/false);
2464 	  if (dest.start != NULL_TREE)
2465 	    instrument_mem_region_access (dest.start, dest_len,
2466 					  iter, loc, /*is_store=*/true);
2467 
2468 	  *iter = gsi_for_stmt (call);
2469 	  gsi_next (iter);
2470 	  iter_advanced_p = true;
2471 	}
2472       else
2473 	{
2474 	  if (src0.start != NULL_TREE)
2475 	    maybe_update_mem_ref_hash_table (src0.start, src0_len);
2476 	  if (src1.start != NULL_TREE)
2477 	    maybe_update_mem_ref_hash_table (src1.start, src1_len);
2478 	  if (dest.start != NULL_TREE)
2479 	    maybe_update_mem_ref_hash_table (dest.start, dest_len);
2480 	}
2481     }
2482   return iter_advanced_p;
2483 }
2484 
2485 /*  Instrument the assignment statement ITER if it is subject to
2486     instrumentation.  Return TRUE iff instrumentation actually
2487     happened.  In that case, the iterator ITER is advanced to the next
2488     logical expression following the one initially pointed to by ITER,
2489     and the relevant memory reference that which access has been
2490     instrumented is added to the memory references hash table.  */
2491 
2492 static bool
maybe_instrument_assignment(gimple_stmt_iterator * iter)2493 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2494 {
2495   gimple *s = gsi_stmt (*iter);
2496 
2497   gcc_assert (gimple_assign_single_p (s));
2498 
2499   tree ref_expr = NULL_TREE;
2500   bool is_store, is_instrumented = false;
2501 
2502   if (gimple_store_p (s))
2503     {
2504       ref_expr = gimple_assign_lhs (s);
2505       is_store = true;
2506       instrument_derefs (iter, ref_expr,
2507 			 gimple_location (s),
2508 			 is_store);
2509       is_instrumented = true;
2510     }
2511 
2512   if (gimple_assign_load_p (s))
2513     {
2514       ref_expr = gimple_assign_rhs1 (s);
2515       is_store = false;
2516       instrument_derefs (iter, ref_expr,
2517 			 gimple_location (s),
2518 			 is_store);
2519       is_instrumented = true;
2520     }
2521 
2522   if (is_instrumented)
2523     gsi_next (iter);
2524 
2525   return is_instrumented;
2526 }
2527 
2528 /* Instrument the function call pointed to by the iterator ITER, if it
2529    is subject to instrumentation.  At the moment, the only function
2530    calls that are instrumented are some built-in functions that access
2531    memory.  Look at instrument_builtin_call to learn more.
2532 
2533    Upon completion return TRUE iff *ITER was advanced to the statement
2534    following the one it was originally pointing to.  */
2535 
2536 static bool
maybe_instrument_call(gimple_stmt_iterator * iter)2537 maybe_instrument_call (gimple_stmt_iterator *iter)
2538 {
2539   gimple *stmt = gsi_stmt (*iter);
2540   bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2541 
2542   if (is_builtin && instrument_builtin_call (iter))
2543     return true;
2544 
2545   if (gimple_call_noreturn_p (stmt))
2546     {
2547       if (is_builtin)
2548 	{
2549 	  tree callee = gimple_call_fndecl (stmt);
2550 	  switch (DECL_FUNCTION_CODE (callee))
2551 	    {
2552 	    case BUILT_IN_UNREACHABLE:
2553 	    case BUILT_IN_TRAP:
2554 	      /* Don't instrument these.  */
2555 	      return false;
2556 	    default:
2557 	      break;
2558 	    }
2559 	}
2560       tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2561       gimple *g = gimple_build_call (decl, 0);
2562       gimple_set_location (g, gimple_location (stmt));
2563       gsi_insert_before (iter, g, GSI_SAME_STMT);
2564     }
2565 
2566   bool instrumented = false;
2567   if (gimple_store_p (stmt))
2568     {
2569       tree ref_expr = gimple_call_lhs (stmt);
2570       instrument_derefs (iter, ref_expr,
2571 			 gimple_location (stmt),
2572 			 /*is_store=*/true);
2573 
2574       instrumented = true;
2575     }
2576 
2577   /* Walk through gimple_call arguments and check them id needed.  */
2578   unsigned args_num = gimple_call_num_args (stmt);
2579   for (unsigned i = 0; i < args_num; ++i)
2580     {
2581       tree arg = gimple_call_arg (stmt, i);
2582       /* If ARG is not a non-aggregate register variable, compiler in general
2583 	 creates temporary for it and pass it as argument to gimple call.
2584 	 But in some cases, e.g. when we pass by value a small structure that
2585 	 fits to register, compiler can avoid extra overhead by pulling out
2586 	 these temporaries.  In this case, we should check the argument.  */
2587       if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2588 	{
2589 	  instrument_derefs (iter, arg,
2590 			     gimple_location (stmt),
2591 			     /*is_store=*/false);
2592 	  instrumented = true;
2593 	}
2594     }
2595   if (instrumented)
2596     gsi_next (iter);
2597   return instrumented;
2598 }
2599 
2600 /* Walk each instruction of all basic block and instrument those that
2601    represent memory references: loads, stores, or function calls.
2602    In a given basic block, this function avoids instrumenting memory
2603    references that have already been instrumented.  */
2604 
2605 static void
transform_statements(void)2606 transform_statements (void)
2607 {
2608   basic_block bb, last_bb = NULL;
2609   gimple_stmt_iterator i;
2610   int saved_last_basic_block = last_basic_block_for_fn (cfun);
2611 
2612   FOR_EACH_BB_FN (bb, cfun)
2613     {
2614       basic_block prev_bb = bb;
2615 
2616       if (bb->index >= saved_last_basic_block) continue;
2617 
2618       /* Flush the mem ref hash table, if current bb doesn't have
2619 	 exactly one predecessor, or if that predecessor (skipping
2620 	 over asan created basic blocks) isn't the last processed
2621 	 basic block.  Thus we effectively flush on extended basic
2622 	 block boundaries.  */
2623       while (single_pred_p (prev_bb))
2624 	{
2625 	  prev_bb = single_pred (prev_bb);
2626 	  if (prev_bb->index < saved_last_basic_block)
2627 	    break;
2628 	}
2629       if (prev_bb != last_bb)
2630 	empty_mem_ref_hash_table ();
2631       last_bb = bb;
2632 
2633       for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2634 	{
2635 	  gimple *s = gsi_stmt (i);
2636 
2637 	  if (has_stmt_been_instrumented_p (s))
2638 	    gsi_next (&i);
2639 	  else if (gimple_assign_single_p (s)
2640 		   && !gimple_clobber_p (s)
2641 		   && maybe_instrument_assignment (&i))
2642 	    /*  Nothing to do as maybe_instrument_assignment advanced
2643 		the iterator I.  */;
2644 	  else if (is_gimple_call (s) && maybe_instrument_call (&i))
2645 	    /*  Nothing to do as maybe_instrument_call
2646 		advanced the iterator I.  */;
2647 	  else
2648 	    {
2649 	      /* No instrumentation happened.
2650 
2651 		 If the current instruction is a function call that
2652 		 might free something, let's forget about the memory
2653 		 references that got instrumented.  Otherwise we might
2654 		 miss some instrumentation opportunities.  Do the same
2655 		 for a ASAN_MARK poisoning internal function.  */
2656 	      if (is_gimple_call (s)
2657 		  && (!nonfreeing_call_p (s)
2658 		      || asan_mark_p (s, ASAN_MARK_POISON)))
2659 		empty_mem_ref_hash_table ();
2660 
2661 	      gsi_next (&i);
2662 	    }
2663 	}
2664     }
2665   free_mem_ref_resources ();
2666 }
2667 
2668 /* Build
2669    __asan_before_dynamic_init (module_name)
2670    or
2671    __asan_after_dynamic_init ()
2672    call.  */
2673 
2674 tree
asan_dynamic_init_call(bool after_p)2675 asan_dynamic_init_call (bool after_p)
2676 {
2677   if (shadow_ptr_types[0] == NULL_TREE)
2678     asan_init_shadow_ptr_types ();
2679 
2680   tree fn = builtin_decl_implicit (after_p
2681 				   ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2682 				   : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2683   tree module_name_cst = NULL_TREE;
2684   if (!after_p)
2685     {
2686       pretty_printer module_name_pp;
2687       pp_string (&module_name_pp, main_input_filename);
2688 
2689       module_name_cst = asan_pp_string (&module_name_pp);
2690       module_name_cst = fold_convert (const_ptr_type_node,
2691 				      module_name_cst);
2692     }
2693 
2694   return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2695 }
2696 
2697 /* Build
2698    struct __asan_global
2699    {
2700      const void *__beg;
2701      uptr __size;
2702      uptr __size_with_redzone;
2703      const void *__name;
2704      const void *__module_name;
2705      uptr __has_dynamic_init;
2706      __asan_global_source_location *__location;
2707      char *__odr_indicator;
2708    } type.  */
2709 
2710 static tree
asan_global_struct(void)2711 asan_global_struct (void)
2712 {
2713   static const char *field_names[]
2714     = { "__beg", "__size", "__size_with_redzone",
2715 	"__name", "__module_name", "__has_dynamic_init", "__location",
2716 	"__odr_indicator" };
2717   tree fields[ARRAY_SIZE (field_names)], ret;
2718   unsigned i;
2719 
2720   ret = make_node (RECORD_TYPE);
2721   for (i = 0; i < ARRAY_SIZE (field_names); i++)
2722     {
2723       fields[i]
2724 	= build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2725 		      get_identifier (field_names[i]),
2726 		      (i == 0 || i == 3) ? const_ptr_type_node
2727 		      : pointer_sized_int_node);
2728       DECL_CONTEXT (fields[i]) = ret;
2729       if (i)
2730 	DECL_CHAIN (fields[i - 1]) = fields[i];
2731     }
2732   tree type_decl = build_decl (input_location, TYPE_DECL,
2733 			       get_identifier ("__asan_global"), ret);
2734   DECL_IGNORED_P (type_decl) = 1;
2735   DECL_ARTIFICIAL (type_decl) = 1;
2736   TYPE_FIELDS (ret) = fields[0];
2737   TYPE_NAME (ret) = type_decl;
2738   TYPE_STUB_DECL (ret) = type_decl;
2739   TYPE_ARTIFICIAL (ret) = 1;
2740   layout_type (ret);
2741   return ret;
2742 }
2743 
2744 /* Create and return odr indicator symbol for DECL.
2745    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2746 
2747 static tree
create_odr_indicator(tree decl,tree type)2748 create_odr_indicator (tree decl, tree type)
2749 {
2750   char *name;
2751   tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2752   tree decl_name
2753     = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2754 					: DECL_NAME (decl));
2755   /* DECL_NAME theoretically might be NULL.  Bail out with 0 in this case.  */
2756   if (decl_name == NULL_TREE)
2757     return build_int_cst (uptr, 0);
2758   const char *dname = IDENTIFIER_POINTER (decl_name);
2759   if (HAS_DECL_ASSEMBLER_NAME_P (decl))
2760     dname = targetm.strip_name_encoding (dname);
2761   size_t len = strlen (dname) + sizeof ("__odr_asan_");
2762   name = XALLOCAVEC (char, len);
2763   snprintf (name, len, "__odr_asan_%s", dname);
2764 #ifndef NO_DOT_IN_LABEL
2765   name[sizeof ("__odr_asan") - 1] = '.';
2766 #elif !defined(NO_DOLLAR_IN_LABEL)
2767   name[sizeof ("__odr_asan") - 1] = '$';
2768 #endif
2769   tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2770 			 char_type_node);
2771   TREE_ADDRESSABLE (var) = 1;
2772   TREE_READONLY (var) = 0;
2773   TREE_THIS_VOLATILE (var) = 1;
2774   DECL_GIMPLE_REG_P (var) = 0;
2775   DECL_ARTIFICIAL (var) = 1;
2776   DECL_IGNORED_P (var) = 1;
2777   TREE_STATIC (var) = 1;
2778   TREE_PUBLIC (var) = 1;
2779   DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2780   DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2781 
2782   TREE_USED (var) = 1;
2783   tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2784 				    build_int_cst (unsigned_type_node, 0));
2785   TREE_CONSTANT (ctor) = 1;
2786   TREE_STATIC (ctor) = 1;
2787   DECL_INITIAL (var) = ctor;
2788   DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2789 				     NULL, DECL_ATTRIBUTES (var));
2790   make_decl_rtl (var);
2791   varpool_node::finalize_decl (var);
2792   return fold_convert (uptr, build_fold_addr_expr (var));
2793 }
2794 
2795 /* Return true if DECL, a global var, might be overridden and needs
2796    an additional odr indicator symbol.  */
2797 
2798 static bool
asan_needs_odr_indicator_p(tree decl)2799 asan_needs_odr_indicator_p (tree decl)
2800 {
2801   /* Don't emit ODR indicators for kernel because:
2802      a) Kernel is written in C thus doesn't need ODR indicators.
2803      b) Some kernel code may have assumptions about symbols containing specific
2804         patterns in their names.  Since ODR indicators contain original names
2805         of symbols they are emitted for, these assumptions would be broken for
2806         ODR indicator symbols.  */
2807   return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2808 	  && !DECL_ARTIFICIAL (decl)
2809 	  && !DECL_WEAK (decl)
2810 	  && TREE_PUBLIC (decl));
2811 }
2812 
2813 /* Append description of a single global DECL into vector V.
2814    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2815 
2816 static void
asan_add_global(tree decl,tree type,vec<constructor_elt,va_gc> * v)2817 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2818 {
2819   tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2820   unsigned HOST_WIDE_INT size;
2821   tree str_cst, module_name_cst, refdecl = decl;
2822   vec<constructor_elt, va_gc> *vinner = NULL;
2823 
2824   pretty_printer asan_pp, module_name_pp;
2825 
2826   if (DECL_NAME (decl))
2827     pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2828   else
2829     pp_string (&asan_pp, "<unknown>");
2830   str_cst = asan_pp_string (&asan_pp);
2831 
2832   pp_string (&module_name_pp, main_input_filename);
2833   module_name_cst = asan_pp_string (&module_name_pp);
2834 
2835   if (asan_needs_local_alias (decl))
2836     {
2837       char buf[20];
2838       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2839       refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2840 			    VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2841       TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2842       TREE_READONLY (refdecl) = TREE_READONLY (decl);
2843       TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2844       DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2845       DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2846       DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2847       TREE_STATIC (refdecl) = 1;
2848       TREE_PUBLIC (refdecl) = 0;
2849       TREE_USED (refdecl) = 1;
2850       assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2851     }
2852 
2853   tree odr_indicator_ptr
2854     = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2855 					 : build_int_cst (uptr, 0));
2856   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2857 			  fold_convert (const_ptr_type_node,
2858 					build_fold_addr_expr (refdecl)));
2859   size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2860   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2861   size += asan_red_zone_size (size);
2862   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2863   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2864 			  fold_convert (const_ptr_type_node, str_cst));
2865   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2866 			  fold_convert (const_ptr_type_node, module_name_cst));
2867   varpool_node *vnode = varpool_node::get (decl);
2868   int has_dynamic_init = 0;
2869   /* FIXME: Enable initialization order fiasco detection in LTO mode once
2870      proper fix for PR 79061 will be applied.  */
2871   if (!in_lto_p)
2872     has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2873   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2874 			  build_int_cst (uptr, has_dynamic_init));
2875   tree locptr = NULL_TREE;
2876   location_t loc = DECL_SOURCE_LOCATION (decl);
2877   expanded_location xloc = expand_location (loc);
2878   if (xloc.file != NULL)
2879     {
2880       static int lasanloccnt = 0;
2881       char buf[25];
2882       ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2883       tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2884 			     ubsan_get_source_location_type ());
2885       TREE_STATIC (var) = 1;
2886       TREE_PUBLIC (var) = 0;
2887       DECL_ARTIFICIAL (var) = 1;
2888       DECL_IGNORED_P (var) = 1;
2889       pretty_printer filename_pp;
2890       pp_string (&filename_pp, xloc.file);
2891       tree str = asan_pp_string (&filename_pp);
2892       tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2893 					NULL_TREE, str, NULL_TREE,
2894 					build_int_cst (unsigned_type_node,
2895 						       xloc.line), NULL_TREE,
2896 					build_int_cst (unsigned_type_node,
2897 						       xloc.column));
2898       TREE_CONSTANT (ctor) = 1;
2899       TREE_STATIC (ctor) = 1;
2900       DECL_INITIAL (var) = ctor;
2901       varpool_node::finalize_decl (var);
2902       locptr = fold_convert (uptr, build_fold_addr_expr (var));
2903     }
2904   else
2905     locptr = build_int_cst (uptr, 0);
2906   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2907   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2908   init = build_constructor (type, vinner);
2909   CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2910 }
2911 
2912 /* Initialize sanitizer.def builtins if the FE hasn't initialized them.  */
2913 void
initialize_sanitizer_builtins(void)2914 initialize_sanitizer_builtins (void)
2915 {
2916   tree decl;
2917 
2918   if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2919     return;
2920 
2921   tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2922   tree BT_FN_VOID_PTR
2923     = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2924   tree BT_FN_VOID_CONST_PTR
2925     = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2926   tree BT_FN_VOID_PTR_PTR
2927     = build_function_type_list (void_type_node, ptr_type_node,
2928 				ptr_type_node, NULL_TREE);
2929   tree BT_FN_VOID_PTR_PTR_PTR
2930     = build_function_type_list (void_type_node, ptr_type_node,
2931 				ptr_type_node, ptr_type_node, NULL_TREE);
2932   tree BT_FN_VOID_PTR_PTRMODE
2933     = build_function_type_list (void_type_node, ptr_type_node,
2934 				pointer_sized_int_node, NULL_TREE);
2935   tree BT_FN_VOID_INT
2936     = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2937   tree BT_FN_SIZE_CONST_PTR_INT
2938     = build_function_type_list (size_type_node, const_ptr_type_node,
2939 				integer_type_node, NULL_TREE);
2940 
2941   tree BT_FN_VOID_UINT8_UINT8
2942     = build_function_type_list (void_type_node, unsigned_char_type_node,
2943 				unsigned_char_type_node, NULL_TREE);
2944   tree BT_FN_VOID_UINT16_UINT16
2945     = build_function_type_list (void_type_node, uint16_type_node,
2946 				uint16_type_node, NULL_TREE);
2947   tree BT_FN_VOID_UINT32_UINT32
2948     = build_function_type_list (void_type_node, uint32_type_node,
2949 				uint32_type_node, NULL_TREE);
2950   tree BT_FN_VOID_UINT64_UINT64
2951     = build_function_type_list (void_type_node, uint64_type_node,
2952 				uint64_type_node, NULL_TREE);
2953   tree BT_FN_VOID_FLOAT_FLOAT
2954     = build_function_type_list (void_type_node, float_type_node,
2955 				float_type_node, NULL_TREE);
2956   tree BT_FN_VOID_DOUBLE_DOUBLE
2957     = build_function_type_list (void_type_node, double_type_node,
2958 				double_type_node, NULL_TREE);
2959   tree BT_FN_VOID_UINT64_PTR
2960     = build_function_type_list (void_type_node, uint64_type_node,
2961 				ptr_type_node, NULL_TREE);
2962 
2963   tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2964   tree BT_FN_IX_CONST_VPTR_INT[5];
2965   tree BT_FN_IX_VPTR_IX_INT[5];
2966   tree BT_FN_VOID_VPTR_IX_INT[5];
2967   tree vptr
2968     = build_pointer_type (build_qualified_type (void_type_node,
2969 						TYPE_QUAL_VOLATILE));
2970   tree cvptr
2971     = build_pointer_type (build_qualified_type (void_type_node,
2972 						TYPE_QUAL_VOLATILE
2973 						|TYPE_QUAL_CONST));
2974   tree boolt
2975     = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2976   int i;
2977   for (i = 0; i < 5; i++)
2978     {
2979       tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2980       BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2981 	= build_function_type_list (boolt, vptr, ptr_type_node, ix,
2982 				    integer_type_node, integer_type_node,
2983 				    NULL_TREE);
2984       BT_FN_IX_CONST_VPTR_INT[i]
2985 	= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2986       BT_FN_IX_VPTR_IX_INT[i]
2987 	= build_function_type_list (ix, vptr, ix, integer_type_node,
2988 				    NULL_TREE);
2989       BT_FN_VOID_VPTR_IX_INT[i]
2990 	= build_function_type_list (void_type_node, vptr, ix,
2991 				    integer_type_node, NULL_TREE);
2992     }
2993 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2994 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2995 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2996 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2997 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2998 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2999 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
3000 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
3001 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
3002 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
3003 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
3004 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
3005 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
3006 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
3007 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
3008 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
3009 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
3010 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
3011 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
3012 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
3013 #undef ATTR_NOTHROW_LEAF_LIST
3014 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
3015 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
3016 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
3017 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
3018 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
3019 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3020 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
3021   ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
3022 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
3023 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
3024   ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
3025 #undef ATTR_COLD_NOTHROW_LEAF_LIST
3026 #define ATTR_COLD_NOTHROW_LEAF_LIST \
3027   /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
3028 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
3029 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
3030   /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
3031 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
3032 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
3033   /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3034 #undef ATTR_PURE_NOTHROW_LEAF_LIST
3035 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
3036 #undef DEF_BUILTIN_STUB
3037 #define DEF_BUILTIN_STUB(ENUM, NAME)
3038 #undef DEF_SANITIZER_BUILTIN_1
3039 #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS)		\
3040   do {									\
3041     decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM,		\
3042 				 BUILT_IN_NORMAL, NAME, NULL_TREE);	\
3043     set_call_expr_flags (decl, ATTRS);					\
3044     set_builtin_decl (ENUM, decl, true);				\
3045   } while (0)
3046 #undef DEF_SANITIZER_BUILTIN
3047 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS)	\
3048   DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
3049 
3050 #include "sanitizer.def"
3051 
3052   /* -fsanitize=object-size uses __builtin_object_size, but that might
3053      not be available for e.g. Fortran at this point.  We use
3054      DEF_SANITIZER_BUILTIN here only as a convenience macro.  */
3055   if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
3056       && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
3057     DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
3058 			     BT_FN_SIZE_CONST_PTR_INT,
3059 			     ATTR_PURE_NOTHROW_LEAF_LIST);
3060 
3061 #undef DEF_SANITIZER_BUILTIN_1
3062 #undef DEF_SANITIZER_BUILTIN
3063 #undef DEF_BUILTIN_STUB
3064 }
3065 
3066 /* Called via htab_traverse.  Count number of emitted
3067    STRING_CSTs in the constant hash table.  */
3068 
3069 int
count_string_csts(constant_descriptor_tree ** slot,unsigned HOST_WIDE_INT * data)3070 count_string_csts (constant_descriptor_tree **slot,
3071 		   unsigned HOST_WIDE_INT *data)
3072 {
3073   struct constant_descriptor_tree *desc = *slot;
3074   if (TREE_CODE (desc->value) == STRING_CST
3075       && TREE_ASM_WRITTEN (desc->value)
3076       && asan_protect_global (desc->value))
3077     ++*data;
3078   return 1;
3079 }
3080 
3081 /* Helper structure to pass two parameters to
3082    add_string_csts.  */
3083 
3084 struct asan_add_string_csts_data
3085 {
3086   tree type;
3087   vec<constructor_elt, va_gc> *v;
3088 };
3089 
3090 /* Called via hash_table::traverse.  Call asan_add_global
3091    on emitted STRING_CSTs from the constant hash table.  */
3092 
3093 int
add_string_csts(constant_descriptor_tree ** slot,asan_add_string_csts_data * aascd)3094 add_string_csts (constant_descriptor_tree **slot,
3095 		 asan_add_string_csts_data *aascd)
3096 {
3097   struct constant_descriptor_tree *desc = *slot;
3098   if (TREE_CODE (desc->value) == STRING_CST
3099       && TREE_ASM_WRITTEN (desc->value)
3100       && asan_protect_global (desc->value))
3101     {
3102       asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3103 		       aascd->type, aascd->v);
3104     }
3105   return 1;
3106 }
3107 
3108 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3109    invoke ggc_collect.  */
3110 static GTY(()) tree asan_ctor_statements;
3111 
3112 /* Module-level instrumentation.
3113    - Insert __asan_init_vN() into the list of CTORs.
3114    - TODO: insert redzones around globals.
3115  */
3116 
3117 void
asan_finish_file(void)3118 asan_finish_file (void)
3119 {
3120   varpool_node *vnode;
3121   unsigned HOST_WIDE_INT gcount = 0;
3122 
3123   if (shadow_ptr_types[0] == NULL_TREE)
3124     asan_init_shadow_ptr_types ();
3125   /* Avoid instrumenting code in the asan ctors/dtors.
3126      We don't need to insert padding after the description strings,
3127      nor after .LASAN* array.  */
3128   flag_sanitize &= ~SANITIZE_ADDRESS;
3129 
3130   /* For user-space we want asan constructors to run first.
3131      Linux kernel does not support priorities other than default, and the only
3132      other user of constructors is coverage. So we run with the default
3133      priority.  */
3134   int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3135                  ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3136 
3137   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3138     {
3139       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3140       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3141       fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3142       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3143     }
3144   FOR_EACH_DEFINED_VARIABLE (vnode)
3145     if (TREE_ASM_WRITTEN (vnode->decl)
3146 	&& asan_protect_global (vnode->decl))
3147       ++gcount;
3148   hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3149   const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3150     (&gcount);
3151   if (gcount)
3152     {
3153       tree type = asan_global_struct (), var, ctor;
3154       tree dtor_statements = NULL_TREE;
3155       vec<constructor_elt, va_gc> *v;
3156       char buf[20];
3157 
3158       type = build_array_type_nelts (type, gcount);
3159       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3160       var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3161 			type);
3162       TREE_STATIC (var) = 1;
3163       TREE_PUBLIC (var) = 0;
3164       DECL_ARTIFICIAL (var) = 1;
3165       DECL_IGNORED_P (var) = 1;
3166       vec_alloc (v, gcount);
3167       FOR_EACH_DEFINED_VARIABLE (vnode)
3168 	if (TREE_ASM_WRITTEN (vnode->decl)
3169 	    && asan_protect_global (vnode->decl))
3170 	  asan_add_global (vnode->decl, TREE_TYPE (type), v);
3171       struct asan_add_string_csts_data aascd;
3172       aascd.type = TREE_TYPE (type);
3173       aascd.v = v;
3174       const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3175        	(&aascd);
3176       ctor = build_constructor (type, v);
3177       TREE_CONSTANT (ctor) = 1;
3178       TREE_STATIC (ctor) = 1;
3179       DECL_INITIAL (var) = ctor;
3180       SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3181 				ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3182 
3183       varpool_node::finalize_decl (var);
3184 
3185       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3186       tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3187       append_to_statement_list (build_call_expr (fn, 2,
3188 						 build_fold_addr_expr (var),
3189 						 gcount_tree),
3190 				&asan_ctor_statements);
3191 
3192       fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3193       append_to_statement_list (build_call_expr (fn, 2,
3194 						 build_fold_addr_expr (var),
3195 						 gcount_tree),
3196 				&dtor_statements);
3197       cgraph_build_static_cdtor ('D', dtor_statements, priority);
3198     }
3199   if (asan_ctor_statements)
3200     cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3201   flag_sanitize |= SANITIZE_ADDRESS;
3202 }
3203 
3204 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3205    on SHADOW address.  Newly added statements will be added to ITER with
3206    given location LOC.  We mark SIZE bytes in shadow memory, where
3207    LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3208    end of a variable.  */
3209 
3210 static void
asan_store_shadow_bytes(gimple_stmt_iterator * iter,location_t loc,tree shadow,unsigned HOST_WIDE_INT base_addr_offset,bool is_clobber,unsigned size,unsigned last_chunk_size)3211 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3212 			 tree shadow,
3213 			 unsigned HOST_WIDE_INT base_addr_offset,
3214 			 bool is_clobber, unsigned size,
3215 			 unsigned last_chunk_size)
3216 {
3217   tree shadow_ptr_type;
3218 
3219   switch (size)
3220     {
3221     case 1:
3222       shadow_ptr_type = shadow_ptr_types[0];
3223       break;
3224     case 2:
3225       shadow_ptr_type = shadow_ptr_types[1];
3226       break;
3227     case 4:
3228       shadow_ptr_type = shadow_ptr_types[2];
3229       break;
3230     default:
3231       gcc_unreachable ();
3232     }
3233 
3234   unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3235   unsigned HOST_WIDE_INT val = 0;
3236   unsigned last_pos = size;
3237   if (last_chunk_size && !is_clobber)
3238     last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3239   for (unsigned i = 0; i < size; ++i)
3240     {
3241       unsigned char shadow_c = c;
3242       if (i == last_pos)
3243 	shadow_c = last_chunk_size;
3244       val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3245     }
3246 
3247   /* Handle last chunk in unpoisoning.  */
3248   tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3249 
3250   tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3251 		      build_int_cst (shadow_ptr_type, base_addr_offset));
3252 
3253   gimple *g = gimple_build_assign (dest, magic);
3254   gimple_set_location (g, loc);
3255   gsi_insert_after (iter, g, GSI_NEW_STMT);
3256 }
3257 
3258 /* Expand the ASAN_MARK builtins.  */
3259 
3260 bool
asan_expand_mark_ifn(gimple_stmt_iterator * iter)3261 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3262 {
3263   gimple *g = gsi_stmt (*iter);
3264   location_t loc = gimple_location (g);
3265   HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3266   bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3267 
3268   tree base = gimple_call_arg (g, 1);
3269   gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3270   tree decl = TREE_OPERAND (base, 0);
3271 
3272   /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3273   if (TREE_CODE (decl) == COMPONENT_REF
3274       && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3275     decl = TREE_OPERAND (decl, 0);
3276 
3277   gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3278 
3279   if (is_poison)
3280     {
3281       if (asan_handled_variables == NULL)
3282 	asan_handled_variables = new hash_set<tree> (16);
3283       asan_handled_variables->add (decl);
3284     }
3285   tree len = gimple_call_arg (g, 2);
3286 
3287   gcc_assert (tree_fits_shwi_p (len));
3288   unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
3289   gcc_assert (size_in_bytes);
3290 
3291   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3292 			   NOP_EXPR, base);
3293   gimple_set_location (g, loc);
3294   gsi_replace (iter, g, false);
3295   tree base_addr = gimple_assign_lhs (g);
3296 
3297   /* Generate direct emission if size_in_bytes is small.  */
3298   if (size_in_bytes
3299       <= (unsigned)param_use_after_scope_direct_emission_threshold)
3300     {
3301       const unsigned HOST_WIDE_INT shadow_size
3302 	= shadow_mem_size (size_in_bytes);
3303       const unsigned int shadow_align
3304 	= (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
3305 
3306       tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3307 					     shadow_ptr_types[0], true);
3308 
3309       for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3310 	{
3311 	  unsigned size = 1;
3312 	  if (shadow_size - offset >= 4
3313 	      && (!STRICT_ALIGNMENT || shadow_align >= 4))
3314 	    size = 4;
3315 	  else if (shadow_size - offset >= 2
3316 		   && (!STRICT_ALIGNMENT || shadow_align >= 2))
3317 	    size = 2;
3318 
3319 	  unsigned HOST_WIDE_INT last_chunk_size = 0;
3320 	  unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3321 	  if (s > size_in_bytes)
3322 	    last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3323 
3324 	  asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3325 				   size, last_chunk_size);
3326 	  offset += size;
3327 	}
3328     }
3329   else
3330     {
3331       g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3332 			       NOP_EXPR, len);
3333       gimple_set_location (g, loc);
3334       gsi_insert_before (iter, g, GSI_SAME_STMT);
3335       tree sz_arg = gimple_assign_lhs (g);
3336 
3337       tree fun
3338 	= builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3339 				 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3340       g = gimple_build_call (fun, 2, base_addr, sz_arg);
3341       gimple_set_location (g, loc);
3342       gsi_insert_after (iter, g, GSI_NEW_STMT);
3343     }
3344 
3345   return false;
3346 }
3347 
3348 /* Expand the ASAN_{LOAD,STORE} builtins.  */
3349 
3350 bool
asan_expand_check_ifn(gimple_stmt_iterator * iter,bool use_calls)3351 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3352 {
3353   gimple *g = gsi_stmt (*iter);
3354   location_t loc = gimple_location (g);
3355   bool recover_p;
3356   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3357     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3358   else
3359     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3360 
3361   HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3362   gcc_assert (flags < ASAN_CHECK_LAST);
3363   bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3364   bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3365   bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3366 
3367   tree base = gimple_call_arg (g, 1);
3368   tree len = gimple_call_arg (g, 2);
3369   HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3370 
3371   HOST_WIDE_INT size_in_bytes
3372     = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3373 
3374   if (use_calls)
3375     {
3376       /* Instrument using callbacks.  */
3377       gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3378 				      NOP_EXPR, base);
3379       gimple_set_location (g, loc);
3380       gsi_insert_before (iter, g, GSI_SAME_STMT);
3381       tree base_addr = gimple_assign_lhs (g);
3382 
3383       int nargs;
3384       tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3385       if (nargs == 1)
3386 	g = gimple_build_call (fun, 1, base_addr);
3387       else
3388 	{
3389 	  gcc_assert (nargs == 2);
3390 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3391 				   NOP_EXPR, len);
3392 	  gimple_set_location (g, loc);
3393 	  gsi_insert_before (iter, g, GSI_SAME_STMT);
3394 	  tree sz_arg = gimple_assign_lhs (g);
3395 	  g = gimple_build_call (fun, nargs, base_addr, sz_arg);
3396 	}
3397       gimple_set_location (g, loc);
3398       gsi_replace (iter, g, false);
3399       return false;
3400     }
3401 
3402   HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
3403 
3404   tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
3405   tree shadow_type = TREE_TYPE (shadow_ptr_type);
3406 
3407   gimple_stmt_iterator gsi = *iter;
3408 
3409   if (!is_non_zero_len)
3410     {
3411       /* So, the length of the memory area to asan-protect is
3412 	 non-constant.  Let's guard the generated instrumentation code
3413 	 like:
3414 
3415 	 if (len != 0)
3416 	   {
3417 	     //asan instrumentation code goes here.
3418 	   }
3419 	 // falltrough instructions, starting with *ITER.  */
3420 
3421       g = gimple_build_cond (NE_EXPR,
3422 			    len,
3423 			    build_int_cst (TREE_TYPE (len), 0),
3424 			    NULL_TREE, NULL_TREE);
3425       gimple_set_location (g, loc);
3426 
3427       basic_block then_bb, fallthrough_bb;
3428       insert_if_then_before_iter (as_a <gcond *> (g), iter,
3429 				  /*then_more_likely_p=*/true,
3430 				  &then_bb, &fallthrough_bb);
3431       /* Note that fallthrough_bb starts with the statement that was
3432 	pointed to by ITER.  */
3433 
3434       /* The 'then block' of the 'if (len != 0) condition is where
3435 	we'll generate the asan instrumentation code now.  */
3436       gsi = gsi_last_bb (then_bb);
3437     }
3438 
3439   /* Get an iterator on the point where we can add the condition
3440      statement for the instrumentation.  */
3441   basic_block then_bb, else_bb;
3442   gsi = create_cond_insert_point (&gsi, /*before_p*/false,
3443 				  /*then_more_likely_p=*/false,
3444 				  /*create_then_fallthru_edge*/recover_p,
3445 				  &then_bb,
3446 				  &else_bb);
3447 
3448   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3449 			   NOP_EXPR, base);
3450   gimple_set_location (g, loc);
3451   gsi_insert_before (&gsi, g, GSI_NEW_STMT);
3452   tree base_addr = gimple_assign_lhs (g);
3453 
3454   tree t = NULL_TREE;
3455   if (real_size_in_bytes >= 8)
3456     {
3457       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3458 					     shadow_ptr_type);
3459       t = shadow;
3460     }
3461   else
3462     {
3463       /* Slow path for 1, 2 and 4 byte accesses.  */
3464       /* Test (shadow != 0)
3465 	 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow).  */
3466       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3467 					     shadow_ptr_type);
3468       gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3469       gimple_seq seq = NULL;
3470       gimple_seq_add_stmt (&seq, shadow_test);
3471       /* Aligned (>= 8 bytes) can test just
3472 	 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
3473 	 to be 0.  */
3474       if (align < 8)
3475 	{
3476 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3477 						   base_addr, 7));
3478 	  gimple_seq_add_stmt (&seq,
3479 			       build_type_cast (shadow_type,
3480 						gimple_seq_last (seq)));
3481 	  if (real_size_in_bytes > 1)
3482 	    gimple_seq_add_stmt (&seq,
3483 				 build_assign (PLUS_EXPR,
3484 					       gimple_seq_last (seq),
3485 					       real_size_in_bytes - 1));
3486 	  t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3487 	}
3488       else
3489 	t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3490       gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3491       gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3492 					       gimple_seq_last (seq)));
3493       t = gimple_assign_lhs (gimple_seq_last (seq));
3494       gimple_seq_set_location (seq, loc);
3495       gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3496 
3497       /* For non-constant, misaligned or otherwise weird access sizes,
3498        check first and last byte.  */
3499       if (size_in_bytes == -1)
3500 	{
3501 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3502 				   MINUS_EXPR, len,
3503 				   build_int_cst (pointer_sized_int_node, 1));
3504 	  gimple_set_location (g, loc);
3505 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3506 	  tree last = gimple_assign_lhs (g);
3507 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3508 				   PLUS_EXPR, base_addr, last);
3509 	  gimple_set_location (g, loc);
3510 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3511 	  tree base_end_addr = gimple_assign_lhs (g);
3512 
3513 	  tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3514 						 shadow_ptr_type);
3515 	  gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3516 	  gimple_seq seq = NULL;
3517 	  gimple_seq_add_stmt (&seq, shadow_test);
3518 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3519 						   base_end_addr, 7));
3520 	  gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3521 						      gimple_seq_last (seq)));
3522 	  gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3523 						   gimple_seq_last (seq),
3524 						   shadow));
3525 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3526 						   gimple_seq_last (seq)));
3527 	  gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3528 						   gimple_seq_last (seq)));
3529 	  t = gimple_assign_lhs (gimple_seq_last (seq));
3530 	  gimple_seq_set_location (seq, loc);
3531 	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3532 	}
3533     }
3534 
3535   g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3536 			 NULL_TREE, NULL_TREE);
3537   gimple_set_location (g, loc);
3538   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3539 
3540   /* Generate call to the run-time library (e.g. __asan_report_load8).  */
3541   gsi = gsi_start_bb (then_bb);
3542   int nargs;
3543   tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3544   g = gimple_build_call (fun, nargs, base_addr, len);
3545   gimple_set_location (g, loc);
3546   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3547 
3548   gsi_remove (iter, true);
3549   *iter = gsi_start_bb (else_bb);
3550 
3551   return true;
3552 }
3553 
3554 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3555    into SSA.  Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING.  */
3556 
3557 static tree
create_asan_shadow_var(tree var_decl,hash_map<tree,tree> & shadow_vars_mapping)3558 create_asan_shadow_var (tree var_decl,
3559 			hash_map<tree, tree> &shadow_vars_mapping)
3560 {
3561   tree *slot = shadow_vars_mapping.get (var_decl);
3562   if (slot == NULL)
3563     {
3564       tree shadow_var = copy_node (var_decl);
3565 
3566       copy_body_data id;
3567       memset (&id, 0, sizeof (copy_body_data));
3568       id.src_fn = id.dst_fn = current_function_decl;
3569       copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3570 
3571       DECL_ARTIFICIAL (shadow_var) = 1;
3572       DECL_IGNORED_P (shadow_var) = 1;
3573       DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3574       gimple_add_tmp_var (shadow_var);
3575 
3576       shadow_vars_mapping.put (var_decl, shadow_var);
3577       return shadow_var;
3578     }
3579   else
3580     return *slot;
3581 }
3582 
3583 /* Expand ASAN_POISON ifn.  */
3584 
3585 bool
asan_expand_poison_ifn(gimple_stmt_iterator * iter,bool * need_commit_edge_insert,hash_map<tree,tree> & shadow_vars_mapping)3586 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3587 			bool *need_commit_edge_insert,
3588 			hash_map<tree, tree> &shadow_vars_mapping)
3589 {
3590   gimple *g = gsi_stmt (*iter);
3591   tree poisoned_var = gimple_call_lhs (g);
3592   if (!poisoned_var || has_zero_uses (poisoned_var))
3593     {
3594       gsi_remove (iter, true);
3595       return true;
3596     }
3597 
3598   if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
3599     SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
3600 				    create_tmp_var (TREE_TYPE (poisoned_var)));
3601 
3602   tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3603 					    shadow_vars_mapping);
3604 
3605   bool recover_p;
3606   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3607     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3608   else
3609     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3610   tree size = DECL_SIZE_UNIT (shadow_var);
3611   gimple *poison_call
3612     = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3613 				  build_int_cst (integer_type_node,
3614 						 ASAN_MARK_POISON),
3615 				  build_fold_addr_expr (shadow_var), size);
3616 
3617   gimple *use;
3618   imm_use_iterator imm_iter;
3619   FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
3620     {
3621       if (is_gimple_debug (use))
3622 	continue;
3623 
3624       int nargs;
3625       bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
3626       tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
3627 				    &nargs);
3628 
3629       gcall *call = gimple_build_call (fun, 1,
3630 				       build_fold_addr_expr (shadow_var));
3631       gimple_set_location (call, gimple_location (use));
3632       gimple *call_to_insert = call;
3633 
3634       /* The USE can be a gimple PHI node.  If so, insert the call on
3635 	 all edges leading to the PHI node.  */
3636       if (is_a <gphi *> (use))
3637 	{
3638 	  gphi *phi = dyn_cast<gphi *> (use);
3639 	  for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3640 	    if (gimple_phi_arg_def (phi, i) == poisoned_var)
3641 	      {
3642 		edge e = gimple_phi_arg_edge (phi, i);
3643 
3644 		/* Do not insert on an edge we can't split.  */
3645 		if (e->flags & EDGE_ABNORMAL)
3646 		  continue;
3647 
3648 		if (call_to_insert == NULL)
3649 		  call_to_insert = gimple_copy (call);
3650 
3651 		gsi_insert_seq_on_edge (e, call_to_insert);
3652 		*need_commit_edge_insert = true;
3653 		call_to_insert = NULL;
3654 	      }
3655 	}
3656       else
3657 	{
3658 	  gimple_stmt_iterator gsi = gsi_for_stmt (use);
3659 	  if (store_p)
3660 	    gsi_replace (&gsi, call, true);
3661 	  else
3662 	    gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3663 	}
3664     }
3665 
3666   SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3667   SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3668   gsi_replace (iter, poison_call, false);
3669 
3670   return true;
3671 }
3672 
3673 /* Instrument the current function.  */
3674 
3675 static unsigned int
asan_instrument(void)3676 asan_instrument (void)
3677 {
3678   if (shadow_ptr_types[0] == NULL_TREE)
3679     asan_init_shadow_ptr_types ();
3680   transform_statements ();
3681   last_alloca_addr = NULL_TREE;
3682   return 0;
3683 }
3684 
3685 static bool
gate_asan(void)3686 gate_asan (void)
3687 {
3688   return sanitize_flags_p (SANITIZE_ADDRESS);
3689 }
3690 
3691 namespace {
3692 
3693 const pass_data pass_data_asan =
3694 {
3695   GIMPLE_PASS, /* type */
3696   "asan", /* name */
3697   OPTGROUP_NONE, /* optinfo_flags */
3698   TV_NONE, /* tv_id */
3699   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3700   0, /* properties_provided */
3701   0, /* properties_destroyed */
3702   0, /* todo_flags_start */
3703   TODO_update_ssa, /* todo_flags_finish */
3704 };
3705 
3706 class pass_asan : public gimple_opt_pass
3707 {
3708 public:
pass_asan(gcc::context * ctxt)3709   pass_asan (gcc::context *ctxt)
3710     : gimple_opt_pass (pass_data_asan, ctxt)
3711   {}
3712 
3713   /* opt_pass methods: */
clone()3714   opt_pass * clone () { return new pass_asan (m_ctxt); }
gate(function *)3715   virtual bool gate (function *) { return gate_asan (); }
execute(function *)3716   virtual unsigned int execute (function *) { return asan_instrument (); }
3717 
3718 }; // class pass_asan
3719 
3720 } // anon namespace
3721 
3722 gimple_opt_pass *
make_pass_asan(gcc::context * ctxt)3723 make_pass_asan (gcc::context *ctxt)
3724 {
3725   return new pass_asan (ctxt);
3726 }
3727 
3728 namespace {
3729 
3730 const pass_data pass_data_asan_O0 =
3731 {
3732   GIMPLE_PASS, /* type */
3733   "asan0", /* name */
3734   OPTGROUP_NONE, /* optinfo_flags */
3735   TV_NONE, /* tv_id */
3736   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3737   0, /* properties_provided */
3738   0, /* properties_destroyed */
3739   0, /* todo_flags_start */
3740   TODO_update_ssa, /* todo_flags_finish */
3741 };
3742 
3743 class pass_asan_O0 : public gimple_opt_pass
3744 {
3745 public:
pass_asan_O0(gcc::context * ctxt)3746   pass_asan_O0 (gcc::context *ctxt)
3747     : gimple_opt_pass (pass_data_asan_O0, ctxt)
3748   {}
3749 
3750   /* opt_pass methods: */
gate(function *)3751   virtual bool gate (function *) { return !optimize && gate_asan (); }
execute(function *)3752   virtual unsigned int execute (function *) { return asan_instrument (); }
3753 
3754 }; // class pass_asan_O0
3755 
3756 } // anon namespace
3757 
3758 gimple_opt_pass *
make_pass_asan_O0(gcc::context * ctxt)3759 make_pass_asan_O0 (gcc::context *ctxt)
3760 {
3761   return new pass_asan_O0 (ctxt);
3762 }
3763 
3764 #include "gt-asan.h"
3765