xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/asan.c (revision 7d62b00eb9ad855ffcd7da46b41e23feb5476fac)
1 /* AddressSanitizer, a fast memory error detector.
2    Copyright (C) 2012-2020 Free Software Foundation, Inc.
3    Contributed by Kostya Serebryany <kcc@google.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "ssa.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "stringpool.h"
51 #include "attribs.h"
52 #include "asan.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "expr.h"
56 #include "output.h"
57 #include "langhooks.h"
58 #include "cfgloop.h"
59 #include "gimple-builder.h"
60 #include "gimple-fold.h"
61 #include "ubsan.h"
62 #include "builtins.h"
63 #include "fnmatch.h"
64 #include "tree-inline.h"
65 #include "tree-ssa.h"
66 #include "tree-eh.h"
67 
68 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
69    with <2x slowdown on average.
70 
71    The tool consists of two parts:
72    instrumentation module (this file) and a run-time library.
73    The instrumentation module adds a run-time check before every memory insn.
74      For a 8- or 16- byte load accessing address X:
75        ShadowAddr = (X >> 3) + Offset
76        ShadowValue = *(char*)ShadowAddr;  // *(short*) for 16-byte access.
77        if (ShadowValue)
78 	 __asan_report_load8(X);
79      For a load of N bytes (N=1, 2 or 4) from address X:
80        ShadowAddr = (X >> 3) + Offset
81        ShadowValue = *(char*)ShadowAddr;
82        if (ShadowValue)
83 	 if ((X & 7) + N - 1 > ShadowValue)
84 	   __asan_report_loadN(X);
85    Stores are instrumented similarly, but using __asan_report_storeN functions.
86    A call too __asan_init_vN() is inserted to the list of module CTORs.
87    N is the version number of the AddressSanitizer API. The changes between the
88    API versions are listed in libsanitizer/asan/asan_interface_internal.h.
89 
90    The run-time library redefines malloc (so that redzone are inserted around
91    the allocated memory) and free (so that reuse of free-ed memory is delayed),
92    provides __asan_report* and __asan_init_vN functions.
93 
94    Read more:
95    http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
96 
97    The current implementation supports detection of out-of-bounds and
98    use-after-free in the heap, on the stack and for global variables.
99 
100    [Protection of stack variables]
101 
102    To understand how detection of out-of-bounds and use-after-free works
103    for stack variables, lets look at this example on x86_64 where the
104    stack grows downward:
105 
106      int
107      foo ()
108      {
109        char a[23] = {0};
110        int b[2] = {0};
111 
112        a[5] = 1;
113        b[1] = 2;
114 
115        return a[5] + b[1];
116      }
117 
118    For this function, the stack protected by asan will be organized as
119    follows, from the top of the stack to the bottom:
120 
121    Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
122 
123    Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
124 	   the next slot be 32 bytes aligned; this one is called Partial
125 	   Redzone; this 32 bytes alignment is an asan constraint]
126 
127    Slot 3/ [24 bytes for variable 'a']
128 
129    Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
130 
131    Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
132 
133    Slot 6/ [8 bytes for variable 'b']
134 
135    Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
136 	    'LEFT RedZone']
137 
138    The 32 bytes of LEFT red zone at the bottom of the stack can be
139    decomposed as such:
140 
141      1/ The first 8 bytes contain a magical asan number that is always
142      0x41B58AB3.
143 
144      2/ The following 8 bytes contains a pointer to a string (to be
145      parsed at runtime by the runtime asan library), which format is
146      the following:
147 
148       "<function-name> <space> <num-of-variables-on-the-stack>
149       (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
150       <length-of-var-in-bytes> ){n} "
151 
152 	where '(...){n}' means the content inside the parenthesis occurs 'n'
153 	times, with 'n' being the number of variables on the stack.
154 
155      3/ The following 8 bytes contain the PC of the current function which
156      will be used by the run-time library to print an error message.
157 
158      4/ The following 8 bytes are reserved for internal use by the run-time.
159 
160    The shadow memory for that stack layout is going to look like this:
161 
162      - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
163        The F1 byte pattern is a magic number called
164        ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
165        the memory for that shadow byte is part of a the LEFT red zone
166        intended to seat at the bottom of the variables on the stack.
167 
168      - content of shadow memory 8 bytes for slots 6 and 5:
169        0xF4F4F400.  The F4 byte pattern is a magic number
170        called ASAN_STACK_MAGIC_PARTIAL.  It flags the fact that the
171        memory region for this shadow byte is a PARTIAL red zone
172        intended to pad a variable A, so that the slot following
173        {A,padding} is 32 bytes aligned.
174 
175        Note that the fact that the least significant byte of this
176        shadow memory content is 00 means that 8 bytes of its
177        corresponding memory (which corresponds to the memory of
178        variable 'b') is addressable.
179 
180      - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
181        The F2 byte pattern is a magic number called
182        ASAN_STACK_MAGIC_MIDDLE.  It flags the fact that the memory
183        region for this shadow byte is a MIDDLE red zone intended to
184        seat between two 32 aligned slots of {variable,padding}.
185 
186      - content of shadow memory 8 bytes for slot 3 and 2:
187        0xF4000000.  This represents is the concatenation of
188        variable 'a' and the partial red zone following it, like what we
189        had for variable 'b'.  The least significant 3 bytes being 00
190        means that the 3 bytes of variable 'a' are addressable.
191 
192      - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
193        The F3 byte pattern is a magic number called
194        ASAN_STACK_MAGIC_RIGHT.  It flags the fact that the memory
195        region for this shadow byte is a RIGHT red zone intended to seat
196        at the top of the variables of the stack.
197 
198    Note that the real variable layout is done in expand_used_vars in
199    cfgexpand.c.  As far as Address Sanitizer is concerned, it lays out
200    stack variables as well as the different red zones, emits some
201    prologue code to populate the shadow memory as to poison (mark as
202    non-accessible) the regions of the red zones and mark the regions of
203    stack variables as accessible, and emit some epilogue code to
204    un-poison (mark as accessible) the regions of red zones right before
205    the function exits.
206 
207    [Protection of global variables]
208 
209    The basic idea is to insert a red zone between two global variables
210    and install a constructor function that calls the asan runtime to do
211    the populating of the relevant shadow memory regions at load time.
212 
213    So the global variables are laid out as to insert a red zone between
214    them. The size of the red zones is so that each variable starts on a
215    32 bytes boundary.
216 
217    Then a constructor function is installed so that, for each global
218    variable, it calls the runtime asan library function
219    __asan_register_globals_with an instance of this type:
220 
221      struct __asan_global
222      {
223        // Address of the beginning of the global variable.
224        const void *__beg;
225 
226        // Initial size of the global variable.
227        uptr __size;
228 
229        // Size of the global variable + size of the red zone.  This
230        //   size is 32 bytes aligned.
231        uptr __size_with_redzone;
232 
233        // Name of the global variable.
234        const void *__name;
235 
236        // Name of the module where the global variable is declared.
237        const void *__module_name;
238 
239        // 1 if it has dynamic initialization, 0 otherwise.
240        uptr __has_dynamic_init;
241 
242        // A pointer to struct that contains source location, could be NULL.
243        __asan_global_source_location *__location;
244      }
245 
246    A destructor function that calls the runtime asan library function
247    _asan_unregister_globals is also installed.  */
248 
249 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
250 static bool asan_shadow_offset_computed;
251 static vec<char *> sanitized_sections;
252 static tree last_alloca_addr;
253 
254 /* Set of variable declarations that are going to be guarded by
255    use-after-scope sanitizer.  */
256 
257 hash_set<tree> *asan_handled_variables = NULL;
258 
259 hash_set <tree> *asan_used_labels = NULL;
260 
261 /* Sets shadow offset to value in string VAL.  */
262 
263 bool
264 set_asan_shadow_offset (const char *val)
265 {
266   char *endp;
267 
268   errno = 0;
269 #ifdef HAVE_LONG_LONG
270   asan_shadow_offset_value = strtoull (val, &endp, 0);
271 #else
272   asan_shadow_offset_value = strtoul (val, &endp, 0);
273 #endif
274   if (!(*val != '\0' && *endp == '\0' && errno == 0))
275     return false;
276 
277   asan_shadow_offset_computed = true;
278 
279   return true;
280 }
281 
282 /* Set list of user-defined sections that need to be sanitized.  */
283 
284 void
285 set_sanitized_sections (const char *sections)
286 {
287   char *pat;
288   unsigned i;
289   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
290     free (pat);
291   sanitized_sections.truncate (0);
292 
293   for (const char *s = sections; *s; )
294     {
295       const char *end;
296       for (end = s; *end && *end != ','; ++end);
297       size_t len = end - s;
298       sanitized_sections.safe_push (xstrndup (s, len));
299       s = *end ? end + 1 : end;
300     }
301 }
302 
303 bool
304 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
305 {
306   return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
307 	  && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
308 }
309 
310 bool
311 asan_sanitize_stack_p (void)
312 {
313   return (sanitize_flags_p (SANITIZE_ADDRESS) && param_asan_stack);
314 }
315 
316 bool
317 asan_sanitize_allocas_p (void)
318 {
319   return (asan_sanitize_stack_p () && param_asan_protect_allocas);
320 }
321 
322 /* Checks whether section SEC should be sanitized.  */
323 
324 static bool
325 section_sanitized_p (const char *sec)
326 {
327   char *pat;
328   unsigned i;
329   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
330     if (fnmatch (pat, sec, FNM_PERIOD) == 0)
331       return true;
332   return false;
333 }
334 
335 /* Returns Asan shadow offset.  */
336 
337 static unsigned HOST_WIDE_INT
338 asan_shadow_offset ()
339 {
340   if (!asan_shadow_offset_computed)
341     {
342       asan_shadow_offset_computed = true;
343       asan_shadow_offset_value = targetm.asan_shadow_offset ();
344     }
345   return asan_shadow_offset_value;
346 }
347 
348 /* Returns Asan shadow offset has been set.  */
349 bool
350 asan_shadow_offset_set_p ()
351 {
352   return asan_shadow_offset_computed;
353 }
354 
355 alias_set_type asan_shadow_set = -1;
356 
357 /* Pointer types to 1, 2 or 4 byte integers in shadow memory.  A separate
358    alias set is used for all shadow memory accesses.  */
359 static GTY(()) tree shadow_ptr_types[3];
360 
361 /* Decl for __asan_option_detect_stack_use_after_return.  */
362 static GTY(()) tree asan_detect_stack_use_after_return;
363 
364 /* Hashtable support for memory references used by gimple
365    statements.  */
366 
367 /* This type represents a reference to a memory region.  */
368 struct asan_mem_ref
369 {
370   /* The expression of the beginning of the memory region.  */
371   tree start;
372 
373   /* The size of the access.  */
374   HOST_WIDE_INT access_size;
375 };
376 
377 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
378 
379 /* Initializes an instance of asan_mem_ref.  */
380 
381 static void
382 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
383 {
384   ref->start = start;
385   ref->access_size = access_size;
386 }
387 
388 /* Allocates memory for an instance of asan_mem_ref into the memory
389    pool returned by asan_mem_ref_get_alloc_pool and initialize it.
390    START is the address of (or the expression pointing to) the
391    beginning of memory reference.  ACCESS_SIZE is the size of the
392    access to the referenced memory.  */
393 
394 static asan_mem_ref*
395 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
396 {
397   asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
398 
399   asan_mem_ref_init (ref, start, access_size);
400   return ref;
401 }
402 
403 /* This builds and returns a pointer to the end of the memory region
404    that starts at START and of length LEN.  */
405 
406 tree
407 asan_mem_ref_get_end (tree start, tree len)
408 {
409   if (len == NULL_TREE || integer_zerop (len))
410     return start;
411 
412   if (!ptrofftype_p (len))
413     len = convert_to_ptrofftype (len);
414 
415   return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
416 }
417 
418 /*  Return a tree expression that represents the end of the referenced
419     memory region.  Beware that this function can actually build a new
420     tree expression.  */
421 
422 tree
423 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
424 {
425   return asan_mem_ref_get_end (ref->start, len);
426 }
427 
428 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
429 {
430   static inline hashval_t hash (const asan_mem_ref *);
431   static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
432 };
433 
434 /* Hash a memory reference.  */
435 
436 inline hashval_t
437 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
438 {
439   return iterative_hash_expr (mem_ref->start, 0);
440 }
441 
442 /* Compare two memory references.  We accept the length of either
443    memory references to be NULL_TREE.  */
444 
445 inline bool
446 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
447 			    const asan_mem_ref *m2)
448 {
449   return operand_equal_p (m1->start, m2->start, 0);
450 }
451 
452 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
453 
454 /* Returns a reference to the hash table containing memory references.
455    This function ensures that the hash table is created.  Note that
456    this hash table is updated by the function
457    update_mem_ref_hash_table.  */
458 
459 static hash_table<asan_mem_ref_hasher> *
460 get_mem_ref_hash_table ()
461 {
462   if (!asan_mem_ref_ht)
463     asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
464 
465   return asan_mem_ref_ht;
466 }
467 
468 /* Clear all entries from the memory references hash table.  */
469 
470 static void
471 empty_mem_ref_hash_table ()
472 {
473   if (asan_mem_ref_ht)
474     asan_mem_ref_ht->empty ();
475 }
476 
477 /* Free the memory references hash table.  */
478 
479 static void
480 free_mem_ref_resources ()
481 {
482   delete asan_mem_ref_ht;
483   asan_mem_ref_ht = NULL;
484 
485   asan_mem_ref_pool.release ();
486 }
487 
488 /* Return true iff the memory reference REF has been instrumented.  */
489 
490 static bool
491 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
492 {
493   asan_mem_ref r;
494   asan_mem_ref_init (&r, ref, access_size);
495 
496   asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
497   return saved_ref && saved_ref->access_size >= access_size;
498 }
499 
500 /* Return true iff the memory reference REF has been instrumented.  */
501 
502 static bool
503 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
504 {
505   return has_mem_ref_been_instrumented (ref->start, ref->access_size);
506 }
507 
508 /* Return true iff access to memory region starting at REF and of
509    length LEN has been instrumented.  */
510 
511 static bool
512 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
513 {
514   HOST_WIDE_INT size_in_bytes
515     = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
516 
517   return size_in_bytes != -1
518     && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
519 }
520 
521 /* Set REF to the memory reference present in a gimple assignment
522    ASSIGNMENT.  Return true upon successful completion, false
523    otherwise.  */
524 
525 static bool
526 get_mem_ref_of_assignment (const gassign *assignment,
527 			   asan_mem_ref *ref,
528 			   bool *ref_is_store)
529 {
530   gcc_assert (gimple_assign_single_p (assignment));
531 
532   if (gimple_store_p (assignment)
533       && !gimple_clobber_p (assignment))
534     {
535       ref->start = gimple_assign_lhs (assignment);
536       *ref_is_store = true;
537     }
538   else if (gimple_assign_load_p (assignment))
539     {
540       ref->start = gimple_assign_rhs1 (assignment);
541       *ref_is_store = false;
542     }
543   else
544     return false;
545 
546   ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
547   return true;
548 }
549 
550 /* Return address of last allocated dynamic alloca.  */
551 
552 static tree
553 get_last_alloca_addr ()
554 {
555   if (last_alloca_addr)
556     return last_alloca_addr;
557 
558   last_alloca_addr = create_tmp_reg (ptr_type_node, "last_alloca_addr");
559   gassign *g = gimple_build_assign (last_alloca_addr, null_pointer_node);
560   edge e = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun));
561   gsi_insert_on_edge_immediate (e, g);
562   return last_alloca_addr;
563 }
564 
565 /* Insert __asan_allocas_unpoison (top, bottom) call before
566    __builtin_stack_restore (new_sp) call.
567    The pseudocode of this routine should look like this:
568      top = last_alloca_addr;
569      bot = new_sp;
570      __asan_allocas_unpoison (top, bot);
571      last_alloca_addr = new_sp;
572      __builtin_stack_restore (new_sp);
573    In general, we can't use new_sp as bot parameter because on some
574    architectures SP has non zero offset from dynamic stack area.  Moreover, on
575    some architectures this offset (STACK_DYNAMIC_OFFSET) becomes known for each
576    particular function only after all callees were expanded to rtl.
577    The most noticeable example is PowerPC{,64}, see
578    http://refspecs.linuxfoundation.org/ELF/ppc64/PPC-elf64abi.html#DYNAM-STACK.
579    To overcome the issue we use following trick: pass new_sp as a second
580    parameter to __asan_allocas_unpoison and rewrite it during expansion with
581    new_sp + (virtual_dynamic_stack_rtx - sp) later in
582    expand_asan_emit_allocas_unpoison function.  */
583 
584 static void
585 handle_builtin_stack_restore (gcall *call, gimple_stmt_iterator *iter)
586 {
587   if (!iter || !asan_sanitize_allocas_p ())
588     return;
589 
590   tree last_alloca = get_last_alloca_addr ();
591   tree restored_stack = gimple_call_arg (call, 0);
592   tree fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCAS_UNPOISON);
593   gimple *g = gimple_build_call (fn, 2, last_alloca, restored_stack);
594   gsi_insert_before (iter, g, GSI_SAME_STMT);
595   g = gimple_build_assign (last_alloca, restored_stack);
596   gsi_insert_before (iter, g, GSI_SAME_STMT);
597 }
598 
599 /* Deploy and poison redzones around __builtin_alloca call.  To do this, we
600    should replace this call with another one with changed parameters and
601    replace all its uses with new address, so
602        addr = __builtin_alloca (old_size, align);
603    is replaced by
604        left_redzone_size = max (align, ASAN_RED_ZONE_SIZE);
605    Following two statements are optimized out if we know that
606    old_size & (ASAN_RED_ZONE_SIZE - 1) == 0, i.e. alloca doesn't need partial
607    redzone.
608        misalign = old_size & (ASAN_RED_ZONE_SIZE - 1);
609        partial_redzone_size = ASAN_RED_ZONE_SIZE - misalign;
610        right_redzone_size = ASAN_RED_ZONE_SIZE;
611        additional_size = left_redzone_size + partial_redzone_size +
612                          right_redzone_size;
613        new_size = old_size + additional_size;
614        new_alloca = __builtin_alloca (new_size, max (align, 32))
615        __asan_alloca_poison (new_alloca, old_size)
616        addr = new_alloca + max (align, ASAN_RED_ZONE_SIZE);
617        last_alloca_addr = new_alloca;
618    ADDITIONAL_SIZE is added to make new memory allocation contain not only
619    requested memory, but also left, partial and right redzones as well as some
620    additional space, required by alignment.  */
621 
622 static void
623 handle_builtin_alloca (gcall *call, gimple_stmt_iterator *iter)
624 {
625   if (!iter || !asan_sanitize_allocas_p ())
626     return;
627 
628   gassign *g;
629   gcall *gg;
630   const HOST_WIDE_INT redzone_mask = ASAN_RED_ZONE_SIZE - 1;
631 
632   tree last_alloca = get_last_alloca_addr ();
633   tree callee = gimple_call_fndecl (call);
634   tree lhs = gimple_call_lhs (call);
635   tree old_size = gimple_call_arg (call, 0);
636   tree ptr_type = lhs ? TREE_TYPE (lhs) : ptr_type_node;
637   tree partial_size = NULL_TREE;
638   unsigned int align
639     = DECL_FUNCTION_CODE (callee) == BUILT_IN_ALLOCA
640       ? 0 : tree_to_uhwi (gimple_call_arg (call, 1));
641 
642   bool throws = false;
643   edge e = NULL;
644   if (stmt_can_throw_internal (cfun, call))
645     {
646       if (!lhs)
647 	return;
648       throws = true;
649       e = find_fallthru_edge (gsi_bb (*iter)->succs);
650     }
651 
652   /* If ALIGN > ASAN_RED_ZONE_SIZE, we embed left redzone into first ALIGN
653      bytes of allocated space.  Otherwise, align alloca to ASAN_RED_ZONE_SIZE
654      manually.  */
655   align = MAX (align, ASAN_RED_ZONE_SIZE * BITS_PER_UNIT);
656 
657   tree alloca_rz_mask = build_int_cst (size_type_node, redzone_mask);
658   tree redzone_size = build_int_cst (size_type_node, ASAN_RED_ZONE_SIZE);
659 
660   /* Extract lower bits from old_size.  */
661   wide_int size_nonzero_bits = get_nonzero_bits (old_size);
662   wide_int rz_mask
663     = wi::uhwi (redzone_mask, wi::get_precision (size_nonzero_bits));
664   wide_int old_size_lower_bits = wi::bit_and (size_nonzero_bits, rz_mask);
665 
666   /* If alloca size is aligned to ASAN_RED_ZONE_SIZE, we don't need partial
667      redzone.  Otherwise, compute its size here.  */
668   if (wi::ne_p (old_size_lower_bits, 0))
669     {
670       /* misalign = size & (ASAN_RED_ZONE_SIZE - 1)
671          partial_size = ASAN_RED_ZONE_SIZE - misalign.  */
672       g = gimple_build_assign (make_ssa_name (size_type_node, NULL),
673 			       BIT_AND_EXPR, old_size, alloca_rz_mask);
674       gsi_insert_before (iter, g, GSI_SAME_STMT);
675       tree misalign = gimple_assign_lhs (g);
676       g = gimple_build_assign (make_ssa_name (size_type_node, NULL), MINUS_EXPR,
677 			       redzone_size, misalign);
678       gsi_insert_before (iter, g, GSI_SAME_STMT);
679       partial_size = gimple_assign_lhs (g);
680     }
681 
682   /* additional_size = align + ASAN_RED_ZONE_SIZE.  */
683   tree additional_size = build_int_cst (size_type_node, align / BITS_PER_UNIT
684 							+ ASAN_RED_ZONE_SIZE);
685   /* If alloca has partial redzone, include it to additional_size too.  */
686   if (partial_size)
687     {
688       /* additional_size += partial_size.  */
689       g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR,
690 			       partial_size, additional_size);
691       gsi_insert_before (iter, g, GSI_SAME_STMT);
692       additional_size = gimple_assign_lhs (g);
693     }
694 
695   /* new_size = old_size + additional_size.  */
696   g = gimple_build_assign (make_ssa_name (size_type_node), PLUS_EXPR, old_size,
697 			   additional_size);
698   gsi_insert_before (iter, g, GSI_SAME_STMT);
699   tree new_size = gimple_assign_lhs (g);
700 
701   /* Build new __builtin_alloca call:
702        new_alloca_with_rz = __builtin_alloca (new_size, align).  */
703   tree fn = builtin_decl_implicit (BUILT_IN_ALLOCA_WITH_ALIGN);
704   gg = gimple_build_call (fn, 2, new_size,
705 			  build_int_cst (size_type_node, align));
706   tree new_alloca_with_rz = make_ssa_name (ptr_type, gg);
707   gimple_call_set_lhs (gg, new_alloca_with_rz);
708   if (throws)
709     {
710       gimple_call_set_lhs (call, NULL);
711       gsi_replace (iter, gg, true);
712     }
713   else
714     gsi_insert_before (iter, gg, GSI_SAME_STMT);
715 
716   /* new_alloca = new_alloca_with_rz + align.  */
717   g = gimple_build_assign (make_ssa_name (ptr_type), POINTER_PLUS_EXPR,
718 			   new_alloca_with_rz,
719 			   build_int_cst (size_type_node,
720 					  align / BITS_PER_UNIT));
721   gimple_stmt_iterator gsi = gsi_none ();
722   if (throws)
723     {
724       gsi_insert_on_edge_immediate (e, g);
725       gsi = gsi_for_stmt (g);
726     }
727   else
728     gsi_insert_before (iter, g, GSI_SAME_STMT);
729   tree new_alloca = gimple_assign_lhs (g);
730 
731   /* Poison newly created alloca redzones:
732       __asan_alloca_poison (new_alloca, old_size).  */
733   fn = builtin_decl_implicit (BUILT_IN_ASAN_ALLOCA_POISON);
734   gg = gimple_build_call (fn, 2, new_alloca, old_size);
735   if (throws)
736     gsi_insert_after (&gsi, gg, GSI_NEW_STMT);
737   else
738     gsi_insert_before (iter, gg, GSI_SAME_STMT);
739 
740   /* Save new_alloca_with_rz value into last_alloca to use it during
741      allocas unpoisoning.  */
742   g = gimple_build_assign (last_alloca, new_alloca_with_rz);
743   if (throws)
744     gsi_insert_after (&gsi, g, GSI_NEW_STMT);
745   else
746     gsi_insert_before (iter, g, GSI_SAME_STMT);
747 
748   /* Finally, replace old alloca ptr with NEW_ALLOCA.  */
749   if (throws)
750     {
751       g = gimple_build_assign (lhs, new_alloca);
752       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
753     }
754   else
755     replace_call_with_value (iter, new_alloca);
756 }
757 
758 /* Return the memory references contained in a gimple statement
759    representing a builtin call that has to do with memory access.  */
760 
761 static bool
762 get_mem_refs_of_builtin_call (gcall *call,
763 			      asan_mem_ref *src0,
764 			      tree *src0_len,
765 			      bool *src0_is_store,
766 			      asan_mem_ref *src1,
767 			      tree *src1_len,
768 			      bool *src1_is_store,
769 			      asan_mem_ref *dst,
770 			      tree *dst_len,
771 			      bool *dst_is_store,
772 			      bool *dest_is_deref,
773 			      bool *intercepted_p,
774 			      gimple_stmt_iterator *iter = NULL)
775 {
776   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
777 
778   tree callee = gimple_call_fndecl (call);
779   tree source0 = NULL_TREE, source1 = NULL_TREE,
780     dest = NULL_TREE, len = NULL_TREE;
781   bool is_store = true, got_reference_p = false;
782   HOST_WIDE_INT access_size = 1;
783 
784   *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
785 
786   switch (DECL_FUNCTION_CODE (callee))
787     {
788       /* (s, s, n) style memops.  */
789     case BUILT_IN_BCMP:
790     case BUILT_IN_MEMCMP:
791       source0 = gimple_call_arg (call, 0);
792       source1 = gimple_call_arg (call, 1);
793       len = gimple_call_arg (call, 2);
794       break;
795 
796       /* (src, dest, n) style memops.  */
797     case BUILT_IN_BCOPY:
798       source0 = gimple_call_arg (call, 0);
799       dest = gimple_call_arg (call, 1);
800       len = gimple_call_arg (call, 2);
801       break;
802 
803       /* (dest, src, n) style memops.  */
804     case BUILT_IN_MEMCPY:
805     case BUILT_IN_MEMCPY_CHK:
806     case BUILT_IN_MEMMOVE:
807     case BUILT_IN_MEMMOVE_CHK:
808     case BUILT_IN_MEMPCPY:
809     case BUILT_IN_MEMPCPY_CHK:
810       dest = gimple_call_arg (call, 0);
811       source0 = gimple_call_arg (call, 1);
812       len = gimple_call_arg (call, 2);
813       break;
814 
815       /* (dest, n) style memops.  */
816     case BUILT_IN_BZERO:
817       dest = gimple_call_arg (call, 0);
818       len = gimple_call_arg (call, 1);
819       break;
820 
821       /* (dest, x, n) style memops*/
822     case BUILT_IN_MEMSET:
823     case BUILT_IN_MEMSET_CHK:
824       dest = gimple_call_arg (call, 0);
825       len = gimple_call_arg (call, 2);
826       break;
827 
828     case BUILT_IN_STRLEN:
829       source0 = gimple_call_arg (call, 0);
830       len = gimple_call_lhs (call);
831       break;
832 
833     case BUILT_IN_STACK_RESTORE:
834       handle_builtin_stack_restore (call, iter);
835       break;
836 
837     CASE_BUILT_IN_ALLOCA:
838       handle_builtin_alloca (call, iter);
839       break;
840     /* And now the __atomic* and __sync builtins.
841        These are handled differently from the classical memory
842        access builtins above.  */
843 
844     case BUILT_IN_ATOMIC_LOAD_1:
845       is_store = false;
846       /* FALLTHRU */
847     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
848     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
849     case BUILT_IN_SYNC_FETCH_AND_OR_1:
850     case BUILT_IN_SYNC_FETCH_AND_AND_1:
851     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
852     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
853     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
854     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
855     case BUILT_IN_SYNC_OR_AND_FETCH_1:
856     case BUILT_IN_SYNC_AND_AND_FETCH_1:
857     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
858     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
859     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
860     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
861     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
862     case BUILT_IN_SYNC_LOCK_RELEASE_1:
863     case BUILT_IN_ATOMIC_EXCHANGE_1:
864     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
865     case BUILT_IN_ATOMIC_STORE_1:
866     case BUILT_IN_ATOMIC_ADD_FETCH_1:
867     case BUILT_IN_ATOMIC_SUB_FETCH_1:
868     case BUILT_IN_ATOMIC_AND_FETCH_1:
869     case BUILT_IN_ATOMIC_NAND_FETCH_1:
870     case BUILT_IN_ATOMIC_XOR_FETCH_1:
871     case BUILT_IN_ATOMIC_OR_FETCH_1:
872     case BUILT_IN_ATOMIC_FETCH_ADD_1:
873     case BUILT_IN_ATOMIC_FETCH_SUB_1:
874     case BUILT_IN_ATOMIC_FETCH_AND_1:
875     case BUILT_IN_ATOMIC_FETCH_NAND_1:
876     case BUILT_IN_ATOMIC_FETCH_XOR_1:
877     case BUILT_IN_ATOMIC_FETCH_OR_1:
878       access_size = 1;
879       goto do_atomic;
880 
881     case BUILT_IN_ATOMIC_LOAD_2:
882       is_store = false;
883       /* FALLTHRU */
884     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
885     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
886     case BUILT_IN_SYNC_FETCH_AND_OR_2:
887     case BUILT_IN_SYNC_FETCH_AND_AND_2:
888     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
889     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
890     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
891     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
892     case BUILT_IN_SYNC_OR_AND_FETCH_2:
893     case BUILT_IN_SYNC_AND_AND_FETCH_2:
894     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
895     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
896     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
897     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
898     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
899     case BUILT_IN_SYNC_LOCK_RELEASE_2:
900     case BUILT_IN_ATOMIC_EXCHANGE_2:
901     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
902     case BUILT_IN_ATOMIC_STORE_2:
903     case BUILT_IN_ATOMIC_ADD_FETCH_2:
904     case BUILT_IN_ATOMIC_SUB_FETCH_2:
905     case BUILT_IN_ATOMIC_AND_FETCH_2:
906     case BUILT_IN_ATOMIC_NAND_FETCH_2:
907     case BUILT_IN_ATOMIC_XOR_FETCH_2:
908     case BUILT_IN_ATOMIC_OR_FETCH_2:
909     case BUILT_IN_ATOMIC_FETCH_ADD_2:
910     case BUILT_IN_ATOMIC_FETCH_SUB_2:
911     case BUILT_IN_ATOMIC_FETCH_AND_2:
912     case BUILT_IN_ATOMIC_FETCH_NAND_2:
913     case BUILT_IN_ATOMIC_FETCH_XOR_2:
914     case BUILT_IN_ATOMIC_FETCH_OR_2:
915       access_size = 2;
916       goto do_atomic;
917 
918     case BUILT_IN_ATOMIC_LOAD_4:
919       is_store = false;
920       /* FALLTHRU */
921     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
922     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
923     case BUILT_IN_SYNC_FETCH_AND_OR_4:
924     case BUILT_IN_SYNC_FETCH_AND_AND_4:
925     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
926     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
927     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
928     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
929     case BUILT_IN_SYNC_OR_AND_FETCH_4:
930     case BUILT_IN_SYNC_AND_AND_FETCH_4:
931     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
932     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
933     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
934     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
935     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
936     case BUILT_IN_SYNC_LOCK_RELEASE_4:
937     case BUILT_IN_ATOMIC_EXCHANGE_4:
938     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
939     case BUILT_IN_ATOMIC_STORE_4:
940     case BUILT_IN_ATOMIC_ADD_FETCH_4:
941     case BUILT_IN_ATOMIC_SUB_FETCH_4:
942     case BUILT_IN_ATOMIC_AND_FETCH_4:
943     case BUILT_IN_ATOMIC_NAND_FETCH_4:
944     case BUILT_IN_ATOMIC_XOR_FETCH_4:
945     case BUILT_IN_ATOMIC_OR_FETCH_4:
946     case BUILT_IN_ATOMIC_FETCH_ADD_4:
947     case BUILT_IN_ATOMIC_FETCH_SUB_4:
948     case BUILT_IN_ATOMIC_FETCH_AND_4:
949     case BUILT_IN_ATOMIC_FETCH_NAND_4:
950     case BUILT_IN_ATOMIC_FETCH_XOR_4:
951     case BUILT_IN_ATOMIC_FETCH_OR_4:
952       access_size = 4;
953       goto do_atomic;
954 
955     case BUILT_IN_ATOMIC_LOAD_8:
956       is_store = false;
957       /* FALLTHRU */
958     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
959     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
960     case BUILT_IN_SYNC_FETCH_AND_OR_8:
961     case BUILT_IN_SYNC_FETCH_AND_AND_8:
962     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
963     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
964     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
965     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
966     case BUILT_IN_SYNC_OR_AND_FETCH_8:
967     case BUILT_IN_SYNC_AND_AND_FETCH_8:
968     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
969     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
970     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
971     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
972     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
973     case BUILT_IN_SYNC_LOCK_RELEASE_8:
974     case BUILT_IN_ATOMIC_EXCHANGE_8:
975     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
976     case BUILT_IN_ATOMIC_STORE_8:
977     case BUILT_IN_ATOMIC_ADD_FETCH_8:
978     case BUILT_IN_ATOMIC_SUB_FETCH_8:
979     case BUILT_IN_ATOMIC_AND_FETCH_8:
980     case BUILT_IN_ATOMIC_NAND_FETCH_8:
981     case BUILT_IN_ATOMIC_XOR_FETCH_8:
982     case BUILT_IN_ATOMIC_OR_FETCH_8:
983     case BUILT_IN_ATOMIC_FETCH_ADD_8:
984     case BUILT_IN_ATOMIC_FETCH_SUB_8:
985     case BUILT_IN_ATOMIC_FETCH_AND_8:
986     case BUILT_IN_ATOMIC_FETCH_NAND_8:
987     case BUILT_IN_ATOMIC_FETCH_XOR_8:
988     case BUILT_IN_ATOMIC_FETCH_OR_8:
989       access_size = 8;
990       goto do_atomic;
991 
992     case BUILT_IN_ATOMIC_LOAD_16:
993       is_store = false;
994       /* FALLTHRU */
995     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
996     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
997     case BUILT_IN_SYNC_FETCH_AND_OR_16:
998     case BUILT_IN_SYNC_FETCH_AND_AND_16:
999     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
1000     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
1001     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
1002     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
1003     case BUILT_IN_SYNC_OR_AND_FETCH_16:
1004     case BUILT_IN_SYNC_AND_AND_FETCH_16:
1005     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
1006     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
1007     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
1008     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
1009     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
1010     case BUILT_IN_SYNC_LOCK_RELEASE_16:
1011     case BUILT_IN_ATOMIC_EXCHANGE_16:
1012     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
1013     case BUILT_IN_ATOMIC_STORE_16:
1014     case BUILT_IN_ATOMIC_ADD_FETCH_16:
1015     case BUILT_IN_ATOMIC_SUB_FETCH_16:
1016     case BUILT_IN_ATOMIC_AND_FETCH_16:
1017     case BUILT_IN_ATOMIC_NAND_FETCH_16:
1018     case BUILT_IN_ATOMIC_XOR_FETCH_16:
1019     case BUILT_IN_ATOMIC_OR_FETCH_16:
1020     case BUILT_IN_ATOMIC_FETCH_ADD_16:
1021     case BUILT_IN_ATOMIC_FETCH_SUB_16:
1022     case BUILT_IN_ATOMIC_FETCH_AND_16:
1023     case BUILT_IN_ATOMIC_FETCH_NAND_16:
1024     case BUILT_IN_ATOMIC_FETCH_XOR_16:
1025     case BUILT_IN_ATOMIC_FETCH_OR_16:
1026       access_size = 16;
1027       /* FALLTHRU */
1028     do_atomic:
1029       {
1030 	dest = gimple_call_arg (call, 0);
1031 	/* DEST represents the address of a memory location.
1032 	   instrument_derefs wants the memory location, so lets
1033 	   dereference the address DEST before handing it to
1034 	   instrument_derefs.  */
1035 	tree type = build_nonstandard_integer_type (access_size
1036 						    * BITS_PER_UNIT, 1);
1037 	dest = build2 (MEM_REF, type, dest,
1038 		       build_int_cst (build_pointer_type (char_type_node), 0));
1039 	break;
1040       }
1041 
1042     default:
1043       /* The other builtins memory access are not instrumented in this
1044 	 function because they either don't have any length parameter,
1045 	 or their length parameter is just a limit.  */
1046       break;
1047     }
1048 
1049   if (len != NULL_TREE)
1050     {
1051       if (source0 != NULL_TREE)
1052 	{
1053 	  src0->start = source0;
1054 	  src0->access_size = access_size;
1055 	  *src0_len = len;
1056 	  *src0_is_store = false;
1057 	}
1058 
1059       if (source1 != NULL_TREE)
1060 	{
1061 	  src1->start = source1;
1062 	  src1->access_size = access_size;
1063 	  *src1_len = len;
1064 	  *src1_is_store = false;
1065 	}
1066 
1067       if (dest != NULL_TREE)
1068 	{
1069 	  dst->start = dest;
1070 	  dst->access_size = access_size;
1071 	  *dst_len = len;
1072 	  *dst_is_store = true;
1073 	}
1074 
1075       got_reference_p = true;
1076     }
1077   else if (dest)
1078     {
1079       dst->start = dest;
1080       dst->access_size = access_size;
1081       *dst_len = NULL_TREE;
1082       *dst_is_store = is_store;
1083       *dest_is_deref = true;
1084       got_reference_p = true;
1085     }
1086 
1087   return got_reference_p;
1088 }
1089 
1090 /* Return true iff a given gimple statement has been instrumented.
1091    Note that the statement is "defined" by the memory references it
1092    contains.  */
1093 
1094 static bool
1095 has_stmt_been_instrumented_p (gimple *stmt)
1096 {
1097   if (gimple_assign_single_p (stmt))
1098     {
1099       bool r_is_store;
1100       asan_mem_ref r;
1101       asan_mem_ref_init (&r, NULL, 1);
1102 
1103       if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
1104 				     &r_is_store))
1105 	return has_mem_ref_been_instrumented (&r);
1106     }
1107   else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1108     {
1109       asan_mem_ref src0, src1, dest;
1110       asan_mem_ref_init (&src0, NULL, 1);
1111       asan_mem_ref_init (&src1, NULL, 1);
1112       asan_mem_ref_init (&dest, NULL, 1);
1113 
1114       tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1115       bool src0_is_store = false, src1_is_store = false,
1116 	dest_is_store = false, dest_is_deref = false, intercepted_p = true;
1117       if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
1118 					&src0, &src0_len, &src0_is_store,
1119 					&src1, &src1_len, &src1_is_store,
1120 					&dest, &dest_len, &dest_is_store,
1121 					&dest_is_deref, &intercepted_p))
1122 	{
1123 	  if (src0.start != NULL_TREE
1124 	      && !has_mem_ref_been_instrumented (&src0, src0_len))
1125 	    return false;
1126 
1127 	  if (src1.start != NULL_TREE
1128 	      && !has_mem_ref_been_instrumented (&src1, src1_len))
1129 	    return false;
1130 
1131 	  if (dest.start != NULL_TREE
1132 	      && !has_mem_ref_been_instrumented (&dest, dest_len))
1133 	    return false;
1134 
1135 	  return true;
1136 	}
1137     }
1138   else if (is_gimple_call (stmt) && gimple_store_p (stmt))
1139     {
1140       asan_mem_ref r;
1141       asan_mem_ref_init (&r, NULL, 1);
1142 
1143       r.start = gimple_call_lhs (stmt);
1144       r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
1145       return has_mem_ref_been_instrumented (&r);
1146     }
1147 
1148   return false;
1149 }
1150 
1151 /*  Insert a memory reference into the hash table.  */
1152 
1153 static void
1154 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
1155 {
1156   hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
1157 
1158   asan_mem_ref r;
1159   asan_mem_ref_init (&r, ref, access_size);
1160 
1161   asan_mem_ref **slot = ht->find_slot (&r, INSERT);
1162   if (*slot == NULL || (*slot)->access_size < access_size)
1163     *slot = asan_mem_ref_new (ref, access_size);
1164 }
1165 
1166 /* Initialize shadow_ptr_types array.  */
1167 
1168 static void
1169 asan_init_shadow_ptr_types (void)
1170 {
1171   asan_shadow_set = new_alias_set ();
1172   tree types[3] = { signed_char_type_node, short_integer_type_node,
1173 		    integer_type_node };
1174 
1175   for (unsigned i = 0; i < 3; i++)
1176     {
1177       shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
1178       TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
1179       shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
1180     }
1181 
1182   initialize_sanitizer_builtins ();
1183 }
1184 
1185 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text.  */
1186 
1187 static tree
1188 asan_pp_string (pretty_printer *pp)
1189 {
1190   const char *buf = pp_formatted_text (pp);
1191   size_t len = strlen (buf);
1192   tree ret = build_string (len + 1, buf);
1193   TREE_TYPE (ret)
1194     = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
1195 			build_index_type (size_int (len)));
1196   TREE_READONLY (ret) = 1;
1197   TREE_STATIC (ret) = 1;
1198   return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
1199 }
1200 
1201 /* Clear shadow memory at SHADOW_MEM, LEN bytes.  Can't call a library call here
1202    though.  */
1203 
1204 static void
1205 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
1206 {
1207   rtx_insn *insn, *insns, *jump;
1208   rtx_code_label *top_label;
1209   rtx end, addr, tmp;
1210 
1211   gcc_assert ((len & 3) == 0);
1212   start_sequence ();
1213   clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
1214   insns = get_insns ();
1215   end_sequence ();
1216   for (insn = insns; insn; insn = NEXT_INSN (insn))
1217     if (CALL_P (insn))
1218       break;
1219   if (insn == NULL_RTX)
1220     {
1221       emit_insn (insns);
1222       return;
1223     }
1224 
1225   top_label = gen_label_rtx ();
1226   addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1227   shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1228   end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1229   emit_label (top_label);
1230 
1231   emit_move_insn (shadow_mem, const0_rtx);
1232   tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1233 			     true, OPTAB_LIB_WIDEN);
1234   if (tmp != addr)
1235     emit_move_insn (addr, tmp);
1236   emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1237   jump = get_last_insn ();
1238   gcc_assert (JUMP_P (jump));
1239   add_reg_br_prob_note (jump,
1240 			profile_probability::guessed_always ()
1241 			   .apply_scale (80, 100));
1242 }
1243 
1244 void
1245 asan_function_start (void)
1246 {
1247   section *fnsec = function_section (current_function_decl);
1248   switch_to_section (fnsec);
1249   ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1250 			 current_function_funcdef_no);
1251 }
1252 
1253 /* Return number of shadow bytes that are occupied by a local variable
1254    of SIZE bytes.  */
1255 
1256 static unsigned HOST_WIDE_INT
1257 shadow_mem_size (unsigned HOST_WIDE_INT size)
1258 {
1259   /* It must be possible to align stack variables to granularity
1260      of shadow memory.  */
1261   gcc_assert (BITS_PER_UNIT
1262 	      * ASAN_SHADOW_GRANULARITY <= MAX_SUPPORTED_STACK_ALIGNMENT);
1263 
1264   return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1265 }
1266 
1267 /* Always emit 4 bytes at a time.  */
1268 #define RZ_BUFFER_SIZE 4
1269 
1270 /* ASAN redzone buffer container that handles emission of shadow bytes.  */
1271 class asan_redzone_buffer
1272 {
1273 public:
1274   /* Constructor.  */
1275   asan_redzone_buffer (rtx shadow_mem, HOST_WIDE_INT prev_offset):
1276     m_shadow_mem (shadow_mem), m_prev_offset (prev_offset),
1277     m_original_offset (prev_offset), m_shadow_bytes (RZ_BUFFER_SIZE)
1278   {}
1279 
1280   /* Emit VALUE shadow byte at a given OFFSET.  */
1281   void emit_redzone_byte (HOST_WIDE_INT offset, unsigned char value);
1282 
1283   /* Emit RTX emission of the content of the buffer.  */
1284   void flush_redzone_payload (void);
1285 
1286 private:
1287   /* Flush if the content of the buffer is full
1288      (equal to RZ_BUFFER_SIZE).  */
1289   void flush_if_full (void);
1290 
1291   /* Memory where we last emitted a redzone payload.  */
1292   rtx m_shadow_mem;
1293 
1294   /* Relative offset where we last emitted a redzone payload.  */
1295   HOST_WIDE_INT m_prev_offset;
1296 
1297   /* Relative original offset.  Used for checking only.  */
1298   HOST_WIDE_INT m_original_offset;
1299 
1300 public:
1301   /* Buffer with redzone payload.  */
1302   auto_vec<unsigned char> m_shadow_bytes;
1303 };
1304 
1305 /* Emit VALUE shadow byte at a given OFFSET.  */
1306 
1307 void
1308 asan_redzone_buffer::emit_redzone_byte (HOST_WIDE_INT offset,
1309 					unsigned char value)
1310 {
1311   gcc_assert ((offset & (ASAN_SHADOW_GRANULARITY - 1)) == 0);
1312   gcc_assert (offset >= m_prev_offset);
1313 
1314   HOST_WIDE_INT off
1315     = m_prev_offset + ASAN_SHADOW_GRANULARITY * m_shadow_bytes.length ();
1316   if (off == offset)
1317     /* Consecutive shadow memory byte.  */;
1318   else if (offset < m_prev_offset + (HOST_WIDE_INT) (ASAN_SHADOW_GRANULARITY
1319 						     * RZ_BUFFER_SIZE)
1320 	   && !m_shadow_bytes.is_empty ())
1321     {
1322       /* Shadow memory byte with a small gap.  */
1323       for (; off < offset; off += ASAN_SHADOW_GRANULARITY)
1324 	m_shadow_bytes.safe_push (0);
1325     }
1326   else
1327     {
1328       if (!m_shadow_bytes.is_empty ())
1329 	flush_redzone_payload ();
1330 
1331       /* Maybe start earlier in order to use aligned store.  */
1332       HOST_WIDE_INT align = (offset - m_prev_offset) % ASAN_RED_ZONE_SIZE;
1333       if (align)
1334 	{
1335 	  offset -= align;
1336 	  for (unsigned i = 0; i < align / BITS_PER_UNIT; i++)
1337 	    m_shadow_bytes.safe_push (0);
1338 	}
1339 
1340       /* Adjust m_prev_offset and m_shadow_mem.  */
1341       HOST_WIDE_INT diff = offset - m_prev_offset;
1342       m_shadow_mem = adjust_address (m_shadow_mem, VOIDmode,
1343 				     diff >> ASAN_SHADOW_SHIFT);
1344       m_prev_offset = offset;
1345     }
1346   m_shadow_bytes.safe_push (value);
1347   flush_if_full ();
1348 }
1349 
1350 /* Emit RTX emission of the content of the buffer.  */
1351 
1352 void
1353 asan_redzone_buffer::flush_redzone_payload (void)
1354 {
1355   gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
1356 
1357   if (m_shadow_bytes.is_empty ())
1358     return;
1359 
1360   /* Be sure we always emit to an aligned address.  */
1361   gcc_assert (((m_prev_offset - m_original_offset)
1362 	      & (ASAN_RED_ZONE_SIZE - 1)) == 0);
1363 
1364   /* Fill it to RZ_BUFFER_SIZE bytes with zeros if needed.  */
1365   unsigned l = m_shadow_bytes.length ();
1366   for (unsigned i = 0; i <= RZ_BUFFER_SIZE - l; i++)
1367     m_shadow_bytes.safe_push (0);
1368 
1369   if (dump_file && (dump_flags & TDF_DETAILS))
1370     fprintf (dump_file,
1371 	     "Flushing rzbuffer at offset %" PRId64 " with: ", m_prev_offset);
1372 
1373   unsigned HOST_WIDE_INT val = 0;
1374   for (unsigned i = 0; i < RZ_BUFFER_SIZE; i++)
1375     {
1376       unsigned char v
1377 	= m_shadow_bytes[BYTES_BIG_ENDIAN ? RZ_BUFFER_SIZE - i - 1 : i];
1378       val |= (unsigned HOST_WIDE_INT)v << (BITS_PER_UNIT * i);
1379       if (dump_file && (dump_flags & TDF_DETAILS))
1380 	fprintf (dump_file, "%02x ", v);
1381     }
1382 
1383   if (dump_file && (dump_flags & TDF_DETAILS))
1384     fprintf (dump_file, "\n");
1385 
1386   rtx c = gen_int_mode (val, SImode);
1387   m_shadow_mem = adjust_address (m_shadow_mem, SImode, 0);
1388   emit_move_insn (m_shadow_mem, c);
1389   m_shadow_bytes.truncate (0);
1390 }
1391 
1392 /* Flush if the content of the buffer is full
1393    (equal to RZ_BUFFER_SIZE).  */
1394 
1395 void
1396 asan_redzone_buffer::flush_if_full (void)
1397 {
1398   if (m_shadow_bytes.length () == RZ_BUFFER_SIZE)
1399     flush_redzone_payload ();
1400 }
1401 
1402 /* Insert code to protect stack vars.  The prologue sequence should be emitted
1403    directly, epilogue sequence returned.  BASE is the register holding the
1404    stack base, against which OFFSETS array offsets are relative to, OFFSETS
1405    array contains pairs of offsets in reverse order, always the end offset
1406    of some gap that needs protection followed by starting offset,
1407    and DECLS is an array of representative decls for each var partition.
1408    LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1409    elements long (OFFSETS include gap before the first variable as well
1410    as gaps after each stack variable).  PBASE is, if non-NULL, some pseudo
1411    register which stack vars DECL_RTLs are based on.  Either BASE should be
1412    assigned to PBASE, when not doing use after return protection, or
1413    corresponding address based on __asan_stack_malloc* return value.  */
1414 
1415 rtx_insn *
1416 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1417 			    HOST_WIDE_INT *offsets, tree *decls, int length)
1418 {
1419   rtx shadow_base, shadow_mem, ret, mem, orig_base;
1420   rtx_code_label *lab;
1421   rtx_insn *insns;
1422   char buf[32];
1423   HOST_WIDE_INT base_offset = offsets[length - 1];
1424   HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1425   HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1426   HOST_WIDE_INT last_offset, last_size, last_size_aligned;
1427   int l;
1428   unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1429   tree str_cst, decl, id;
1430   int use_after_return_class = -1;
1431 
1432   if (shadow_ptr_types[0] == NULL_TREE)
1433     asan_init_shadow_ptr_types ();
1434 
1435   expanded_location cfun_xloc
1436     = expand_location (DECL_SOURCE_LOCATION (current_function_decl));
1437 
1438   /* First of all, prepare the description string.  */
1439   pretty_printer asan_pp;
1440 
1441   pp_decimal_int (&asan_pp, length / 2 - 1);
1442   pp_space (&asan_pp);
1443   for (l = length - 2; l; l -= 2)
1444     {
1445       tree decl = decls[l / 2 - 1];
1446       pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1447       pp_space (&asan_pp);
1448       pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1449       pp_space (&asan_pp);
1450 
1451       expanded_location xloc
1452 	= expand_location (DECL_SOURCE_LOCATION (decl));
1453       char location[32];
1454 
1455       if (xloc.file == cfun_xloc.file)
1456 	sprintf (location, ":%d", xloc.line);
1457       else
1458 	location[0] = '\0';
1459 
1460       if (DECL_P (decl) && DECL_NAME (decl))
1461 	{
1462 	  unsigned idlen
1463 	    = IDENTIFIER_LENGTH (DECL_NAME (decl)) + strlen (location);
1464 	  pp_decimal_int (&asan_pp, idlen);
1465 	  pp_space (&asan_pp);
1466 	  pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1467 	  pp_string (&asan_pp, location);
1468 	}
1469       else
1470 	pp_string (&asan_pp, "9 <unknown>");
1471 
1472       if (l > 2)
1473 	pp_space (&asan_pp);
1474     }
1475   str_cst = asan_pp_string (&asan_pp);
1476 
1477   /* Emit the prologue sequence.  */
1478   if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1479       && param_asan_use_after_return)
1480     {
1481       use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1482       /* __asan_stack_malloc_N guarantees alignment
1483 	 N < 6 ? (64 << N) : 4096 bytes.  */
1484       if (alignb > (use_after_return_class < 6
1485 		    ? (64U << use_after_return_class) : 4096U))
1486 	use_after_return_class = -1;
1487       else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1488 	base_align_bias = ((asan_frame_size + alignb - 1)
1489 			   & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1490     }
1491 
1492   /* Align base if target is STRICT_ALIGNMENT.  */
1493   if (STRICT_ALIGNMENT)
1494     {
1495       const HOST_WIDE_INT align
1496 	= (GET_MODE_ALIGNMENT (SImode) / BITS_PER_UNIT) << ASAN_SHADOW_SHIFT;
1497       base = expand_binop (Pmode, and_optab, base, gen_int_mode (-align, Pmode),
1498 			   NULL_RTX, 1, OPTAB_DIRECT);
1499     }
1500 
1501   if (use_after_return_class == -1 && pbase)
1502     emit_move_insn (pbase, base);
1503 
1504   base = expand_binop (Pmode, add_optab, base,
1505 		       gen_int_mode (base_offset - base_align_bias, Pmode),
1506 		       NULL_RTX, 1, OPTAB_DIRECT);
1507   orig_base = NULL_RTX;
1508   if (use_after_return_class != -1)
1509     {
1510       if (asan_detect_stack_use_after_return == NULL_TREE)
1511 	{
1512 	  id = get_identifier ("__asan_option_detect_stack_use_after_return");
1513 	  decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1514 			     integer_type_node);
1515 	  SET_DECL_ASSEMBLER_NAME (decl, id);
1516 	  TREE_ADDRESSABLE (decl) = 1;
1517 	  DECL_ARTIFICIAL (decl) = 1;
1518 	  DECL_IGNORED_P (decl) = 1;
1519 	  DECL_EXTERNAL (decl) = 1;
1520 	  TREE_STATIC (decl) = 1;
1521 	  TREE_PUBLIC (decl) = 1;
1522 	  TREE_USED (decl) = 1;
1523 	  asan_detect_stack_use_after_return = decl;
1524 	}
1525       orig_base = gen_reg_rtx (Pmode);
1526       emit_move_insn (orig_base, base);
1527       ret = expand_normal (asan_detect_stack_use_after_return);
1528       lab = gen_label_rtx ();
1529       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1530 			       VOIDmode, 0, lab,
1531 			       profile_probability::very_likely ());
1532       snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1533 		use_after_return_class);
1534       ret = init_one_libfunc (buf);
1535       ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
1536 				     GEN_INT (asan_frame_size
1537 					      + base_align_bias),
1538 				     TYPE_MODE (pointer_sized_int_node));
1539       /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1540 	 and NULL otherwise.  Check RET value is NULL here and jump over the
1541 	 BASE reassignment in this case.  Otherwise, reassign BASE to RET.  */
1542       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1543 			       VOIDmode, 0, lab,
1544 			       profile_probability:: very_unlikely ());
1545       ret = convert_memory_address (Pmode, ret);
1546       emit_move_insn (base, ret);
1547       emit_label (lab);
1548       emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1549 					   gen_int_mode (base_align_bias
1550 							 - base_offset, Pmode),
1551 					   NULL_RTX, 1, OPTAB_DIRECT));
1552     }
1553   mem = gen_rtx_MEM (ptr_mode, base);
1554   mem = adjust_address (mem, VOIDmode, base_align_bias);
1555   emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1556   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1557   emit_move_insn (mem, expand_normal (str_cst));
1558   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1559   ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1560   id = get_identifier (buf);
1561   decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1562 		    VAR_DECL, id, char_type_node);
1563   SET_DECL_ASSEMBLER_NAME (decl, id);
1564   TREE_ADDRESSABLE (decl) = 1;
1565   TREE_READONLY (decl) = 1;
1566   DECL_ARTIFICIAL (decl) = 1;
1567   DECL_IGNORED_P (decl) = 1;
1568   TREE_STATIC (decl) = 1;
1569   TREE_PUBLIC (decl) = 0;
1570   TREE_USED (decl) = 1;
1571   DECL_INITIAL (decl) = decl;
1572   TREE_ASM_WRITTEN (decl) = 1;
1573   TREE_ASM_WRITTEN (id) = 1;
1574   emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1575   shadow_base = expand_binop (Pmode, lshr_optab, base,
1576 			      gen_int_shift_amount (Pmode, ASAN_SHADOW_SHIFT),
1577 			      NULL_RTX, 1, OPTAB_DIRECT);
1578   shadow_base
1579     = plus_constant (Pmode, shadow_base,
1580 		     asan_shadow_offset ()
1581 		     + (base_align_bias >> ASAN_SHADOW_SHIFT));
1582   gcc_assert (asan_shadow_set != -1
1583 	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1584   shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1585   set_mem_alias_set (shadow_mem, asan_shadow_set);
1586   if (STRICT_ALIGNMENT)
1587     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1588   prev_offset = base_offset;
1589 
1590   asan_redzone_buffer rz_buffer (shadow_mem, prev_offset);
1591   for (l = length; l; l -= 2)
1592     {
1593       if (l == 2)
1594 	cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1595       offset = offsets[l - 1];
1596 
1597       bool extra_byte = (offset - base_offset) & (ASAN_SHADOW_GRANULARITY - 1);
1598       /* If a red-zone is not aligned to ASAN_SHADOW_GRANULARITY then
1599 	 the previous stack variable has size % ASAN_SHADOW_GRANULARITY != 0.
1600 	 In that case we have to emit one extra byte that will describe
1601 	 how many bytes (our of ASAN_SHADOW_GRANULARITY) can be accessed.  */
1602       if (extra_byte)
1603 	{
1604 	  HOST_WIDE_INT aoff
1605 	    = base_offset + ((offset - base_offset)
1606 			     & ~(ASAN_SHADOW_GRANULARITY - HOST_WIDE_INT_1));
1607 	  rz_buffer.emit_redzone_byte (aoff, offset - aoff);
1608 	  offset = aoff + ASAN_SHADOW_GRANULARITY;
1609 	}
1610 
1611       /* Calculate size of red zone payload.  */
1612       while (offset < offsets[l - 2])
1613 	{
1614 	  rz_buffer.emit_redzone_byte (offset, cur_shadow_byte);
1615 	  offset += ASAN_SHADOW_GRANULARITY;
1616 	}
1617 
1618       cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1619     }
1620 
1621   /* As the automatic variables are aligned to
1622      ASAN_RED_ZONE_SIZE / ASAN_SHADOW_GRANULARITY, the buffer should be
1623      flushed here.  */
1624   gcc_assert (rz_buffer.m_shadow_bytes.is_empty ());
1625 
1626   do_pending_stack_adjust ();
1627 
1628   /* Construct epilogue sequence.  */
1629   start_sequence ();
1630 
1631   lab = NULL;
1632   if (use_after_return_class != -1)
1633     {
1634       rtx_code_label *lab2 = gen_label_rtx ();
1635       char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1636       emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1637 			       VOIDmode, 0, lab2,
1638 			       profile_probability::very_likely ());
1639       shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1640       set_mem_alias_set (shadow_mem, asan_shadow_set);
1641       mem = gen_rtx_MEM (ptr_mode, base);
1642       mem = adjust_address (mem, VOIDmode, base_align_bias);
1643       emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1644       unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1645       if (use_after_return_class < 5
1646 	  && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1647 				  BITS_PER_UNIT, true))
1648 	{
1649 	  /* Emit:
1650 	       memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
1651 	       **SavedFlagPtr(FakeStack, class_id) = 0
1652 	  */
1653 	  store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1654 			   BITS_PER_UNIT, true, RETURN_BEGIN);
1655 
1656 	  unsigned HOST_WIDE_INT offset
1657 	    = (1 << (use_after_return_class + 6));
1658 	  offset -= GET_MODE_SIZE (ptr_mode);
1659 	  mem = gen_rtx_MEM (ptr_mode, base);
1660 	  mem = adjust_address (mem, ptr_mode, offset);
1661 	  rtx addr = gen_reg_rtx (ptr_mode);
1662 	  emit_move_insn (addr, mem);
1663 	  addr = convert_memory_address (Pmode, addr);
1664 	  mem = gen_rtx_MEM (QImode, addr);
1665 	  emit_move_insn (mem, const0_rtx);
1666 	}
1667       else if (use_after_return_class >= 5
1668 	       || !set_storage_via_setmem (shadow_mem,
1669 					   GEN_INT (sz),
1670 					   gen_int_mode (c, QImode),
1671 					   BITS_PER_UNIT, BITS_PER_UNIT,
1672 					   -1, sz, sz, sz))
1673 	{
1674 	  snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1675 		    use_after_return_class);
1676 	  ret = init_one_libfunc (buf);
1677 	  rtx addr = convert_memory_address (ptr_mode, base);
1678 	  rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1679 	  emit_library_call (ret, LCT_NORMAL, ptr_mode, addr, ptr_mode,
1680 			     GEN_INT (asan_frame_size + base_align_bias),
1681 			     TYPE_MODE (pointer_sized_int_node),
1682 			     orig_addr, ptr_mode);
1683 	}
1684       lab = gen_label_rtx ();
1685       emit_jump (lab);
1686       emit_label (lab2);
1687     }
1688 
1689   shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1690   set_mem_alias_set (shadow_mem, asan_shadow_set);
1691 
1692   if (STRICT_ALIGNMENT)
1693     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1694 
1695   prev_offset = base_offset;
1696   last_offset = base_offset;
1697   last_size = 0;
1698   last_size_aligned = 0;
1699   for (l = length; l; l -= 2)
1700     {
1701       offset = base_offset + ((offsets[l - 1] - base_offset)
1702 			      & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1703       if (last_offset + last_size_aligned < offset)
1704 	{
1705 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1706 				       (last_offset - prev_offset)
1707 				       >> ASAN_SHADOW_SHIFT);
1708 	  prev_offset = last_offset;
1709 	  asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1710 	  last_offset = offset;
1711 	  last_size = 0;
1712 	}
1713       else
1714 	last_size = offset - last_offset;
1715       last_size += base_offset + ((offsets[l - 2] - base_offset)
1716 				  & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1717 		   - offset;
1718 
1719       /* Unpoison shadow memory that corresponds to a variable that is
1720 	 is subject of use-after-return sanitization.  */
1721       if (l > 2)
1722 	{
1723 	  decl = decls[l / 2 - 2];
1724 	  if (asan_handled_variables != NULL
1725 	      && asan_handled_variables->contains (decl))
1726 	    {
1727 	      HOST_WIDE_INT size = offsets[l - 3] - offsets[l - 2];
1728 	      if (dump_file && (dump_flags & TDF_DETAILS))
1729 		{
1730 		  const char *n = (DECL_NAME (decl)
1731 				   ? IDENTIFIER_POINTER (DECL_NAME (decl))
1732 				   : "<unknown>");
1733 		  fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1734 			   "%s (%" PRId64 " B)\n", n, size);
1735 		}
1736 
1737 		last_size += size & ~(ASAN_MIN_RED_ZONE_SIZE - HOST_WIDE_INT_1);
1738 	    }
1739 	}
1740       last_size_aligned
1741 	= ((last_size + (ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1742 	   & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1743     }
1744   if (last_size_aligned)
1745     {
1746       shadow_mem = adjust_address (shadow_mem, VOIDmode,
1747 				   (last_offset - prev_offset)
1748 				   >> ASAN_SHADOW_SHIFT);
1749       asan_clear_shadow (shadow_mem, last_size_aligned >> ASAN_SHADOW_SHIFT);
1750     }
1751 
1752   /* Clean-up set with instrumented stack variables.  */
1753   delete asan_handled_variables;
1754   asan_handled_variables = NULL;
1755   delete asan_used_labels;
1756   asan_used_labels = NULL;
1757 
1758   do_pending_stack_adjust ();
1759   if (lab)
1760     emit_label (lab);
1761 
1762   insns = get_insns ();
1763   end_sequence ();
1764   return insns;
1765 }
1766 
1767 /* Emit __asan_allocas_unpoison (top, bot) call.  The BASE parameter corresponds
1768    to BOT argument, for TOP virtual_stack_dynamic_rtx is used.  NEW_SEQUENCE
1769    indicates whether we're emitting new instructions sequence or not.  */
1770 
1771 rtx_insn *
1772 asan_emit_allocas_unpoison (rtx top, rtx bot, rtx_insn *before)
1773 {
1774   if (before)
1775     push_to_sequence (before);
1776   else
1777     start_sequence ();
1778   rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
1779   top = convert_memory_address (ptr_mode, top);
1780   bot = convert_memory_address (ptr_mode, bot);
1781   emit_library_call (ret, LCT_NORMAL, ptr_mode,
1782 		     top, ptr_mode, bot, ptr_mode);
1783 
1784   do_pending_stack_adjust ();
1785   rtx_insn *insns = get_insns ();
1786   end_sequence ();
1787   return insns;
1788 }
1789 
1790 /* Return true if DECL, a global var, might be overridden and needs
1791    therefore a local alias.  */
1792 
1793 static bool
1794 asan_needs_local_alias (tree decl)
1795 {
1796   return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1797 }
1798 
1799 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1800    therefore doesn't need protection.  */
1801 
1802 static bool
1803 is_odr_indicator (tree decl)
1804 {
1805   return (DECL_ARTIFICIAL (decl)
1806 	  && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1807 }
1808 
1809 /* Return true if DECL is a VAR_DECL that should be protected
1810    by Address Sanitizer, by appending a red zone with protected
1811    shadow memory after it and aligning it to at least
1812    ASAN_RED_ZONE_SIZE bytes.  */
1813 
1814 bool
1815 asan_protect_global (tree decl, bool ignore_decl_rtl_set_p)
1816 {
1817   if (!param_asan_globals)
1818     return false;
1819 
1820   rtx rtl, symbol;
1821 
1822   if (TREE_CODE (decl) == STRING_CST)
1823     {
1824       /* Instrument all STRING_CSTs except those created
1825 	 by asan_pp_string here.  */
1826       if (shadow_ptr_types[0] != NULL_TREE
1827 	  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1828 	  && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1829 	return false;
1830       return true;
1831     }
1832   if (!VAR_P (decl)
1833       /* TLS vars aren't statically protectable.  */
1834       || DECL_THREAD_LOCAL_P (decl)
1835       /* Externs will be protected elsewhere.  */
1836       || DECL_EXTERNAL (decl)
1837       /* PR sanitizer/81697: For architectures that use section anchors first
1838 	 call to asan_protect_global may occur before DECL_RTL (decl) is set.
1839 	 We should ignore DECL_RTL_SET_P then, because otherwise the first call
1840 	 to asan_protect_global will return FALSE and the following calls on the
1841 	 same decl after setting DECL_RTL (decl) will return TRUE and we'll end
1842 	 up with inconsistency at runtime.  */
1843       || (!DECL_RTL_SET_P (decl) && !ignore_decl_rtl_set_p)
1844       /* Comdat vars pose an ABI problem, we can't know if
1845 	 the var that is selected by the linker will have
1846 	 padding or not.  */
1847       || DECL_ONE_ONLY (decl)
1848       /* Similarly for common vars.  People can use -fno-common.
1849 	 Note: Linux kernel is built with -fno-common, so we do instrument
1850 	 globals there even if it is C.  */
1851       || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1852       /* Don't protect if using user section, often vars placed
1853 	 into user section from multiple TUs are then assumed
1854 	 to be an array of such vars, putting padding in there
1855 	 breaks this assumption.  */
1856       || (DECL_SECTION_NAME (decl) != NULL
1857 	  && !symtab_node::get (decl)->implicit_section
1858 	  && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1859       || DECL_SIZE (decl) == 0
1860       || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1861       || TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1862       || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1863       || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1864       || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1865       || is_odr_indicator (decl))
1866     return false;
1867 
1868   if (!ignore_decl_rtl_set_p || DECL_RTL_SET_P (decl))
1869     {
1870 
1871       rtl = DECL_RTL (decl);
1872       if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1873 	return false;
1874       symbol = XEXP (rtl, 0);
1875 
1876       if (CONSTANT_POOL_ADDRESS_P (symbol)
1877 	  || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1878 	return false;
1879     }
1880 
1881   if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1882     return false;
1883 
1884   if (!TARGET_SUPPORTS_ALIASES && asan_needs_local_alias (decl))
1885     return false;
1886 
1887   return true;
1888 }
1889 
1890 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1891    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1892 
1893 static tree
1894 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1895 		   int *nargs)
1896 {
1897   static enum built_in_function report[2][2][6]
1898     = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1899 	    BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1900 	    BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1901 	  { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1902 	    BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1903 	    BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1904 	{ { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1905 	    BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1906 	    BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1907 	    BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1908 	    BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1909 	    BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1910 	  { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1911 	    BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1912 	    BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1913 	    BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1914 	    BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1915 	    BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1916   if (size_in_bytes == -1)
1917     {
1918       *nargs = 2;
1919       return builtin_decl_implicit (report[recover_p][is_store][5]);
1920     }
1921   *nargs = 1;
1922   int size_log2 = exact_log2 (size_in_bytes);
1923   return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1924 }
1925 
1926 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1927    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1928 
1929 static tree
1930 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1931 	    int *nargs)
1932 {
1933   static enum built_in_function check[2][2][6]
1934     = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1935 	    BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1936 	    BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1937 	  { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1938 	    BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1939 	    BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1940 	{ { BUILT_IN_ASAN_LOAD1_NOABORT,
1941 	    BUILT_IN_ASAN_LOAD2_NOABORT,
1942 	    BUILT_IN_ASAN_LOAD4_NOABORT,
1943 	    BUILT_IN_ASAN_LOAD8_NOABORT,
1944 	    BUILT_IN_ASAN_LOAD16_NOABORT,
1945 	    BUILT_IN_ASAN_LOADN_NOABORT },
1946 	  { BUILT_IN_ASAN_STORE1_NOABORT,
1947 	    BUILT_IN_ASAN_STORE2_NOABORT,
1948 	    BUILT_IN_ASAN_STORE4_NOABORT,
1949 	    BUILT_IN_ASAN_STORE8_NOABORT,
1950 	    BUILT_IN_ASAN_STORE16_NOABORT,
1951 	    BUILT_IN_ASAN_STOREN_NOABORT } } };
1952   if (size_in_bytes == -1)
1953     {
1954       *nargs = 2;
1955       return builtin_decl_implicit (check[recover_p][is_store][5]);
1956     }
1957   *nargs = 1;
1958   int size_log2 = exact_log2 (size_in_bytes);
1959   return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1960 }
1961 
1962 /* Split the current basic block and create a condition statement
1963    insertion point right before or after the statement pointed to by
1964    ITER.  Return an iterator to the point at which the caller might
1965    safely insert the condition statement.
1966 
1967    THEN_BLOCK must be set to the address of an uninitialized instance
1968    of basic_block.  The function will then set *THEN_BLOCK to the
1969    'then block' of the condition statement to be inserted by the
1970    caller.
1971 
1972    If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1973    *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1974 
1975    Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1976    block' of the condition statement to be inserted by the caller.
1977 
1978    Note that *FALLTHROUGH_BLOCK is a new block that contains the
1979    statements starting from *ITER, and *THEN_BLOCK is a new empty
1980    block.
1981 
1982    *ITER is adjusted to point to always point to the first statement
1983     of the basic block * FALLTHROUGH_BLOCK.  That statement is the
1984     same as what ITER was pointing to prior to calling this function,
1985     if BEFORE_P is true; otherwise, it is its following statement.  */
1986 
1987 gimple_stmt_iterator
1988 create_cond_insert_point (gimple_stmt_iterator *iter,
1989 			  bool before_p,
1990 			  bool then_more_likely_p,
1991 			  bool create_then_fallthru_edge,
1992 			  basic_block *then_block,
1993 			  basic_block *fallthrough_block)
1994 {
1995   gimple_stmt_iterator gsi = *iter;
1996 
1997   if (!gsi_end_p (gsi) && before_p)
1998     gsi_prev (&gsi);
1999 
2000   basic_block cur_bb = gsi_bb (*iter);
2001 
2002   edge e = split_block (cur_bb, gsi_stmt (gsi));
2003 
2004   /* Get a hold on the 'condition block', the 'then block' and the
2005      'else block'.  */
2006   basic_block cond_bb = e->src;
2007   basic_block fallthru_bb = e->dest;
2008   basic_block then_bb = create_empty_bb (cond_bb);
2009   if (current_loops)
2010     {
2011       add_bb_to_loop (then_bb, cond_bb->loop_father);
2012       loops_state_set (LOOPS_NEED_FIXUP);
2013     }
2014 
2015   /* Set up the newly created 'then block'.  */
2016   e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
2017   profile_probability fallthrough_probability
2018     = then_more_likely_p
2019     ? profile_probability::very_unlikely ()
2020     : profile_probability::very_likely ();
2021   e->probability = fallthrough_probability.invert ();
2022   then_bb->count = e->count ();
2023   if (create_then_fallthru_edge)
2024     make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
2025 
2026   /* Set up the fallthrough basic block.  */
2027   e = find_edge (cond_bb, fallthru_bb);
2028   e->flags = EDGE_FALSE_VALUE;
2029   e->probability = fallthrough_probability;
2030 
2031   /* Update dominance info for the newly created then_bb; note that
2032      fallthru_bb's dominance info has already been updated by
2033      split_bock.  */
2034   if (dom_info_available_p (CDI_DOMINATORS))
2035     set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
2036 
2037   *then_block = then_bb;
2038   *fallthrough_block = fallthru_bb;
2039   *iter = gsi_start_bb (fallthru_bb);
2040 
2041   return gsi_last_bb (cond_bb);
2042 }
2043 
2044 /* Insert an if condition followed by a 'then block' right before the
2045    statement pointed to by ITER.  The fallthrough block -- which is the
2046    else block of the condition as well as the destination of the
2047    outcoming edge of the 'then block' -- starts with the statement
2048    pointed to by ITER.
2049 
2050    COND is the condition of the if.
2051 
2052    If THEN_MORE_LIKELY_P is true, the probability of the edge to the
2053    'then block' is higher than the probability of the edge to the
2054    fallthrough block.
2055 
2056    Upon completion of the function, *THEN_BB is set to the newly
2057    inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
2058    fallthrough block.
2059 
2060    *ITER is adjusted to still point to the same statement it was
2061    pointing to initially.  */
2062 
2063 static void
2064 insert_if_then_before_iter (gcond *cond,
2065 			    gimple_stmt_iterator *iter,
2066 			    bool then_more_likely_p,
2067 			    basic_block *then_bb,
2068 			    basic_block *fallthrough_bb)
2069 {
2070   gimple_stmt_iterator cond_insert_point =
2071     create_cond_insert_point (iter,
2072 			      /*before_p=*/true,
2073 			      then_more_likely_p,
2074 			      /*create_then_fallthru_edge=*/true,
2075 			      then_bb,
2076 			      fallthrough_bb);
2077   gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
2078 }
2079 
2080 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
2081    If RETURN_ADDRESS is set to true, return memory location instread
2082    of a value in the shadow memory.  */
2083 
2084 static tree
2085 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
2086 			 tree base_addr, tree shadow_ptr_type,
2087 			 bool return_address = false)
2088 {
2089   tree t, uintptr_type = TREE_TYPE (base_addr);
2090   tree shadow_type = TREE_TYPE (shadow_ptr_type);
2091   gimple *g;
2092 
2093   t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
2094   g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
2095 			   base_addr, t);
2096   gimple_set_location (g, location);
2097   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2098 
2099   t = build_int_cst (uintptr_type, asan_shadow_offset ());
2100   g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
2101 			   gimple_assign_lhs (g), t);
2102   gimple_set_location (g, location);
2103   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2104 
2105   g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
2106 			   gimple_assign_lhs (g));
2107   gimple_set_location (g, location);
2108   gsi_insert_after (gsi, g, GSI_NEW_STMT);
2109 
2110   if (!return_address)
2111     {
2112       t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
2113 		  build_int_cst (shadow_ptr_type, 0));
2114       g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
2115       gimple_set_location (g, location);
2116       gsi_insert_after (gsi, g, GSI_NEW_STMT);
2117     }
2118 
2119   return gimple_assign_lhs (g);
2120 }
2121 
2122 /* BASE can already be an SSA_NAME; in that case, do not create a
2123    new SSA_NAME for it.  */
2124 
2125 static tree
2126 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
2127 		       bool before_p)
2128 {
2129   STRIP_USELESS_TYPE_CONVERSION (base);
2130   if (TREE_CODE (base) == SSA_NAME)
2131     return base;
2132   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)), base);
2133   gimple_set_location (g, loc);
2134   if (before_p)
2135     gsi_insert_before (iter, g, GSI_SAME_STMT);
2136   else
2137     gsi_insert_after (iter, g, GSI_NEW_STMT);
2138   return gimple_assign_lhs (g);
2139 }
2140 
2141 /* LEN can already have necessary size and precision;
2142    in that case, do not create a new variable.  */
2143 
2144 tree
2145 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
2146 		       bool before_p)
2147 {
2148   if (ptrofftype_p (len))
2149     return len;
2150   gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2151 				  NOP_EXPR, len);
2152   gimple_set_location (g, loc);
2153   if (before_p)
2154     gsi_insert_before (iter, g, GSI_SAME_STMT);
2155   else
2156     gsi_insert_after (iter, g, GSI_NEW_STMT);
2157   return gimple_assign_lhs (g);
2158 }
2159 
2160 /* Instrument the memory access instruction BASE.  Insert new
2161    statements before or after ITER.
2162 
2163    Note that the memory access represented by BASE can be either an
2164    SSA_NAME, or a non-SSA expression.  LOCATION is the source code
2165    location.  IS_STORE is TRUE for a store, FALSE for a load.
2166    BEFORE_P is TRUE for inserting the instrumentation code before
2167    ITER, FALSE for inserting it after ITER.  IS_SCALAR_ACCESS is TRUE
2168    for a scalar memory access and FALSE for memory region access.
2169    NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
2170    length.  ALIGN tells alignment of accessed memory object.
2171 
2172    START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
2173    memory region have already been instrumented.
2174 
2175    If BEFORE_P is TRUE, *ITER is arranged to still point to the
2176    statement it was pointing to prior to calling this function,
2177    otherwise, it points to the statement logically following it.  */
2178 
2179 static void
2180 build_check_stmt (location_t loc, tree base, tree len,
2181 		  HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
2182 		  bool is_non_zero_len, bool before_p, bool is_store,
2183 		  bool is_scalar_access, unsigned int align = 0)
2184 {
2185   gimple_stmt_iterator gsi = *iter;
2186   gimple *g;
2187 
2188   gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
2189 
2190   gsi = *iter;
2191 
2192   base = unshare_expr (base);
2193   base = maybe_create_ssa_name (loc, base, &gsi, before_p);
2194 
2195   if (len)
2196     {
2197       len = unshare_expr (len);
2198       len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
2199     }
2200   else
2201     {
2202       gcc_assert (size_in_bytes != -1);
2203       len = build_int_cst (pointer_sized_int_node, size_in_bytes);
2204     }
2205 
2206   if (size_in_bytes > 1)
2207     {
2208       if ((size_in_bytes & (size_in_bytes - 1)) != 0
2209 	  || size_in_bytes > 16)
2210 	is_scalar_access = false;
2211       else if (align && align < size_in_bytes * BITS_PER_UNIT)
2212 	{
2213 	  /* On non-strict alignment targets, if
2214 	     16-byte access is just 8-byte aligned,
2215 	     this will result in misaligned shadow
2216 	     memory 2 byte load, but otherwise can
2217 	     be handled using one read.  */
2218 	  if (size_in_bytes != 16
2219 	      || STRICT_ALIGNMENT
2220 	      || align < 8 * BITS_PER_UNIT)
2221 	    is_scalar_access = false;
2222 	}
2223     }
2224 
2225   HOST_WIDE_INT flags = 0;
2226   if (is_store)
2227     flags |= ASAN_CHECK_STORE;
2228   if (is_non_zero_len)
2229     flags |= ASAN_CHECK_NON_ZERO_LEN;
2230   if (is_scalar_access)
2231     flags |= ASAN_CHECK_SCALAR_ACCESS;
2232 
2233   g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
2234 				  build_int_cst (integer_type_node, flags),
2235 				  base, len,
2236 				  build_int_cst (integer_type_node,
2237 						 align / BITS_PER_UNIT));
2238   gimple_set_location (g, loc);
2239   if (before_p)
2240     gsi_insert_before (&gsi, g, GSI_SAME_STMT);
2241   else
2242     {
2243       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2244       gsi_next (&gsi);
2245       *iter = gsi;
2246     }
2247 }
2248 
2249 /* If T represents a memory access, add instrumentation code before ITER.
2250    LOCATION is source code location.
2251    IS_STORE is either TRUE (for a store) or FALSE (for a load).  */
2252 
2253 static void
2254 instrument_derefs (gimple_stmt_iterator *iter, tree t,
2255 		   location_t location, bool is_store)
2256 {
2257   if (is_store && !param_asan_instrument_writes)
2258     return;
2259   if (!is_store && !param_asan_instrument_reads)
2260     return;
2261 
2262   tree type, base;
2263   HOST_WIDE_INT size_in_bytes;
2264   if (location == UNKNOWN_LOCATION)
2265     location = EXPR_LOCATION (t);
2266 
2267   type = TREE_TYPE (t);
2268   switch (TREE_CODE (t))
2269     {
2270     case ARRAY_REF:
2271     case COMPONENT_REF:
2272     case INDIRECT_REF:
2273     case MEM_REF:
2274     case VAR_DECL:
2275     case BIT_FIELD_REF:
2276       break;
2277       /* FALLTHRU */
2278     default:
2279       return;
2280     }
2281 
2282   size_in_bytes = int_size_in_bytes (type);
2283   if (size_in_bytes <= 0)
2284     return;
2285 
2286   poly_int64 bitsize, bitpos;
2287   tree offset;
2288   machine_mode mode;
2289   int unsignedp, reversep, volatilep = 0;
2290   tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
2291 				    &unsignedp, &reversep, &volatilep);
2292 
2293   if (TREE_CODE (t) == COMPONENT_REF
2294       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
2295     {
2296       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
2297       instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
2298 				       TREE_OPERAND (t, 0), repr,
2299 				       TREE_OPERAND (t, 2)),
2300 			 location, is_store);
2301       return;
2302     }
2303 
2304   if (!multiple_p (bitpos, BITS_PER_UNIT)
2305       || maybe_ne (bitsize, size_in_bytes * BITS_PER_UNIT))
2306     return;
2307 
2308   if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
2309     return;
2310 
2311   poly_int64 decl_size;
2312   if ((VAR_P (inner) || TREE_CODE (inner) == RESULT_DECL)
2313       && offset == NULL_TREE
2314       && DECL_SIZE (inner)
2315       && poly_int_tree_p (DECL_SIZE (inner), &decl_size)
2316       && known_subrange_p (bitpos, bitsize, 0, decl_size))
2317     {
2318       if (VAR_P (inner) && DECL_THREAD_LOCAL_P (inner))
2319 	return;
2320       if (!param_asan_globals && is_global_var (inner))
2321         return;
2322       if (!TREE_STATIC (inner))
2323 	{
2324 	  /* Automatic vars in the current function will be always
2325 	     accessible.  */
2326 	  if (decl_function_context (inner) == current_function_decl
2327 	      && (!asan_sanitize_use_after_scope ()
2328 		  || !TREE_ADDRESSABLE (inner)))
2329 	    return;
2330 	}
2331       /* Always instrument external vars, they might be dynamically
2332 	 initialized.  */
2333       else if (!DECL_EXTERNAL (inner))
2334 	{
2335 	  /* For static vars if they are known not to be dynamically
2336 	     initialized, they will be always accessible.  */
2337 	  varpool_node *vnode = varpool_node::get (inner);
2338 	  if (vnode && !vnode->dynamically_initialized)
2339 	    return;
2340 	}
2341     }
2342 
2343   if (DECL_P (inner)
2344       && decl_function_context (inner) == current_function_decl
2345       && !TREE_ADDRESSABLE (inner))
2346     mark_addressable (inner);
2347 
2348   base = build_fold_addr_expr (t);
2349   if (!has_mem_ref_been_instrumented (base, size_in_bytes))
2350     {
2351       unsigned int align = get_object_alignment (t);
2352       build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
2353 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
2354 			is_store, /*is_scalar_access*/true, align);
2355       update_mem_ref_hash_table (base, size_in_bytes);
2356       update_mem_ref_hash_table (t, size_in_bytes);
2357     }
2358 
2359 }
2360 
2361 /*  Insert a memory reference into the hash table if access length
2362     can be determined in compile time.  */
2363 
2364 static void
2365 maybe_update_mem_ref_hash_table (tree base, tree len)
2366 {
2367   if (!POINTER_TYPE_P (TREE_TYPE (base))
2368       || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
2369     return;
2370 
2371   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2372 
2373   if (size_in_bytes != -1)
2374     update_mem_ref_hash_table (base, size_in_bytes);
2375 }
2376 
2377 /* Instrument an access to a contiguous memory region that starts at
2378    the address pointed to by BASE, over a length of LEN (expressed in
2379    the sizeof (*BASE) bytes).  ITER points to the instruction before
2380    which the instrumentation instructions must be inserted.  LOCATION
2381    is the source location that the instrumentation instructions must
2382    have.  If IS_STORE is true, then the memory access is a store;
2383    otherwise, it's a load.  */
2384 
2385 static void
2386 instrument_mem_region_access (tree base, tree len,
2387 			      gimple_stmt_iterator *iter,
2388 			      location_t location, bool is_store)
2389 {
2390   if (!POINTER_TYPE_P (TREE_TYPE (base))
2391       || !INTEGRAL_TYPE_P (TREE_TYPE (len))
2392       || integer_zerop (len))
2393     return;
2394 
2395   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2396 
2397   if ((size_in_bytes == -1)
2398       || !has_mem_ref_been_instrumented (base, size_in_bytes))
2399     {
2400       build_check_stmt (location, base, len, size_in_bytes, iter,
2401 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
2402 			is_store, /*is_scalar_access*/false, /*align*/0);
2403     }
2404 
2405   maybe_update_mem_ref_hash_table (base, len);
2406   *iter = gsi_for_stmt (gsi_stmt (*iter));
2407 }
2408 
2409 /* Instrument the call to a built-in memory access function that is
2410    pointed to by the iterator ITER.
2411 
2412    Upon completion, return TRUE iff *ITER has been advanced to the
2413    statement following the one it was originally pointing to.  */
2414 
2415 static bool
2416 instrument_builtin_call (gimple_stmt_iterator *iter)
2417 {
2418   if (!param_asan_memintrin)
2419     return false;
2420 
2421   bool iter_advanced_p = false;
2422   gcall *call = as_a <gcall *> (gsi_stmt (*iter));
2423 
2424   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
2425 
2426   location_t loc = gimple_location (call);
2427 
2428   asan_mem_ref src0, src1, dest;
2429   asan_mem_ref_init (&src0, NULL, 1);
2430   asan_mem_ref_init (&src1, NULL, 1);
2431   asan_mem_ref_init (&dest, NULL, 1);
2432 
2433   tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2434   bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2435     dest_is_deref = false, intercepted_p = true;
2436 
2437   if (get_mem_refs_of_builtin_call (call,
2438 				    &src0, &src0_len, &src0_is_store,
2439 				    &src1, &src1_len, &src1_is_store,
2440 				    &dest, &dest_len, &dest_is_store,
2441 				    &dest_is_deref, &intercepted_p, iter))
2442     {
2443       if (dest_is_deref)
2444 	{
2445 	  instrument_derefs (iter, dest.start, loc, dest_is_store);
2446 	  gsi_next (iter);
2447 	  iter_advanced_p = true;
2448 	}
2449       else if (!intercepted_p
2450 	       && (src0_len || src1_len || dest_len))
2451 	{
2452 	  if (src0.start != NULL_TREE)
2453 	    instrument_mem_region_access (src0.start, src0_len,
2454 					  iter, loc, /*is_store=*/false);
2455 	  if (src1.start != NULL_TREE)
2456 	    instrument_mem_region_access (src1.start, src1_len,
2457 					  iter, loc, /*is_store=*/false);
2458 	  if (dest.start != NULL_TREE)
2459 	    instrument_mem_region_access (dest.start, dest_len,
2460 					  iter, loc, /*is_store=*/true);
2461 
2462 	  *iter = gsi_for_stmt (call);
2463 	  gsi_next (iter);
2464 	  iter_advanced_p = true;
2465 	}
2466       else
2467 	{
2468 	  if (src0.start != NULL_TREE)
2469 	    maybe_update_mem_ref_hash_table (src0.start, src0_len);
2470 	  if (src1.start != NULL_TREE)
2471 	    maybe_update_mem_ref_hash_table (src1.start, src1_len);
2472 	  if (dest.start != NULL_TREE)
2473 	    maybe_update_mem_ref_hash_table (dest.start, dest_len);
2474 	}
2475     }
2476   return iter_advanced_p;
2477 }
2478 
2479 /*  Instrument the assignment statement ITER if it is subject to
2480     instrumentation.  Return TRUE iff instrumentation actually
2481     happened.  In that case, the iterator ITER is advanced to the next
2482     logical expression following the one initially pointed to by ITER,
2483     and the relevant memory reference that which access has been
2484     instrumented is added to the memory references hash table.  */
2485 
2486 static bool
2487 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2488 {
2489   gimple *s = gsi_stmt (*iter);
2490 
2491   gcc_assert (gimple_assign_single_p (s));
2492 
2493   tree ref_expr = NULL_TREE;
2494   bool is_store, is_instrumented = false;
2495 
2496   if (gimple_store_p (s))
2497     {
2498       ref_expr = gimple_assign_lhs (s);
2499       is_store = true;
2500       instrument_derefs (iter, ref_expr,
2501 			 gimple_location (s),
2502 			 is_store);
2503       is_instrumented = true;
2504     }
2505 
2506   if (gimple_assign_load_p (s))
2507     {
2508       ref_expr = gimple_assign_rhs1 (s);
2509       is_store = false;
2510       instrument_derefs (iter, ref_expr,
2511 			 gimple_location (s),
2512 			 is_store);
2513       is_instrumented = true;
2514     }
2515 
2516   if (is_instrumented)
2517     gsi_next (iter);
2518 
2519   return is_instrumented;
2520 }
2521 
2522 /* Instrument the function call pointed to by the iterator ITER, if it
2523    is subject to instrumentation.  At the moment, the only function
2524    calls that are instrumented are some built-in functions that access
2525    memory.  Look at instrument_builtin_call to learn more.
2526 
2527    Upon completion return TRUE iff *ITER was advanced to the statement
2528    following the one it was originally pointing to.  */
2529 
2530 static bool
2531 maybe_instrument_call (gimple_stmt_iterator *iter)
2532 {
2533   gimple *stmt = gsi_stmt (*iter);
2534   bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2535 
2536   if (is_builtin && instrument_builtin_call (iter))
2537     return true;
2538 
2539   if (gimple_call_noreturn_p (stmt))
2540     {
2541       if (is_builtin)
2542 	{
2543 	  tree callee = gimple_call_fndecl (stmt);
2544 	  switch (DECL_FUNCTION_CODE (callee))
2545 	    {
2546 	    case BUILT_IN_UNREACHABLE:
2547 	    case BUILT_IN_TRAP:
2548 	      /* Don't instrument these.  */
2549 	      return false;
2550 	    default:
2551 	      break;
2552 	    }
2553 	}
2554       tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2555       gimple *g = gimple_build_call (decl, 0);
2556       gimple_set_location (g, gimple_location (stmt));
2557       gsi_insert_before (iter, g, GSI_SAME_STMT);
2558     }
2559 
2560   bool instrumented = false;
2561   if (gimple_store_p (stmt))
2562     {
2563       tree ref_expr = gimple_call_lhs (stmt);
2564       instrument_derefs (iter, ref_expr,
2565 			 gimple_location (stmt),
2566 			 /*is_store=*/true);
2567 
2568       instrumented = true;
2569     }
2570 
2571   /* Walk through gimple_call arguments and check them id needed.  */
2572   unsigned args_num = gimple_call_num_args (stmt);
2573   for (unsigned i = 0; i < args_num; ++i)
2574     {
2575       tree arg = gimple_call_arg (stmt, i);
2576       /* If ARG is not a non-aggregate register variable, compiler in general
2577 	 creates temporary for it and pass it as argument to gimple call.
2578 	 But in some cases, e.g. when we pass by value a small structure that
2579 	 fits to register, compiler can avoid extra overhead by pulling out
2580 	 these temporaries.  In this case, we should check the argument.  */
2581       if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2582 	{
2583 	  instrument_derefs (iter, arg,
2584 			     gimple_location (stmt),
2585 			     /*is_store=*/false);
2586 	  instrumented = true;
2587 	}
2588     }
2589   if (instrumented)
2590     gsi_next (iter);
2591   return instrumented;
2592 }
2593 
2594 /* Walk each instruction of all basic block and instrument those that
2595    represent memory references: loads, stores, or function calls.
2596    In a given basic block, this function avoids instrumenting memory
2597    references that have already been instrumented.  */
2598 
2599 static void
2600 transform_statements (void)
2601 {
2602   basic_block bb, last_bb = NULL;
2603   gimple_stmt_iterator i;
2604   int saved_last_basic_block = last_basic_block_for_fn (cfun);
2605 
2606   FOR_EACH_BB_FN (bb, cfun)
2607     {
2608       basic_block prev_bb = bb;
2609 
2610       if (bb->index >= saved_last_basic_block) continue;
2611 
2612       /* Flush the mem ref hash table, if current bb doesn't have
2613 	 exactly one predecessor, or if that predecessor (skipping
2614 	 over asan created basic blocks) isn't the last processed
2615 	 basic block.  Thus we effectively flush on extended basic
2616 	 block boundaries.  */
2617       while (single_pred_p (prev_bb))
2618 	{
2619 	  prev_bb = single_pred (prev_bb);
2620 	  if (prev_bb->index < saved_last_basic_block)
2621 	    break;
2622 	}
2623       if (prev_bb != last_bb)
2624 	empty_mem_ref_hash_table ();
2625       last_bb = bb;
2626 
2627       for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2628 	{
2629 	  gimple *s = gsi_stmt (i);
2630 
2631 	  if (has_stmt_been_instrumented_p (s))
2632 	    gsi_next (&i);
2633 	  else if (gimple_assign_single_p (s)
2634 		   && !gimple_clobber_p (s)
2635 		   && maybe_instrument_assignment (&i))
2636 	    /*  Nothing to do as maybe_instrument_assignment advanced
2637 		the iterator I.  */;
2638 	  else if (is_gimple_call (s) && maybe_instrument_call (&i))
2639 	    /*  Nothing to do as maybe_instrument_call
2640 		advanced the iterator I.  */;
2641 	  else
2642 	    {
2643 	      /* No instrumentation happened.
2644 
2645 		 If the current instruction is a function call that
2646 		 might free something, let's forget about the memory
2647 		 references that got instrumented.  Otherwise we might
2648 		 miss some instrumentation opportunities.  Do the same
2649 		 for a ASAN_MARK poisoning internal function.  */
2650 	      if (is_gimple_call (s)
2651 		  && (!nonfreeing_call_p (s)
2652 		      || asan_mark_p (s, ASAN_MARK_POISON)))
2653 		empty_mem_ref_hash_table ();
2654 
2655 	      gsi_next (&i);
2656 	    }
2657 	}
2658     }
2659   free_mem_ref_resources ();
2660 }
2661 
2662 /* Build
2663    __asan_before_dynamic_init (module_name)
2664    or
2665    __asan_after_dynamic_init ()
2666    call.  */
2667 
2668 tree
2669 asan_dynamic_init_call (bool after_p)
2670 {
2671   if (shadow_ptr_types[0] == NULL_TREE)
2672     asan_init_shadow_ptr_types ();
2673 
2674   tree fn = builtin_decl_implicit (after_p
2675 				   ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2676 				   : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2677   tree module_name_cst = NULL_TREE;
2678   if (!after_p)
2679     {
2680       pretty_printer module_name_pp;
2681       pp_string (&module_name_pp, main_input_filename);
2682 
2683       module_name_cst = asan_pp_string (&module_name_pp);
2684       module_name_cst = fold_convert (const_ptr_type_node,
2685 				      module_name_cst);
2686     }
2687 
2688   return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2689 }
2690 
2691 /* Build
2692    struct __asan_global
2693    {
2694      const void *__beg;
2695      uptr __size;
2696      uptr __size_with_redzone;
2697      const void *__name;
2698      const void *__module_name;
2699      uptr __has_dynamic_init;
2700      __asan_global_source_location *__location;
2701      char *__odr_indicator;
2702    } type.  */
2703 
2704 static tree
2705 asan_global_struct (void)
2706 {
2707   static const char *field_names[]
2708     = { "__beg", "__size", "__size_with_redzone",
2709 	"__name", "__module_name", "__has_dynamic_init", "__location",
2710 	"__odr_indicator" };
2711   tree fields[ARRAY_SIZE (field_names)], ret;
2712   unsigned i;
2713 
2714   ret = make_node (RECORD_TYPE);
2715   for (i = 0; i < ARRAY_SIZE (field_names); i++)
2716     {
2717       fields[i]
2718 	= build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2719 		      get_identifier (field_names[i]),
2720 		      (i == 0 || i == 3) ? const_ptr_type_node
2721 		      : pointer_sized_int_node);
2722       DECL_CONTEXT (fields[i]) = ret;
2723       if (i)
2724 	DECL_CHAIN (fields[i - 1]) = fields[i];
2725     }
2726   tree type_decl = build_decl (input_location, TYPE_DECL,
2727 			       get_identifier ("__asan_global"), ret);
2728   DECL_IGNORED_P (type_decl) = 1;
2729   DECL_ARTIFICIAL (type_decl) = 1;
2730   TYPE_FIELDS (ret) = fields[0];
2731   TYPE_NAME (ret) = type_decl;
2732   TYPE_STUB_DECL (ret) = type_decl;
2733   TYPE_ARTIFICIAL (ret) = 1;
2734   layout_type (ret);
2735   return ret;
2736 }
2737 
2738 /* Create and return odr indicator symbol for DECL.
2739    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2740 
2741 static tree
2742 create_odr_indicator (tree decl, tree type)
2743 {
2744   char *name;
2745   tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2746   tree decl_name
2747     = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2748 					: DECL_NAME (decl));
2749   /* DECL_NAME theoretically might be NULL.  Bail out with 0 in this case.  */
2750   if (decl_name == NULL_TREE)
2751     return build_int_cst (uptr, 0);
2752   const char *dname = IDENTIFIER_POINTER (decl_name);
2753   if (HAS_DECL_ASSEMBLER_NAME_P (decl))
2754     dname = targetm.strip_name_encoding (dname);
2755   size_t len = strlen (dname) + sizeof ("__odr_asan_");
2756   name = XALLOCAVEC (char, len);
2757   snprintf (name, len, "__odr_asan_%s", dname);
2758 #ifndef NO_DOT_IN_LABEL
2759   name[sizeof ("__odr_asan") - 1] = '.';
2760 #elif !defined(NO_DOLLAR_IN_LABEL)
2761   name[sizeof ("__odr_asan") - 1] = '$';
2762 #endif
2763   tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2764 			 char_type_node);
2765   TREE_ADDRESSABLE (var) = 1;
2766   TREE_READONLY (var) = 0;
2767   TREE_THIS_VOLATILE (var) = 1;
2768   DECL_GIMPLE_REG_P (var) = 0;
2769   DECL_ARTIFICIAL (var) = 1;
2770   DECL_IGNORED_P (var) = 1;
2771   TREE_STATIC (var) = 1;
2772   TREE_PUBLIC (var) = 1;
2773   DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2774   DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2775 
2776   TREE_USED (var) = 1;
2777   tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2778 				    build_int_cst (unsigned_type_node, 0));
2779   TREE_CONSTANT (ctor) = 1;
2780   TREE_STATIC (ctor) = 1;
2781   DECL_INITIAL (var) = ctor;
2782   DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2783 				     NULL, DECL_ATTRIBUTES (var));
2784   make_decl_rtl (var);
2785   varpool_node::finalize_decl (var);
2786   return fold_convert (uptr, build_fold_addr_expr (var));
2787 }
2788 
2789 /* Return true if DECL, a global var, might be overridden and needs
2790    an additional odr indicator symbol.  */
2791 
2792 static bool
2793 asan_needs_odr_indicator_p (tree decl)
2794 {
2795   /* Don't emit ODR indicators for kernel because:
2796      a) Kernel is written in C thus doesn't need ODR indicators.
2797      b) Some kernel code may have assumptions about symbols containing specific
2798         patterns in their names.  Since ODR indicators contain original names
2799         of symbols they are emitted for, these assumptions would be broken for
2800         ODR indicator symbols.  */
2801   return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2802 	  && !DECL_ARTIFICIAL (decl)
2803 	  && !DECL_WEAK (decl)
2804 	  && TREE_PUBLIC (decl));
2805 }
2806 
2807 /* Append description of a single global DECL into vector V.
2808    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2809 
2810 static void
2811 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2812 {
2813   tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2814   unsigned HOST_WIDE_INT size;
2815   tree str_cst, module_name_cst, refdecl = decl;
2816   vec<constructor_elt, va_gc> *vinner = NULL;
2817 
2818   pretty_printer asan_pp, module_name_pp;
2819 
2820   if (DECL_NAME (decl))
2821     pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2822   else
2823     pp_string (&asan_pp, "<unknown>");
2824   str_cst = asan_pp_string (&asan_pp);
2825 
2826   pp_string (&module_name_pp, main_input_filename);
2827   module_name_cst = asan_pp_string (&module_name_pp);
2828 
2829   if (asan_needs_local_alias (decl))
2830     {
2831       char buf[20];
2832       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2833       refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2834 			    VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2835       TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2836       TREE_READONLY (refdecl) = TREE_READONLY (decl);
2837       TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2838       DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2839       DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2840       DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2841       TREE_STATIC (refdecl) = 1;
2842       TREE_PUBLIC (refdecl) = 0;
2843       TREE_USED (refdecl) = 1;
2844       assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2845     }
2846 
2847   tree odr_indicator_ptr
2848     = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2849 					 : build_int_cst (uptr, 0));
2850   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2851 			  fold_convert (const_ptr_type_node,
2852 					build_fold_addr_expr (refdecl)));
2853   size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2854   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2855   size += asan_red_zone_size (size);
2856   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2857   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2858 			  fold_convert (const_ptr_type_node, str_cst));
2859   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2860 			  fold_convert (const_ptr_type_node, module_name_cst));
2861   varpool_node *vnode = varpool_node::get (decl);
2862   int has_dynamic_init = 0;
2863   /* FIXME: Enable initialization order fiasco detection in LTO mode once
2864      proper fix for PR 79061 will be applied.  */
2865   if (!in_lto_p)
2866     has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2867   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2868 			  build_int_cst (uptr, has_dynamic_init));
2869   tree locptr = NULL_TREE;
2870   location_t loc = DECL_SOURCE_LOCATION (decl);
2871   expanded_location xloc = expand_location (loc);
2872   if (xloc.file != NULL)
2873     {
2874       static int lasanloccnt = 0;
2875       char buf[25];
2876       ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2877       tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2878 			     ubsan_get_source_location_type ());
2879       TREE_STATIC (var) = 1;
2880       TREE_PUBLIC (var) = 0;
2881       DECL_ARTIFICIAL (var) = 1;
2882       DECL_IGNORED_P (var) = 1;
2883       pretty_printer filename_pp;
2884       pp_string (&filename_pp, xloc.file);
2885       tree str = asan_pp_string (&filename_pp);
2886       tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2887 					NULL_TREE, str, NULL_TREE,
2888 					build_int_cst (unsigned_type_node,
2889 						       xloc.line), NULL_TREE,
2890 					build_int_cst (unsigned_type_node,
2891 						       xloc.column));
2892       TREE_CONSTANT (ctor) = 1;
2893       TREE_STATIC (ctor) = 1;
2894       DECL_INITIAL (var) = ctor;
2895       varpool_node::finalize_decl (var);
2896       locptr = fold_convert (uptr, build_fold_addr_expr (var));
2897     }
2898   else
2899     locptr = build_int_cst (uptr, 0);
2900   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2901   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2902   init = build_constructor (type, vinner);
2903   CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2904 }
2905 
2906 /* Initialize sanitizer.def builtins if the FE hasn't initialized them.  */
2907 void
2908 initialize_sanitizer_builtins (void)
2909 {
2910   tree decl;
2911 
2912   if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2913     return;
2914 
2915   tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2916   tree BT_FN_VOID_PTR
2917     = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2918   tree BT_FN_VOID_CONST_PTR
2919     = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2920   tree BT_FN_VOID_PTR_PTR
2921     = build_function_type_list (void_type_node, ptr_type_node,
2922 				ptr_type_node, NULL_TREE);
2923   tree BT_FN_VOID_PTR_PTR_PTR
2924     = build_function_type_list (void_type_node, ptr_type_node,
2925 				ptr_type_node, ptr_type_node, NULL_TREE);
2926   tree BT_FN_VOID_PTR_PTRMODE
2927     = build_function_type_list (void_type_node, ptr_type_node,
2928 				pointer_sized_int_node, NULL_TREE);
2929   tree BT_FN_VOID_INT
2930     = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2931   tree BT_FN_SIZE_CONST_PTR_INT
2932     = build_function_type_list (size_type_node, const_ptr_type_node,
2933 				integer_type_node, NULL_TREE);
2934 
2935   tree BT_FN_VOID_UINT8_UINT8
2936     = build_function_type_list (void_type_node, unsigned_char_type_node,
2937 				unsigned_char_type_node, NULL_TREE);
2938   tree BT_FN_VOID_UINT16_UINT16
2939     = build_function_type_list (void_type_node, uint16_type_node,
2940 				uint16_type_node, NULL_TREE);
2941   tree BT_FN_VOID_UINT32_UINT32
2942     = build_function_type_list (void_type_node, uint32_type_node,
2943 				uint32_type_node, NULL_TREE);
2944   tree BT_FN_VOID_UINT64_UINT64
2945     = build_function_type_list (void_type_node, uint64_type_node,
2946 				uint64_type_node, NULL_TREE);
2947   tree BT_FN_VOID_FLOAT_FLOAT
2948     = build_function_type_list (void_type_node, float_type_node,
2949 				float_type_node, NULL_TREE);
2950   tree BT_FN_VOID_DOUBLE_DOUBLE
2951     = build_function_type_list (void_type_node, double_type_node,
2952 				double_type_node, NULL_TREE);
2953   tree BT_FN_VOID_UINT64_PTR
2954     = build_function_type_list (void_type_node, uint64_type_node,
2955 				ptr_type_node, NULL_TREE);
2956 
2957   tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2958   tree BT_FN_IX_CONST_VPTR_INT[5];
2959   tree BT_FN_IX_VPTR_IX_INT[5];
2960   tree BT_FN_VOID_VPTR_IX_INT[5];
2961   tree vptr
2962     = build_pointer_type (build_qualified_type (void_type_node,
2963 						TYPE_QUAL_VOLATILE));
2964   tree cvptr
2965     = build_pointer_type (build_qualified_type (void_type_node,
2966 						TYPE_QUAL_VOLATILE
2967 						|TYPE_QUAL_CONST));
2968   tree boolt
2969     = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2970   int i;
2971   for (i = 0; i < 5; i++)
2972     {
2973       tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2974       BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2975 	= build_function_type_list (boolt, vptr, ptr_type_node, ix,
2976 				    integer_type_node, integer_type_node,
2977 				    NULL_TREE);
2978       BT_FN_IX_CONST_VPTR_INT[i]
2979 	= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2980       BT_FN_IX_VPTR_IX_INT[i]
2981 	= build_function_type_list (ix, vptr, ix, integer_type_node,
2982 				    NULL_TREE);
2983       BT_FN_VOID_VPTR_IX_INT[i]
2984 	= build_function_type_list (void_type_node, vptr, ix,
2985 				    integer_type_node, NULL_TREE);
2986     }
2987 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2988 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2989 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2990 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2991 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2992 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2993 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2994 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2995 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2996 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2997 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2998 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2999 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
3000 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
3001 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
3002 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
3003 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
3004 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
3005 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
3006 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
3007 #undef ATTR_NOTHROW_LEAF_LIST
3008 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
3009 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
3010 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
3011 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
3012 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
3013 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3014 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
3015   ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
3016 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
3017 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
3018   ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
3019 #undef ATTR_COLD_NOTHROW_LEAF_LIST
3020 #define ATTR_COLD_NOTHROW_LEAF_LIST \
3021   /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
3022 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
3023 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
3024   /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
3025 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
3026 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
3027   /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
3028 #undef ATTR_PURE_NOTHROW_LEAF_LIST
3029 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
3030 #undef DEF_BUILTIN_STUB
3031 #define DEF_BUILTIN_STUB(ENUM, NAME)
3032 #undef DEF_SANITIZER_BUILTIN_1
3033 #define DEF_SANITIZER_BUILTIN_1(ENUM, NAME, TYPE, ATTRS)		\
3034   do {									\
3035     decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM,		\
3036 				 BUILT_IN_NORMAL, NAME, NULL_TREE);	\
3037     set_call_expr_flags (decl, ATTRS);					\
3038     set_builtin_decl (ENUM, decl, true);				\
3039   } while (0)
3040 #undef DEF_SANITIZER_BUILTIN
3041 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS)	\
3042   DEF_SANITIZER_BUILTIN_1 (ENUM, NAME, TYPE, ATTRS);
3043 
3044 #include "sanitizer.def"
3045 
3046   /* -fsanitize=object-size uses __builtin_object_size, but that might
3047      not be available for e.g. Fortran at this point.  We use
3048      DEF_SANITIZER_BUILTIN here only as a convenience macro.  */
3049   if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
3050       && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
3051     DEF_SANITIZER_BUILTIN_1 (BUILT_IN_OBJECT_SIZE, "object_size",
3052 			     BT_FN_SIZE_CONST_PTR_INT,
3053 			     ATTR_PURE_NOTHROW_LEAF_LIST);
3054 
3055 #undef DEF_SANITIZER_BUILTIN_1
3056 #undef DEF_SANITIZER_BUILTIN
3057 #undef DEF_BUILTIN_STUB
3058 }
3059 
3060 /* Called via htab_traverse.  Count number of emitted
3061    STRING_CSTs in the constant hash table.  */
3062 
3063 int
3064 count_string_csts (constant_descriptor_tree **slot,
3065 		   unsigned HOST_WIDE_INT *data)
3066 {
3067   struct constant_descriptor_tree *desc = *slot;
3068   if (TREE_CODE (desc->value) == STRING_CST
3069       && TREE_ASM_WRITTEN (desc->value)
3070       && asan_protect_global (desc->value))
3071     ++*data;
3072   return 1;
3073 }
3074 
3075 /* Helper structure to pass two parameters to
3076    add_string_csts.  */
3077 
3078 struct asan_add_string_csts_data
3079 {
3080   tree type;
3081   vec<constructor_elt, va_gc> *v;
3082 };
3083 
3084 /* Called via hash_table::traverse.  Call asan_add_global
3085    on emitted STRING_CSTs from the constant hash table.  */
3086 
3087 int
3088 add_string_csts (constant_descriptor_tree **slot,
3089 		 asan_add_string_csts_data *aascd)
3090 {
3091   struct constant_descriptor_tree *desc = *slot;
3092   if (TREE_CODE (desc->value) == STRING_CST
3093       && TREE_ASM_WRITTEN (desc->value)
3094       && asan_protect_global (desc->value))
3095     {
3096       asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
3097 		       aascd->type, aascd->v);
3098     }
3099   return 1;
3100 }
3101 
3102 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
3103    invoke ggc_collect.  */
3104 static GTY(()) tree asan_ctor_statements;
3105 
3106 /* Module-level instrumentation.
3107    - Insert __asan_init_vN() into the list of CTORs.
3108    - TODO: insert redzones around globals.
3109  */
3110 
3111 void
3112 asan_finish_file (void)
3113 {
3114   varpool_node *vnode;
3115   unsigned HOST_WIDE_INT gcount = 0;
3116 
3117   if (shadow_ptr_types[0] == NULL_TREE)
3118     asan_init_shadow_ptr_types ();
3119   /* Avoid instrumenting code in the asan ctors/dtors.
3120      We don't need to insert padding after the description strings,
3121      nor after .LASAN* array.  */
3122   flag_sanitize &= ~SANITIZE_ADDRESS;
3123 
3124   /* For user-space we want asan constructors to run first.
3125      Linux kernel does not support priorities other than default, and the only
3126      other user of constructors is coverage. So we run with the default
3127      priority.  */
3128   int priority = flag_sanitize & SANITIZE_USER_ADDRESS
3129                  ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
3130 
3131   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3132     {
3133       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
3134       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3135       fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
3136       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
3137     }
3138   FOR_EACH_DEFINED_VARIABLE (vnode)
3139     if (TREE_ASM_WRITTEN (vnode->decl)
3140 	&& asan_protect_global (vnode->decl))
3141       ++gcount;
3142   hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
3143   const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
3144     (&gcount);
3145   if (gcount)
3146     {
3147       tree type = asan_global_struct (), var, ctor;
3148       tree dtor_statements = NULL_TREE;
3149       vec<constructor_elt, va_gc> *v;
3150       char buf[20];
3151 
3152       type = build_array_type_nelts (type, gcount);
3153       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
3154       var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
3155 			type);
3156       TREE_STATIC (var) = 1;
3157       TREE_PUBLIC (var) = 0;
3158       DECL_ARTIFICIAL (var) = 1;
3159       DECL_IGNORED_P (var) = 1;
3160       vec_alloc (v, gcount);
3161       FOR_EACH_DEFINED_VARIABLE (vnode)
3162 	if (TREE_ASM_WRITTEN (vnode->decl)
3163 	    && asan_protect_global (vnode->decl))
3164 	  asan_add_global (vnode->decl, TREE_TYPE (type), v);
3165       struct asan_add_string_csts_data aascd;
3166       aascd.type = TREE_TYPE (type);
3167       aascd.v = v;
3168       const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
3169        	(&aascd);
3170       ctor = build_constructor (type, v);
3171       TREE_CONSTANT (ctor) = 1;
3172       TREE_STATIC (ctor) = 1;
3173       DECL_INITIAL (var) = ctor;
3174       SET_DECL_ALIGN (var, MAX (DECL_ALIGN (var),
3175 				ASAN_SHADOW_GRANULARITY * BITS_PER_UNIT));
3176 
3177       varpool_node::finalize_decl (var);
3178 
3179       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
3180       tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
3181       append_to_statement_list (build_call_expr (fn, 2,
3182 						 build_fold_addr_expr (var),
3183 						 gcount_tree),
3184 				&asan_ctor_statements);
3185 
3186       fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
3187       append_to_statement_list (build_call_expr (fn, 2,
3188 						 build_fold_addr_expr (var),
3189 						 gcount_tree),
3190 				&dtor_statements);
3191       cgraph_build_static_cdtor ('D', dtor_statements, priority);
3192     }
3193   if (asan_ctor_statements)
3194     cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
3195   flag_sanitize |= SANITIZE_ADDRESS;
3196 }
3197 
3198 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
3199    on SHADOW address.  Newly added statements will be added to ITER with
3200    given location LOC.  We mark SIZE bytes in shadow memory, where
3201    LAST_CHUNK_SIZE is greater than zero in situation where we are at the
3202    end of a variable.  */
3203 
3204 static void
3205 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
3206 			 tree shadow,
3207 			 unsigned HOST_WIDE_INT base_addr_offset,
3208 			 bool is_clobber, unsigned size,
3209 			 unsigned last_chunk_size)
3210 {
3211   tree shadow_ptr_type;
3212 
3213   switch (size)
3214     {
3215     case 1:
3216       shadow_ptr_type = shadow_ptr_types[0];
3217       break;
3218     case 2:
3219       shadow_ptr_type = shadow_ptr_types[1];
3220       break;
3221     case 4:
3222       shadow_ptr_type = shadow_ptr_types[2];
3223       break;
3224     default:
3225       gcc_unreachable ();
3226     }
3227 
3228   unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
3229   unsigned HOST_WIDE_INT val = 0;
3230   unsigned last_pos = size;
3231   if (last_chunk_size && !is_clobber)
3232     last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
3233   for (unsigned i = 0; i < size; ++i)
3234     {
3235       unsigned char shadow_c = c;
3236       if (i == last_pos)
3237 	shadow_c = last_chunk_size;
3238       val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
3239     }
3240 
3241   /* Handle last chunk in unpoisoning.  */
3242   tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
3243 
3244   tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
3245 		      build_int_cst (shadow_ptr_type, base_addr_offset));
3246 
3247   gimple *g = gimple_build_assign (dest, magic);
3248   gimple_set_location (g, loc);
3249   gsi_insert_after (iter, g, GSI_NEW_STMT);
3250 }
3251 
3252 /* Expand the ASAN_MARK builtins.  */
3253 
3254 bool
3255 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
3256 {
3257   gimple *g = gsi_stmt (*iter);
3258   location_t loc = gimple_location (g);
3259   HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
3260   bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
3261 
3262   tree base = gimple_call_arg (g, 1);
3263   gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
3264   tree decl = TREE_OPERAND (base, 0);
3265 
3266   /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
3267   if (TREE_CODE (decl) == COMPONENT_REF
3268       && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
3269     decl = TREE_OPERAND (decl, 0);
3270 
3271   gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
3272 
3273   if (is_poison)
3274     {
3275       if (asan_handled_variables == NULL)
3276 	asan_handled_variables = new hash_set<tree> (16);
3277       asan_handled_variables->add (decl);
3278     }
3279   tree len = gimple_call_arg (g, 2);
3280 
3281   gcc_assert (tree_fits_shwi_p (len));
3282   unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
3283   gcc_assert (size_in_bytes);
3284 
3285   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3286 			   NOP_EXPR, base);
3287   gimple_set_location (g, loc);
3288   gsi_replace (iter, g, false);
3289   tree base_addr = gimple_assign_lhs (g);
3290 
3291   /* Generate direct emission if size_in_bytes is small.  */
3292   if (size_in_bytes
3293       <= (unsigned)param_use_after_scope_direct_emission_threshold)
3294     {
3295       const unsigned HOST_WIDE_INT shadow_size
3296 	= shadow_mem_size (size_in_bytes);
3297       const unsigned int shadow_align
3298 	= (get_pointer_alignment (base) / BITS_PER_UNIT) >> ASAN_SHADOW_SHIFT;
3299 
3300       tree shadow = build_shadow_mem_access (iter, loc, base_addr,
3301 					     shadow_ptr_types[0], true);
3302 
3303       for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
3304 	{
3305 	  unsigned size = 1;
3306 	  if (shadow_size - offset >= 4
3307 	      && (!STRICT_ALIGNMENT || shadow_align >= 4))
3308 	    size = 4;
3309 	  else if (shadow_size - offset >= 2
3310 		   && (!STRICT_ALIGNMENT || shadow_align >= 2))
3311 	    size = 2;
3312 
3313 	  unsigned HOST_WIDE_INT last_chunk_size = 0;
3314 	  unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
3315 	  if (s > size_in_bytes)
3316 	    last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
3317 
3318 	  asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
3319 				   size, last_chunk_size);
3320 	  offset += size;
3321 	}
3322     }
3323   else
3324     {
3325       g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3326 			       NOP_EXPR, len);
3327       gimple_set_location (g, loc);
3328       gsi_insert_before (iter, g, GSI_SAME_STMT);
3329       tree sz_arg = gimple_assign_lhs (g);
3330 
3331       tree fun
3332 	= builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
3333 				 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
3334       g = gimple_build_call (fun, 2, base_addr, sz_arg);
3335       gimple_set_location (g, loc);
3336       gsi_insert_after (iter, g, GSI_NEW_STMT);
3337     }
3338 
3339   return false;
3340 }
3341 
3342 /* Expand the ASAN_{LOAD,STORE} builtins.  */
3343 
3344 bool
3345 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
3346 {
3347   gimple *g = gsi_stmt (*iter);
3348   location_t loc = gimple_location (g);
3349   bool recover_p;
3350   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3351     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3352   else
3353     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3354 
3355   HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
3356   gcc_assert (flags < ASAN_CHECK_LAST);
3357   bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
3358   bool is_store = (flags & ASAN_CHECK_STORE) != 0;
3359   bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
3360 
3361   tree base = gimple_call_arg (g, 1);
3362   tree len = gimple_call_arg (g, 2);
3363   HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
3364 
3365   HOST_WIDE_INT size_in_bytes
3366     = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
3367 
3368   if (use_calls)
3369     {
3370       /* Instrument using callbacks.  */
3371       gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3372 				      NOP_EXPR, base);
3373       gimple_set_location (g, loc);
3374       gsi_insert_before (iter, g, GSI_SAME_STMT);
3375       tree base_addr = gimple_assign_lhs (g);
3376 
3377       int nargs;
3378       tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
3379       if (nargs == 1)
3380 	g = gimple_build_call (fun, 1, base_addr);
3381       else
3382 	{
3383 	  gcc_assert (nargs == 2);
3384 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3385 				   NOP_EXPR, len);
3386 	  gimple_set_location (g, loc);
3387 	  gsi_insert_before (iter, g, GSI_SAME_STMT);
3388 	  tree sz_arg = gimple_assign_lhs (g);
3389 	  g = gimple_build_call (fun, nargs, base_addr, sz_arg);
3390 	}
3391       gimple_set_location (g, loc);
3392       gsi_replace (iter, g, false);
3393       return false;
3394     }
3395 
3396   HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
3397 
3398   tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
3399   tree shadow_type = TREE_TYPE (shadow_ptr_type);
3400 
3401   gimple_stmt_iterator gsi = *iter;
3402 
3403   if (!is_non_zero_len)
3404     {
3405       /* So, the length of the memory area to asan-protect is
3406 	 non-constant.  Let's guard the generated instrumentation code
3407 	 like:
3408 
3409 	 if (len != 0)
3410 	   {
3411 	     //asan instrumentation code goes here.
3412 	   }
3413 	 // falltrough instructions, starting with *ITER.  */
3414 
3415       g = gimple_build_cond (NE_EXPR,
3416 			    len,
3417 			    build_int_cst (TREE_TYPE (len), 0),
3418 			    NULL_TREE, NULL_TREE);
3419       gimple_set_location (g, loc);
3420 
3421       basic_block then_bb, fallthrough_bb;
3422       insert_if_then_before_iter (as_a <gcond *> (g), iter,
3423 				  /*then_more_likely_p=*/true,
3424 				  &then_bb, &fallthrough_bb);
3425       /* Note that fallthrough_bb starts with the statement that was
3426 	pointed to by ITER.  */
3427 
3428       /* The 'then block' of the 'if (len != 0) condition is where
3429 	we'll generate the asan instrumentation code now.  */
3430       gsi = gsi_last_bb (then_bb);
3431     }
3432 
3433   /* Get an iterator on the point where we can add the condition
3434      statement for the instrumentation.  */
3435   basic_block then_bb, else_bb;
3436   gsi = create_cond_insert_point (&gsi, /*before_p*/false,
3437 				  /*then_more_likely_p=*/false,
3438 				  /*create_then_fallthru_edge*/recover_p,
3439 				  &then_bb,
3440 				  &else_bb);
3441 
3442   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3443 			   NOP_EXPR, base);
3444   gimple_set_location (g, loc);
3445   gsi_insert_before (&gsi, g, GSI_NEW_STMT);
3446   tree base_addr = gimple_assign_lhs (g);
3447 
3448   tree t = NULL_TREE;
3449   if (real_size_in_bytes >= 8)
3450     {
3451       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3452 					     shadow_ptr_type);
3453       t = shadow;
3454     }
3455   else
3456     {
3457       /* Slow path for 1, 2 and 4 byte accesses.  */
3458       /* Test (shadow != 0)
3459 	 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow).  */
3460       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
3461 					     shadow_ptr_type);
3462       gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3463       gimple_seq seq = NULL;
3464       gimple_seq_add_stmt (&seq, shadow_test);
3465       /* Aligned (>= 8 bytes) can test just
3466 	 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
3467 	 to be 0.  */
3468       if (align < 8)
3469 	{
3470 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3471 						   base_addr, 7));
3472 	  gimple_seq_add_stmt (&seq,
3473 			       build_type_cast (shadow_type,
3474 						gimple_seq_last (seq)));
3475 	  if (real_size_in_bytes > 1)
3476 	    gimple_seq_add_stmt (&seq,
3477 				 build_assign (PLUS_EXPR,
3478 					       gimple_seq_last (seq),
3479 					       real_size_in_bytes - 1));
3480 	  t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3481 	}
3482       else
3483 	t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3484       gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3485       gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3486 					       gimple_seq_last (seq)));
3487       t = gimple_assign_lhs (gimple_seq_last (seq));
3488       gimple_seq_set_location (seq, loc);
3489       gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3490 
3491       /* For non-constant, misaligned or otherwise weird access sizes,
3492        check first and last byte.  */
3493       if (size_in_bytes == -1)
3494 	{
3495 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3496 				   MINUS_EXPR, len,
3497 				   build_int_cst (pointer_sized_int_node, 1));
3498 	  gimple_set_location (g, loc);
3499 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3500 	  tree last = gimple_assign_lhs (g);
3501 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3502 				   PLUS_EXPR, base_addr, last);
3503 	  gimple_set_location (g, loc);
3504 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3505 	  tree base_end_addr = gimple_assign_lhs (g);
3506 
3507 	  tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3508 						 shadow_ptr_type);
3509 	  gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3510 	  gimple_seq seq = NULL;
3511 	  gimple_seq_add_stmt (&seq, shadow_test);
3512 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3513 						   base_end_addr, 7));
3514 	  gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3515 						      gimple_seq_last (seq)));
3516 	  gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3517 						   gimple_seq_last (seq),
3518 						   shadow));
3519 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3520 						   gimple_seq_last (seq)));
3521 	  gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3522 						   gimple_seq_last (seq)));
3523 	  t = gimple_assign_lhs (gimple_seq_last (seq));
3524 	  gimple_seq_set_location (seq, loc);
3525 	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3526 	}
3527     }
3528 
3529   g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3530 			 NULL_TREE, NULL_TREE);
3531   gimple_set_location (g, loc);
3532   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3533 
3534   /* Generate call to the run-time library (e.g. __asan_report_load8).  */
3535   gsi = gsi_start_bb (then_bb);
3536   int nargs;
3537   tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3538   g = gimple_build_call (fun, nargs, base_addr, len);
3539   gimple_set_location (g, loc);
3540   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3541 
3542   gsi_remove (iter, true);
3543   *iter = gsi_start_bb (else_bb);
3544 
3545   return true;
3546 }
3547 
3548 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3549    into SSA.  Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING.  */
3550 
3551 static tree
3552 create_asan_shadow_var (tree var_decl,
3553 			hash_map<tree, tree> &shadow_vars_mapping)
3554 {
3555   tree *slot = shadow_vars_mapping.get (var_decl);
3556   if (slot == NULL)
3557     {
3558       tree shadow_var = copy_node (var_decl);
3559 
3560       copy_body_data id;
3561       memset (&id, 0, sizeof (copy_body_data));
3562       id.src_fn = id.dst_fn = current_function_decl;
3563       copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3564 
3565       DECL_ARTIFICIAL (shadow_var) = 1;
3566       DECL_IGNORED_P (shadow_var) = 1;
3567       DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3568       gimple_add_tmp_var (shadow_var);
3569 
3570       shadow_vars_mapping.put (var_decl, shadow_var);
3571       return shadow_var;
3572     }
3573   else
3574     return *slot;
3575 }
3576 
3577 /* Expand ASAN_POISON ifn.  */
3578 
3579 bool
3580 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3581 			bool *need_commit_edge_insert,
3582 			hash_map<tree, tree> &shadow_vars_mapping)
3583 {
3584   gimple *g = gsi_stmt (*iter);
3585   tree poisoned_var = gimple_call_lhs (g);
3586   if (!poisoned_var || has_zero_uses (poisoned_var))
3587     {
3588       gsi_remove (iter, true);
3589       return true;
3590     }
3591 
3592   if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
3593     SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
3594 				    create_tmp_var (TREE_TYPE (poisoned_var)));
3595 
3596   tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3597 					    shadow_vars_mapping);
3598 
3599   bool recover_p;
3600   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3601     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3602   else
3603     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3604   tree size = DECL_SIZE_UNIT (shadow_var);
3605   gimple *poison_call
3606     = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3607 				  build_int_cst (integer_type_node,
3608 						 ASAN_MARK_POISON),
3609 				  build_fold_addr_expr (shadow_var), size);
3610 
3611   gimple *use;
3612   imm_use_iterator imm_iter;
3613   FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
3614     {
3615       if (is_gimple_debug (use))
3616 	continue;
3617 
3618       int nargs;
3619       bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
3620       tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
3621 				    &nargs);
3622 
3623       gcall *call = gimple_build_call (fun, 1,
3624 				       build_fold_addr_expr (shadow_var));
3625       gimple_set_location (call, gimple_location (use));
3626       gimple *call_to_insert = call;
3627 
3628       /* The USE can be a gimple PHI node.  If so, insert the call on
3629 	 all edges leading to the PHI node.  */
3630       if (is_a <gphi *> (use))
3631 	{
3632 	  gphi *phi = dyn_cast<gphi *> (use);
3633 	  for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3634 	    if (gimple_phi_arg_def (phi, i) == poisoned_var)
3635 	      {
3636 		edge e = gimple_phi_arg_edge (phi, i);
3637 
3638 		/* Do not insert on an edge we can't split.  */
3639 		if (e->flags & EDGE_ABNORMAL)
3640 		  continue;
3641 
3642 		if (call_to_insert == NULL)
3643 		  call_to_insert = gimple_copy (call);
3644 
3645 		gsi_insert_seq_on_edge (e, call_to_insert);
3646 		*need_commit_edge_insert = true;
3647 		call_to_insert = NULL;
3648 	      }
3649 	}
3650       else
3651 	{
3652 	  gimple_stmt_iterator gsi = gsi_for_stmt (use);
3653 	  if (store_p)
3654 	    gsi_replace (&gsi, call, true);
3655 	  else
3656 	    gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3657 	}
3658     }
3659 
3660   SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3661   SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3662   gsi_replace (iter, poison_call, false);
3663 
3664   return true;
3665 }
3666 
3667 /* Instrument the current function.  */
3668 
3669 static unsigned int
3670 asan_instrument (void)
3671 {
3672   if (shadow_ptr_types[0] == NULL_TREE)
3673     asan_init_shadow_ptr_types ();
3674   transform_statements ();
3675   last_alloca_addr = NULL_TREE;
3676   return 0;
3677 }
3678 
3679 static bool
3680 gate_asan (void)
3681 {
3682   return sanitize_flags_p (SANITIZE_ADDRESS);
3683 }
3684 
3685 namespace {
3686 
3687 const pass_data pass_data_asan =
3688 {
3689   GIMPLE_PASS, /* type */
3690   "asan", /* name */
3691   OPTGROUP_NONE, /* optinfo_flags */
3692   TV_NONE, /* tv_id */
3693   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3694   0, /* properties_provided */
3695   0, /* properties_destroyed */
3696   0, /* todo_flags_start */
3697   TODO_update_ssa, /* todo_flags_finish */
3698 };
3699 
3700 class pass_asan : public gimple_opt_pass
3701 {
3702 public:
3703   pass_asan (gcc::context *ctxt)
3704     : gimple_opt_pass (pass_data_asan, ctxt)
3705   {}
3706 
3707   /* opt_pass methods: */
3708   opt_pass * clone () { return new pass_asan (m_ctxt); }
3709   virtual bool gate (function *) { return gate_asan (); }
3710   virtual unsigned int execute (function *) { return asan_instrument (); }
3711 
3712 }; // class pass_asan
3713 
3714 } // anon namespace
3715 
3716 gimple_opt_pass *
3717 make_pass_asan (gcc::context *ctxt)
3718 {
3719   return new pass_asan (ctxt);
3720 }
3721 
3722 namespace {
3723 
3724 const pass_data pass_data_asan_O0 =
3725 {
3726   GIMPLE_PASS, /* type */
3727   "asan0", /* name */
3728   OPTGROUP_NONE, /* optinfo_flags */
3729   TV_NONE, /* tv_id */
3730   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3731   0, /* properties_provided */
3732   0, /* properties_destroyed */
3733   0, /* todo_flags_start */
3734   TODO_update_ssa, /* todo_flags_finish */
3735 };
3736 
3737 class pass_asan_O0 : public gimple_opt_pass
3738 {
3739 public:
3740   pass_asan_O0 (gcc::context *ctxt)
3741     : gimple_opt_pass (pass_data_asan_O0, ctxt)
3742   {}
3743 
3744   /* opt_pass methods: */
3745   virtual bool gate (function *) { return !optimize && gate_asan (); }
3746   virtual unsigned int execute (function *) { return asan_instrument (); }
3747 
3748 }; // class pass_asan_O0
3749 
3750 } // anon namespace
3751 
3752 gimple_opt_pass *
3753 make_pass_asan_O0 (gcc::context *ctxt)
3754 {
3755   return new pass_asan_O0 (ctxt);
3756 }
3757 
3758 #include "gt-asan.h"
3759