xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/asan.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* AddressSanitizer, a fast memory error detector.
2    Copyright (C) 2012-2017 Free Software Foundation, Inc.
3    Contributed by Kostya Serebryany <kcc@google.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "backend.h"
26 #include "target.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "cfghooks.h"
31 #include "alloc-pool.h"
32 #include "tree-pass.h"
33 #include "memmodel.h"
34 #include "tm_p.h"
35 #include "ssa.h"
36 #include "stringpool.h"
37 #include "tree-ssanames.h"
38 #include "optabs.h"
39 #include "emit-rtl.h"
40 #include "cgraph.h"
41 #include "gimple-pretty-print.h"
42 #include "alias.h"
43 #include "fold-const.h"
44 #include "cfganal.h"
45 #include "gimplify.h"
46 #include "gimple-iterator.h"
47 #include "varasm.h"
48 #include "stor-layout.h"
49 #include "tree-iterator.h"
50 #include "asan.h"
51 #include "dojump.h"
52 #include "explow.h"
53 #include "expr.h"
54 #include "output.h"
55 #include "langhooks.h"
56 #include "cfgloop.h"
57 #include "gimple-builder.h"
58 #include "ubsan.h"
59 #include "params.h"
60 #include "builtins.h"
61 #include "fnmatch.h"
62 #include "tree-inline.h"
63 
64 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
65    with <2x slowdown on average.
66 
67    The tool consists of two parts:
68    instrumentation module (this file) and a run-time library.
69    The instrumentation module adds a run-time check before every memory insn.
70      For a 8- or 16- byte load accessing address X:
71        ShadowAddr = (X >> 3) + Offset
72        ShadowValue = *(char*)ShadowAddr;  // *(short*) for 16-byte access.
73        if (ShadowValue)
74 	 __asan_report_load8(X);
75      For a load of N bytes (N=1, 2 or 4) from address X:
76        ShadowAddr = (X >> 3) + Offset
77        ShadowValue = *(char*)ShadowAddr;
78        if (ShadowValue)
79 	 if ((X & 7) + N - 1 > ShadowValue)
80 	   __asan_report_loadN(X);
81    Stores are instrumented similarly, but using __asan_report_storeN functions.
82    A call too __asan_init_vN() is inserted to the list of module CTORs.
83    N is the version number of the AddressSanitizer API. The changes between the
84    API versions are listed in libsanitizer/asan/asan_interface_internal.h.
85 
86    The run-time library redefines malloc (so that redzone are inserted around
87    the allocated memory) and free (so that reuse of free-ed memory is delayed),
88    provides __asan_report* and __asan_init_vN functions.
89 
90    Read more:
91    http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
92 
93    The current implementation supports detection of out-of-bounds and
94    use-after-free in the heap, on the stack and for global variables.
95 
96    [Protection of stack variables]
97 
98    To understand how detection of out-of-bounds and use-after-free works
99    for stack variables, lets look at this example on x86_64 where the
100    stack grows downward:
101 
102      int
103      foo ()
104      {
105        char a[23] = {0};
106        int b[2] = {0};
107 
108        a[5] = 1;
109        b[1] = 2;
110 
111        return a[5] + b[1];
112      }
113 
114    For this function, the stack protected by asan will be organized as
115    follows, from the top of the stack to the bottom:
116 
117    Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
118 
119    Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
120 	   the next slot be 32 bytes aligned; this one is called Partial
121 	   Redzone; this 32 bytes alignment is an asan constraint]
122 
123    Slot 3/ [24 bytes for variable 'a']
124 
125    Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
126 
127    Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
128 
129    Slot 6/ [8 bytes for variable 'b']
130 
131    Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
132 	    'LEFT RedZone']
133 
134    The 32 bytes of LEFT red zone at the bottom of the stack can be
135    decomposed as such:
136 
137      1/ The first 8 bytes contain a magical asan number that is always
138      0x41B58AB3.
139 
140      2/ The following 8 bytes contains a pointer to a string (to be
141      parsed at runtime by the runtime asan library), which format is
142      the following:
143 
144       "<function-name> <space> <num-of-variables-on-the-stack>
145       (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
146       <length-of-var-in-bytes> ){n} "
147 
148 	where '(...){n}' means the content inside the parenthesis occurs 'n'
149 	times, with 'n' being the number of variables on the stack.
150 
151      3/ The following 8 bytes contain the PC of the current function which
152      will be used by the run-time library to print an error message.
153 
154      4/ The following 8 bytes are reserved for internal use by the run-time.
155 
156    The shadow memory for that stack layout is going to look like this:
157 
158      - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
159        The F1 byte pattern is a magic number called
160        ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
161        the memory for that shadow byte is part of a the LEFT red zone
162        intended to seat at the bottom of the variables on the stack.
163 
164      - content of shadow memory 8 bytes for slots 6 and 5:
165        0xF4F4F400.  The F4 byte pattern is a magic number
166        called ASAN_STACK_MAGIC_PARTIAL.  It flags the fact that the
167        memory region for this shadow byte is a PARTIAL red zone
168        intended to pad a variable A, so that the slot following
169        {A,padding} is 32 bytes aligned.
170 
171        Note that the fact that the least significant byte of this
172        shadow memory content is 00 means that 8 bytes of its
173        corresponding memory (which corresponds to the memory of
174        variable 'b') is addressable.
175 
176      - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
177        The F2 byte pattern is a magic number called
178        ASAN_STACK_MAGIC_MIDDLE.  It flags the fact that the memory
179        region for this shadow byte is a MIDDLE red zone intended to
180        seat between two 32 aligned slots of {variable,padding}.
181 
182      - content of shadow memory 8 bytes for slot 3 and 2:
183        0xF4000000.  This represents is the concatenation of
184        variable 'a' and the partial red zone following it, like what we
185        had for variable 'b'.  The least significant 3 bytes being 00
186        means that the 3 bytes of variable 'a' are addressable.
187 
188      - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
189        The F3 byte pattern is a magic number called
190        ASAN_STACK_MAGIC_RIGHT.  It flags the fact that the memory
191        region for this shadow byte is a RIGHT red zone intended to seat
192        at the top of the variables of the stack.
193 
194    Note that the real variable layout is done in expand_used_vars in
195    cfgexpand.c.  As far as Address Sanitizer is concerned, it lays out
196    stack variables as well as the different red zones, emits some
197    prologue code to populate the shadow memory as to poison (mark as
198    non-accessible) the regions of the red zones and mark the regions of
199    stack variables as accessible, and emit some epilogue code to
200    un-poison (mark as accessible) the regions of red zones right before
201    the function exits.
202 
203    [Protection of global variables]
204 
205    The basic idea is to insert a red zone between two global variables
206    and install a constructor function that calls the asan runtime to do
207    the populating of the relevant shadow memory regions at load time.
208 
209    So the global variables are laid out as to insert a red zone between
210    them. The size of the red zones is so that each variable starts on a
211    32 bytes boundary.
212 
213    Then a constructor function is installed so that, for each global
214    variable, it calls the runtime asan library function
215    __asan_register_globals_with an instance of this type:
216 
217      struct __asan_global
218      {
219        // Address of the beginning of the global variable.
220        const void *__beg;
221 
222        // Initial size of the global variable.
223        uptr __size;
224 
225        // Size of the global variable + size of the red zone.  This
226        //   size is 32 bytes aligned.
227        uptr __size_with_redzone;
228 
229        // Name of the global variable.
230        const void *__name;
231 
232        // Name of the module where the global variable is declared.
233        const void *__module_name;
234 
235        // 1 if it has dynamic initialization, 0 otherwise.
236        uptr __has_dynamic_init;
237 
238        // A pointer to struct that contains source location, could be NULL.
239        __asan_global_source_location *__location;
240      }
241 
242    A destructor function that calls the runtime asan library function
243    _asan_unregister_globals is also installed.  */
244 
245 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
246 static bool asan_shadow_offset_computed;
247 static vec<char *> sanitized_sections;
248 
249 /* Set of variable declarations that are going to be guarded by
250    use-after-scope sanitizer.  */
251 
252 static hash_set<tree> *asan_handled_variables = NULL;
253 
254 hash_set <tree> *asan_used_labels = NULL;
255 
256 /* Sets shadow offset to value in string VAL.  */
257 
258 bool
259 set_asan_shadow_offset (const char *val)
260 {
261   char *endp;
262 
263   errno = 0;
264 #ifdef HAVE_LONG_LONG
265   asan_shadow_offset_value = strtoull (val, &endp, 0);
266 #else
267   asan_shadow_offset_value = strtoul (val, &endp, 0);
268 #endif
269   if (!(*val != '\0' && *endp == '\0' && errno == 0))
270     return false;
271 
272   asan_shadow_offset_computed = true;
273 
274   return true;
275 }
276 
277 /* Set list of user-defined sections that need to be sanitized.  */
278 
279 void
280 set_sanitized_sections (const char *sections)
281 {
282   char *pat;
283   unsigned i;
284   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
285     free (pat);
286   sanitized_sections.truncate (0);
287 
288   for (const char *s = sections; *s; )
289     {
290       const char *end;
291       for (end = s; *end && *end != ','; ++end);
292       size_t len = end - s;
293       sanitized_sections.safe_push (xstrndup (s, len));
294       s = *end ? end + 1 : end;
295     }
296 }
297 
298 bool
299 asan_mark_p (gimple *stmt, enum asan_mark_flags flag)
300 {
301   return (gimple_call_internal_p (stmt, IFN_ASAN_MARK)
302 	  && tree_to_uhwi (gimple_call_arg (stmt, 0)) == flag);
303 }
304 
305 bool
306 asan_sanitize_stack_p (void)
307 {
308   return ((flag_sanitize & SANITIZE_ADDRESS)
309 	  && ASAN_STACK
310 	  && !asan_no_sanitize_address_p ());
311 }
312 
313 /* Checks whether section SEC should be sanitized.  */
314 
315 static bool
316 section_sanitized_p (const char *sec)
317 {
318   char *pat;
319   unsigned i;
320   FOR_EACH_VEC_ELT (sanitized_sections, i, pat)
321     if (fnmatch (pat, sec, FNM_PERIOD) == 0)
322       return true;
323   return false;
324 }
325 
326 /* Returns Asan shadow offset.  */
327 
328 static unsigned HOST_WIDE_INT
329 asan_shadow_offset ()
330 {
331   if (!asan_shadow_offset_computed)
332     {
333       asan_shadow_offset_computed = true;
334       asan_shadow_offset_value = targetm.asan_shadow_offset ();
335     }
336   return asan_shadow_offset_value;
337 }
338 
339 alias_set_type asan_shadow_set = -1;
340 
341 /* Pointer types to 1, 2 or 4 byte integers in shadow memory.  A separate
342    alias set is used for all shadow memory accesses.  */
343 static GTY(()) tree shadow_ptr_types[3];
344 
345 /* Decl for __asan_option_detect_stack_use_after_return.  */
346 static GTY(()) tree asan_detect_stack_use_after_return;
347 
348 /* Hashtable support for memory references used by gimple
349    statements.  */
350 
351 /* This type represents a reference to a memory region.  */
352 struct asan_mem_ref
353 {
354   /* The expression of the beginning of the memory region.  */
355   tree start;
356 
357   /* The size of the access.  */
358   HOST_WIDE_INT access_size;
359 };
360 
361 object_allocator <asan_mem_ref> asan_mem_ref_pool ("asan_mem_ref");
362 
363 /* Initializes an instance of asan_mem_ref.  */
364 
365 static void
366 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
367 {
368   ref->start = start;
369   ref->access_size = access_size;
370 }
371 
372 /* Allocates memory for an instance of asan_mem_ref into the memory
373    pool returned by asan_mem_ref_get_alloc_pool and initialize it.
374    START is the address of (or the expression pointing to) the
375    beginning of memory reference.  ACCESS_SIZE is the size of the
376    access to the referenced memory.  */
377 
378 static asan_mem_ref*
379 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
380 {
381   asan_mem_ref *ref = asan_mem_ref_pool.allocate ();
382 
383   asan_mem_ref_init (ref, start, access_size);
384   return ref;
385 }
386 
387 /* This builds and returns a pointer to the end of the memory region
388    that starts at START and of length LEN.  */
389 
390 tree
391 asan_mem_ref_get_end (tree start, tree len)
392 {
393   if (len == NULL_TREE || integer_zerop (len))
394     return start;
395 
396   if (!ptrofftype_p (len))
397     len = convert_to_ptrofftype (len);
398 
399   return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
400 }
401 
402 /*  Return a tree expression that represents the end of the referenced
403     memory region.  Beware that this function can actually build a new
404     tree expression.  */
405 
406 tree
407 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
408 {
409   return asan_mem_ref_get_end (ref->start, len);
410 }
411 
412 struct asan_mem_ref_hasher : nofree_ptr_hash <asan_mem_ref>
413 {
414   static inline hashval_t hash (const asan_mem_ref *);
415   static inline bool equal (const asan_mem_ref *, const asan_mem_ref *);
416 };
417 
418 /* Hash a memory reference.  */
419 
420 inline hashval_t
421 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
422 {
423   return iterative_hash_expr (mem_ref->start, 0);
424 }
425 
426 /* Compare two memory references.  We accept the length of either
427    memory references to be NULL_TREE.  */
428 
429 inline bool
430 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
431 			    const asan_mem_ref *m2)
432 {
433   return operand_equal_p (m1->start, m2->start, 0);
434 }
435 
436 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
437 
438 /* Returns a reference to the hash table containing memory references.
439    This function ensures that the hash table is created.  Note that
440    this hash table is updated by the function
441    update_mem_ref_hash_table.  */
442 
443 static hash_table<asan_mem_ref_hasher> *
444 get_mem_ref_hash_table ()
445 {
446   if (!asan_mem_ref_ht)
447     asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
448 
449   return asan_mem_ref_ht;
450 }
451 
452 /* Clear all entries from the memory references hash table.  */
453 
454 static void
455 empty_mem_ref_hash_table ()
456 {
457   if (asan_mem_ref_ht)
458     asan_mem_ref_ht->empty ();
459 }
460 
461 /* Free the memory references hash table.  */
462 
463 static void
464 free_mem_ref_resources ()
465 {
466   delete asan_mem_ref_ht;
467   asan_mem_ref_ht = NULL;
468 
469   asan_mem_ref_pool.release ();
470 }
471 
472 /* Return true iff the memory reference REF has been instrumented.  */
473 
474 static bool
475 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
476 {
477   asan_mem_ref r;
478   asan_mem_ref_init (&r, ref, access_size);
479 
480   asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
481   return saved_ref && saved_ref->access_size >= access_size;
482 }
483 
484 /* Return true iff the memory reference REF has been instrumented.  */
485 
486 static bool
487 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
488 {
489   return has_mem_ref_been_instrumented (ref->start, ref->access_size);
490 }
491 
492 /* Return true iff access to memory region starting at REF and of
493    length LEN has been instrumented.  */
494 
495 static bool
496 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
497 {
498   HOST_WIDE_INT size_in_bytes
499     = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
500 
501   return size_in_bytes != -1
502     && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
503 }
504 
505 /* Set REF to the memory reference present in a gimple assignment
506    ASSIGNMENT.  Return true upon successful completion, false
507    otherwise.  */
508 
509 static bool
510 get_mem_ref_of_assignment (const gassign *assignment,
511 			   asan_mem_ref *ref,
512 			   bool *ref_is_store)
513 {
514   gcc_assert (gimple_assign_single_p (assignment));
515 
516   if (gimple_store_p (assignment)
517       && !gimple_clobber_p (assignment))
518     {
519       ref->start = gimple_assign_lhs (assignment);
520       *ref_is_store = true;
521     }
522   else if (gimple_assign_load_p (assignment))
523     {
524       ref->start = gimple_assign_rhs1 (assignment);
525       *ref_is_store = false;
526     }
527   else
528     return false;
529 
530   ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
531   return true;
532 }
533 
534 /* Return the memory references contained in a gimple statement
535    representing a builtin call that has to do with memory access.  */
536 
537 static bool
538 get_mem_refs_of_builtin_call (const gcall *call,
539 			      asan_mem_ref *src0,
540 			      tree *src0_len,
541 			      bool *src0_is_store,
542 			      asan_mem_ref *src1,
543 			      tree *src1_len,
544 			      bool *src1_is_store,
545 			      asan_mem_ref *dst,
546 			      tree *dst_len,
547 			      bool *dst_is_store,
548 			      bool *dest_is_deref,
549 			      bool *intercepted_p)
550 {
551   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
552 
553   tree callee = gimple_call_fndecl (call);
554   tree source0 = NULL_TREE, source1 = NULL_TREE,
555     dest = NULL_TREE, len = NULL_TREE;
556   bool is_store = true, got_reference_p = false;
557   HOST_WIDE_INT access_size = 1;
558 
559   *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
560 
561   switch (DECL_FUNCTION_CODE (callee))
562     {
563       /* (s, s, n) style memops.  */
564     case BUILT_IN_BCMP:
565     case BUILT_IN_MEMCMP:
566       source0 = gimple_call_arg (call, 0);
567       source1 = gimple_call_arg (call, 1);
568       len = gimple_call_arg (call, 2);
569       break;
570 
571       /* (src, dest, n) style memops.  */
572     case BUILT_IN_BCOPY:
573       source0 = gimple_call_arg (call, 0);
574       dest = gimple_call_arg (call, 1);
575       len = gimple_call_arg (call, 2);
576       break;
577 
578       /* (dest, src, n) style memops.  */
579     case BUILT_IN_MEMCPY:
580     case BUILT_IN_MEMCPY_CHK:
581     case BUILT_IN_MEMMOVE:
582     case BUILT_IN_MEMMOVE_CHK:
583     case BUILT_IN_MEMPCPY:
584     case BUILT_IN_MEMPCPY_CHK:
585       dest = gimple_call_arg (call, 0);
586       source0 = gimple_call_arg (call, 1);
587       len = gimple_call_arg (call, 2);
588       break;
589 
590       /* (dest, n) style memops.  */
591     case BUILT_IN_BZERO:
592       dest = gimple_call_arg (call, 0);
593       len = gimple_call_arg (call, 1);
594       break;
595 
596       /* (dest, x, n) style memops*/
597     case BUILT_IN_MEMSET:
598     case BUILT_IN_MEMSET_CHK:
599       dest = gimple_call_arg (call, 0);
600       len = gimple_call_arg (call, 2);
601       break;
602 
603     case BUILT_IN_STRLEN:
604       source0 = gimple_call_arg (call, 0);
605       len = gimple_call_lhs (call);
606       break;
607 
608     /* And now the __atomic* and __sync builtins.
609        These are handled differently from the classical memory memory
610        access builtins above.  */
611 
612     case BUILT_IN_ATOMIC_LOAD_1:
613       is_store = false;
614       /* FALLTHRU */
615     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
616     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
617     case BUILT_IN_SYNC_FETCH_AND_OR_1:
618     case BUILT_IN_SYNC_FETCH_AND_AND_1:
619     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
620     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
621     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
622     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
623     case BUILT_IN_SYNC_OR_AND_FETCH_1:
624     case BUILT_IN_SYNC_AND_AND_FETCH_1:
625     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
626     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
627     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
628     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
629     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
630     case BUILT_IN_SYNC_LOCK_RELEASE_1:
631     case BUILT_IN_ATOMIC_EXCHANGE_1:
632     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
633     case BUILT_IN_ATOMIC_STORE_1:
634     case BUILT_IN_ATOMIC_ADD_FETCH_1:
635     case BUILT_IN_ATOMIC_SUB_FETCH_1:
636     case BUILT_IN_ATOMIC_AND_FETCH_1:
637     case BUILT_IN_ATOMIC_NAND_FETCH_1:
638     case BUILT_IN_ATOMIC_XOR_FETCH_1:
639     case BUILT_IN_ATOMIC_OR_FETCH_1:
640     case BUILT_IN_ATOMIC_FETCH_ADD_1:
641     case BUILT_IN_ATOMIC_FETCH_SUB_1:
642     case BUILT_IN_ATOMIC_FETCH_AND_1:
643     case BUILT_IN_ATOMIC_FETCH_NAND_1:
644     case BUILT_IN_ATOMIC_FETCH_XOR_1:
645     case BUILT_IN_ATOMIC_FETCH_OR_1:
646       access_size = 1;
647       goto do_atomic;
648 
649     case BUILT_IN_ATOMIC_LOAD_2:
650       is_store = false;
651       /* FALLTHRU */
652     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
653     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
654     case BUILT_IN_SYNC_FETCH_AND_OR_2:
655     case BUILT_IN_SYNC_FETCH_AND_AND_2:
656     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
657     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
658     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
659     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
660     case BUILT_IN_SYNC_OR_AND_FETCH_2:
661     case BUILT_IN_SYNC_AND_AND_FETCH_2:
662     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
663     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
664     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
665     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
666     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
667     case BUILT_IN_SYNC_LOCK_RELEASE_2:
668     case BUILT_IN_ATOMIC_EXCHANGE_2:
669     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
670     case BUILT_IN_ATOMIC_STORE_2:
671     case BUILT_IN_ATOMIC_ADD_FETCH_2:
672     case BUILT_IN_ATOMIC_SUB_FETCH_2:
673     case BUILT_IN_ATOMIC_AND_FETCH_2:
674     case BUILT_IN_ATOMIC_NAND_FETCH_2:
675     case BUILT_IN_ATOMIC_XOR_FETCH_2:
676     case BUILT_IN_ATOMIC_OR_FETCH_2:
677     case BUILT_IN_ATOMIC_FETCH_ADD_2:
678     case BUILT_IN_ATOMIC_FETCH_SUB_2:
679     case BUILT_IN_ATOMIC_FETCH_AND_2:
680     case BUILT_IN_ATOMIC_FETCH_NAND_2:
681     case BUILT_IN_ATOMIC_FETCH_XOR_2:
682     case BUILT_IN_ATOMIC_FETCH_OR_2:
683       access_size = 2;
684       goto do_atomic;
685 
686     case BUILT_IN_ATOMIC_LOAD_4:
687       is_store = false;
688       /* FALLTHRU */
689     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
690     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
691     case BUILT_IN_SYNC_FETCH_AND_OR_4:
692     case BUILT_IN_SYNC_FETCH_AND_AND_4:
693     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
694     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
695     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
696     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
697     case BUILT_IN_SYNC_OR_AND_FETCH_4:
698     case BUILT_IN_SYNC_AND_AND_FETCH_4:
699     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
700     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
701     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
702     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
703     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
704     case BUILT_IN_SYNC_LOCK_RELEASE_4:
705     case BUILT_IN_ATOMIC_EXCHANGE_4:
706     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
707     case BUILT_IN_ATOMIC_STORE_4:
708     case BUILT_IN_ATOMIC_ADD_FETCH_4:
709     case BUILT_IN_ATOMIC_SUB_FETCH_4:
710     case BUILT_IN_ATOMIC_AND_FETCH_4:
711     case BUILT_IN_ATOMIC_NAND_FETCH_4:
712     case BUILT_IN_ATOMIC_XOR_FETCH_4:
713     case BUILT_IN_ATOMIC_OR_FETCH_4:
714     case BUILT_IN_ATOMIC_FETCH_ADD_4:
715     case BUILT_IN_ATOMIC_FETCH_SUB_4:
716     case BUILT_IN_ATOMIC_FETCH_AND_4:
717     case BUILT_IN_ATOMIC_FETCH_NAND_4:
718     case BUILT_IN_ATOMIC_FETCH_XOR_4:
719     case BUILT_IN_ATOMIC_FETCH_OR_4:
720       access_size = 4;
721       goto do_atomic;
722 
723     case BUILT_IN_ATOMIC_LOAD_8:
724       is_store = false;
725       /* FALLTHRU */
726     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
727     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
728     case BUILT_IN_SYNC_FETCH_AND_OR_8:
729     case BUILT_IN_SYNC_FETCH_AND_AND_8:
730     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
731     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
732     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
733     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
734     case BUILT_IN_SYNC_OR_AND_FETCH_8:
735     case BUILT_IN_SYNC_AND_AND_FETCH_8:
736     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
737     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
738     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
739     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
740     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
741     case BUILT_IN_SYNC_LOCK_RELEASE_8:
742     case BUILT_IN_ATOMIC_EXCHANGE_8:
743     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
744     case BUILT_IN_ATOMIC_STORE_8:
745     case BUILT_IN_ATOMIC_ADD_FETCH_8:
746     case BUILT_IN_ATOMIC_SUB_FETCH_8:
747     case BUILT_IN_ATOMIC_AND_FETCH_8:
748     case BUILT_IN_ATOMIC_NAND_FETCH_8:
749     case BUILT_IN_ATOMIC_XOR_FETCH_8:
750     case BUILT_IN_ATOMIC_OR_FETCH_8:
751     case BUILT_IN_ATOMIC_FETCH_ADD_8:
752     case BUILT_IN_ATOMIC_FETCH_SUB_8:
753     case BUILT_IN_ATOMIC_FETCH_AND_8:
754     case BUILT_IN_ATOMIC_FETCH_NAND_8:
755     case BUILT_IN_ATOMIC_FETCH_XOR_8:
756     case BUILT_IN_ATOMIC_FETCH_OR_8:
757       access_size = 8;
758       goto do_atomic;
759 
760     case BUILT_IN_ATOMIC_LOAD_16:
761       is_store = false;
762       /* FALLTHRU */
763     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
764     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
765     case BUILT_IN_SYNC_FETCH_AND_OR_16:
766     case BUILT_IN_SYNC_FETCH_AND_AND_16:
767     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
768     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
769     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
770     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
771     case BUILT_IN_SYNC_OR_AND_FETCH_16:
772     case BUILT_IN_SYNC_AND_AND_FETCH_16:
773     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
774     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
775     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
776     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
777     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
778     case BUILT_IN_SYNC_LOCK_RELEASE_16:
779     case BUILT_IN_ATOMIC_EXCHANGE_16:
780     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
781     case BUILT_IN_ATOMIC_STORE_16:
782     case BUILT_IN_ATOMIC_ADD_FETCH_16:
783     case BUILT_IN_ATOMIC_SUB_FETCH_16:
784     case BUILT_IN_ATOMIC_AND_FETCH_16:
785     case BUILT_IN_ATOMIC_NAND_FETCH_16:
786     case BUILT_IN_ATOMIC_XOR_FETCH_16:
787     case BUILT_IN_ATOMIC_OR_FETCH_16:
788     case BUILT_IN_ATOMIC_FETCH_ADD_16:
789     case BUILT_IN_ATOMIC_FETCH_SUB_16:
790     case BUILT_IN_ATOMIC_FETCH_AND_16:
791     case BUILT_IN_ATOMIC_FETCH_NAND_16:
792     case BUILT_IN_ATOMIC_FETCH_XOR_16:
793     case BUILT_IN_ATOMIC_FETCH_OR_16:
794       access_size = 16;
795       /* FALLTHRU */
796     do_atomic:
797       {
798 	dest = gimple_call_arg (call, 0);
799 	/* DEST represents the address of a memory location.
800 	   instrument_derefs wants the memory location, so lets
801 	   dereference the address DEST before handing it to
802 	   instrument_derefs.  */
803 	tree type = build_nonstandard_integer_type (access_size
804 						    * BITS_PER_UNIT, 1);
805 	dest = build2 (MEM_REF, type, dest,
806 		       build_int_cst (build_pointer_type (char_type_node), 0));
807 	break;
808       }
809 
810     default:
811       /* The other builtins memory access are not instrumented in this
812 	 function because they either don't have any length parameter,
813 	 or their length parameter is just a limit.  */
814       break;
815     }
816 
817   if (len != NULL_TREE)
818     {
819       if (source0 != NULL_TREE)
820 	{
821 	  src0->start = source0;
822 	  src0->access_size = access_size;
823 	  *src0_len = len;
824 	  *src0_is_store = false;
825 	}
826 
827       if (source1 != NULL_TREE)
828 	{
829 	  src1->start = source1;
830 	  src1->access_size = access_size;
831 	  *src1_len = len;
832 	  *src1_is_store = false;
833 	}
834 
835       if (dest != NULL_TREE)
836 	{
837 	  dst->start = dest;
838 	  dst->access_size = access_size;
839 	  *dst_len = len;
840 	  *dst_is_store = true;
841 	}
842 
843       got_reference_p = true;
844     }
845   else if (dest)
846     {
847       dst->start = dest;
848       dst->access_size = access_size;
849       *dst_len = NULL_TREE;
850       *dst_is_store = is_store;
851       *dest_is_deref = true;
852       got_reference_p = true;
853     }
854 
855   return got_reference_p;
856 }
857 
858 /* Return true iff a given gimple statement has been instrumented.
859    Note that the statement is "defined" by the memory references it
860    contains.  */
861 
862 static bool
863 has_stmt_been_instrumented_p (gimple *stmt)
864 {
865   if (gimple_assign_single_p (stmt))
866     {
867       bool r_is_store;
868       asan_mem_ref r;
869       asan_mem_ref_init (&r, NULL, 1);
870 
871       if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
872 				     &r_is_store))
873 	return has_mem_ref_been_instrumented (&r);
874     }
875   else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
876     {
877       asan_mem_ref src0, src1, dest;
878       asan_mem_ref_init (&src0, NULL, 1);
879       asan_mem_ref_init (&src1, NULL, 1);
880       asan_mem_ref_init (&dest, NULL, 1);
881 
882       tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
883       bool src0_is_store = false, src1_is_store = false,
884 	dest_is_store = false, dest_is_deref = false, intercepted_p = true;
885       if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
886 					&src0, &src0_len, &src0_is_store,
887 					&src1, &src1_len, &src1_is_store,
888 					&dest, &dest_len, &dest_is_store,
889 					&dest_is_deref, &intercepted_p))
890 	{
891 	  if (src0.start != NULL_TREE
892 	      && !has_mem_ref_been_instrumented (&src0, src0_len))
893 	    return false;
894 
895 	  if (src1.start != NULL_TREE
896 	      && !has_mem_ref_been_instrumented (&src1, src1_len))
897 	    return false;
898 
899 	  if (dest.start != NULL_TREE
900 	      && !has_mem_ref_been_instrumented (&dest, dest_len))
901 	    return false;
902 
903 	  return true;
904 	}
905     }
906   else if (is_gimple_call (stmt) && gimple_store_p (stmt))
907     {
908       asan_mem_ref r;
909       asan_mem_ref_init (&r, NULL, 1);
910 
911       r.start = gimple_call_lhs (stmt);
912       r.access_size = int_size_in_bytes (TREE_TYPE (r.start));
913       return has_mem_ref_been_instrumented (&r);
914     }
915 
916   return false;
917 }
918 
919 /*  Insert a memory reference into the hash table.  */
920 
921 static void
922 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
923 {
924   hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
925 
926   asan_mem_ref r;
927   asan_mem_ref_init (&r, ref, access_size);
928 
929   asan_mem_ref **slot = ht->find_slot (&r, INSERT);
930   if (*slot == NULL || (*slot)->access_size < access_size)
931     *slot = asan_mem_ref_new (ref, access_size);
932 }
933 
934 /* Initialize shadow_ptr_types array.  */
935 
936 static void
937 asan_init_shadow_ptr_types (void)
938 {
939   asan_shadow_set = new_alias_set ();
940   tree types[3] = { signed_char_type_node, short_integer_type_node,
941 		    integer_type_node };
942 
943   for (unsigned i = 0; i < 3; i++)
944     {
945       shadow_ptr_types[i] = build_distinct_type_copy (types[i]);
946       TYPE_ALIAS_SET (shadow_ptr_types[i]) = asan_shadow_set;
947       shadow_ptr_types[i] = build_pointer_type (shadow_ptr_types[i]);
948     }
949 
950   initialize_sanitizer_builtins ();
951 }
952 
953 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text.  */
954 
955 static tree
956 asan_pp_string (pretty_printer *pp)
957 {
958   const char *buf = pp_formatted_text (pp);
959   size_t len = strlen (buf);
960   tree ret = build_string (len + 1, buf);
961   TREE_TYPE (ret)
962     = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
963 			build_index_type (size_int (len)));
964   TREE_READONLY (ret) = 1;
965   TREE_STATIC (ret) = 1;
966   return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
967 }
968 
969 /* Return a CONST_INT representing 4 subsequent shadow memory bytes.  */
970 
971 static rtx
972 asan_shadow_cst (unsigned char shadow_bytes[4])
973 {
974   int i;
975   unsigned HOST_WIDE_INT val = 0;
976   gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
977   for (i = 0; i < 4; i++)
978     val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
979 	   << (BITS_PER_UNIT * i);
980   return gen_int_mode (val, SImode);
981 }
982 
983 /* Clear shadow memory at SHADOW_MEM, LEN bytes.  Can't call a library call here
984    though.  */
985 
986 static void
987 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
988 {
989   rtx_insn *insn, *insns, *jump;
990   rtx_code_label *top_label;
991   rtx end, addr, tmp;
992 
993   start_sequence ();
994   clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
995   insns = get_insns ();
996   end_sequence ();
997   for (insn = insns; insn; insn = NEXT_INSN (insn))
998     if (CALL_P (insn))
999       break;
1000   if (insn == NULL_RTX)
1001     {
1002       emit_insn (insns);
1003       return;
1004     }
1005 
1006   gcc_assert ((len & 3) == 0);
1007   top_label = gen_label_rtx ();
1008   addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
1009   shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
1010   end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1011   emit_label (top_label);
1012 
1013   emit_move_insn (shadow_mem, const0_rtx);
1014   tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1015 			     true, OPTAB_LIB_WIDEN);
1016   if (tmp != addr)
1017     emit_move_insn (addr, tmp);
1018   emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1019   jump = get_last_insn ();
1020   gcc_assert (JUMP_P (jump));
1021   add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1022 }
1023 
1024 void
1025 asan_function_start (void)
1026 {
1027   section *fnsec = function_section (current_function_decl);
1028   switch_to_section (fnsec);
1029   ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1030 			 current_function_funcdef_no);
1031 }
1032 
1033 /* Return number of shadow bytes that are occupied by a local variable
1034    of SIZE bytes.  */
1035 
1036 static unsigned HOST_WIDE_INT
1037 shadow_mem_size (unsigned HOST_WIDE_INT size)
1038 {
1039   return ROUND_UP (size, ASAN_SHADOW_GRANULARITY) / ASAN_SHADOW_GRANULARITY;
1040 }
1041 
1042 /* Insert code to protect stack vars.  The prologue sequence should be emitted
1043    directly, epilogue sequence returned.  BASE is the register holding the
1044    stack base, against which OFFSETS array offsets are relative to, OFFSETS
1045    array contains pairs of offsets in reverse order, always the end offset
1046    of some gap that needs protection followed by starting offset,
1047    and DECLS is an array of representative decls for each var partition.
1048    LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1049    elements long (OFFSETS include gap before the first variable as well
1050    as gaps after each stack variable).  PBASE is, if non-NULL, some pseudo
1051    register which stack vars DECL_RTLs are based on.  Either BASE should be
1052    assigned to PBASE, when not doing use after return protection, or
1053    corresponding address based on __asan_stack_malloc* return value.  */
1054 
1055 rtx_insn *
1056 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1057 			    HOST_WIDE_INT *offsets, tree *decls, int length)
1058 {
1059   rtx shadow_base, shadow_mem, ret, mem, orig_base;
1060   rtx_code_label *lab;
1061   rtx_insn *insns;
1062   char buf[32];
1063   unsigned char shadow_bytes[4];
1064   HOST_WIDE_INT base_offset = offsets[length - 1];
1065   HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1066   HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1067   HOST_WIDE_INT last_offset;
1068   int l;
1069   unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1070   tree str_cst, decl, id;
1071   int use_after_return_class = -1;
1072 
1073   if (shadow_ptr_types[0] == NULL_TREE)
1074     asan_init_shadow_ptr_types ();
1075 
1076   /* First of all, prepare the description string.  */
1077   pretty_printer asan_pp;
1078 
1079   pp_decimal_int (&asan_pp, length / 2 - 1);
1080   pp_space (&asan_pp);
1081   for (l = length - 2; l; l -= 2)
1082     {
1083       tree decl = decls[l / 2 - 1];
1084       pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1085       pp_space (&asan_pp);
1086       pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1087       pp_space (&asan_pp);
1088       if (DECL_P (decl) && DECL_NAME (decl))
1089 	{
1090 	  pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1091 	  pp_space (&asan_pp);
1092 	  pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1093 	}
1094       else
1095 	pp_string (&asan_pp, "9 <unknown>");
1096       pp_space (&asan_pp);
1097     }
1098   str_cst = asan_pp_string (&asan_pp);
1099 
1100   /* Emit the prologue sequence.  */
1101   if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1102       && ASAN_USE_AFTER_RETURN)
1103     {
1104       use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1105       /* __asan_stack_malloc_N guarantees alignment
1106 	 N < 6 ? (64 << N) : 4096 bytes.  */
1107       if (alignb > (use_after_return_class < 6
1108 		    ? (64U << use_after_return_class) : 4096U))
1109 	use_after_return_class = -1;
1110       else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1111 	base_align_bias = ((asan_frame_size + alignb - 1)
1112 			   & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1113     }
1114   /* Align base if target is STRICT_ALIGNMENT.  */
1115   if (STRICT_ALIGNMENT)
1116     base = expand_binop (Pmode, and_optab, base,
1117 			 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1118 					  << ASAN_SHADOW_SHIFT)
1119 					 / BITS_PER_UNIT), Pmode), NULL_RTX,
1120 			 1, OPTAB_DIRECT);
1121 
1122   if (use_after_return_class == -1 && pbase)
1123     emit_move_insn (pbase, base);
1124 
1125   base = expand_binop (Pmode, add_optab, base,
1126 		       gen_int_mode (base_offset - base_align_bias, Pmode),
1127 		       NULL_RTX, 1, OPTAB_DIRECT);
1128   orig_base = NULL_RTX;
1129   if (use_after_return_class != -1)
1130     {
1131       if (asan_detect_stack_use_after_return == NULL_TREE)
1132 	{
1133 	  id = get_identifier ("__asan_option_detect_stack_use_after_return");
1134 	  decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1135 			     integer_type_node);
1136 	  SET_DECL_ASSEMBLER_NAME (decl, id);
1137 	  TREE_ADDRESSABLE (decl) = 1;
1138 	  DECL_ARTIFICIAL (decl) = 1;
1139 	  DECL_IGNORED_P (decl) = 1;
1140 	  DECL_EXTERNAL (decl) = 1;
1141 	  TREE_STATIC (decl) = 1;
1142 	  TREE_PUBLIC (decl) = 1;
1143 	  TREE_USED (decl) = 1;
1144 	  asan_detect_stack_use_after_return = decl;
1145 	}
1146       orig_base = gen_reg_rtx (Pmode);
1147       emit_move_insn (orig_base, base);
1148       ret = expand_normal (asan_detect_stack_use_after_return);
1149       lab = gen_label_rtx ();
1150       int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1151       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1152 			       VOIDmode, 0, lab, very_likely);
1153       snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1154 		use_after_return_class);
1155       ret = init_one_libfunc (buf);
1156       ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 1,
1157 				     GEN_INT (asan_frame_size
1158 					      + base_align_bias),
1159 				     TYPE_MODE (pointer_sized_int_node));
1160       /* __asan_stack_malloc_[n] returns a pointer to fake stack if succeeded
1161 	 and NULL otherwise.  Check RET value is NULL here and jump over the
1162 	 BASE reassignment in this case.  Otherwise, reassign BASE to RET.  */
1163       int very_unlikely = REG_BR_PROB_BASE / 2000 - 1;
1164       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1165 			       VOIDmode, 0, lab, very_unlikely);
1166       ret = convert_memory_address (Pmode, ret);
1167       emit_move_insn (base, ret);
1168       emit_label (lab);
1169       emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1170 					   gen_int_mode (base_align_bias
1171 							 - base_offset, Pmode),
1172 					   NULL_RTX, 1, OPTAB_DIRECT));
1173     }
1174   mem = gen_rtx_MEM (ptr_mode, base);
1175   mem = adjust_address (mem, VOIDmode, base_align_bias);
1176   emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1177   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1178   emit_move_insn (mem, expand_normal (str_cst));
1179   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1180   ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1181   id = get_identifier (buf);
1182   decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1183 		    VAR_DECL, id, char_type_node);
1184   SET_DECL_ASSEMBLER_NAME (decl, id);
1185   TREE_ADDRESSABLE (decl) = 1;
1186   TREE_READONLY (decl) = 1;
1187   DECL_ARTIFICIAL (decl) = 1;
1188   DECL_IGNORED_P (decl) = 1;
1189   TREE_STATIC (decl) = 1;
1190   TREE_PUBLIC (decl) = 0;
1191   TREE_USED (decl) = 1;
1192   DECL_INITIAL (decl) = decl;
1193   TREE_ASM_WRITTEN (decl) = 1;
1194   TREE_ASM_WRITTEN (id) = 1;
1195   emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1196   shadow_base = expand_binop (Pmode, lshr_optab, base,
1197 			      GEN_INT (ASAN_SHADOW_SHIFT),
1198 			      NULL_RTX, 1, OPTAB_DIRECT);
1199   shadow_base
1200     = plus_constant (Pmode, shadow_base,
1201 		     asan_shadow_offset ()
1202 		     + (base_align_bias >> ASAN_SHADOW_SHIFT));
1203   gcc_assert (asan_shadow_set != -1
1204 	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1205   shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1206   set_mem_alias_set (shadow_mem, asan_shadow_set);
1207   if (STRICT_ALIGNMENT)
1208     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1209   prev_offset = base_offset;
1210   for (l = length; l; l -= 2)
1211     {
1212       if (l == 2)
1213 	cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1214       offset = offsets[l - 1];
1215       if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1216 	{
1217 	  int i;
1218 	  HOST_WIDE_INT aoff
1219 	    = base_offset + ((offset - base_offset)
1220 			     & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1221 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1222 				       (aoff - prev_offset)
1223 				       >> ASAN_SHADOW_SHIFT);
1224 	  prev_offset = aoff;
1225 	  for (i = 0; i < 4; i++, aoff += ASAN_SHADOW_GRANULARITY)
1226 	    if (aoff < offset)
1227 	      {
1228 		if (aoff < offset - (HOST_WIDE_INT)ASAN_SHADOW_GRANULARITY + 1)
1229 		  shadow_bytes[i] = 0;
1230 		else
1231 		  shadow_bytes[i] = offset - aoff;
1232 	      }
1233 	    else
1234 	      shadow_bytes[i] = ASAN_STACK_MAGIC_MIDDLE;
1235 	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1236 	  offset = aoff;
1237 	}
1238       while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1239 	{
1240 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1241 				       (offset - prev_offset)
1242 				       >> ASAN_SHADOW_SHIFT);
1243 	  prev_offset = offset;
1244 	  memset (shadow_bytes, cur_shadow_byte, 4);
1245 	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1246 	  offset += ASAN_RED_ZONE_SIZE;
1247 	}
1248       cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1249     }
1250   do_pending_stack_adjust ();
1251 
1252   /* Construct epilogue sequence.  */
1253   start_sequence ();
1254 
1255   lab = NULL;
1256   if (use_after_return_class != -1)
1257     {
1258       rtx_code_label *lab2 = gen_label_rtx ();
1259       char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1260       int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1261       emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1262 			       VOIDmode, 0, lab2, very_likely);
1263       shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1264       set_mem_alias_set (shadow_mem, asan_shadow_set);
1265       mem = gen_rtx_MEM (ptr_mode, base);
1266       mem = adjust_address (mem, VOIDmode, base_align_bias);
1267       emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1268       unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1269       if (use_after_return_class < 5
1270 	  && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1271 				  BITS_PER_UNIT, true))
1272 	store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1273 			 BITS_PER_UNIT, true, 0);
1274       else if (use_after_return_class >= 5
1275 	       || !set_storage_via_setmem (shadow_mem,
1276 					   GEN_INT (sz),
1277 					   gen_int_mode (c, QImode),
1278 					   BITS_PER_UNIT, BITS_PER_UNIT,
1279 					   -1, sz, sz, sz))
1280 	{
1281 	  snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1282 		    use_after_return_class);
1283 	  ret = init_one_libfunc (buf);
1284 	  rtx addr = convert_memory_address (ptr_mode, base);
1285 	  rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1286 	  emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1287 			     GEN_INT (asan_frame_size + base_align_bias),
1288 			     TYPE_MODE (pointer_sized_int_node),
1289 			     orig_addr, ptr_mode);
1290 	}
1291       lab = gen_label_rtx ();
1292       emit_jump (lab);
1293       emit_label (lab2);
1294     }
1295 
1296   shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1297   set_mem_alias_set (shadow_mem, asan_shadow_set);
1298 
1299   if (STRICT_ALIGNMENT)
1300     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1301 
1302   /* Unpoison shadow memory of a stack at the very end of a function.
1303      As we're poisoning stack variables at the end of their scope,
1304      shadow memory must be properly unpoisoned here.  The easiest approach
1305      would be to collect all variables that should not be unpoisoned and
1306      we unpoison shadow memory of the whole stack except ranges
1307      occupied by these variables.  */
1308   last_offset = base_offset;
1309   HOST_WIDE_INT current_offset = last_offset;
1310   if (length)
1311     {
1312       HOST_WIDE_INT var_end_offset = 0;
1313       HOST_WIDE_INT stack_start = offsets[length - 1];
1314       gcc_assert (last_offset == stack_start);
1315 
1316       for (int l = length - 2; l > 0; l -= 2)
1317 	{
1318 	  HOST_WIDE_INT var_offset = offsets[l];
1319 	  current_offset = var_offset;
1320 	  var_end_offset = offsets[l - 1];
1321 	  HOST_WIDE_INT rounded_size = ROUND_UP (var_end_offset - var_offset,
1322 					     BITS_PER_UNIT);
1323 
1324 	  /* Should we unpoison the variable?  */
1325 	  if (asan_handled_variables != NULL
1326 	      && asan_handled_variables->contains (decl))
1327 	    {
1328 	      if (dump_file && (dump_flags & TDF_DETAILS))
1329 		{
1330 		  const char *n = (DECL_NAME (decl)
1331 				   ? IDENTIFIER_POINTER (DECL_NAME (decl))
1332 				   : "<unknown>");
1333 		  fprintf (dump_file, "Unpoisoning shadow stack for variable: "
1334 			   "%s (%" PRId64 "B)\n", n,
1335 			   var_end_offset - var_offset);
1336 		}
1337 
1338 	      unsigned HOST_WIDE_INT s
1339 		= shadow_mem_size (current_offset - last_offset);
1340 	      asan_clear_shadow (shadow_mem, s);
1341 	      HOST_WIDE_INT shift
1342 		= shadow_mem_size (current_offset - last_offset + rounded_size);
1343 	      shadow_mem = adjust_address (shadow_mem, VOIDmode, shift);
1344 	      last_offset = var_offset + rounded_size;
1345 	      current_offset = last_offset;
1346 	    }
1347 
1348 	}
1349 
1350       /* Handle last redzone.  */
1351       current_offset = offsets[0];
1352       asan_clear_shadow (shadow_mem,
1353 			 shadow_mem_size (current_offset - last_offset));
1354     }
1355 
1356   /* Clean-up set with instrumented stack variables.  */
1357   delete asan_handled_variables;
1358   asan_handled_variables = NULL;
1359   delete asan_used_labels;
1360   asan_used_labels = NULL;
1361 
1362   do_pending_stack_adjust ();
1363   if (lab)
1364     emit_label (lab);
1365 
1366   insns = get_insns ();
1367   end_sequence ();
1368   return insns;
1369 }
1370 
1371 /* Return true if DECL, a global var, might be overridden and needs
1372    therefore a local alias.  */
1373 
1374 static bool
1375 asan_needs_local_alias (tree decl)
1376 {
1377   return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1378 }
1379 
1380 /* Return true if DECL, a global var, is an artificial ODR indicator symbol
1381    therefore doesn't need protection.  */
1382 
1383 static bool
1384 is_odr_indicator (tree decl)
1385 {
1386   return (DECL_ARTIFICIAL (decl)
1387 	  && lookup_attribute ("asan odr indicator", DECL_ATTRIBUTES (decl)));
1388 }
1389 
1390 /* Return true if DECL is a VAR_DECL that should be protected
1391    by Address Sanitizer, by appending a red zone with protected
1392    shadow memory after it and aligning it to at least
1393    ASAN_RED_ZONE_SIZE bytes.  */
1394 
1395 bool
1396 asan_protect_global (tree decl)
1397 {
1398   if (!ASAN_GLOBALS)
1399     return false;
1400 
1401   rtx rtl, symbol;
1402 
1403   if (TREE_CODE (decl) == STRING_CST)
1404     {
1405       /* Instrument all STRING_CSTs except those created
1406 	 by asan_pp_string here.  */
1407       if (shadow_ptr_types[0] != NULL_TREE
1408 	  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1409 	  && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1410 	return false;
1411       return true;
1412     }
1413   if (!VAR_P (decl)
1414       /* TLS vars aren't statically protectable.  */
1415       || DECL_THREAD_LOCAL_P (decl)
1416       /* Externs will be protected elsewhere.  */
1417       || DECL_EXTERNAL (decl)
1418       || !DECL_RTL_SET_P (decl)
1419       /* Comdat vars pose an ABI problem, we can't know if
1420 	 the var that is selected by the linker will have
1421 	 padding or not.  */
1422       || DECL_ONE_ONLY (decl)
1423       /* Similarly for common vars.  People can use -fno-common.
1424 	 Note: Linux kernel is built with -fno-common, so we do instrument
1425 	 globals there even if it is C.  */
1426       || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1427       /* Don't protect if using user section, often vars placed
1428 	 into user section from multiple TUs are then assumed
1429 	 to be an array of such vars, putting padding in there
1430 	 breaks this assumption.  */
1431       || (DECL_SECTION_NAME (decl) != NULL
1432 	  && !symtab_node::get (decl)->implicit_section
1433 	  && !section_sanitized_p (DECL_SECTION_NAME (decl)))
1434       || DECL_SIZE (decl) == 0
1435       || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1436       || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1437       || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1438       || TREE_TYPE (decl) == ubsan_get_source_location_type ()
1439       || is_odr_indicator (decl))
1440     return false;
1441 
1442   rtl = DECL_RTL (decl);
1443   if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1444     return false;
1445   symbol = XEXP (rtl, 0);
1446 
1447   if (CONSTANT_POOL_ADDRESS_P (symbol)
1448       || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1449     return false;
1450 
1451   if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1452     return false;
1453 
1454 #ifndef ASM_OUTPUT_DEF
1455   if (asan_needs_local_alias (decl))
1456     return false;
1457 #endif
1458 
1459   return true;
1460 }
1461 
1462 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1463    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1464 
1465 static tree
1466 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1467 		   int *nargs)
1468 {
1469   static enum built_in_function report[2][2][6]
1470     = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1471 	    BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1472 	    BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1473 	  { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1474 	    BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1475 	    BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1476 	{ { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1477 	    BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1478 	    BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1479 	    BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1480 	    BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1481 	    BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1482 	  { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1483 	    BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1484 	    BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1485 	    BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1486 	    BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1487 	    BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1488   if (size_in_bytes == -1)
1489     {
1490       *nargs = 2;
1491       return builtin_decl_implicit (report[recover_p][is_store][5]);
1492     }
1493   *nargs = 1;
1494   int size_log2 = exact_log2 (size_in_bytes);
1495   return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1496 }
1497 
1498 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1499    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1500 
1501 static tree
1502 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1503 	    int *nargs)
1504 {
1505   static enum built_in_function check[2][2][6]
1506     = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1507 	    BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1508 	    BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1509 	  { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1510 	    BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1511 	    BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1512 	{ { BUILT_IN_ASAN_LOAD1_NOABORT,
1513 	    BUILT_IN_ASAN_LOAD2_NOABORT,
1514 	    BUILT_IN_ASAN_LOAD4_NOABORT,
1515 	    BUILT_IN_ASAN_LOAD8_NOABORT,
1516 	    BUILT_IN_ASAN_LOAD16_NOABORT,
1517 	    BUILT_IN_ASAN_LOADN_NOABORT },
1518 	  { BUILT_IN_ASAN_STORE1_NOABORT,
1519 	    BUILT_IN_ASAN_STORE2_NOABORT,
1520 	    BUILT_IN_ASAN_STORE4_NOABORT,
1521 	    BUILT_IN_ASAN_STORE8_NOABORT,
1522 	    BUILT_IN_ASAN_STORE16_NOABORT,
1523 	    BUILT_IN_ASAN_STOREN_NOABORT } } };
1524   if (size_in_bytes == -1)
1525     {
1526       *nargs = 2;
1527       return builtin_decl_implicit (check[recover_p][is_store][5]);
1528     }
1529   *nargs = 1;
1530   int size_log2 = exact_log2 (size_in_bytes);
1531   return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1532 }
1533 
1534 /* Split the current basic block and create a condition statement
1535    insertion point right before or after the statement pointed to by
1536    ITER.  Return an iterator to the point at which the caller might
1537    safely insert the condition statement.
1538 
1539    THEN_BLOCK must be set to the address of an uninitialized instance
1540    of basic_block.  The function will then set *THEN_BLOCK to the
1541    'then block' of the condition statement to be inserted by the
1542    caller.
1543 
1544    If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1545    *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1546 
1547    Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1548    block' of the condition statement to be inserted by the caller.
1549 
1550    Note that *FALLTHROUGH_BLOCK is a new block that contains the
1551    statements starting from *ITER, and *THEN_BLOCK is a new empty
1552    block.
1553 
1554    *ITER is adjusted to point to always point to the first statement
1555     of the basic block * FALLTHROUGH_BLOCK.  That statement is the
1556     same as what ITER was pointing to prior to calling this function,
1557     if BEFORE_P is true; otherwise, it is its following statement.  */
1558 
1559 gimple_stmt_iterator
1560 create_cond_insert_point (gimple_stmt_iterator *iter,
1561 			  bool before_p,
1562 			  bool then_more_likely_p,
1563 			  bool create_then_fallthru_edge,
1564 			  basic_block *then_block,
1565 			  basic_block *fallthrough_block)
1566 {
1567   gimple_stmt_iterator gsi = *iter;
1568 
1569   if (!gsi_end_p (gsi) && before_p)
1570     gsi_prev (&gsi);
1571 
1572   basic_block cur_bb = gsi_bb (*iter);
1573 
1574   edge e = split_block (cur_bb, gsi_stmt (gsi));
1575 
1576   /* Get a hold on the 'condition block', the 'then block' and the
1577      'else block'.  */
1578   basic_block cond_bb = e->src;
1579   basic_block fallthru_bb = e->dest;
1580   basic_block then_bb = create_empty_bb (cond_bb);
1581   if (current_loops)
1582     {
1583       add_bb_to_loop (then_bb, cond_bb->loop_father);
1584       loops_state_set (LOOPS_NEED_FIXUP);
1585     }
1586 
1587   /* Set up the newly created 'then block'.  */
1588   e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1589   int fallthrough_probability
1590     = then_more_likely_p
1591     ? PROB_VERY_UNLIKELY
1592     : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1593   e->probability = PROB_ALWAYS - fallthrough_probability;
1594   if (create_then_fallthru_edge)
1595     make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1596 
1597   /* Set up the fallthrough basic block.  */
1598   e = find_edge (cond_bb, fallthru_bb);
1599   e->flags = EDGE_FALSE_VALUE;
1600   e->count = cond_bb->count;
1601   e->probability = fallthrough_probability;
1602 
1603   /* Update dominance info for the newly created then_bb; note that
1604      fallthru_bb's dominance info has already been updated by
1605      split_bock.  */
1606   if (dom_info_available_p (CDI_DOMINATORS))
1607     set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1608 
1609   *then_block = then_bb;
1610   *fallthrough_block = fallthru_bb;
1611   *iter = gsi_start_bb (fallthru_bb);
1612 
1613   return gsi_last_bb (cond_bb);
1614 }
1615 
1616 /* Insert an if condition followed by a 'then block' right before the
1617    statement pointed to by ITER.  The fallthrough block -- which is the
1618    else block of the condition as well as the destination of the
1619    outcoming edge of the 'then block' -- starts with the statement
1620    pointed to by ITER.
1621 
1622    COND is the condition of the if.
1623 
1624    If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1625    'then block' is higher than the probability of the edge to the
1626    fallthrough block.
1627 
1628    Upon completion of the function, *THEN_BB is set to the newly
1629    inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1630    fallthrough block.
1631 
1632    *ITER is adjusted to still point to the same statement it was
1633    pointing to initially.  */
1634 
1635 static void
1636 insert_if_then_before_iter (gcond *cond,
1637 			    gimple_stmt_iterator *iter,
1638 			    bool then_more_likely_p,
1639 			    basic_block *then_bb,
1640 			    basic_block *fallthrough_bb)
1641 {
1642   gimple_stmt_iterator cond_insert_point =
1643     create_cond_insert_point (iter,
1644 			      /*before_p=*/true,
1645 			      then_more_likely_p,
1646 			      /*create_then_fallthru_edge=*/true,
1647 			      then_bb,
1648 			      fallthrough_bb);
1649   gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1650 }
1651 
1652 /* Build (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().
1653    If RETURN_ADDRESS is set to true, return memory location instread
1654    of a value in the shadow memory.  */
1655 
1656 static tree
1657 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1658 			 tree base_addr, tree shadow_ptr_type,
1659 			 bool return_address = false)
1660 {
1661   tree t, uintptr_type = TREE_TYPE (base_addr);
1662   tree shadow_type = TREE_TYPE (shadow_ptr_type);
1663   gimple *g;
1664 
1665   t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1666   g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1667 			   base_addr, t);
1668   gimple_set_location (g, location);
1669   gsi_insert_after (gsi, g, GSI_NEW_STMT);
1670 
1671   t = build_int_cst (uintptr_type, asan_shadow_offset ());
1672   g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1673 			   gimple_assign_lhs (g), t);
1674   gimple_set_location (g, location);
1675   gsi_insert_after (gsi, g, GSI_NEW_STMT);
1676 
1677   g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1678 			   gimple_assign_lhs (g));
1679   gimple_set_location (g, location);
1680   gsi_insert_after (gsi, g, GSI_NEW_STMT);
1681 
1682   if (!return_address)
1683     {
1684       t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1685 		  build_int_cst (shadow_ptr_type, 0));
1686       g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1687       gimple_set_location (g, location);
1688       gsi_insert_after (gsi, g, GSI_NEW_STMT);
1689     }
1690 
1691   return gimple_assign_lhs (g);
1692 }
1693 
1694 /* BASE can already be an SSA_NAME; in that case, do not create a
1695    new SSA_NAME for it.  */
1696 
1697 static tree
1698 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1699 		       bool before_p)
1700 {
1701   if (TREE_CODE (base) == SSA_NAME)
1702     return base;
1703   gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1704 				  TREE_CODE (base), base);
1705   gimple_set_location (g, loc);
1706   if (before_p)
1707     gsi_insert_before (iter, g, GSI_SAME_STMT);
1708   else
1709     gsi_insert_after (iter, g, GSI_NEW_STMT);
1710   return gimple_assign_lhs (g);
1711 }
1712 
1713 /* LEN can already have necessary size and precision;
1714    in that case, do not create a new variable.  */
1715 
1716 tree
1717 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1718 		       bool before_p)
1719 {
1720   if (ptrofftype_p (len))
1721     return len;
1722   gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1723 				  NOP_EXPR, len);
1724   gimple_set_location (g, loc);
1725   if (before_p)
1726     gsi_insert_before (iter, g, GSI_SAME_STMT);
1727   else
1728     gsi_insert_after (iter, g, GSI_NEW_STMT);
1729   return gimple_assign_lhs (g);
1730 }
1731 
1732 /* Instrument the memory access instruction BASE.  Insert new
1733    statements before or after ITER.
1734 
1735    Note that the memory access represented by BASE can be either an
1736    SSA_NAME, or a non-SSA expression.  LOCATION is the source code
1737    location.  IS_STORE is TRUE for a store, FALSE for a load.
1738    BEFORE_P is TRUE for inserting the instrumentation code before
1739    ITER, FALSE for inserting it after ITER.  IS_SCALAR_ACCESS is TRUE
1740    for a scalar memory access and FALSE for memory region access.
1741    NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1742    length.  ALIGN tells alignment of accessed memory object.
1743 
1744    START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1745    memory region have already been instrumented.
1746 
1747    If BEFORE_P is TRUE, *ITER is arranged to still point to the
1748    statement it was pointing to prior to calling this function,
1749    otherwise, it points to the statement logically following it.  */
1750 
1751 static void
1752 build_check_stmt (location_t loc, tree base, tree len,
1753 		  HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1754 		  bool is_non_zero_len, bool before_p, bool is_store,
1755 		  bool is_scalar_access, unsigned int align = 0)
1756 {
1757   gimple_stmt_iterator gsi = *iter;
1758   gimple *g;
1759 
1760   gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1761 
1762   gsi = *iter;
1763 
1764   base = unshare_expr (base);
1765   base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1766 
1767   if (len)
1768     {
1769       len = unshare_expr (len);
1770       len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1771     }
1772   else
1773     {
1774       gcc_assert (size_in_bytes != -1);
1775       len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1776     }
1777 
1778   if (size_in_bytes > 1)
1779     {
1780       if ((size_in_bytes & (size_in_bytes - 1)) != 0
1781 	  || size_in_bytes > 16)
1782 	is_scalar_access = false;
1783       else if (align && align < size_in_bytes * BITS_PER_UNIT)
1784 	{
1785 	  /* On non-strict alignment targets, if
1786 	     16-byte access is just 8-byte aligned,
1787 	     this will result in misaligned shadow
1788 	     memory 2 byte load, but otherwise can
1789 	     be handled using one read.  */
1790 	  if (size_in_bytes != 16
1791 	      || STRICT_ALIGNMENT
1792 	      || align < 8 * BITS_PER_UNIT)
1793 	    is_scalar_access = false;
1794 	}
1795     }
1796 
1797   HOST_WIDE_INT flags = 0;
1798   if (is_store)
1799     flags |= ASAN_CHECK_STORE;
1800   if (is_non_zero_len)
1801     flags |= ASAN_CHECK_NON_ZERO_LEN;
1802   if (is_scalar_access)
1803     flags |= ASAN_CHECK_SCALAR_ACCESS;
1804 
1805   g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1806 				  build_int_cst (integer_type_node, flags),
1807 				  base, len,
1808 				  build_int_cst (integer_type_node,
1809 						 align / BITS_PER_UNIT));
1810   gimple_set_location (g, loc);
1811   if (before_p)
1812     gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1813   else
1814     {
1815       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1816       gsi_next (&gsi);
1817       *iter = gsi;
1818     }
1819 }
1820 
1821 /* If T represents a memory access, add instrumentation code before ITER.
1822    LOCATION is source code location.
1823    IS_STORE is either TRUE (for a store) or FALSE (for a load).  */
1824 
1825 static void
1826 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1827 		   location_t location, bool is_store)
1828 {
1829   if (is_store && !ASAN_INSTRUMENT_WRITES)
1830     return;
1831   if (!is_store && !ASAN_INSTRUMENT_READS)
1832     return;
1833 
1834   tree type, base;
1835   HOST_WIDE_INT size_in_bytes;
1836   if (location == UNKNOWN_LOCATION)
1837     location = EXPR_LOCATION (t);
1838 
1839   type = TREE_TYPE (t);
1840   switch (TREE_CODE (t))
1841     {
1842     case ARRAY_REF:
1843     case COMPONENT_REF:
1844     case INDIRECT_REF:
1845     case MEM_REF:
1846     case VAR_DECL:
1847     case BIT_FIELD_REF:
1848       break;
1849       /* FALLTHRU */
1850     default:
1851       return;
1852     }
1853 
1854   size_in_bytes = int_size_in_bytes (type);
1855   if (size_in_bytes <= 0)
1856     return;
1857 
1858   HOST_WIDE_INT bitsize, bitpos;
1859   tree offset;
1860   machine_mode mode;
1861   int unsignedp, reversep, volatilep = 0;
1862   tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset, &mode,
1863 				    &unsignedp, &reversep, &volatilep);
1864 
1865   if (TREE_CODE (t) == COMPONENT_REF
1866       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1867     {
1868       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1869       instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1870 				       TREE_OPERAND (t, 0), repr,
1871 				       TREE_OPERAND (t, 2)),
1872 			 location, is_store);
1873       return;
1874     }
1875 
1876   if (bitpos % BITS_PER_UNIT
1877       || bitsize != size_in_bytes * BITS_PER_UNIT)
1878     return;
1879 
1880   if (VAR_P (inner) && DECL_HARD_REGISTER (inner))
1881     return;
1882 
1883   if (VAR_P (inner)
1884       && offset == NULL_TREE
1885       && bitpos >= 0
1886       && DECL_SIZE (inner)
1887       && tree_fits_shwi_p (DECL_SIZE (inner))
1888       && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1889     {
1890       if (DECL_THREAD_LOCAL_P (inner))
1891 	return;
1892       if (!ASAN_GLOBALS && is_global_var (inner))
1893         return;
1894       if (!TREE_STATIC (inner))
1895 	{
1896 	  /* Automatic vars in the current function will be always
1897 	     accessible.  */
1898 	  if (decl_function_context (inner) == current_function_decl
1899 	      && (!asan_sanitize_use_after_scope ()
1900 		  || !TREE_ADDRESSABLE (inner)))
1901 	    return;
1902 	}
1903       /* Always instrument external vars, they might be dynamically
1904 	 initialized.  */
1905       else if (!DECL_EXTERNAL (inner))
1906 	{
1907 	  /* For static vars if they are known not to be dynamically
1908 	     initialized, they will be always accessible.  */
1909 	  varpool_node *vnode = varpool_node::get (inner);
1910 	  if (vnode && !vnode->dynamically_initialized)
1911 	    return;
1912 	}
1913     }
1914 
1915   base = build_fold_addr_expr (t);
1916   if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1917     {
1918       unsigned int align = get_object_alignment (t);
1919       build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1920 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1921 			is_store, /*is_scalar_access*/true, align);
1922       update_mem_ref_hash_table (base, size_in_bytes);
1923       update_mem_ref_hash_table (t, size_in_bytes);
1924     }
1925 
1926 }
1927 
1928 /*  Insert a memory reference into the hash table if access length
1929     can be determined in compile time.  */
1930 
1931 static void
1932 maybe_update_mem_ref_hash_table (tree base, tree len)
1933 {
1934   if (!POINTER_TYPE_P (TREE_TYPE (base))
1935       || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1936     return;
1937 
1938   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1939 
1940   if (size_in_bytes != -1)
1941     update_mem_ref_hash_table (base, size_in_bytes);
1942 }
1943 
1944 /* Instrument an access to a contiguous memory region that starts at
1945    the address pointed to by BASE, over a length of LEN (expressed in
1946    the sizeof (*BASE) bytes).  ITER points to the instruction before
1947    which the instrumentation instructions must be inserted.  LOCATION
1948    is the source location that the instrumentation instructions must
1949    have.  If IS_STORE is true, then the memory access is a store;
1950    otherwise, it's a load.  */
1951 
1952 static void
1953 instrument_mem_region_access (tree base, tree len,
1954 			      gimple_stmt_iterator *iter,
1955 			      location_t location, bool is_store)
1956 {
1957   if (!POINTER_TYPE_P (TREE_TYPE (base))
1958       || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1959       || integer_zerop (len))
1960     return;
1961 
1962   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1963 
1964   if ((size_in_bytes == -1)
1965       || !has_mem_ref_been_instrumented (base, size_in_bytes))
1966     {
1967       build_check_stmt (location, base, len, size_in_bytes, iter,
1968 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1969 			is_store, /*is_scalar_access*/false, /*align*/0);
1970     }
1971 
1972   maybe_update_mem_ref_hash_table (base, len);
1973   *iter = gsi_for_stmt (gsi_stmt (*iter));
1974 }
1975 
1976 /* Instrument the call to a built-in memory access function that is
1977    pointed to by the iterator ITER.
1978 
1979    Upon completion, return TRUE iff *ITER has been advanced to the
1980    statement following the one it was originally pointing to.  */
1981 
1982 static bool
1983 instrument_builtin_call (gimple_stmt_iterator *iter)
1984 {
1985   if (!ASAN_MEMINTRIN)
1986     return false;
1987 
1988   bool iter_advanced_p = false;
1989   gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1990 
1991   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1992 
1993   location_t loc = gimple_location (call);
1994 
1995   asan_mem_ref src0, src1, dest;
1996   asan_mem_ref_init (&src0, NULL, 1);
1997   asan_mem_ref_init (&src1, NULL, 1);
1998   asan_mem_ref_init (&dest, NULL, 1);
1999 
2000   tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
2001   bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
2002     dest_is_deref = false, intercepted_p = true;
2003 
2004   if (get_mem_refs_of_builtin_call (call,
2005 				    &src0, &src0_len, &src0_is_store,
2006 				    &src1, &src1_len, &src1_is_store,
2007 				    &dest, &dest_len, &dest_is_store,
2008 				    &dest_is_deref, &intercepted_p))
2009     {
2010       if (dest_is_deref)
2011 	{
2012 	  instrument_derefs (iter, dest.start, loc, dest_is_store);
2013 	  gsi_next (iter);
2014 	  iter_advanced_p = true;
2015 	}
2016       else if (!intercepted_p
2017 	       && (src0_len || src1_len || dest_len))
2018 	{
2019 	  if (src0.start != NULL_TREE)
2020 	    instrument_mem_region_access (src0.start, src0_len,
2021 					  iter, loc, /*is_store=*/false);
2022 	  if (src1.start != NULL_TREE)
2023 	    instrument_mem_region_access (src1.start, src1_len,
2024 					  iter, loc, /*is_store=*/false);
2025 	  if (dest.start != NULL_TREE)
2026 	    instrument_mem_region_access (dest.start, dest_len,
2027 					  iter, loc, /*is_store=*/true);
2028 
2029 	  *iter = gsi_for_stmt (call);
2030 	  gsi_next (iter);
2031 	  iter_advanced_p = true;
2032 	}
2033       else
2034 	{
2035 	  if (src0.start != NULL_TREE)
2036 	    maybe_update_mem_ref_hash_table (src0.start, src0_len);
2037 	  if (src1.start != NULL_TREE)
2038 	    maybe_update_mem_ref_hash_table (src1.start, src1_len);
2039 	  if (dest.start != NULL_TREE)
2040 	    maybe_update_mem_ref_hash_table (dest.start, dest_len);
2041 	}
2042     }
2043   return iter_advanced_p;
2044 }
2045 
2046 /*  Instrument the assignment statement ITER if it is subject to
2047     instrumentation.  Return TRUE iff instrumentation actually
2048     happened.  In that case, the iterator ITER is advanced to the next
2049     logical expression following the one initially pointed to by ITER,
2050     and the relevant memory reference that which access has been
2051     instrumented is added to the memory references hash table.  */
2052 
2053 static bool
2054 maybe_instrument_assignment (gimple_stmt_iterator *iter)
2055 {
2056   gimple *s = gsi_stmt (*iter);
2057 
2058   gcc_assert (gimple_assign_single_p (s));
2059 
2060   tree ref_expr = NULL_TREE;
2061   bool is_store, is_instrumented = false;
2062 
2063   if (gimple_store_p (s))
2064     {
2065       ref_expr = gimple_assign_lhs (s);
2066       is_store = true;
2067       instrument_derefs (iter, ref_expr,
2068 			 gimple_location (s),
2069 			 is_store);
2070       is_instrumented = true;
2071     }
2072 
2073   if (gimple_assign_load_p (s))
2074     {
2075       ref_expr = gimple_assign_rhs1 (s);
2076       is_store = false;
2077       instrument_derefs (iter, ref_expr,
2078 			 gimple_location (s),
2079 			 is_store);
2080       is_instrumented = true;
2081     }
2082 
2083   if (is_instrumented)
2084     gsi_next (iter);
2085 
2086   return is_instrumented;
2087 }
2088 
2089 /* Instrument the function call pointed to by the iterator ITER, if it
2090    is subject to instrumentation.  At the moment, the only function
2091    calls that are instrumented are some built-in functions that access
2092    memory.  Look at instrument_builtin_call to learn more.
2093 
2094    Upon completion return TRUE iff *ITER was advanced to the statement
2095    following the one it was originally pointing to.  */
2096 
2097 static bool
2098 maybe_instrument_call (gimple_stmt_iterator *iter)
2099 {
2100   gimple *stmt = gsi_stmt (*iter);
2101   bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2102 
2103   if (is_builtin && instrument_builtin_call (iter))
2104     return true;
2105 
2106   if (gimple_call_noreturn_p (stmt))
2107     {
2108       if (is_builtin)
2109 	{
2110 	  tree callee = gimple_call_fndecl (stmt);
2111 	  switch (DECL_FUNCTION_CODE (callee))
2112 	    {
2113 	    case BUILT_IN_UNREACHABLE:
2114 	    case BUILT_IN_TRAP:
2115 	      /* Don't instrument these.  */
2116 	      return false;
2117 	    default:
2118 	      break;
2119 	    }
2120 	}
2121       tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2122       gimple *g = gimple_build_call (decl, 0);
2123       gimple_set_location (g, gimple_location (stmt));
2124       gsi_insert_before (iter, g, GSI_SAME_STMT);
2125     }
2126 
2127   bool instrumented = false;
2128   if (gimple_store_p (stmt))
2129     {
2130       tree ref_expr = gimple_call_lhs (stmt);
2131       instrument_derefs (iter, ref_expr,
2132 			 gimple_location (stmt),
2133 			 /*is_store=*/true);
2134 
2135       instrumented = true;
2136     }
2137 
2138   /* Walk through gimple_call arguments and check them id needed.  */
2139   unsigned args_num = gimple_call_num_args (stmt);
2140   for (unsigned i = 0; i < args_num; ++i)
2141     {
2142       tree arg = gimple_call_arg (stmt, i);
2143       /* If ARG is not a non-aggregate register variable, compiler in general
2144 	 creates temporary for it and pass it as argument to gimple call.
2145 	 But in some cases, e.g. when we pass by value a small structure that
2146 	 fits to register, compiler can avoid extra overhead by pulling out
2147 	 these temporaries.  In this case, we should check the argument.  */
2148       if (!is_gimple_reg (arg) && !is_gimple_min_invariant (arg))
2149 	{
2150 	  instrument_derefs (iter, arg,
2151 			     gimple_location (stmt),
2152 			     /*is_store=*/false);
2153 	  instrumented = true;
2154 	}
2155     }
2156   if (instrumented)
2157     gsi_next (iter);
2158   return instrumented;
2159 }
2160 
2161 /* Walk each instruction of all basic block and instrument those that
2162    represent memory references: loads, stores, or function calls.
2163    In a given basic block, this function avoids instrumenting memory
2164    references that have already been instrumented.  */
2165 
2166 static void
2167 transform_statements (void)
2168 {
2169   basic_block bb, last_bb = NULL;
2170   gimple_stmt_iterator i;
2171   int saved_last_basic_block = last_basic_block_for_fn (cfun);
2172 
2173   FOR_EACH_BB_FN (bb, cfun)
2174     {
2175       basic_block prev_bb = bb;
2176 
2177       if (bb->index >= saved_last_basic_block) continue;
2178 
2179       /* Flush the mem ref hash table, if current bb doesn't have
2180 	 exactly one predecessor, or if that predecessor (skipping
2181 	 over asan created basic blocks) isn't the last processed
2182 	 basic block.  Thus we effectively flush on extended basic
2183 	 block boundaries.  */
2184       while (single_pred_p (prev_bb))
2185 	{
2186 	  prev_bb = single_pred (prev_bb);
2187 	  if (prev_bb->index < saved_last_basic_block)
2188 	    break;
2189 	}
2190       if (prev_bb != last_bb)
2191 	empty_mem_ref_hash_table ();
2192       last_bb = bb;
2193 
2194       for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2195 	{
2196 	  gimple *s = gsi_stmt (i);
2197 
2198 	  if (has_stmt_been_instrumented_p (s))
2199 	    gsi_next (&i);
2200 	  else if (gimple_assign_single_p (s)
2201 		   && !gimple_clobber_p (s)
2202 		   && maybe_instrument_assignment (&i))
2203 	    /*  Nothing to do as maybe_instrument_assignment advanced
2204 		the iterator I.  */;
2205 	  else if (is_gimple_call (s) && maybe_instrument_call (&i))
2206 	    /*  Nothing to do as maybe_instrument_call
2207 		advanced the iterator I.  */;
2208 	  else
2209 	    {
2210 	      /* No instrumentation happened.
2211 
2212 		 If the current instruction is a function call that
2213 		 might free something, let's forget about the memory
2214 		 references that got instrumented.  Otherwise we might
2215 		 miss some instrumentation opportunities.  Do the same
2216 		 for a ASAN_MARK poisoning internal function.  */
2217 	      if (is_gimple_call (s)
2218 		  && (!nonfreeing_call_p (s)
2219 		      || asan_mark_p (s, ASAN_MARK_POISON)))
2220 		empty_mem_ref_hash_table ();
2221 
2222 	      gsi_next (&i);
2223 	    }
2224 	}
2225     }
2226   free_mem_ref_resources ();
2227 }
2228 
2229 /* Build
2230    __asan_before_dynamic_init (module_name)
2231    or
2232    __asan_after_dynamic_init ()
2233    call.  */
2234 
2235 tree
2236 asan_dynamic_init_call (bool after_p)
2237 {
2238   if (shadow_ptr_types[0] == NULL_TREE)
2239     asan_init_shadow_ptr_types ();
2240 
2241   tree fn = builtin_decl_implicit (after_p
2242 				   ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2243 				   : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2244   tree module_name_cst = NULL_TREE;
2245   if (!after_p)
2246     {
2247       pretty_printer module_name_pp;
2248       pp_string (&module_name_pp, main_input_filename);
2249 
2250       module_name_cst = asan_pp_string (&module_name_pp);
2251       module_name_cst = fold_convert (const_ptr_type_node,
2252 				      module_name_cst);
2253     }
2254 
2255   return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2256 }
2257 
2258 /* Build
2259    struct __asan_global
2260    {
2261      const void *__beg;
2262      uptr __size;
2263      uptr __size_with_redzone;
2264      const void *__name;
2265      const void *__module_name;
2266      uptr __has_dynamic_init;
2267      __asan_global_source_location *__location;
2268      char *__odr_indicator;
2269    } type.  */
2270 
2271 static tree
2272 asan_global_struct (void)
2273 {
2274   static const char *field_names[]
2275     = { "__beg", "__size", "__size_with_redzone",
2276 	"__name", "__module_name", "__has_dynamic_init", "__location",
2277 	"__odr_indicator" };
2278   tree fields[ARRAY_SIZE (field_names)], ret;
2279   unsigned i;
2280 
2281   ret = make_node (RECORD_TYPE);
2282   for (i = 0; i < ARRAY_SIZE (field_names); i++)
2283     {
2284       fields[i]
2285 	= build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2286 		      get_identifier (field_names[i]),
2287 		      (i == 0 || i == 3) ? const_ptr_type_node
2288 		      : pointer_sized_int_node);
2289       DECL_CONTEXT (fields[i]) = ret;
2290       if (i)
2291 	DECL_CHAIN (fields[i - 1]) = fields[i];
2292     }
2293   tree type_decl = build_decl (input_location, TYPE_DECL,
2294 			       get_identifier ("__asan_global"), ret);
2295   DECL_IGNORED_P (type_decl) = 1;
2296   DECL_ARTIFICIAL (type_decl) = 1;
2297   TYPE_FIELDS (ret) = fields[0];
2298   TYPE_NAME (ret) = type_decl;
2299   TYPE_STUB_DECL (ret) = type_decl;
2300   layout_type (ret);
2301   return ret;
2302 }
2303 
2304 /* Create and return odr indicator symbol for DECL.
2305    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2306 
2307 static tree
2308 create_odr_indicator (tree decl, tree type)
2309 {
2310   char *name;
2311   tree uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2312   tree decl_name
2313     = (HAS_DECL_ASSEMBLER_NAME_P (decl) ? DECL_ASSEMBLER_NAME (decl)
2314 					: DECL_NAME (decl));
2315   /* DECL_NAME theoretically might be NULL.  Bail out with 0 in this case.  */
2316   if (decl_name == NULL_TREE)
2317     return build_int_cst (uptr, 0);
2318   const char *dname = IDENTIFIER_POINTER (decl_name);
2319   if (HAS_DECL_ASSEMBLER_NAME_P (decl))
2320     dname = targetm.strip_name_encoding (dname);
2321   size_t len = strlen (dname) + sizeof ("__odr_asan_");
2322   name = XALLOCAVEC (char, len);
2323   snprintf (name, len, "__odr_asan_%s", dname);
2324 #ifndef NO_DOT_IN_LABEL
2325   name[sizeof ("__odr_asan") - 1] = '.';
2326 #elif !defined(NO_DOLLAR_IN_LABEL)
2327   name[sizeof ("__odr_asan") - 1] = '$';
2328 #endif
2329   tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (name),
2330 			 char_type_node);
2331   TREE_ADDRESSABLE (var) = 1;
2332   TREE_READONLY (var) = 0;
2333   TREE_THIS_VOLATILE (var) = 1;
2334   DECL_GIMPLE_REG_P (var) = 0;
2335   DECL_ARTIFICIAL (var) = 1;
2336   DECL_IGNORED_P (var) = 1;
2337   TREE_STATIC (var) = 1;
2338   TREE_PUBLIC (var) = 1;
2339   DECL_VISIBILITY (var) = DECL_VISIBILITY (decl);
2340   DECL_VISIBILITY_SPECIFIED (var) = DECL_VISIBILITY_SPECIFIED (decl);
2341 
2342   TREE_USED (var) = 1;
2343   tree ctor = build_constructor_va (TREE_TYPE (var), 1, NULL_TREE,
2344 				    build_int_cst (unsigned_type_node, 0));
2345   TREE_CONSTANT (ctor) = 1;
2346   TREE_STATIC (ctor) = 1;
2347   DECL_INITIAL (var) = ctor;
2348   DECL_ATTRIBUTES (var) = tree_cons (get_identifier ("asan odr indicator"),
2349 				     NULL, DECL_ATTRIBUTES (var));
2350   make_decl_rtl (var);
2351   varpool_node::finalize_decl (var);
2352   return fold_convert (uptr, build_fold_addr_expr (var));
2353 }
2354 
2355 /* Return true if DECL, a global var, might be overridden and needs
2356    an additional odr indicator symbol.  */
2357 
2358 static bool
2359 asan_needs_odr_indicator_p (tree decl)
2360 {
2361   /* Don't emit ODR indicators for kernel because:
2362      a) Kernel is written in C thus doesn't need ODR indicators.
2363      b) Some kernel code may have assumptions about symbols containing specific
2364         patterns in their names.  Since ODR indicators contain original names
2365         of symbols they are emitted for, these assumptions would be broken for
2366         ODR indicator symbols.  */
2367   return (!(flag_sanitize & SANITIZE_KERNEL_ADDRESS)
2368 	  && !DECL_ARTIFICIAL (decl)
2369 	  && !DECL_WEAK (decl)
2370 	  && TREE_PUBLIC (decl));
2371 }
2372 
2373 /* Append description of a single global DECL into vector V.
2374    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2375 
2376 static void
2377 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2378 {
2379   tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2380   unsigned HOST_WIDE_INT size;
2381   tree str_cst, module_name_cst, refdecl = decl;
2382   vec<constructor_elt, va_gc> *vinner = NULL;
2383 
2384   pretty_printer asan_pp, module_name_pp;
2385 
2386   if (DECL_NAME (decl))
2387     pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2388   else
2389     pp_string (&asan_pp, "<unknown>");
2390   str_cst = asan_pp_string (&asan_pp);
2391 
2392   pp_string (&module_name_pp, main_input_filename);
2393   module_name_cst = asan_pp_string (&module_name_pp);
2394 
2395   if (asan_needs_local_alias (decl))
2396     {
2397       char buf[20];
2398       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2399       refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2400 			    VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2401       TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2402       TREE_READONLY (refdecl) = TREE_READONLY (decl);
2403       TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2404       DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2405       DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2406       DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2407       TREE_STATIC (refdecl) = 1;
2408       TREE_PUBLIC (refdecl) = 0;
2409       TREE_USED (refdecl) = 1;
2410       assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2411     }
2412 
2413   tree odr_indicator_ptr
2414     = (asan_needs_odr_indicator_p (decl) ? create_odr_indicator (decl, type)
2415 					 : build_int_cst (uptr, 0));
2416   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2417 			  fold_convert (const_ptr_type_node,
2418 					build_fold_addr_expr (refdecl)));
2419   size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2420   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2421   size += asan_red_zone_size (size);
2422   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2423   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2424 			  fold_convert (const_ptr_type_node, str_cst));
2425   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2426 			  fold_convert (const_ptr_type_node, module_name_cst));
2427   varpool_node *vnode = varpool_node::get (decl);
2428   int has_dynamic_init = 0;
2429   /* FIXME: Enable initialization order fiasco detection in LTO mode once
2430      proper fix for PR 79061 will be applied.  */
2431   if (!in_lto_p)
2432     has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2433   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2434 			  build_int_cst (uptr, has_dynamic_init));
2435   tree locptr = NULL_TREE;
2436   location_t loc = DECL_SOURCE_LOCATION (decl);
2437   expanded_location xloc = expand_location (loc);
2438   if (xloc.file != NULL)
2439     {
2440       static int lasanloccnt = 0;
2441       char buf[25];
2442       ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2443       tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2444 			     ubsan_get_source_location_type ());
2445       TREE_STATIC (var) = 1;
2446       TREE_PUBLIC (var) = 0;
2447       DECL_ARTIFICIAL (var) = 1;
2448       DECL_IGNORED_P (var) = 1;
2449       pretty_printer filename_pp;
2450       pp_string (&filename_pp, xloc.file);
2451       tree str = asan_pp_string (&filename_pp);
2452       tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2453 					NULL_TREE, str, NULL_TREE,
2454 					build_int_cst (unsigned_type_node,
2455 						       xloc.line), NULL_TREE,
2456 					build_int_cst (unsigned_type_node,
2457 						       xloc.column));
2458       TREE_CONSTANT (ctor) = 1;
2459       TREE_STATIC (ctor) = 1;
2460       DECL_INITIAL (var) = ctor;
2461       varpool_node::finalize_decl (var);
2462       locptr = fold_convert (uptr, build_fold_addr_expr (var));
2463     }
2464   else
2465     locptr = build_int_cst (uptr, 0);
2466   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2467   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, odr_indicator_ptr);
2468   init = build_constructor (type, vinner);
2469   CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2470 }
2471 
2472 /* Initialize sanitizer.def builtins if the FE hasn't initialized them.  */
2473 void
2474 initialize_sanitizer_builtins (void)
2475 {
2476   tree decl;
2477 
2478   if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2479     return;
2480 
2481   tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2482   tree BT_FN_VOID_PTR
2483     = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2484   tree BT_FN_VOID_CONST_PTR
2485     = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2486   tree BT_FN_VOID_PTR_PTR
2487     = build_function_type_list (void_type_node, ptr_type_node,
2488 				ptr_type_node, NULL_TREE);
2489   tree BT_FN_VOID_PTR_PTR_PTR
2490     = build_function_type_list (void_type_node, ptr_type_node,
2491 				ptr_type_node, ptr_type_node, NULL_TREE);
2492   tree BT_FN_VOID_PTR_PTRMODE
2493     = build_function_type_list (void_type_node, ptr_type_node,
2494 				pointer_sized_int_node, NULL_TREE);
2495   tree BT_FN_VOID_INT
2496     = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2497   tree BT_FN_SIZE_CONST_PTR_INT
2498     = build_function_type_list (size_type_node, const_ptr_type_node,
2499 				integer_type_node, NULL_TREE);
2500   tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2501   tree BT_FN_IX_CONST_VPTR_INT[5];
2502   tree BT_FN_IX_VPTR_IX_INT[5];
2503   tree BT_FN_VOID_VPTR_IX_INT[5];
2504   tree vptr
2505     = build_pointer_type (build_qualified_type (void_type_node,
2506 						TYPE_QUAL_VOLATILE));
2507   tree cvptr
2508     = build_pointer_type (build_qualified_type (void_type_node,
2509 						TYPE_QUAL_VOLATILE
2510 						|TYPE_QUAL_CONST));
2511   tree boolt
2512     = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2513   int i;
2514   for (i = 0; i < 5; i++)
2515     {
2516       tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2517       BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2518 	= build_function_type_list (boolt, vptr, ptr_type_node, ix,
2519 				    integer_type_node, integer_type_node,
2520 				    NULL_TREE);
2521       BT_FN_IX_CONST_VPTR_INT[i]
2522 	= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2523       BT_FN_IX_VPTR_IX_INT[i]
2524 	= build_function_type_list (ix, vptr, ix, integer_type_node,
2525 				    NULL_TREE);
2526       BT_FN_VOID_VPTR_IX_INT[i]
2527 	= build_function_type_list (void_type_node, vptr, ix,
2528 				    integer_type_node, NULL_TREE);
2529     }
2530 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2531 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2532 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2533 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2534 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2535 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2536 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2537 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2538 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2539 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2540 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2541 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2542 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2543 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2544 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2545 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2546 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2547 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2548 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2549 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2550 #undef ATTR_NOTHROW_LEAF_LIST
2551 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2552 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2553 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2554 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2555 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2556 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2557 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2558   ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2559 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2560 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2561   ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2562 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2563 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2564   /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2565 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2566 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2567   /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2568 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2569 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2570   /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2571 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2572 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2573 #undef DEF_BUILTIN_STUB
2574 #define DEF_BUILTIN_STUB(ENUM, NAME)
2575 #undef DEF_SANITIZER_BUILTIN
2576 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2577   do {									\
2578     decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM,		\
2579 				 BUILT_IN_NORMAL, NAME, NULL_TREE);	\
2580     set_call_expr_flags (decl, ATTRS);					\
2581     set_builtin_decl (ENUM, decl, true);				\
2582   } while (0);
2583 
2584 #include "sanitizer.def"
2585 
2586   /* -fsanitize=object-size uses __builtin_object_size, but that might
2587      not be available for e.g. Fortran at this point.  We use
2588      DEF_SANITIZER_BUILTIN here only as a convenience macro.  */
2589   if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2590       && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2591     DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2592 			   BT_FN_SIZE_CONST_PTR_INT,
2593 			   ATTR_PURE_NOTHROW_LEAF_LIST)
2594 
2595 #undef DEF_SANITIZER_BUILTIN
2596 #undef DEF_BUILTIN_STUB
2597 }
2598 
2599 /* Called via htab_traverse.  Count number of emitted
2600    STRING_CSTs in the constant hash table.  */
2601 
2602 int
2603 count_string_csts (constant_descriptor_tree **slot,
2604 		   unsigned HOST_WIDE_INT *data)
2605 {
2606   struct constant_descriptor_tree *desc = *slot;
2607   if (TREE_CODE (desc->value) == STRING_CST
2608       && TREE_ASM_WRITTEN (desc->value)
2609       && asan_protect_global (desc->value))
2610     ++*data;
2611   return 1;
2612 }
2613 
2614 /* Helper structure to pass two parameters to
2615    add_string_csts.  */
2616 
2617 struct asan_add_string_csts_data
2618 {
2619   tree type;
2620   vec<constructor_elt, va_gc> *v;
2621 };
2622 
2623 /* Called via hash_table::traverse.  Call asan_add_global
2624    on emitted STRING_CSTs from the constant hash table.  */
2625 
2626 int
2627 add_string_csts (constant_descriptor_tree **slot,
2628 		 asan_add_string_csts_data *aascd)
2629 {
2630   struct constant_descriptor_tree *desc = *slot;
2631   if (TREE_CODE (desc->value) == STRING_CST
2632       && TREE_ASM_WRITTEN (desc->value)
2633       && asan_protect_global (desc->value))
2634     {
2635       asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2636 		       aascd->type, aascd->v);
2637     }
2638   return 1;
2639 }
2640 
2641 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2642    invoke ggc_collect.  */
2643 static GTY(()) tree asan_ctor_statements;
2644 
2645 /* Module-level instrumentation.
2646    - Insert __asan_init_vN() into the list of CTORs.
2647    - TODO: insert redzones around globals.
2648  */
2649 
2650 void
2651 asan_finish_file (void)
2652 {
2653   varpool_node *vnode;
2654   unsigned HOST_WIDE_INT gcount = 0;
2655 
2656   if (shadow_ptr_types[0] == NULL_TREE)
2657     asan_init_shadow_ptr_types ();
2658   /* Avoid instrumenting code in the asan ctors/dtors.
2659      We don't need to insert padding after the description strings,
2660      nor after .LASAN* array.  */
2661   flag_sanitize &= ~SANITIZE_ADDRESS;
2662 
2663   /* For user-space we want asan constructors to run first.
2664      Linux kernel does not support priorities other than default, and the only
2665      other user of constructors is coverage. So we run with the default
2666      priority.  */
2667   int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2668                  ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2669 
2670   if (flag_sanitize & SANITIZE_USER_ADDRESS)
2671     {
2672       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2673       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2674       fn = builtin_decl_implicit (BUILT_IN_ASAN_VERSION_MISMATCH_CHECK);
2675       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2676     }
2677   FOR_EACH_DEFINED_VARIABLE (vnode)
2678     if (TREE_ASM_WRITTEN (vnode->decl)
2679 	&& asan_protect_global (vnode->decl))
2680       ++gcount;
2681   hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2682   const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2683     (&gcount);
2684   if (gcount)
2685     {
2686       tree type = asan_global_struct (), var, ctor;
2687       tree dtor_statements = NULL_TREE;
2688       vec<constructor_elt, va_gc> *v;
2689       char buf[20];
2690 
2691       type = build_array_type_nelts (type, gcount);
2692       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2693       var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2694 			type);
2695       TREE_STATIC (var) = 1;
2696       TREE_PUBLIC (var) = 0;
2697       DECL_ARTIFICIAL (var) = 1;
2698       DECL_IGNORED_P (var) = 1;
2699       vec_alloc (v, gcount);
2700       FOR_EACH_DEFINED_VARIABLE (vnode)
2701 	if (TREE_ASM_WRITTEN (vnode->decl)
2702 	    && asan_protect_global (vnode->decl))
2703 	  asan_add_global (vnode->decl, TREE_TYPE (type), v);
2704       struct asan_add_string_csts_data aascd;
2705       aascd.type = TREE_TYPE (type);
2706       aascd.v = v;
2707       const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2708        	(&aascd);
2709       ctor = build_constructor (type, v);
2710       TREE_CONSTANT (ctor) = 1;
2711       TREE_STATIC (ctor) = 1;
2712       DECL_INITIAL (var) = ctor;
2713       varpool_node::finalize_decl (var);
2714 
2715       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2716       tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2717       append_to_statement_list (build_call_expr (fn, 2,
2718 						 build_fold_addr_expr (var),
2719 						 gcount_tree),
2720 				&asan_ctor_statements);
2721 
2722       fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2723       append_to_statement_list (build_call_expr (fn, 2,
2724 						 build_fold_addr_expr (var),
2725 						 gcount_tree),
2726 				&dtor_statements);
2727       cgraph_build_static_cdtor ('D', dtor_statements, priority);
2728     }
2729   if (asan_ctor_statements)
2730     cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2731   flag_sanitize |= SANITIZE_ADDRESS;
2732 }
2733 
2734 /* Poison or unpoison (depending on IS_CLOBBER variable) shadow memory based
2735    on SHADOW address.  Newly added statements will be added to ITER with
2736    given location LOC.  We mark SIZE bytes in shadow memory, where
2737    LAST_CHUNK_SIZE is greater than zero in situation where we are at the
2738    end of a variable.  */
2739 
2740 static void
2741 asan_store_shadow_bytes (gimple_stmt_iterator *iter, location_t loc,
2742 			 tree shadow,
2743 			 unsigned HOST_WIDE_INT base_addr_offset,
2744 			 bool is_clobber, unsigned size,
2745 			 unsigned last_chunk_size)
2746 {
2747   tree shadow_ptr_type;
2748 
2749   switch (size)
2750     {
2751     case 1:
2752       shadow_ptr_type = shadow_ptr_types[0];
2753       break;
2754     case 2:
2755       shadow_ptr_type = shadow_ptr_types[1];
2756       break;
2757     case 4:
2758       shadow_ptr_type = shadow_ptr_types[2];
2759       break;
2760     default:
2761       gcc_unreachable ();
2762     }
2763 
2764   unsigned char c = (char) is_clobber ? ASAN_STACK_MAGIC_USE_AFTER_SCOPE : 0;
2765   unsigned HOST_WIDE_INT val = 0;
2766   unsigned last_pos = size;
2767   if (last_chunk_size && !is_clobber)
2768     last_pos = BYTES_BIG_ENDIAN ? 0 : size - 1;
2769   for (unsigned i = 0; i < size; ++i)
2770     {
2771       unsigned char shadow_c = c;
2772       if (i == last_pos)
2773 	shadow_c = last_chunk_size;
2774       val |= (unsigned HOST_WIDE_INT) shadow_c << (BITS_PER_UNIT * i);
2775     }
2776 
2777   /* Handle last chunk in unpoisoning.  */
2778   tree magic = build_int_cst (TREE_TYPE (shadow_ptr_type), val);
2779 
2780   tree dest = build2 (MEM_REF, TREE_TYPE (shadow_ptr_type), shadow,
2781 		      build_int_cst (shadow_ptr_type, base_addr_offset));
2782 
2783   gimple *g = gimple_build_assign (dest, magic);
2784   gimple_set_location (g, loc);
2785   gsi_insert_after (iter, g, GSI_NEW_STMT);
2786 }
2787 
2788 /* Expand the ASAN_MARK builtins.  */
2789 
2790 bool
2791 asan_expand_mark_ifn (gimple_stmt_iterator *iter)
2792 {
2793   gimple *g = gsi_stmt (*iter);
2794   location_t loc = gimple_location (g);
2795   HOST_WIDE_INT flag = tree_to_shwi (gimple_call_arg (g, 0));
2796   bool is_poison = ((asan_mark_flags)flag) == ASAN_MARK_POISON;
2797 
2798   tree base = gimple_call_arg (g, 1);
2799   gcc_checking_assert (TREE_CODE (base) == ADDR_EXPR);
2800   tree decl = TREE_OPERAND (base, 0);
2801 
2802   /* For a nested function, we can have: ASAN_MARK (2, &FRAME.2.fp_input, 4) */
2803   if (TREE_CODE (decl) == COMPONENT_REF
2804       && DECL_NONLOCAL_FRAME (TREE_OPERAND (decl, 0)))
2805     decl = TREE_OPERAND (decl, 0);
2806 
2807   gcc_checking_assert (TREE_CODE (decl) == VAR_DECL);
2808   if (asan_handled_variables == NULL)
2809     asan_handled_variables = new hash_set<tree> (16);
2810   asan_handled_variables->add (decl);
2811   tree len = gimple_call_arg (g, 2);
2812 
2813   gcc_assert (tree_fits_shwi_p (len));
2814   unsigned HOST_WIDE_INT size_in_bytes = tree_to_shwi (len);
2815   gcc_assert (size_in_bytes);
2816 
2817   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2818 			   NOP_EXPR, base);
2819   gimple_set_location (g, loc);
2820   gsi_replace (iter, g, false);
2821   tree base_addr = gimple_assign_lhs (g);
2822 
2823   /* Generate direct emission if size_in_bytes is small.  */
2824   if (size_in_bytes <= ASAN_PARAM_USE_AFTER_SCOPE_DIRECT_EMISSION_THRESHOLD)
2825     {
2826       unsigned HOST_WIDE_INT shadow_size = shadow_mem_size (size_in_bytes);
2827 
2828       tree shadow = build_shadow_mem_access (iter, loc, base_addr,
2829 					     shadow_ptr_types[0], true);
2830 
2831       for (unsigned HOST_WIDE_INT offset = 0; offset < shadow_size;)
2832 	{
2833 	  unsigned size = 1;
2834 	  if (shadow_size - offset >= 4)
2835 	    size = 4;
2836 	  else if (shadow_size - offset >= 2)
2837 	    size = 2;
2838 
2839 	  unsigned HOST_WIDE_INT last_chunk_size = 0;
2840 	  unsigned HOST_WIDE_INT s = (offset + size) * ASAN_SHADOW_GRANULARITY;
2841 	  if (s > size_in_bytes)
2842 	    last_chunk_size = ASAN_SHADOW_GRANULARITY - (s - size_in_bytes);
2843 
2844 	  asan_store_shadow_bytes (iter, loc, shadow, offset, is_poison,
2845 				   size, last_chunk_size);
2846 	  offset += size;
2847 	}
2848     }
2849   else
2850     {
2851       g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2852 			       NOP_EXPR, len);
2853       gimple_set_location (g, loc);
2854       gsi_insert_before (iter, g, GSI_SAME_STMT);
2855       tree sz_arg = gimple_assign_lhs (g);
2856 
2857       tree fun
2858 	= builtin_decl_implicit (is_poison ? BUILT_IN_ASAN_POISON_STACK_MEMORY
2859 				 : BUILT_IN_ASAN_UNPOISON_STACK_MEMORY);
2860       g = gimple_build_call (fun, 2, base_addr, sz_arg);
2861       gimple_set_location (g, loc);
2862       gsi_insert_after (iter, g, GSI_NEW_STMT);
2863     }
2864 
2865   return false;
2866 }
2867 
2868 /* Expand the ASAN_{LOAD,STORE} builtins.  */
2869 
2870 bool
2871 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2872 {
2873   gimple *g = gsi_stmt (*iter);
2874   location_t loc = gimple_location (g);
2875   bool recover_p;
2876   if (flag_sanitize & SANITIZE_USER_ADDRESS)
2877     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
2878   else
2879     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2880 
2881   HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2882   gcc_assert (flags < ASAN_CHECK_LAST);
2883   bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2884   bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2885   bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2886 
2887   tree base = gimple_call_arg (g, 1);
2888   tree len = gimple_call_arg (g, 2);
2889   HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2890 
2891   HOST_WIDE_INT size_in_bytes
2892     = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2893 
2894   if (use_calls)
2895     {
2896       /* Instrument using callbacks.  */
2897       gimple *g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2898 				      NOP_EXPR, base);
2899       gimple_set_location (g, loc);
2900       gsi_insert_before (iter, g, GSI_SAME_STMT);
2901       tree base_addr = gimple_assign_lhs (g);
2902 
2903       int nargs;
2904       tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2905       if (nargs == 1)
2906 	g = gimple_build_call (fun, 1, base_addr);
2907       else
2908 	{
2909 	  gcc_assert (nargs == 2);
2910 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2911 				   NOP_EXPR, len);
2912 	  gimple_set_location (g, loc);
2913 	  gsi_insert_before (iter, g, GSI_SAME_STMT);
2914 	  tree sz_arg = gimple_assign_lhs (g);
2915 	  g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2916 	}
2917       gimple_set_location (g, loc);
2918       gsi_replace (iter, g, false);
2919       return false;
2920     }
2921 
2922   HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2923 
2924   tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2925   tree shadow_type = TREE_TYPE (shadow_ptr_type);
2926 
2927   gimple_stmt_iterator gsi = *iter;
2928 
2929   if (!is_non_zero_len)
2930     {
2931       /* So, the length of the memory area to asan-protect is
2932 	 non-constant.  Let's guard the generated instrumentation code
2933 	 like:
2934 
2935 	 if (len != 0)
2936 	   {
2937 	     //asan instrumentation code goes here.
2938 	   }
2939 	 // falltrough instructions, starting with *ITER.  */
2940 
2941       g = gimple_build_cond (NE_EXPR,
2942 			    len,
2943 			    build_int_cst (TREE_TYPE (len), 0),
2944 			    NULL_TREE, NULL_TREE);
2945       gimple_set_location (g, loc);
2946 
2947       basic_block then_bb, fallthrough_bb;
2948       insert_if_then_before_iter (as_a <gcond *> (g), iter,
2949 				  /*then_more_likely_p=*/true,
2950 				  &then_bb, &fallthrough_bb);
2951       /* Note that fallthrough_bb starts with the statement that was
2952 	pointed to by ITER.  */
2953 
2954       /* The 'then block' of the 'if (len != 0) condition is where
2955 	we'll generate the asan instrumentation code now.  */
2956       gsi = gsi_last_bb (then_bb);
2957     }
2958 
2959   /* Get an iterator on the point where we can add the condition
2960      statement for the instrumentation.  */
2961   basic_block then_bb, else_bb;
2962   gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2963 				  /*then_more_likely_p=*/false,
2964 				  /*create_then_fallthru_edge*/recover_p,
2965 				  &then_bb,
2966 				  &else_bb);
2967 
2968   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2969 			   NOP_EXPR, base);
2970   gimple_set_location (g, loc);
2971   gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2972   tree base_addr = gimple_assign_lhs (g);
2973 
2974   tree t = NULL_TREE;
2975   if (real_size_in_bytes >= 8)
2976     {
2977       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2978 					     shadow_ptr_type);
2979       t = shadow;
2980     }
2981   else
2982     {
2983       /* Slow path for 1, 2 and 4 byte accesses.  */
2984       /* Test (shadow != 0)
2985 	 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow).  */
2986       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2987 					     shadow_ptr_type);
2988       gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
2989       gimple_seq seq = NULL;
2990       gimple_seq_add_stmt (&seq, shadow_test);
2991       /* Aligned (>= 8 bytes) can test just
2992 	 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2993 	 to be 0.  */
2994       if (align < 8)
2995 	{
2996 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2997 						   base_addr, 7));
2998 	  gimple_seq_add_stmt (&seq,
2999 			       build_type_cast (shadow_type,
3000 						gimple_seq_last (seq)));
3001 	  if (real_size_in_bytes > 1)
3002 	    gimple_seq_add_stmt (&seq,
3003 				 build_assign (PLUS_EXPR,
3004 					       gimple_seq_last (seq),
3005 					       real_size_in_bytes - 1));
3006 	  t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
3007 	}
3008       else
3009 	t = build_int_cst (shadow_type, real_size_in_bytes - 1);
3010       gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
3011       gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3012 					       gimple_seq_last (seq)));
3013       t = gimple_assign_lhs (gimple_seq_last (seq));
3014       gimple_seq_set_location (seq, loc);
3015       gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3016 
3017       /* For non-constant, misaligned or otherwise weird access sizes,
3018        check first and last byte.  */
3019       if (size_in_bytes == -1)
3020 	{
3021 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3022 				   MINUS_EXPR, len,
3023 				   build_int_cst (pointer_sized_int_node, 1));
3024 	  gimple_set_location (g, loc);
3025 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3026 	  tree last = gimple_assign_lhs (g);
3027 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
3028 				   PLUS_EXPR, base_addr, last);
3029 	  gimple_set_location (g, loc);
3030 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3031 	  tree base_end_addr = gimple_assign_lhs (g);
3032 
3033 	  tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
3034 						 shadow_ptr_type);
3035 	  gimple *shadow_test = build_assign (NE_EXPR, shadow, 0);
3036 	  gimple_seq seq = NULL;
3037 	  gimple_seq_add_stmt (&seq, shadow_test);
3038 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
3039 						   base_end_addr, 7));
3040 	  gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
3041 						      gimple_seq_last (seq)));
3042 	  gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
3043 						   gimple_seq_last (seq),
3044 						   shadow));
3045 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
3046 						   gimple_seq_last (seq)));
3047 	  gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
3048 						   gimple_seq_last (seq)));
3049 	  t = gimple_assign_lhs (gimple_seq_last (seq));
3050 	  gimple_seq_set_location (seq, loc);
3051 	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
3052 	}
3053     }
3054 
3055   g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
3056 			 NULL_TREE, NULL_TREE);
3057   gimple_set_location (g, loc);
3058   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3059 
3060   /* Generate call to the run-time library (e.g. __asan_report_load8).  */
3061   gsi = gsi_start_bb (then_bb);
3062   int nargs;
3063   tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
3064   g = gimple_build_call (fun, nargs, base_addr, len);
3065   gimple_set_location (g, loc);
3066   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
3067 
3068   gsi_remove (iter, true);
3069   *iter = gsi_start_bb (else_bb);
3070 
3071   return true;
3072 }
3073 
3074 /* Create ASAN shadow variable for a VAR_DECL which has been rewritten
3075    into SSA.  Already seen VAR_DECLs are stored in SHADOW_VARS_MAPPING.  */
3076 
3077 static tree
3078 create_asan_shadow_var (tree var_decl,
3079 			hash_map<tree, tree> &shadow_vars_mapping)
3080 {
3081   tree *slot = shadow_vars_mapping.get (var_decl);
3082   if (slot == NULL)
3083     {
3084       tree shadow_var = copy_node (var_decl);
3085 
3086       copy_body_data id;
3087       memset (&id, 0, sizeof (copy_body_data));
3088       id.src_fn = id.dst_fn = current_function_decl;
3089       copy_decl_for_dup_finish (&id, var_decl, shadow_var);
3090 
3091       DECL_ARTIFICIAL (shadow_var) = 1;
3092       DECL_IGNORED_P (shadow_var) = 1;
3093       DECL_SEEN_IN_BIND_EXPR_P (shadow_var) = 0;
3094       gimple_add_tmp_var (shadow_var);
3095 
3096       shadow_vars_mapping.put (var_decl, shadow_var);
3097       return shadow_var;
3098     }
3099   else
3100     return *slot;
3101 }
3102 
3103 /* Expand ASAN_POISON ifn.  */
3104 
3105 bool
3106 asan_expand_poison_ifn (gimple_stmt_iterator *iter,
3107 			bool *need_commit_edge_insert,
3108 			hash_map<tree, tree> &shadow_vars_mapping)
3109 {
3110   gimple *g = gsi_stmt (*iter);
3111   tree poisoned_var = gimple_call_lhs (g);
3112   if (!poisoned_var || has_zero_uses (poisoned_var))
3113     {
3114       gsi_remove (iter, true);
3115       return true;
3116     }
3117 
3118   if (SSA_NAME_VAR (poisoned_var) == NULL_TREE)
3119     SET_SSA_NAME_VAR_OR_IDENTIFIER (poisoned_var,
3120 				    create_tmp_var (TREE_TYPE (poisoned_var)));
3121 
3122   tree shadow_var = create_asan_shadow_var (SSA_NAME_VAR (poisoned_var),
3123 					    shadow_vars_mapping);
3124 
3125   bool recover_p;
3126   if (flag_sanitize & SANITIZE_USER_ADDRESS)
3127     recover_p = (flag_sanitize_recover & SANITIZE_USER_ADDRESS) != 0;
3128   else
3129     recover_p = (flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
3130   tree size = DECL_SIZE_UNIT (shadow_var);
3131   gimple *poison_call
3132     = gimple_build_call_internal (IFN_ASAN_MARK, 3,
3133 				  build_int_cst (integer_type_node,
3134 						 ASAN_MARK_POISON),
3135 				  build_fold_addr_expr (shadow_var), size);
3136 
3137   gimple *use;
3138   imm_use_iterator imm_iter;
3139   FOR_EACH_IMM_USE_STMT (use, imm_iter, poisoned_var)
3140     {
3141       if (is_gimple_debug (use))
3142 	continue;
3143 
3144       int nargs;
3145       bool store_p = gimple_call_internal_p (use, IFN_ASAN_POISON_USE);
3146       tree fun = report_error_func (store_p, recover_p, tree_to_uhwi (size),
3147 				    &nargs);
3148 
3149       gcall *call = gimple_build_call (fun, 1,
3150 				       build_fold_addr_expr (shadow_var));
3151       gimple_set_location (call, gimple_location (use));
3152       gimple *call_to_insert = call;
3153 
3154       /* The USE can be a gimple PHI node.  If so, insert the call on
3155 	 all edges leading to the PHI node.  */
3156       if (is_a <gphi *> (use))
3157 	{
3158 	  gphi *phi = dyn_cast<gphi *> (use);
3159 	  for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
3160 	    if (gimple_phi_arg_def (phi, i) == poisoned_var)
3161 	      {
3162 		edge e = gimple_phi_arg_edge (phi, i);
3163 
3164 		/* Do not insert on an edge we can't split.  */
3165 		if (e->flags & EDGE_ABNORMAL)
3166 		  continue;
3167 
3168 		if (call_to_insert == NULL)
3169 		  call_to_insert = gimple_copy (call);
3170 
3171 		gsi_insert_seq_on_edge (e, call_to_insert);
3172 		*need_commit_edge_insert = true;
3173 		call_to_insert = NULL;
3174 	      }
3175 	}
3176       else
3177 	{
3178 	  gimple_stmt_iterator gsi = gsi_for_stmt (use);
3179 	  if (store_p)
3180 	    gsi_replace (&gsi, call, true);
3181 	  else
3182 	    gsi_insert_before (&gsi, call, GSI_NEW_STMT);
3183 	}
3184     }
3185 
3186   SSA_NAME_IS_DEFAULT_DEF (poisoned_var) = true;
3187   SSA_NAME_DEF_STMT (poisoned_var) = gimple_build_nop ();
3188   gsi_replace (iter, poison_call, false);
3189 
3190   return true;
3191 }
3192 
3193 /* Instrument the current function.  */
3194 
3195 static unsigned int
3196 asan_instrument (void)
3197 {
3198   if (shadow_ptr_types[0] == NULL_TREE)
3199     asan_init_shadow_ptr_types ();
3200   transform_statements ();
3201   return 0;
3202 }
3203 
3204 static bool
3205 gate_asan (void)
3206 {
3207   return (flag_sanitize & SANITIZE_ADDRESS) != 0
3208 	  && !lookup_attribute ("no_sanitize_address",
3209 				DECL_ATTRIBUTES (current_function_decl));
3210 }
3211 
3212 namespace {
3213 
3214 const pass_data pass_data_asan =
3215 {
3216   GIMPLE_PASS, /* type */
3217   "asan", /* name */
3218   OPTGROUP_NONE, /* optinfo_flags */
3219   TV_NONE, /* tv_id */
3220   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3221   0, /* properties_provided */
3222   0, /* properties_destroyed */
3223   0, /* todo_flags_start */
3224   TODO_update_ssa, /* todo_flags_finish */
3225 };
3226 
3227 class pass_asan : public gimple_opt_pass
3228 {
3229 public:
3230   pass_asan (gcc::context *ctxt)
3231     : gimple_opt_pass (pass_data_asan, ctxt)
3232   {}
3233 
3234   /* opt_pass methods: */
3235   opt_pass * clone () { return new pass_asan (m_ctxt); }
3236   virtual bool gate (function *) { return gate_asan (); }
3237   virtual unsigned int execute (function *) { return asan_instrument (); }
3238 
3239 }; // class pass_asan
3240 
3241 } // anon namespace
3242 
3243 gimple_opt_pass *
3244 make_pass_asan (gcc::context *ctxt)
3245 {
3246   return new pass_asan (ctxt);
3247 }
3248 
3249 namespace {
3250 
3251 const pass_data pass_data_asan_O0 =
3252 {
3253   GIMPLE_PASS, /* type */
3254   "asan0", /* name */
3255   OPTGROUP_NONE, /* optinfo_flags */
3256   TV_NONE, /* tv_id */
3257   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
3258   0, /* properties_provided */
3259   0, /* properties_destroyed */
3260   0, /* todo_flags_start */
3261   TODO_update_ssa, /* todo_flags_finish */
3262 };
3263 
3264 class pass_asan_O0 : public gimple_opt_pass
3265 {
3266 public:
3267   pass_asan_O0 (gcc::context *ctxt)
3268     : gimple_opt_pass (pass_data_asan_O0, ctxt)
3269   {}
3270 
3271   /* opt_pass methods: */
3272   virtual bool gate (function *) { return !optimize && gate_asan (); }
3273   virtual unsigned int execute (function *) { return asan_instrument (); }
3274 
3275 }; // class pass_asan_O0
3276 
3277 } // anon namespace
3278 
3279 gimple_opt_pass *
3280 make_pass_asan_O0 (gcc::context *ctxt)
3281 {
3282   return new pass_asan_O0 (ctxt);
3283 }
3284 
3285 #include "gt-asan.h"
3286