xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/asan.c (revision bdc22b2e01993381dcefeff2bc9b56ca75a4235c)
1 /* AddressSanitizer, a fast memory error detector.
2    Copyright (C) 2012-2015 Free Software Foundation, Inc.
3    Contributed by Kostya Serebryany <kcc@google.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "hash-set.h"
26 #include "machmode.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "options.h"
33 #include "wide-int.h"
34 #include "inchash.h"
35 #include "tree.h"
36 #include "fold-const.h"
37 #include "hash-table.h"
38 #include "predict.h"
39 #include "tm.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "dominance.h"
43 #include "cfg.h"
44 #include "cfganal.h"
45 #include "basic-block.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
49 #include "is-a.h"
50 #include "gimple.h"
51 #include "gimplify.h"
52 #include "gimple-iterator.h"
53 #include "calls.h"
54 #include "varasm.h"
55 #include "stor-layout.h"
56 #include "tree-iterator.h"
57 #include "hash-map.h"
58 #include "plugin-api.h"
59 #include "ipa-ref.h"
60 #include "cgraph.h"
61 #include "stringpool.h"
62 #include "tree-ssanames.h"
63 #include "tree-pass.h"
64 #include "asan.h"
65 #include "gimple-pretty-print.h"
66 #include "target.h"
67 #include "hashtab.h"
68 #include "rtl.h"
69 #include "flags.h"
70 #include "statistics.h"
71 #include "real.h"
72 #include "fixed-value.h"
73 #include "insn-config.h"
74 #include "expmed.h"
75 #include "dojump.h"
76 #include "explow.h"
77 #include "emit-rtl.h"
78 #include "stmt.h"
79 #include "expr.h"
80 #include "insn-codes.h"
81 #include "optabs.h"
82 #include "output.h"
83 #include "tm_p.h"
84 #include "langhooks.h"
85 #include "alloc-pool.h"
86 #include "cfgloop.h"
87 #include "gimple-builder.h"
88 #include "ubsan.h"
89 #include "params.h"
90 #include "builtins.h"
91 
92 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
93    with <2x slowdown on average.
94 
95    The tool consists of two parts:
96    instrumentation module (this file) and a run-time library.
97    The instrumentation module adds a run-time check before every memory insn.
98      For a 8- or 16- byte load accessing address X:
99        ShadowAddr = (X >> 3) + Offset
100        ShadowValue = *(char*)ShadowAddr;  // *(short*) for 16-byte access.
101        if (ShadowValue)
102 	 __asan_report_load8(X);
103      For a load of N bytes (N=1, 2 or 4) from address X:
104        ShadowAddr = (X >> 3) + Offset
105        ShadowValue = *(char*)ShadowAddr;
106        if (ShadowValue)
107 	 if ((X & 7) + N - 1 > ShadowValue)
108 	   __asan_report_loadN(X);
109    Stores are instrumented similarly, but using __asan_report_storeN functions.
110    A call too __asan_init_vN() is inserted to the list of module CTORs.
111    N is the version number of the AddressSanitizer API. The changes between the
112    API versions are listed in libsanitizer/asan/asan_interface_internal.h.
113 
114    The run-time library redefines malloc (so that redzone are inserted around
115    the allocated memory) and free (so that reuse of free-ed memory is delayed),
116    provides __asan_report* and __asan_init_vN functions.
117 
118    Read more:
119    http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
120 
121    The current implementation supports detection of out-of-bounds and
122    use-after-free in the heap, on the stack and for global variables.
123 
124    [Protection of stack variables]
125 
126    To understand how detection of out-of-bounds and use-after-free works
127    for stack variables, lets look at this example on x86_64 where the
128    stack grows downward:
129 
130      int
131      foo ()
132      {
133        char a[23] = {0};
134        int b[2] = {0};
135 
136        a[5] = 1;
137        b[1] = 2;
138 
139        return a[5] + b[1];
140      }
141 
142    For this function, the stack protected by asan will be organized as
143    follows, from the top of the stack to the bottom:
144 
145    Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
146 
147    Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
148 	   the next slot be 32 bytes aligned; this one is called Partial
149 	   Redzone; this 32 bytes alignment is an asan constraint]
150 
151    Slot 3/ [24 bytes for variable 'a']
152 
153    Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
154 
155    Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
156 
157    Slot 6/ [8 bytes for variable 'b']
158 
159    Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
160 	    'LEFT RedZone']
161 
162    The 32 bytes of LEFT red zone at the bottom of the stack can be
163    decomposed as such:
164 
165      1/ The first 8 bytes contain a magical asan number that is always
166      0x41B58AB3.
167 
168      2/ The following 8 bytes contains a pointer to a string (to be
169      parsed at runtime by the runtime asan library), which format is
170      the following:
171 
172       "<function-name> <space> <num-of-variables-on-the-stack>
173       (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
174       <length-of-var-in-bytes> ){n} "
175 
176 	where '(...){n}' means the content inside the parenthesis occurs 'n'
177 	times, with 'n' being the number of variables on the stack.
178 
179      3/ The following 8 bytes contain the PC of the current function which
180      will be used by the run-time library to print an error message.
181 
182      4/ The following 8 bytes are reserved for internal use by the run-time.
183 
184    The shadow memory for that stack layout is going to look like this:
185 
186      - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
187        The F1 byte pattern is a magic number called
188        ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
189        the memory for that shadow byte is part of a the LEFT red zone
190        intended to seat at the bottom of the variables on the stack.
191 
192      - content of shadow memory 8 bytes for slots 6 and 5:
193        0xF4F4F400.  The F4 byte pattern is a magic number
194        called ASAN_STACK_MAGIC_PARTIAL.  It flags the fact that the
195        memory region for this shadow byte is a PARTIAL red zone
196        intended to pad a variable A, so that the slot following
197        {A,padding} is 32 bytes aligned.
198 
199        Note that the fact that the least significant byte of this
200        shadow memory content is 00 means that 8 bytes of its
201        corresponding memory (which corresponds to the memory of
202        variable 'b') is addressable.
203 
204      - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
205        The F2 byte pattern is a magic number called
206        ASAN_STACK_MAGIC_MIDDLE.  It flags the fact that the memory
207        region for this shadow byte is a MIDDLE red zone intended to
208        seat between two 32 aligned slots of {variable,padding}.
209 
210      - content of shadow memory 8 bytes for slot 3 and 2:
211        0xF4000000.  This represents is the concatenation of
212        variable 'a' and the partial red zone following it, like what we
213        had for variable 'b'.  The least significant 3 bytes being 00
214        means that the 3 bytes of variable 'a' are addressable.
215 
216      - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
217        The F3 byte pattern is a magic number called
218        ASAN_STACK_MAGIC_RIGHT.  It flags the fact that the memory
219        region for this shadow byte is a RIGHT red zone intended to seat
220        at the top of the variables of the stack.
221 
222    Note that the real variable layout is done in expand_used_vars in
223    cfgexpand.c.  As far as Address Sanitizer is concerned, it lays out
224    stack variables as well as the different red zones, emits some
225    prologue code to populate the shadow memory as to poison (mark as
226    non-accessible) the regions of the red zones and mark the regions of
227    stack variables as accessible, and emit some epilogue code to
228    un-poison (mark as accessible) the regions of red zones right before
229    the function exits.
230 
231    [Protection of global variables]
232 
233    The basic idea is to insert a red zone between two global variables
234    and install a constructor function that calls the asan runtime to do
235    the populating of the relevant shadow memory regions at load time.
236 
237    So the global variables are laid out as to insert a red zone between
238    them. The size of the red zones is so that each variable starts on a
239    32 bytes boundary.
240 
241    Then a constructor function is installed so that, for each global
242    variable, it calls the runtime asan library function
243    __asan_register_globals_with an instance of this type:
244 
245      struct __asan_global
246      {
247        // Address of the beginning of the global variable.
248        const void *__beg;
249 
250        // Initial size of the global variable.
251        uptr __size;
252 
253        // Size of the global variable + size of the red zone.  This
254        //   size is 32 bytes aligned.
255        uptr __size_with_redzone;
256 
257        // Name of the global variable.
258        const void *__name;
259 
260        // Name of the module where the global variable is declared.
261        const void *__module_name;
262 
263        // 1 if it has dynamic initialization, 0 otherwise.
264        uptr __has_dynamic_init;
265 
266        // A pointer to struct that contains source location, could be NULL.
267        __asan_global_source_location *__location;
268      }
269 
270    A destructor function that calls the runtime asan library function
271    _asan_unregister_globals is also installed.  */
272 
273 static unsigned HOST_WIDE_INT asan_shadow_offset_value;
274 static bool asan_shadow_offset_computed;
275 
276 /* Sets shadow offset to value in string VAL.  */
277 
278 bool
279 set_asan_shadow_offset (const char *val)
280 {
281   char *endp;
282 
283   errno = 0;
284 #ifdef HAVE_LONG_LONG
285   asan_shadow_offset_value = strtoull (val, &endp, 0);
286 #else
287   asan_shadow_offset_value = strtoul (val, &endp, 0);
288 #endif
289   if (!(*val != '\0' && *endp == '\0' && errno == 0))
290     return false;
291 
292   asan_shadow_offset_computed = true;
293 
294   return true;
295 }
296 
297 /* Returns Asan shadow offset.  */
298 
299 static unsigned HOST_WIDE_INT
300 asan_shadow_offset ()
301 {
302   if (!asan_shadow_offset_computed)
303     {
304       asan_shadow_offset_computed = true;
305       asan_shadow_offset_value = targetm.asan_shadow_offset ();
306     }
307   return asan_shadow_offset_value;
308 }
309 
310 alias_set_type asan_shadow_set = -1;
311 
312 /* Pointer types to 1 resp. 2 byte integers in shadow memory.  A separate
313    alias set is used for all shadow memory accesses.  */
314 static GTY(()) tree shadow_ptr_types[2];
315 
316 /* Decl for __asan_option_detect_stack_use_after_return.  */
317 static GTY(()) tree asan_detect_stack_use_after_return;
318 
319 /* Various flags for Asan builtins.  */
320 enum asan_check_flags
321 {
322   ASAN_CHECK_STORE = 1 << 0,
323   ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
324   ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
325   ASAN_CHECK_LAST = 1 << 3
326 };
327 
328 /* Hashtable support for memory references used by gimple
329    statements.  */
330 
331 /* This type represents a reference to a memory region.  */
332 struct asan_mem_ref
333 {
334   /* The expression of the beginning of the memory region.  */
335   tree start;
336 
337   /* The size of the access.  */
338   HOST_WIDE_INT access_size;
339 };
340 
341 static alloc_pool asan_mem_ref_alloc_pool;
342 
343 /* This creates the alloc pool used to store the instances of
344    asan_mem_ref that are stored in the hash table asan_mem_ref_ht.  */
345 
346 static alloc_pool
347 asan_mem_ref_get_alloc_pool ()
348 {
349   if (asan_mem_ref_alloc_pool == NULL)
350     asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
351 						 sizeof (asan_mem_ref),
352 						 10);
353   return asan_mem_ref_alloc_pool;
354 
355 }
356 
357 /* Initializes an instance of asan_mem_ref.  */
358 
359 static void
360 asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
361 {
362   ref->start = start;
363   ref->access_size = access_size;
364 }
365 
366 /* Allocates memory for an instance of asan_mem_ref into the memory
367    pool returned by asan_mem_ref_get_alloc_pool and initialize it.
368    START is the address of (or the expression pointing to) the
369    beginning of memory reference.  ACCESS_SIZE is the size of the
370    access to the referenced memory.  */
371 
372 static asan_mem_ref*
373 asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
374 {
375   asan_mem_ref *ref =
376     (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
377 
378   asan_mem_ref_init (ref, start, access_size);
379   return ref;
380 }
381 
382 /* This builds and returns a pointer to the end of the memory region
383    that starts at START and of length LEN.  */
384 
385 tree
386 asan_mem_ref_get_end (tree start, tree len)
387 {
388   if (len == NULL_TREE || integer_zerop (len))
389     return start;
390 
391   if (!ptrofftype_p (len))
392     len = convert_to_ptrofftype (len);
393 
394   return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
395 }
396 
397 /*  Return a tree expression that represents the end of the referenced
398     memory region.  Beware that this function can actually build a new
399     tree expression.  */
400 
401 tree
402 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
403 {
404   return asan_mem_ref_get_end (ref->start, len);
405 }
406 
407 struct asan_mem_ref_hasher
408   : typed_noop_remove <asan_mem_ref>
409 {
410   typedef asan_mem_ref value_type;
411   typedef asan_mem_ref compare_type;
412 
413   static inline hashval_t hash (const value_type *);
414   static inline bool equal (const value_type *, const compare_type *);
415 };
416 
417 /* Hash a memory reference.  */
418 
419 inline hashval_t
420 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
421 {
422   return iterative_hash_expr (mem_ref->start, 0);
423 }
424 
425 /* Compare two memory references.  We accept the length of either
426    memory references to be NULL_TREE.  */
427 
428 inline bool
429 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
430 			    const asan_mem_ref *m2)
431 {
432   return operand_equal_p (m1->start, m2->start, 0);
433 }
434 
435 static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
436 
437 /* Returns a reference to the hash table containing memory references.
438    This function ensures that the hash table is created.  Note that
439    this hash table is updated by the function
440    update_mem_ref_hash_table.  */
441 
442 static hash_table<asan_mem_ref_hasher> *
443 get_mem_ref_hash_table ()
444 {
445   if (!asan_mem_ref_ht)
446     asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
447 
448   return asan_mem_ref_ht;
449 }
450 
451 /* Clear all entries from the memory references hash table.  */
452 
453 static void
454 empty_mem_ref_hash_table ()
455 {
456   if (asan_mem_ref_ht)
457     asan_mem_ref_ht->empty ();
458 }
459 
460 /* Free the memory references hash table.  */
461 
462 static void
463 free_mem_ref_resources ()
464 {
465   delete asan_mem_ref_ht;
466   asan_mem_ref_ht = NULL;
467 
468   if (asan_mem_ref_alloc_pool)
469     {
470       free_alloc_pool (asan_mem_ref_alloc_pool);
471       asan_mem_ref_alloc_pool = NULL;
472     }
473 }
474 
475 /* Return true iff the memory reference REF has been instrumented.  */
476 
477 static bool
478 has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
479 {
480   asan_mem_ref r;
481   asan_mem_ref_init (&r, ref, access_size);
482 
483   asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
484   return saved_ref && saved_ref->access_size >= access_size;
485 }
486 
487 /* Return true iff the memory reference REF has been instrumented.  */
488 
489 static bool
490 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
491 {
492   return has_mem_ref_been_instrumented (ref->start, ref->access_size);
493 }
494 
495 /* Return true iff access to memory region starting at REF and of
496    length LEN has been instrumented.  */
497 
498 static bool
499 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
500 {
501   HOST_WIDE_INT size_in_bytes
502     = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
503 
504   return size_in_bytes != -1
505     && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
506 }
507 
508 /* Set REF to the memory reference present in a gimple assignment
509    ASSIGNMENT.  Return true upon successful completion, false
510    otherwise.  */
511 
512 static bool
513 get_mem_ref_of_assignment (const gassign *assignment,
514 			   asan_mem_ref *ref,
515 			   bool *ref_is_store)
516 {
517   gcc_assert (gimple_assign_single_p (assignment));
518 
519   if (gimple_store_p (assignment)
520       && !gimple_clobber_p (assignment))
521     {
522       ref->start = gimple_assign_lhs (assignment);
523       *ref_is_store = true;
524     }
525   else if (gimple_assign_load_p (assignment))
526     {
527       ref->start = gimple_assign_rhs1 (assignment);
528       *ref_is_store = false;
529     }
530   else
531     return false;
532 
533   ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
534   return true;
535 }
536 
537 /* Return the memory references contained in a gimple statement
538    representing a builtin call that has to do with memory access.  */
539 
540 static bool
541 get_mem_refs_of_builtin_call (const gcall *call,
542 			      asan_mem_ref *src0,
543 			      tree *src0_len,
544 			      bool *src0_is_store,
545 			      asan_mem_ref *src1,
546 			      tree *src1_len,
547 			      bool *src1_is_store,
548 			      asan_mem_ref *dst,
549 			      tree *dst_len,
550 			      bool *dst_is_store,
551 			      bool *dest_is_deref,
552 			      bool *intercepted_p)
553 {
554   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
555 
556   tree callee = gimple_call_fndecl (call);
557   tree source0 = NULL_TREE, source1 = NULL_TREE,
558     dest = NULL_TREE, len = NULL_TREE;
559   bool is_store = true, got_reference_p = false;
560   HOST_WIDE_INT access_size = 1;
561 
562   *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
563 
564   switch (DECL_FUNCTION_CODE (callee))
565     {
566       /* (s, s, n) style memops.  */
567     case BUILT_IN_BCMP:
568     case BUILT_IN_MEMCMP:
569       source0 = gimple_call_arg (call, 0);
570       source1 = gimple_call_arg (call, 1);
571       len = gimple_call_arg (call, 2);
572       break;
573 
574       /* (src, dest, n) style memops.  */
575     case BUILT_IN_BCOPY:
576       source0 = gimple_call_arg (call, 0);
577       dest = gimple_call_arg (call, 1);
578       len = gimple_call_arg (call, 2);
579       break;
580 
581       /* (dest, src, n) style memops.  */
582     case BUILT_IN_MEMCPY:
583     case BUILT_IN_MEMCPY_CHK:
584     case BUILT_IN_MEMMOVE:
585     case BUILT_IN_MEMMOVE_CHK:
586     case BUILT_IN_MEMPCPY:
587     case BUILT_IN_MEMPCPY_CHK:
588       dest = gimple_call_arg (call, 0);
589       source0 = gimple_call_arg (call, 1);
590       len = gimple_call_arg (call, 2);
591       break;
592 
593       /* (dest, n) style memops.  */
594     case BUILT_IN_BZERO:
595       dest = gimple_call_arg (call, 0);
596       len = gimple_call_arg (call, 1);
597       break;
598 
599       /* (dest, x, n) style memops*/
600     case BUILT_IN_MEMSET:
601     case BUILT_IN_MEMSET_CHK:
602       dest = gimple_call_arg (call, 0);
603       len = gimple_call_arg (call, 2);
604       break;
605 
606     case BUILT_IN_STRLEN:
607       source0 = gimple_call_arg (call, 0);
608       len = gimple_call_lhs (call);
609       break;
610 
611     /* And now the __atomic* and __sync builtins.
612        These are handled differently from the classical memory memory
613        access builtins above.  */
614 
615     case BUILT_IN_ATOMIC_LOAD_1:
616       is_store = false;
617       /* FALLTHRU */
618     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
619     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
620     case BUILT_IN_SYNC_FETCH_AND_OR_1:
621     case BUILT_IN_SYNC_FETCH_AND_AND_1:
622     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
623     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
624     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
625     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
626     case BUILT_IN_SYNC_OR_AND_FETCH_1:
627     case BUILT_IN_SYNC_AND_AND_FETCH_1:
628     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
629     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
630     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
631     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
632     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
633     case BUILT_IN_SYNC_LOCK_RELEASE_1:
634     case BUILT_IN_ATOMIC_EXCHANGE_1:
635     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
636     case BUILT_IN_ATOMIC_STORE_1:
637     case BUILT_IN_ATOMIC_ADD_FETCH_1:
638     case BUILT_IN_ATOMIC_SUB_FETCH_1:
639     case BUILT_IN_ATOMIC_AND_FETCH_1:
640     case BUILT_IN_ATOMIC_NAND_FETCH_1:
641     case BUILT_IN_ATOMIC_XOR_FETCH_1:
642     case BUILT_IN_ATOMIC_OR_FETCH_1:
643     case BUILT_IN_ATOMIC_FETCH_ADD_1:
644     case BUILT_IN_ATOMIC_FETCH_SUB_1:
645     case BUILT_IN_ATOMIC_FETCH_AND_1:
646     case BUILT_IN_ATOMIC_FETCH_NAND_1:
647     case BUILT_IN_ATOMIC_FETCH_XOR_1:
648     case BUILT_IN_ATOMIC_FETCH_OR_1:
649       access_size = 1;
650       goto do_atomic;
651 
652     case BUILT_IN_ATOMIC_LOAD_2:
653       is_store = false;
654       /* FALLTHRU */
655     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
656     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
657     case BUILT_IN_SYNC_FETCH_AND_OR_2:
658     case BUILT_IN_SYNC_FETCH_AND_AND_2:
659     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
660     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
661     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
662     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
663     case BUILT_IN_SYNC_OR_AND_FETCH_2:
664     case BUILT_IN_SYNC_AND_AND_FETCH_2:
665     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
666     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
667     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
668     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
669     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
670     case BUILT_IN_SYNC_LOCK_RELEASE_2:
671     case BUILT_IN_ATOMIC_EXCHANGE_2:
672     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
673     case BUILT_IN_ATOMIC_STORE_2:
674     case BUILT_IN_ATOMIC_ADD_FETCH_2:
675     case BUILT_IN_ATOMIC_SUB_FETCH_2:
676     case BUILT_IN_ATOMIC_AND_FETCH_2:
677     case BUILT_IN_ATOMIC_NAND_FETCH_2:
678     case BUILT_IN_ATOMIC_XOR_FETCH_2:
679     case BUILT_IN_ATOMIC_OR_FETCH_2:
680     case BUILT_IN_ATOMIC_FETCH_ADD_2:
681     case BUILT_IN_ATOMIC_FETCH_SUB_2:
682     case BUILT_IN_ATOMIC_FETCH_AND_2:
683     case BUILT_IN_ATOMIC_FETCH_NAND_2:
684     case BUILT_IN_ATOMIC_FETCH_XOR_2:
685     case BUILT_IN_ATOMIC_FETCH_OR_2:
686       access_size = 2;
687       goto do_atomic;
688 
689     case BUILT_IN_ATOMIC_LOAD_4:
690       is_store = false;
691       /* FALLTHRU */
692     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
693     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
694     case BUILT_IN_SYNC_FETCH_AND_OR_4:
695     case BUILT_IN_SYNC_FETCH_AND_AND_4:
696     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
697     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
698     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
699     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
700     case BUILT_IN_SYNC_OR_AND_FETCH_4:
701     case BUILT_IN_SYNC_AND_AND_FETCH_4:
702     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
703     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
704     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
705     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
706     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
707     case BUILT_IN_SYNC_LOCK_RELEASE_4:
708     case BUILT_IN_ATOMIC_EXCHANGE_4:
709     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
710     case BUILT_IN_ATOMIC_STORE_4:
711     case BUILT_IN_ATOMIC_ADD_FETCH_4:
712     case BUILT_IN_ATOMIC_SUB_FETCH_4:
713     case BUILT_IN_ATOMIC_AND_FETCH_4:
714     case BUILT_IN_ATOMIC_NAND_FETCH_4:
715     case BUILT_IN_ATOMIC_XOR_FETCH_4:
716     case BUILT_IN_ATOMIC_OR_FETCH_4:
717     case BUILT_IN_ATOMIC_FETCH_ADD_4:
718     case BUILT_IN_ATOMIC_FETCH_SUB_4:
719     case BUILT_IN_ATOMIC_FETCH_AND_4:
720     case BUILT_IN_ATOMIC_FETCH_NAND_4:
721     case BUILT_IN_ATOMIC_FETCH_XOR_4:
722     case BUILT_IN_ATOMIC_FETCH_OR_4:
723       access_size = 4;
724       goto do_atomic;
725 
726     case BUILT_IN_ATOMIC_LOAD_8:
727       is_store = false;
728       /* FALLTHRU */
729     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
730     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
731     case BUILT_IN_SYNC_FETCH_AND_OR_8:
732     case BUILT_IN_SYNC_FETCH_AND_AND_8:
733     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
734     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
735     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
736     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
737     case BUILT_IN_SYNC_OR_AND_FETCH_8:
738     case BUILT_IN_SYNC_AND_AND_FETCH_8:
739     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
740     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
741     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
742     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
743     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
744     case BUILT_IN_SYNC_LOCK_RELEASE_8:
745     case BUILT_IN_ATOMIC_EXCHANGE_8:
746     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
747     case BUILT_IN_ATOMIC_STORE_8:
748     case BUILT_IN_ATOMIC_ADD_FETCH_8:
749     case BUILT_IN_ATOMIC_SUB_FETCH_8:
750     case BUILT_IN_ATOMIC_AND_FETCH_8:
751     case BUILT_IN_ATOMIC_NAND_FETCH_8:
752     case BUILT_IN_ATOMIC_XOR_FETCH_8:
753     case BUILT_IN_ATOMIC_OR_FETCH_8:
754     case BUILT_IN_ATOMIC_FETCH_ADD_8:
755     case BUILT_IN_ATOMIC_FETCH_SUB_8:
756     case BUILT_IN_ATOMIC_FETCH_AND_8:
757     case BUILT_IN_ATOMIC_FETCH_NAND_8:
758     case BUILT_IN_ATOMIC_FETCH_XOR_8:
759     case BUILT_IN_ATOMIC_FETCH_OR_8:
760       access_size = 8;
761       goto do_atomic;
762 
763     case BUILT_IN_ATOMIC_LOAD_16:
764       is_store = false;
765       /* FALLTHRU */
766     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
767     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
768     case BUILT_IN_SYNC_FETCH_AND_OR_16:
769     case BUILT_IN_SYNC_FETCH_AND_AND_16:
770     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
771     case BUILT_IN_SYNC_FETCH_AND_NAND_16:
772     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
773     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
774     case BUILT_IN_SYNC_OR_AND_FETCH_16:
775     case BUILT_IN_SYNC_AND_AND_FETCH_16:
776     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
777     case BUILT_IN_SYNC_NAND_AND_FETCH_16:
778     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
779     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
780     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
781     case BUILT_IN_SYNC_LOCK_RELEASE_16:
782     case BUILT_IN_ATOMIC_EXCHANGE_16:
783     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
784     case BUILT_IN_ATOMIC_STORE_16:
785     case BUILT_IN_ATOMIC_ADD_FETCH_16:
786     case BUILT_IN_ATOMIC_SUB_FETCH_16:
787     case BUILT_IN_ATOMIC_AND_FETCH_16:
788     case BUILT_IN_ATOMIC_NAND_FETCH_16:
789     case BUILT_IN_ATOMIC_XOR_FETCH_16:
790     case BUILT_IN_ATOMIC_OR_FETCH_16:
791     case BUILT_IN_ATOMIC_FETCH_ADD_16:
792     case BUILT_IN_ATOMIC_FETCH_SUB_16:
793     case BUILT_IN_ATOMIC_FETCH_AND_16:
794     case BUILT_IN_ATOMIC_FETCH_NAND_16:
795     case BUILT_IN_ATOMIC_FETCH_XOR_16:
796     case BUILT_IN_ATOMIC_FETCH_OR_16:
797       access_size = 16;
798       /* FALLTHRU */
799     do_atomic:
800       {
801 	dest = gimple_call_arg (call, 0);
802 	/* DEST represents the address of a memory location.
803 	   instrument_derefs wants the memory location, so lets
804 	   dereference the address DEST before handing it to
805 	   instrument_derefs.  */
806 	tree type = build_nonstandard_integer_type (access_size
807 						    * BITS_PER_UNIT, 1);
808 	dest = build2 (MEM_REF, type, dest,
809 		       build_int_cst (build_pointer_type (char_type_node), 0));
810 	break;
811       }
812 
813     default:
814       /* The other builtins memory access are not instrumented in this
815 	 function because they either don't have any length parameter,
816 	 or their length parameter is just a limit.  */
817       break;
818     }
819 
820   if (len != NULL_TREE)
821     {
822       if (source0 != NULL_TREE)
823 	{
824 	  src0->start = source0;
825 	  src0->access_size = access_size;
826 	  *src0_len = len;
827 	  *src0_is_store = false;
828 	}
829 
830       if (source1 != NULL_TREE)
831 	{
832 	  src1->start = source1;
833 	  src1->access_size = access_size;
834 	  *src1_len = len;
835 	  *src1_is_store = false;
836 	}
837 
838       if (dest != NULL_TREE)
839 	{
840 	  dst->start = dest;
841 	  dst->access_size = access_size;
842 	  *dst_len = len;
843 	  *dst_is_store = true;
844 	}
845 
846       got_reference_p = true;
847     }
848   else if (dest)
849     {
850       dst->start = dest;
851       dst->access_size = access_size;
852       *dst_len = NULL_TREE;
853       *dst_is_store = is_store;
854       *dest_is_deref = true;
855       got_reference_p = true;
856     }
857 
858   return got_reference_p;
859 }
860 
861 /* Return true iff a given gimple statement has been instrumented.
862    Note that the statement is "defined" by the memory references it
863    contains.  */
864 
865 static bool
866 has_stmt_been_instrumented_p (gimple stmt)
867 {
868   if (gimple_assign_single_p (stmt))
869     {
870       bool r_is_store;
871       asan_mem_ref r;
872       asan_mem_ref_init (&r, NULL, 1);
873 
874       if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
875 				     &r_is_store))
876 	return has_mem_ref_been_instrumented (&r);
877     }
878   else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
879     {
880       asan_mem_ref src0, src1, dest;
881       asan_mem_ref_init (&src0, NULL, 1);
882       asan_mem_ref_init (&src1, NULL, 1);
883       asan_mem_ref_init (&dest, NULL, 1);
884 
885       tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
886       bool src0_is_store = false, src1_is_store = false,
887 	dest_is_store = false, dest_is_deref = false, intercepted_p = true;
888       if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
889 					&src0, &src0_len, &src0_is_store,
890 					&src1, &src1_len, &src1_is_store,
891 					&dest, &dest_len, &dest_is_store,
892 					&dest_is_deref, &intercepted_p))
893 	{
894 	  if (src0.start != NULL_TREE
895 	      && !has_mem_ref_been_instrumented (&src0, src0_len))
896 	    return false;
897 
898 	  if (src1.start != NULL_TREE
899 	      && !has_mem_ref_been_instrumented (&src1, src1_len))
900 	    return false;
901 
902 	  if (dest.start != NULL_TREE
903 	      && !has_mem_ref_been_instrumented (&dest, dest_len))
904 	    return false;
905 
906 	  return true;
907 	}
908     }
909   return false;
910 }
911 
912 /*  Insert a memory reference into the hash table.  */
913 
914 static void
915 update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
916 {
917   hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
918 
919   asan_mem_ref r;
920   asan_mem_ref_init (&r, ref, access_size);
921 
922   asan_mem_ref **slot = ht->find_slot (&r, INSERT);
923   if (*slot == NULL || (*slot)->access_size < access_size)
924     *slot = asan_mem_ref_new (ref, access_size);
925 }
926 
927 /* Initialize shadow_ptr_types array.  */
928 
929 static void
930 asan_init_shadow_ptr_types (void)
931 {
932   asan_shadow_set = new_alias_set ();
933   shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
934   TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
935   shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
936   shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
937   TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
938   shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
939   initialize_sanitizer_builtins ();
940 }
941 
942 /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text.  */
943 
944 static tree
945 asan_pp_string (pretty_printer *pp)
946 {
947   const char *buf = pp_formatted_text (pp);
948   size_t len = strlen (buf);
949   tree ret = build_string (len + 1, buf);
950   TREE_TYPE (ret)
951     = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
952 			build_index_type (size_int (len)));
953   TREE_READONLY (ret) = 1;
954   TREE_STATIC (ret) = 1;
955   return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
956 }
957 
958 /* Return a CONST_INT representing 4 subsequent shadow memory bytes.  */
959 
960 static rtx
961 asan_shadow_cst (unsigned char shadow_bytes[4])
962 {
963   int i;
964   unsigned HOST_WIDE_INT val = 0;
965   gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
966   for (i = 0; i < 4; i++)
967     val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
968 	   << (BITS_PER_UNIT * i);
969   return gen_int_mode (val, SImode);
970 }
971 
972 /* Clear shadow memory at SHADOW_MEM, LEN bytes.  Can't call a library call here
973    though.  */
974 
975 static void
976 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
977 {
978   rtx_insn *insn, *insns, *jump;
979   rtx_code_label *top_label;
980   rtx end, addr, tmp;
981 
982   start_sequence ();
983   clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
984   insns = get_insns ();
985   end_sequence ();
986   for (insn = insns; insn; insn = NEXT_INSN (insn))
987     if (CALL_P (insn))
988       break;
989   if (insn == NULL_RTX)
990     {
991       emit_insn (insns);
992       return;
993     }
994 
995   gcc_assert ((len & 3) == 0);
996   top_label = gen_label_rtx ();
997   addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
998   shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
999   end = force_reg (Pmode, plus_constant (Pmode, addr, len));
1000   emit_label (top_label);
1001 
1002   emit_move_insn (shadow_mem, const0_rtx);
1003   tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
1004 			     true, OPTAB_LIB_WIDEN);
1005   if (tmp != addr)
1006     emit_move_insn (addr, tmp);
1007   emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
1008   jump = get_last_insn ();
1009   gcc_assert (JUMP_P (jump));
1010   add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
1011 }
1012 
1013 void
1014 asan_function_start (void)
1015 {
1016   section *fnsec = function_section (current_function_decl);
1017   switch_to_section (fnsec);
1018   ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
1019 			 current_function_funcdef_no);
1020 }
1021 
1022 /* Insert code to protect stack vars.  The prologue sequence should be emitted
1023    directly, epilogue sequence returned.  BASE is the register holding the
1024    stack base, against which OFFSETS array offsets are relative to, OFFSETS
1025    array contains pairs of offsets in reverse order, always the end offset
1026    of some gap that needs protection followed by starting offset,
1027    and DECLS is an array of representative decls for each var partition.
1028    LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
1029    elements long (OFFSETS include gap before the first variable as well
1030    as gaps after each stack variable).  PBASE is, if non-NULL, some pseudo
1031    register which stack vars DECL_RTLs are based on.  Either BASE should be
1032    assigned to PBASE, when not doing use after return protection, or
1033    corresponding address based on __asan_stack_malloc* return value.  */
1034 
1035 rtx_insn *
1036 asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
1037 			    HOST_WIDE_INT *offsets, tree *decls, int length)
1038 {
1039   rtx shadow_base, shadow_mem, ret, mem, orig_base;
1040   rtx_code_label *lab;
1041   rtx_insn *insns;
1042   char buf[30];
1043   unsigned char shadow_bytes[4];
1044   HOST_WIDE_INT base_offset = offsets[length - 1];
1045   HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
1046   HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
1047   HOST_WIDE_INT last_offset, last_size;
1048   int l;
1049   unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
1050   tree str_cst, decl, id;
1051   int use_after_return_class = -1;
1052 
1053   if (shadow_ptr_types[0] == NULL_TREE)
1054     asan_init_shadow_ptr_types ();
1055 
1056   /* First of all, prepare the description string.  */
1057   pretty_printer asan_pp;
1058 
1059   pp_decimal_int (&asan_pp, length / 2 - 1);
1060   pp_space (&asan_pp);
1061   for (l = length - 2; l; l -= 2)
1062     {
1063       tree decl = decls[l / 2 - 1];
1064       pp_wide_integer (&asan_pp, offsets[l] - base_offset);
1065       pp_space (&asan_pp);
1066       pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
1067       pp_space (&asan_pp);
1068       if (DECL_P (decl) && DECL_NAME (decl))
1069 	{
1070 	  pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
1071 	  pp_space (&asan_pp);
1072 	  pp_tree_identifier (&asan_pp, DECL_NAME (decl));
1073 	}
1074       else
1075 	pp_string (&asan_pp, "9 <unknown>");
1076       pp_space (&asan_pp);
1077     }
1078   str_cst = asan_pp_string (&asan_pp);
1079 
1080   /* Emit the prologue sequence.  */
1081   if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
1082       && ASAN_USE_AFTER_RETURN)
1083     {
1084       use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
1085       /* __asan_stack_malloc_N guarantees alignment
1086 	 N < 6 ? (64 << N) : 4096 bytes.  */
1087       if (alignb > (use_after_return_class < 6
1088 		    ? (64U << use_after_return_class) : 4096U))
1089 	use_after_return_class = -1;
1090       else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
1091 	base_align_bias = ((asan_frame_size + alignb - 1)
1092 			   & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
1093     }
1094   /* Align base if target is STRICT_ALIGNMENT.  */
1095   if (STRICT_ALIGNMENT)
1096     base = expand_binop (Pmode, and_optab, base,
1097 			 gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
1098 					  << ASAN_SHADOW_SHIFT)
1099 					 / BITS_PER_UNIT), Pmode), NULL_RTX,
1100 			 1, OPTAB_DIRECT);
1101 
1102   if (use_after_return_class == -1 && pbase)
1103     emit_move_insn (pbase, base);
1104 
1105   base = expand_binop (Pmode, add_optab, base,
1106 		       gen_int_mode (base_offset - base_align_bias, Pmode),
1107 		       NULL_RTX, 1, OPTAB_DIRECT);
1108   orig_base = NULL_RTX;
1109   if (use_after_return_class != -1)
1110     {
1111       if (asan_detect_stack_use_after_return == NULL_TREE)
1112 	{
1113 	  id = get_identifier ("__asan_option_detect_stack_use_after_return");
1114 	  decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
1115 			     integer_type_node);
1116 	  SET_DECL_ASSEMBLER_NAME (decl, id);
1117 	  TREE_ADDRESSABLE (decl) = 1;
1118 	  DECL_ARTIFICIAL (decl) = 1;
1119 	  DECL_IGNORED_P (decl) = 1;
1120 	  DECL_EXTERNAL (decl) = 1;
1121 	  TREE_STATIC (decl) = 1;
1122 	  TREE_PUBLIC (decl) = 1;
1123 	  TREE_USED (decl) = 1;
1124 	  asan_detect_stack_use_after_return = decl;
1125 	}
1126       orig_base = gen_reg_rtx (Pmode);
1127       emit_move_insn (orig_base, base);
1128       ret = expand_normal (asan_detect_stack_use_after_return);
1129       lab = gen_label_rtx ();
1130       int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1131       emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
1132 			       VOIDmode, 0, lab, very_likely);
1133       snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
1134 		use_after_return_class);
1135       ret = init_one_libfunc (buf);
1136       rtx addr = convert_memory_address (ptr_mode, base);
1137       ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
1138 				     GEN_INT (asan_frame_size
1139 					      + base_align_bias),
1140 				     TYPE_MODE (pointer_sized_int_node),
1141 				     addr, ptr_mode);
1142       ret = convert_memory_address (Pmode, ret);
1143       emit_move_insn (base, ret);
1144       emit_label (lab);
1145       emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
1146 					   gen_int_mode (base_align_bias
1147 							 - base_offset, Pmode),
1148 					   NULL_RTX, 1, OPTAB_DIRECT));
1149     }
1150   mem = gen_rtx_MEM (ptr_mode, base);
1151   mem = adjust_address (mem, VOIDmode, base_align_bias);
1152   emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
1153   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1154   emit_move_insn (mem, expand_normal (str_cst));
1155   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
1156   ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
1157   id = get_identifier (buf);
1158   decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1159 		    VAR_DECL, id, char_type_node);
1160   SET_DECL_ASSEMBLER_NAME (decl, id);
1161   TREE_ADDRESSABLE (decl) = 1;
1162   TREE_READONLY (decl) = 1;
1163   DECL_ARTIFICIAL (decl) = 1;
1164   DECL_IGNORED_P (decl) = 1;
1165   TREE_STATIC (decl) = 1;
1166   TREE_PUBLIC (decl) = 0;
1167   TREE_USED (decl) = 1;
1168   DECL_INITIAL (decl) = decl;
1169   TREE_ASM_WRITTEN (decl) = 1;
1170   TREE_ASM_WRITTEN (id) = 1;
1171   emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
1172   shadow_base = expand_binop (Pmode, lshr_optab, base,
1173 			      GEN_INT (ASAN_SHADOW_SHIFT),
1174 			      NULL_RTX, 1, OPTAB_DIRECT);
1175   shadow_base
1176     = plus_constant (Pmode, shadow_base,
1177 		     asan_shadow_offset ()
1178 		     + (base_align_bias >> ASAN_SHADOW_SHIFT));
1179   gcc_assert (asan_shadow_set != -1
1180 	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
1181   shadow_mem = gen_rtx_MEM (SImode, shadow_base);
1182   set_mem_alias_set (shadow_mem, asan_shadow_set);
1183   if (STRICT_ALIGNMENT)
1184     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1185   prev_offset = base_offset;
1186   for (l = length; l; l -= 2)
1187     {
1188       if (l == 2)
1189 	cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1190       offset = offsets[l - 1];
1191       if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1192 	{
1193 	  int i;
1194 	  HOST_WIDE_INT aoff
1195 	    = base_offset + ((offset - base_offset)
1196 			     & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1197 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1198 				       (aoff - prev_offset)
1199 				       >> ASAN_SHADOW_SHIFT);
1200 	  prev_offset = aoff;
1201 	  for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1202 	    if (aoff < offset)
1203 	      {
1204 		if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1205 		  shadow_bytes[i] = 0;
1206 		else
1207 		  shadow_bytes[i] = offset - aoff;
1208 	      }
1209 	    else
1210 	      shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1211 	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1212 	  offset = aoff;
1213 	}
1214       while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1215 	{
1216 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1217 				       (offset - prev_offset)
1218 				       >> ASAN_SHADOW_SHIFT);
1219 	  prev_offset = offset;
1220 	  memset (shadow_bytes, cur_shadow_byte, 4);
1221 	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1222 	  offset += ASAN_RED_ZONE_SIZE;
1223 	}
1224       cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1225     }
1226   do_pending_stack_adjust ();
1227 
1228   /* Construct epilogue sequence.  */
1229   start_sequence ();
1230 
1231   lab = NULL;
1232   if (use_after_return_class != -1)
1233     {
1234       rtx_code_label *lab2 = gen_label_rtx ();
1235       char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
1236       int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
1237       emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
1238 			       VOIDmode, 0, lab2, very_likely);
1239       shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1240       set_mem_alias_set (shadow_mem, asan_shadow_set);
1241       mem = gen_rtx_MEM (ptr_mode, base);
1242       mem = adjust_address (mem, VOIDmode, base_align_bias);
1243       emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
1244       unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
1245       if (use_after_return_class < 5
1246 	  && can_store_by_pieces (sz, builtin_memset_read_str, &c,
1247 				  BITS_PER_UNIT, true))
1248 	store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
1249 			 BITS_PER_UNIT, true, 0);
1250       else if (use_after_return_class >= 5
1251 	       || !set_storage_via_setmem (shadow_mem,
1252 					   GEN_INT (sz),
1253 					   gen_int_mode (c, QImode),
1254 					   BITS_PER_UNIT, BITS_PER_UNIT,
1255 					   -1, sz, sz, sz))
1256 	{
1257 	  snprintf (buf, sizeof buf, "__asan_stack_free_%d",
1258 		    use_after_return_class);
1259 	  ret = init_one_libfunc (buf);
1260 	  rtx addr = convert_memory_address (ptr_mode, base);
1261 	  rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
1262 	  emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
1263 			     GEN_INT (asan_frame_size + base_align_bias),
1264 			     TYPE_MODE (pointer_sized_int_node),
1265 			     orig_addr, ptr_mode);
1266 	}
1267       lab = gen_label_rtx ();
1268       emit_jump (lab);
1269       emit_label (lab2);
1270     }
1271 
1272   shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1273   set_mem_alias_set (shadow_mem, asan_shadow_set);
1274 
1275   if (STRICT_ALIGNMENT)
1276     set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
1277 
1278   prev_offset = base_offset;
1279   last_offset = base_offset;
1280   last_size = 0;
1281   for (l = length; l; l -= 2)
1282     {
1283       offset = base_offset + ((offsets[l - 1] - base_offset)
1284 			     & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1285       if (last_offset + last_size != offset)
1286 	{
1287 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1288 				       (last_offset - prev_offset)
1289 				       >> ASAN_SHADOW_SHIFT);
1290 	  prev_offset = last_offset;
1291 	  asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1292 	  last_offset = offset;
1293 	  last_size = 0;
1294 	}
1295       last_size += base_offset + ((offsets[l - 2] - base_offset)
1296 				  & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1297 		   - offset;
1298     }
1299   if (last_size)
1300     {
1301       shadow_mem = adjust_address (shadow_mem, VOIDmode,
1302 				   (last_offset - prev_offset)
1303 				   >> ASAN_SHADOW_SHIFT);
1304       asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1305     }
1306 
1307   do_pending_stack_adjust ();
1308   if (lab)
1309     emit_label (lab);
1310 
1311   insns = get_insns ();
1312   end_sequence ();
1313   return insns;
1314 }
1315 
1316 /* Return true if DECL, a global var, might be overridden and needs
1317    therefore a local alias.  */
1318 
1319 static bool
1320 asan_needs_local_alias (tree decl)
1321 {
1322   return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1323 }
1324 
1325 /* Return true if DECL is a VAR_DECL that should be protected
1326    by Address Sanitizer, by appending a red zone with protected
1327    shadow memory after it and aligning it to at least
1328    ASAN_RED_ZONE_SIZE bytes.  */
1329 
1330 bool
1331 asan_protect_global (tree decl)
1332 {
1333   if (!ASAN_GLOBALS)
1334     return false;
1335 
1336   rtx rtl, symbol;
1337 
1338   if (TREE_CODE (decl) == STRING_CST)
1339     {
1340       /* Instrument all STRING_CSTs except those created
1341 	 by asan_pp_string here.  */
1342       if (shadow_ptr_types[0] != NULL_TREE
1343 	  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1344 	  && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1345 	return false;
1346       return true;
1347     }
1348   if (TREE_CODE (decl) != VAR_DECL
1349       /* TLS vars aren't statically protectable.  */
1350       || DECL_THREAD_LOCAL_P (decl)
1351       /* Externs will be protected elsewhere.  */
1352       || DECL_EXTERNAL (decl)
1353       || !DECL_RTL_SET_P (decl)
1354       /* Comdat vars pose an ABI problem, we can't know if
1355 	 the var that is selected by the linker will have
1356 	 padding or not.  */
1357       || DECL_ONE_ONLY (decl)
1358       /* Similarly for common vars.  People can use -fno-common.
1359 	 Note: Linux kernel is built with -fno-common, so we do instrument
1360 	 globals there even if it is C.  */
1361       || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1362       /* Don't protect if using user section, often vars placed
1363 	 into user section from multiple TUs are then assumed
1364 	 to be an array of such vars, putting padding in there
1365 	 breaks this assumption.  */
1366       || (DECL_SECTION_NAME (decl) != NULL
1367 	  && !symtab_node::get (decl)->implicit_section)
1368       || DECL_SIZE (decl) == 0
1369       || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1370       || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1371       || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
1372       || TREE_TYPE (decl) == ubsan_get_source_location_type ())
1373     return false;
1374 
1375   rtl = DECL_RTL (decl);
1376   if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1377     return false;
1378   symbol = XEXP (rtl, 0);
1379 
1380   if (CONSTANT_POOL_ADDRESS_P (symbol)
1381       || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1382     return false;
1383 
1384   if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1385     return false;
1386 
1387 #ifndef ASM_OUTPUT_DEF
1388   if (asan_needs_local_alias (decl))
1389     return false;
1390 #endif
1391 
1392   return true;
1393 }
1394 
1395 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
1396    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1397 
1398 static tree
1399 report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1400 		   int *nargs)
1401 {
1402   static enum built_in_function report[2][2][6]
1403     = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1404 	    BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1405 	    BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
1406 	  { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1407 	    BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1408 	    BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
1409 	{ { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
1410 	    BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
1411 	    BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
1412 	    BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
1413 	    BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
1414 	    BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
1415 	  { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
1416 	    BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
1417 	    BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
1418 	    BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
1419 	    BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
1420 	    BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
1421   if (size_in_bytes == -1)
1422     {
1423       *nargs = 2;
1424       return builtin_decl_implicit (report[recover_p][is_store][5]);
1425     }
1426   *nargs = 1;
1427   int size_log2 = exact_log2 (size_in_bytes);
1428   return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
1429 }
1430 
1431 /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
1432    IS_STORE is either 1 (for a store) or 0 (for a load).  */
1433 
1434 static tree
1435 check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
1436 	    int *nargs)
1437 {
1438   static enum built_in_function check[2][2][6]
1439     = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
1440 	    BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
1441 	    BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
1442 	  { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
1443 	    BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
1444 	    BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
1445 	{ { BUILT_IN_ASAN_LOAD1_NOABORT,
1446 	    BUILT_IN_ASAN_LOAD2_NOABORT,
1447 	    BUILT_IN_ASAN_LOAD4_NOABORT,
1448 	    BUILT_IN_ASAN_LOAD8_NOABORT,
1449 	    BUILT_IN_ASAN_LOAD16_NOABORT,
1450 	    BUILT_IN_ASAN_LOADN_NOABORT },
1451 	  { BUILT_IN_ASAN_STORE1_NOABORT,
1452 	    BUILT_IN_ASAN_STORE2_NOABORT,
1453 	    BUILT_IN_ASAN_STORE4_NOABORT,
1454 	    BUILT_IN_ASAN_STORE8_NOABORT,
1455 	    BUILT_IN_ASAN_STORE16_NOABORT,
1456 	    BUILT_IN_ASAN_STOREN_NOABORT } } };
1457   if (size_in_bytes == -1)
1458     {
1459       *nargs = 2;
1460       return builtin_decl_implicit (check[recover_p][is_store][5]);
1461     }
1462   *nargs = 1;
1463   int size_log2 = exact_log2 (size_in_bytes);
1464   return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
1465 }
1466 
1467 /* Split the current basic block and create a condition statement
1468    insertion point right before or after the statement pointed to by
1469    ITER.  Return an iterator to the point at which the caller might
1470    safely insert the condition statement.
1471 
1472    THEN_BLOCK must be set to the address of an uninitialized instance
1473    of basic_block.  The function will then set *THEN_BLOCK to the
1474    'then block' of the condition statement to be inserted by the
1475    caller.
1476 
1477    If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1478    *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1479 
1480    Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1481    block' of the condition statement to be inserted by the caller.
1482 
1483    Note that *FALLTHROUGH_BLOCK is a new block that contains the
1484    statements starting from *ITER, and *THEN_BLOCK is a new empty
1485    block.
1486 
1487    *ITER is adjusted to point to always point to the first statement
1488     of the basic block * FALLTHROUGH_BLOCK.  That statement is the
1489     same as what ITER was pointing to prior to calling this function,
1490     if BEFORE_P is true; otherwise, it is its following statement.  */
1491 
1492 gimple_stmt_iterator
1493 create_cond_insert_point (gimple_stmt_iterator *iter,
1494 			  bool before_p,
1495 			  bool then_more_likely_p,
1496 			  bool create_then_fallthru_edge,
1497 			  basic_block *then_block,
1498 			  basic_block *fallthrough_block)
1499 {
1500   gimple_stmt_iterator gsi = *iter;
1501 
1502   if (!gsi_end_p (gsi) && before_p)
1503     gsi_prev (&gsi);
1504 
1505   basic_block cur_bb = gsi_bb (*iter);
1506 
1507   edge e = split_block (cur_bb, gsi_stmt (gsi));
1508 
1509   /* Get a hold on the 'condition block', the 'then block' and the
1510      'else block'.  */
1511   basic_block cond_bb = e->src;
1512   basic_block fallthru_bb = e->dest;
1513   basic_block then_bb = create_empty_bb (cond_bb);
1514   if (current_loops)
1515     {
1516       add_bb_to_loop (then_bb, cond_bb->loop_father);
1517       loops_state_set (LOOPS_NEED_FIXUP);
1518     }
1519 
1520   /* Set up the newly created 'then block'.  */
1521   e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1522   int fallthrough_probability
1523     = then_more_likely_p
1524     ? PROB_VERY_UNLIKELY
1525     : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1526   e->probability = PROB_ALWAYS - fallthrough_probability;
1527   if (create_then_fallthru_edge)
1528     make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1529 
1530   /* Set up the fallthrough basic block.  */
1531   e = find_edge (cond_bb, fallthru_bb);
1532   e->flags = EDGE_FALSE_VALUE;
1533   e->count = cond_bb->count;
1534   e->probability = fallthrough_probability;
1535 
1536   /* Update dominance info for the newly created then_bb; note that
1537      fallthru_bb's dominance info has already been updated by
1538      split_bock.  */
1539   if (dom_info_available_p (CDI_DOMINATORS))
1540     set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1541 
1542   *then_block = then_bb;
1543   *fallthrough_block = fallthru_bb;
1544   *iter = gsi_start_bb (fallthru_bb);
1545 
1546   return gsi_last_bb (cond_bb);
1547 }
1548 
1549 /* Insert an if condition followed by a 'then block' right before the
1550    statement pointed to by ITER.  The fallthrough block -- which is the
1551    else block of the condition as well as the destination of the
1552    outcoming edge of the 'then block' -- starts with the statement
1553    pointed to by ITER.
1554 
1555    COND is the condition of the if.
1556 
1557    If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1558    'then block' is higher than the probability of the edge to the
1559    fallthrough block.
1560 
1561    Upon completion of the function, *THEN_BB is set to the newly
1562    inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1563    fallthrough block.
1564 
1565    *ITER is adjusted to still point to the same statement it was
1566    pointing to initially.  */
1567 
1568 static void
1569 insert_if_then_before_iter (gcond *cond,
1570 			    gimple_stmt_iterator *iter,
1571 			    bool then_more_likely_p,
1572 			    basic_block *then_bb,
1573 			    basic_block *fallthrough_bb)
1574 {
1575   gimple_stmt_iterator cond_insert_point =
1576     create_cond_insert_point (iter,
1577 			      /*before_p=*/true,
1578 			      then_more_likely_p,
1579 			      /*create_then_fallthru_edge=*/true,
1580 			      then_bb,
1581 			      fallthrough_bb);
1582   gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1583 }
1584 
1585 /* Build
1586    (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset ().  */
1587 
1588 static tree
1589 build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
1590 			 tree base_addr, tree shadow_ptr_type)
1591 {
1592   tree t, uintptr_type = TREE_TYPE (base_addr);
1593   tree shadow_type = TREE_TYPE (shadow_ptr_type);
1594   gimple g;
1595 
1596   t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1597   g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
1598 			   base_addr, t);
1599   gimple_set_location (g, location);
1600   gsi_insert_after (gsi, g, GSI_NEW_STMT);
1601 
1602   t = build_int_cst (uintptr_type, asan_shadow_offset ());
1603   g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
1604 			   gimple_assign_lhs (g), t);
1605   gimple_set_location (g, location);
1606   gsi_insert_after (gsi, g, GSI_NEW_STMT);
1607 
1608   g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
1609 			   gimple_assign_lhs (g));
1610   gimple_set_location (g, location);
1611   gsi_insert_after (gsi, g, GSI_NEW_STMT);
1612 
1613   t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1614 	      build_int_cst (shadow_ptr_type, 0));
1615   g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
1616   gimple_set_location (g, location);
1617   gsi_insert_after (gsi, g, GSI_NEW_STMT);
1618   return gimple_assign_lhs (g);
1619 }
1620 
1621 /* BASE can already be an SSA_NAME; in that case, do not create a
1622    new SSA_NAME for it.  */
1623 
1624 static tree
1625 maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
1626 		       bool before_p)
1627 {
1628   if (TREE_CODE (base) == SSA_NAME)
1629     return base;
1630   gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
1631 				  TREE_CODE (base), base);
1632   gimple_set_location (g, loc);
1633   if (before_p)
1634     gsi_insert_before (iter, g, GSI_SAME_STMT);
1635   else
1636     gsi_insert_after (iter, g, GSI_NEW_STMT);
1637   return gimple_assign_lhs (g);
1638 }
1639 
1640 /* LEN can already have necessary size and precision;
1641    in that case, do not create a new variable.  */
1642 
1643 tree
1644 maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
1645 		       bool before_p)
1646 {
1647   if (ptrofftype_p (len))
1648     return len;
1649   gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
1650 				  NOP_EXPR, len);
1651   gimple_set_location (g, loc);
1652   if (before_p)
1653     gsi_insert_before (iter, g, GSI_SAME_STMT);
1654   else
1655     gsi_insert_after (iter, g, GSI_NEW_STMT);
1656   return gimple_assign_lhs (g);
1657 }
1658 
1659 /* Instrument the memory access instruction BASE.  Insert new
1660    statements before or after ITER.
1661 
1662    Note that the memory access represented by BASE can be either an
1663    SSA_NAME, or a non-SSA expression.  LOCATION is the source code
1664    location.  IS_STORE is TRUE for a store, FALSE for a load.
1665    BEFORE_P is TRUE for inserting the instrumentation code before
1666    ITER, FALSE for inserting it after ITER.  IS_SCALAR_ACCESS is TRUE
1667    for a scalar memory access and FALSE for memory region access.
1668    NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
1669    length.  ALIGN tells alignment of accessed memory object.
1670 
1671    START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
1672    memory region have already been instrumented.
1673 
1674    If BEFORE_P is TRUE, *ITER is arranged to still point to the
1675    statement it was pointing to prior to calling this function,
1676    otherwise, it points to the statement logically following it.  */
1677 
1678 static void
1679 build_check_stmt (location_t loc, tree base, tree len,
1680 		  HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
1681 		  bool is_non_zero_len, bool before_p, bool is_store,
1682 		  bool is_scalar_access, unsigned int align = 0)
1683 {
1684   gimple_stmt_iterator gsi = *iter;
1685   gimple g;
1686 
1687   gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
1688 
1689   gsi = *iter;
1690 
1691   base = unshare_expr (base);
1692   base = maybe_create_ssa_name (loc, base, &gsi, before_p);
1693 
1694   if (len)
1695     {
1696       len = unshare_expr (len);
1697       len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
1698     }
1699   else
1700     {
1701       gcc_assert (size_in_bytes != -1);
1702       len = build_int_cst (pointer_sized_int_node, size_in_bytes);
1703     }
1704 
1705   if (size_in_bytes > 1)
1706     {
1707       if ((size_in_bytes & (size_in_bytes - 1)) != 0
1708 	  || size_in_bytes > 16)
1709 	is_scalar_access = false;
1710       else if (align && align < size_in_bytes * BITS_PER_UNIT)
1711 	{
1712 	  /* On non-strict alignment targets, if
1713 	     16-byte access is just 8-byte aligned,
1714 	     this will result in misaligned shadow
1715 	     memory 2 byte load, but otherwise can
1716 	     be handled using one read.  */
1717 	  if (size_in_bytes != 16
1718 	      || STRICT_ALIGNMENT
1719 	      || align < 8 * BITS_PER_UNIT)
1720 	    is_scalar_access = false;
1721 	}
1722     }
1723 
1724   HOST_WIDE_INT flags = 0;
1725   if (is_store)
1726     flags |= ASAN_CHECK_STORE;
1727   if (is_non_zero_len)
1728     flags |= ASAN_CHECK_NON_ZERO_LEN;
1729   if (is_scalar_access)
1730     flags |= ASAN_CHECK_SCALAR_ACCESS;
1731 
1732   g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
1733 				  build_int_cst (integer_type_node, flags),
1734 				  base, len,
1735 				  build_int_cst (integer_type_node,
1736 						 align / BITS_PER_UNIT));
1737   gimple_set_location (g, loc);
1738   if (before_p)
1739     gsi_insert_before (&gsi, g, GSI_SAME_STMT);
1740   else
1741     {
1742       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1743       gsi_next (&gsi);
1744       *iter = gsi;
1745     }
1746 }
1747 
1748 /* If T represents a memory access, add instrumentation code before ITER.
1749    LOCATION is source code location.
1750    IS_STORE is either TRUE (for a store) or FALSE (for a load).  */
1751 
1752 static void
1753 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1754 		   location_t location, bool is_store)
1755 {
1756   if (is_store && !ASAN_INSTRUMENT_WRITES)
1757     return;
1758   if (!is_store && !ASAN_INSTRUMENT_READS)
1759     return;
1760 
1761   tree type, base;
1762   HOST_WIDE_INT size_in_bytes;
1763 
1764   type = TREE_TYPE (t);
1765   switch (TREE_CODE (t))
1766     {
1767     case ARRAY_REF:
1768     case COMPONENT_REF:
1769     case INDIRECT_REF:
1770     case MEM_REF:
1771     case VAR_DECL:
1772     case BIT_FIELD_REF:
1773       break;
1774       /* FALLTHRU */
1775     default:
1776       return;
1777     }
1778 
1779   size_in_bytes = int_size_in_bytes (type);
1780   if (size_in_bytes <= 0)
1781     return;
1782 
1783   HOST_WIDE_INT bitsize, bitpos;
1784   tree offset;
1785   machine_mode mode;
1786   int volatilep = 0, unsignedp = 0;
1787   tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
1788 				    &mode, &unsignedp, &volatilep, false);
1789 
1790   if (TREE_CODE (t) == COMPONENT_REF
1791       && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1792     {
1793       tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1794       instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1795 				       TREE_OPERAND (t, 0), repr,
1796 				       TREE_OPERAND (t, 2)),
1797 			 location, is_store);
1798       return;
1799     }
1800 
1801   if (bitpos % BITS_PER_UNIT
1802       || bitsize != size_in_bytes * BITS_PER_UNIT)
1803     return;
1804 
1805   if (TREE_CODE (inner) == VAR_DECL && DECL_HARD_REGISTER (inner))
1806     return;
1807 
1808   if (TREE_CODE (inner) == VAR_DECL
1809       && offset == NULL_TREE
1810       && bitpos >= 0
1811       && DECL_SIZE (inner)
1812       && tree_fits_shwi_p (DECL_SIZE (inner))
1813       && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
1814     {
1815       if (DECL_THREAD_LOCAL_P (inner))
1816 	return;
1817       if (!ASAN_GLOBALS && is_global_var (inner))
1818         return;
1819       if (!TREE_STATIC (inner))
1820 	{
1821 	  /* Automatic vars in the current function will be always
1822 	     accessible.  */
1823 	  if (decl_function_context (inner) == current_function_decl)
1824 	    return;
1825 	}
1826       /* Always instrument external vars, they might be dynamically
1827 	 initialized.  */
1828       else if (!DECL_EXTERNAL (inner))
1829 	{
1830 	  /* For static vars if they are known not to be dynamically
1831 	     initialized, they will be always accessible.  */
1832 	  varpool_node *vnode = varpool_node::get (inner);
1833 	  if (vnode && !vnode->dynamically_initialized)
1834 	    return;
1835 	}
1836     }
1837 
1838   base = build_fold_addr_expr (t);
1839   if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1840     {
1841       unsigned int align = get_object_alignment (t);
1842       build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
1843 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
1844 			is_store, /*is_scalar_access*/true, align);
1845       update_mem_ref_hash_table (base, size_in_bytes);
1846       update_mem_ref_hash_table (t, size_in_bytes);
1847     }
1848 
1849 }
1850 
1851 /*  Insert a memory reference into the hash table if access length
1852     can be determined in compile time.  */
1853 
1854 static void
1855 maybe_update_mem_ref_hash_table (tree base, tree len)
1856 {
1857   if (!POINTER_TYPE_P (TREE_TYPE (base))
1858       || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
1859     return;
1860 
1861   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1862 
1863   if (size_in_bytes != -1)
1864     update_mem_ref_hash_table (base, size_in_bytes);
1865 }
1866 
1867 /* Instrument an access to a contiguous memory region that starts at
1868    the address pointed to by BASE, over a length of LEN (expressed in
1869    the sizeof (*BASE) bytes).  ITER points to the instruction before
1870    which the instrumentation instructions must be inserted.  LOCATION
1871    is the source location that the instrumentation instructions must
1872    have.  If IS_STORE is true, then the memory access is a store;
1873    otherwise, it's a load.  */
1874 
1875 static void
1876 instrument_mem_region_access (tree base, tree len,
1877 			      gimple_stmt_iterator *iter,
1878 			      location_t location, bool is_store)
1879 {
1880   if (!POINTER_TYPE_P (TREE_TYPE (base))
1881       || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1882       || integer_zerop (len))
1883     return;
1884 
1885   HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
1886 
1887   if ((size_in_bytes == -1)
1888       || !has_mem_ref_been_instrumented (base, size_in_bytes))
1889     {
1890       build_check_stmt (location, base, len, size_in_bytes, iter,
1891 			/*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
1892 			is_store, /*is_scalar_access*/false, /*align*/0);
1893     }
1894 
1895   maybe_update_mem_ref_hash_table (base, len);
1896   *iter = gsi_for_stmt (gsi_stmt (*iter));
1897 }
1898 
1899 /* Instrument the call to a built-in memory access function that is
1900    pointed to by the iterator ITER.
1901 
1902    Upon completion, return TRUE iff *ITER has been advanced to the
1903    statement following the one it was originally pointing to.  */
1904 
1905 static bool
1906 instrument_builtin_call (gimple_stmt_iterator *iter)
1907 {
1908   if (!ASAN_MEMINTRIN)
1909     return false;
1910 
1911   bool iter_advanced_p = false;
1912   gcall *call = as_a <gcall *> (gsi_stmt (*iter));
1913 
1914   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1915 
1916   location_t loc = gimple_location (call);
1917 
1918   asan_mem_ref src0, src1, dest;
1919   asan_mem_ref_init (&src0, NULL, 1);
1920   asan_mem_ref_init (&src1, NULL, 1);
1921   asan_mem_ref_init (&dest, NULL, 1);
1922 
1923   tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1924   bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
1925     dest_is_deref = false, intercepted_p = true;
1926 
1927   if (get_mem_refs_of_builtin_call (call,
1928 				    &src0, &src0_len, &src0_is_store,
1929 				    &src1, &src1_len, &src1_is_store,
1930 				    &dest, &dest_len, &dest_is_store,
1931 				    &dest_is_deref, &intercepted_p))
1932     {
1933       if (dest_is_deref)
1934 	{
1935 	  instrument_derefs (iter, dest.start, loc, dest_is_store);
1936 	  gsi_next (iter);
1937 	  iter_advanced_p = true;
1938 	}
1939       else if (!intercepted_p
1940 	       && (src0_len || src1_len || dest_len))
1941 	{
1942 	  if (src0.start != NULL_TREE)
1943 	    instrument_mem_region_access (src0.start, src0_len,
1944 					  iter, loc, /*is_store=*/false);
1945 	  if (src1.start != NULL_TREE)
1946 	    instrument_mem_region_access (src1.start, src1_len,
1947 					  iter, loc, /*is_store=*/false);
1948 	  if (dest.start != NULL_TREE)
1949 	    instrument_mem_region_access (dest.start, dest_len,
1950 					  iter, loc, /*is_store=*/true);
1951 
1952 	  *iter = gsi_for_stmt (call);
1953 	  gsi_next (iter);
1954 	  iter_advanced_p = true;
1955 	}
1956       else
1957 	{
1958 	  if (src0.start != NULL_TREE)
1959 	    maybe_update_mem_ref_hash_table (src0.start, src0_len);
1960 	  if (src1.start != NULL_TREE)
1961 	    maybe_update_mem_ref_hash_table (src1.start, src1_len);
1962 	  if (dest.start != NULL_TREE)
1963 	    maybe_update_mem_ref_hash_table (dest.start, dest_len);
1964 	}
1965     }
1966   return iter_advanced_p;
1967 }
1968 
1969 /*  Instrument the assignment statement ITER if it is subject to
1970     instrumentation.  Return TRUE iff instrumentation actually
1971     happened.  In that case, the iterator ITER is advanced to the next
1972     logical expression following the one initially pointed to by ITER,
1973     and the relevant memory reference that which access has been
1974     instrumented is added to the memory references hash table.  */
1975 
1976 static bool
1977 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1978 {
1979   gimple s = gsi_stmt (*iter);
1980 
1981   gcc_assert (gimple_assign_single_p (s));
1982 
1983   tree ref_expr = NULL_TREE;
1984   bool is_store, is_instrumented = false;
1985 
1986   if (gimple_store_p (s))
1987     {
1988       ref_expr = gimple_assign_lhs (s);
1989       is_store = true;
1990       instrument_derefs (iter, ref_expr,
1991 			 gimple_location (s),
1992 			 is_store);
1993       is_instrumented = true;
1994     }
1995 
1996   if (gimple_assign_load_p (s))
1997     {
1998       ref_expr = gimple_assign_rhs1 (s);
1999       is_store = false;
2000       instrument_derefs (iter, ref_expr,
2001 			 gimple_location (s),
2002 			 is_store);
2003       is_instrumented = true;
2004     }
2005 
2006   if (is_instrumented)
2007     gsi_next (iter);
2008 
2009   return is_instrumented;
2010 }
2011 
2012 /* Instrument the function call pointed to by the iterator ITER, if it
2013    is subject to instrumentation.  At the moment, the only function
2014    calls that are instrumented are some built-in functions that access
2015    memory.  Look at instrument_builtin_call to learn more.
2016 
2017    Upon completion return TRUE iff *ITER was advanced to the statement
2018    following the one it was originally pointing to.  */
2019 
2020 static bool
2021 maybe_instrument_call (gimple_stmt_iterator *iter)
2022 {
2023   gimple stmt = gsi_stmt (*iter);
2024   bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
2025 
2026   if (is_builtin && instrument_builtin_call (iter))
2027     return true;
2028 
2029   if (gimple_call_noreturn_p (stmt))
2030     {
2031       if (is_builtin)
2032 	{
2033 	  tree callee = gimple_call_fndecl (stmt);
2034 	  switch (DECL_FUNCTION_CODE (callee))
2035 	    {
2036 	    case BUILT_IN_UNREACHABLE:
2037 	    case BUILT_IN_TRAP:
2038 	      /* Don't instrument these.  */
2039 	      return false;
2040 	    default:
2041 	      break;
2042 	    }
2043 	}
2044       tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
2045       gimple g = gimple_build_call (decl, 0);
2046       gimple_set_location (g, gimple_location (stmt));
2047       gsi_insert_before (iter, g, GSI_SAME_STMT);
2048     }
2049   return false;
2050 }
2051 
2052 /* Walk each instruction of all basic block and instrument those that
2053    represent memory references: loads, stores, or function calls.
2054    In a given basic block, this function avoids instrumenting memory
2055    references that have already been instrumented.  */
2056 
2057 static void
2058 transform_statements (void)
2059 {
2060   basic_block bb, last_bb = NULL;
2061   gimple_stmt_iterator i;
2062   int saved_last_basic_block = last_basic_block_for_fn (cfun);
2063 
2064   FOR_EACH_BB_FN (bb, cfun)
2065     {
2066       basic_block prev_bb = bb;
2067 
2068       if (bb->index >= saved_last_basic_block) continue;
2069 
2070       /* Flush the mem ref hash table, if current bb doesn't have
2071 	 exactly one predecessor, or if that predecessor (skipping
2072 	 over asan created basic blocks) isn't the last processed
2073 	 basic block.  Thus we effectively flush on extended basic
2074 	 block boundaries.  */
2075       while (single_pred_p (prev_bb))
2076 	{
2077 	  prev_bb = single_pred (prev_bb);
2078 	  if (prev_bb->index < saved_last_basic_block)
2079 	    break;
2080 	}
2081       if (prev_bb != last_bb)
2082 	empty_mem_ref_hash_table ();
2083       last_bb = bb;
2084 
2085       for (i = gsi_start_bb (bb); !gsi_end_p (i);)
2086 	{
2087 	  gimple s = gsi_stmt (i);
2088 
2089 	  if (has_stmt_been_instrumented_p (s))
2090 	    gsi_next (&i);
2091 	  else if (gimple_assign_single_p (s)
2092 		   && !gimple_clobber_p (s)
2093 		   && maybe_instrument_assignment (&i))
2094 	    /*  Nothing to do as maybe_instrument_assignment advanced
2095 		the iterator I.  */;
2096 	  else if (is_gimple_call (s) && maybe_instrument_call (&i))
2097 	    /*  Nothing to do as maybe_instrument_call
2098 		advanced the iterator I.  */;
2099 	  else
2100 	    {
2101 	      /* No instrumentation happened.
2102 
2103 		 If the current instruction is a function call that
2104 		 might free something, let's forget about the memory
2105 		 references that got instrumented.  Otherwise we might
2106 		 miss some instrumentation opportunities.  */
2107 	      if (is_gimple_call (s) && !nonfreeing_call_p (s))
2108 		empty_mem_ref_hash_table ();
2109 
2110 	      gsi_next (&i);
2111 	    }
2112 	}
2113     }
2114   free_mem_ref_resources ();
2115 }
2116 
2117 /* Build
2118    __asan_before_dynamic_init (module_name)
2119    or
2120    __asan_after_dynamic_init ()
2121    call.  */
2122 
2123 tree
2124 asan_dynamic_init_call (bool after_p)
2125 {
2126   if (shadow_ptr_types[0] == NULL_TREE)
2127     asan_init_shadow_ptr_types ();
2128 
2129   tree fn = builtin_decl_implicit (after_p
2130 				   ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
2131 				   : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
2132   tree module_name_cst = NULL_TREE;
2133   if (!after_p)
2134     {
2135       pretty_printer module_name_pp;
2136       pp_string (&module_name_pp, main_input_filename);
2137 
2138       module_name_cst = asan_pp_string (&module_name_pp);
2139       module_name_cst = fold_convert (const_ptr_type_node,
2140 				      module_name_cst);
2141     }
2142 
2143   return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
2144 }
2145 
2146 /* Build
2147    struct __asan_global
2148    {
2149      const void *__beg;
2150      uptr __size;
2151      uptr __size_with_redzone;
2152      const void *__name;
2153      const void *__module_name;
2154      uptr __has_dynamic_init;
2155      __asan_global_source_location *__location;
2156    } type.  */
2157 
2158 static tree
2159 asan_global_struct (void)
2160 {
2161   static const char *field_names[7]
2162     = { "__beg", "__size", "__size_with_redzone",
2163 	"__name", "__module_name", "__has_dynamic_init", "__location"};
2164   tree fields[7], ret;
2165   int i;
2166 
2167   ret = make_node (RECORD_TYPE);
2168   for (i = 0; i < 7; i++)
2169     {
2170       fields[i]
2171 	= build_decl (UNKNOWN_LOCATION, FIELD_DECL,
2172 		      get_identifier (field_names[i]),
2173 		      (i == 0 || i == 3) ? const_ptr_type_node
2174 		      : pointer_sized_int_node);
2175       DECL_CONTEXT (fields[i]) = ret;
2176       if (i)
2177 	DECL_CHAIN (fields[i - 1]) = fields[i];
2178     }
2179   tree type_decl = build_decl (input_location, TYPE_DECL,
2180 			       get_identifier ("__asan_global"), ret);
2181   DECL_IGNORED_P (type_decl) = 1;
2182   DECL_ARTIFICIAL (type_decl) = 1;
2183   TYPE_FIELDS (ret) = fields[0];
2184   TYPE_NAME (ret) = type_decl;
2185   TYPE_STUB_DECL (ret) = type_decl;
2186   layout_type (ret);
2187   return ret;
2188 }
2189 
2190 /* Append description of a single global DECL into vector V.
2191    TYPE is __asan_global struct type as returned by asan_global_struct.  */
2192 
2193 static void
2194 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
2195 {
2196   tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2197   unsigned HOST_WIDE_INT size;
2198   tree str_cst, module_name_cst, refdecl = decl;
2199   vec<constructor_elt, va_gc> *vinner = NULL;
2200 
2201   pretty_printer asan_pp, module_name_pp;
2202 
2203   if (DECL_NAME (decl))
2204     pp_tree_identifier (&asan_pp, DECL_NAME (decl));
2205   else
2206     pp_string (&asan_pp, "<unknown>");
2207   str_cst = asan_pp_string (&asan_pp);
2208 
2209   pp_string (&module_name_pp, main_input_filename);
2210   module_name_cst = asan_pp_string (&module_name_pp);
2211 
2212   if (asan_needs_local_alias (decl))
2213     {
2214       char buf[20];
2215       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2216       refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2217 			    VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2218       TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2219       TREE_READONLY (refdecl) = TREE_READONLY (decl);
2220       TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2221       DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2222       DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2223       DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2224       TREE_STATIC (refdecl) = 1;
2225       TREE_PUBLIC (refdecl) = 0;
2226       TREE_USED (refdecl) = 1;
2227       assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2228     }
2229 
2230   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2231 			  fold_convert (const_ptr_type_node,
2232 					build_fold_addr_expr (refdecl)));
2233   size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
2234   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2235   size += asan_red_zone_size (size);
2236   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2237   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2238 			  fold_convert (const_ptr_type_node, str_cst));
2239   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2240 			  fold_convert (const_ptr_type_node, module_name_cst));
2241   varpool_node *vnode = varpool_node::get (decl);
2242   int has_dynamic_init = 0;
2243   /* FIXME: Enable initialization order fiasco detection in LTO mode once
2244      proper fix for PR 79061 will be applied.  */
2245   if (!in_lto_p)
2246     has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
2247   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2248 			  build_int_cst (uptr, has_dynamic_init));
2249   tree locptr = NULL_TREE;
2250   location_t loc = DECL_SOURCE_LOCATION (decl);
2251   expanded_location xloc = expand_location (loc);
2252   if (xloc.file != NULL)
2253     {
2254       static int lasanloccnt = 0;
2255       char buf[25];
2256       ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
2257       tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2258 			     ubsan_get_source_location_type ());
2259       TREE_STATIC (var) = 1;
2260       TREE_PUBLIC (var) = 0;
2261       DECL_ARTIFICIAL (var) = 1;
2262       DECL_IGNORED_P (var) = 1;
2263       pretty_printer filename_pp;
2264       pp_string (&filename_pp, xloc.file);
2265       tree str = asan_pp_string (&filename_pp);
2266       tree ctor = build_constructor_va (TREE_TYPE (var), 3,
2267 					NULL_TREE, str, NULL_TREE,
2268 					build_int_cst (unsigned_type_node,
2269 						       xloc.line), NULL_TREE,
2270 					build_int_cst (unsigned_type_node,
2271 						       xloc.column));
2272       TREE_CONSTANT (ctor) = 1;
2273       TREE_STATIC (ctor) = 1;
2274       DECL_INITIAL (var) = ctor;
2275       varpool_node::finalize_decl (var);
2276       locptr = fold_convert (uptr, build_fold_addr_expr (var));
2277     }
2278   else
2279     locptr = build_int_cst (uptr, 0);
2280   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
2281   init = build_constructor (type, vinner);
2282   CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2283 }
2284 
2285 /* Initialize sanitizer.def builtins if the FE hasn't initialized them.  */
2286 void
2287 initialize_sanitizer_builtins (void)
2288 {
2289   tree decl;
2290 
2291   if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2292     return;
2293 
2294   tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2295   tree BT_FN_VOID_PTR
2296     = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2297   tree BT_FN_VOID_CONST_PTR
2298     = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
2299   tree BT_FN_VOID_PTR_PTR
2300     = build_function_type_list (void_type_node, ptr_type_node,
2301 				ptr_type_node, NULL_TREE);
2302   tree BT_FN_VOID_PTR_PTR_PTR
2303     = build_function_type_list (void_type_node, ptr_type_node,
2304 				ptr_type_node, ptr_type_node, NULL_TREE);
2305   tree BT_FN_VOID_PTR_PTRMODE
2306     = build_function_type_list (void_type_node, ptr_type_node,
2307 				pointer_sized_int_node, NULL_TREE);
2308   tree BT_FN_VOID_INT
2309     = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2310   tree BT_FN_SIZE_CONST_PTR_INT
2311     = build_function_type_list (size_type_node, const_ptr_type_node,
2312 				integer_type_node, NULL_TREE);
2313   tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2314   tree BT_FN_IX_CONST_VPTR_INT[5];
2315   tree BT_FN_IX_VPTR_IX_INT[5];
2316   tree BT_FN_VOID_VPTR_IX_INT[5];
2317   tree vptr
2318     = build_pointer_type (build_qualified_type (void_type_node,
2319 						TYPE_QUAL_VOLATILE));
2320   tree cvptr
2321     = build_pointer_type (build_qualified_type (void_type_node,
2322 						TYPE_QUAL_VOLATILE
2323 						|TYPE_QUAL_CONST));
2324   tree boolt
2325     = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2326   int i;
2327   for (i = 0; i < 5; i++)
2328     {
2329       tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2330       BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2331 	= build_function_type_list (boolt, vptr, ptr_type_node, ix,
2332 				    integer_type_node, integer_type_node,
2333 				    NULL_TREE);
2334       BT_FN_IX_CONST_VPTR_INT[i]
2335 	= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2336       BT_FN_IX_VPTR_IX_INT[i]
2337 	= build_function_type_list (ix, vptr, ix, integer_type_node,
2338 				    NULL_TREE);
2339       BT_FN_VOID_VPTR_IX_INT[i]
2340 	= build_function_type_list (void_type_node, vptr, ix,
2341 				    integer_type_node, NULL_TREE);
2342     }
2343 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2344 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2345 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2346 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2347 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2348 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2349 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2350 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2351 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2352 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2353 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2354 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2355 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2356 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2357 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2358 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2359 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2360 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2361 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2362 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2363 #undef ATTR_NOTHROW_LEAF_LIST
2364 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2365 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2366 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2367 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2368 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2369 #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2370 #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
2371   ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
2372 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2373 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2374   ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2375 #undef ATTR_COLD_NOTHROW_LEAF_LIST
2376 #define ATTR_COLD_NOTHROW_LEAF_LIST \
2377   /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
2378 #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
2379 #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
2380   /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
2381 #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
2382 #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
2383   /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
2384 #undef ATTR_PURE_NOTHROW_LEAF_LIST
2385 #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
2386 #undef DEF_SANITIZER_BUILTIN
2387 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2388   decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM,		\
2389 			       BUILT_IN_NORMAL, NAME, NULL_TREE);	\
2390   set_call_expr_flags (decl, ATTRS);					\
2391   set_builtin_decl (ENUM, decl, true);
2392 
2393 #include "sanitizer.def"
2394 
2395   /* -fsanitize=object-size uses __builtin_object_size, but that might
2396      not be available for e.g. Fortran at this point.  We use
2397      DEF_SANITIZER_BUILTIN here only as a convenience macro.  */
2398   if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
2399       && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
2400     DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
2401 			   BT_FN_SIZE_CONST_PTR_INT,
2402 			   ATTR_PURE_NOTHROW_LEAF_LIST)
2403 
2404 #undef DEF_SANITIZER_BUILTIN
2405 }
2406 
2407 /* Called via htab_traverse.  Count number of emitted
2408    STRING_CSTs in the constant hash table.  */
2409 
2410 int
2411 count_string_csts (constant_descriptor_tree **slot,
2412 		   unsigned HOST_WIDE_INT *data)
2413 {
2414   struct constant_descriptor_tree *desc = *slot;
2415   if (TREE_CODE (desc->value) == STRING_CST
2416       && TREE_ASM_WRITTEN (desc->value)
2417       && asan_protect_global (desc->value))
2418     ++*data;
2419   return 1;
2420 }
2421 
2422 /* Helper structure to pass two parameters to
2423    add_string_csts.  */
2424 
2425 struct asan_add_string_csts_data
2426 {
2427   tree type;
2428   vec<constructor_elt, va_gc> *v;
2429 };
2430 
2431 /* Called via hash_table::traverse.  Call asan_add_global
2432    on emitted STRING_CSTs from the constant hash table.  */
2433 
2434 int
2435 add_string_csts (constant_descriptor_tree **slot,
2436 		 asan_add_string_csts_data *aascd)
2437 {
2438   struct constant_descriptor_tree *desc = *slot;
2439   if (TREE_CODE (desc->value) == STRING_CST
2440       && TREE_ASM_WRITTEN (desc->value)
2441       && asan_protect_global (desc->value))
2442     {
2443       asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2444 		       aascd->type, aascd->v);
2445     }
2446   return 1;
2447 }
2448 
2449 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2450    invoke ggc_collect.  */
2451 static GTY(()) tree asan_ctor_statements;
2452 
2453 /* Module-level instrumentation.
2454    - Insert __asan_init_vN() into the list of CTORs.
2455    - TODO: insert redzones around globals.
2456  */
2457 
2458 void
2459 asan_finish_file (void)
2460 {
2461   varpool_node *vnode;
2462   unsigned HOST_WIDE_INT gcount = 0;
2463 
2464   if (shadow_ptr_types[0] == NULL_TREE)
2465     asan_init_shadow_ptr_types ();
2466   /* Avoid instrumenting code in the asan ctors/dtors.
2467      We don't need to insert padding after the description strings,
2468      nor after .LASAN* array.  */
2469   flag_sanitize &= ~SANITIZE_ADDRESS;
2470 
2471   /* For user-space we want asan constructors to run first.
2472      Linux kernel does not support priorities other than default, and the only
2473      other user of constructors is coverage. So we run with the default
2474      priority.  */
2475   int priority = flag_sanitize & SANITIZE_USER_ADDRESS
2476                  ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
2477 
2478   if (flag_sanitize & SANITIZE_USER_ADDRESS)
2479     {
2480       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2481       append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2482     }
2483   FOR_EACH_DEFINED_VARIABLE (vnode)
2484     if (TREE_ASM_WRITTEN (vnode->decl)
2485 	&& asan_protect_global (vnode->decl))
2486       ++gcount;
2487   hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
2488   const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
2489     (&gcount);
2490   if (gcount)
2491     {
2492       tree type = asan_global_struct (), var, ctor;
2493       tree dtor_statements = NULL_TREE;
2494       vec<constructor_elt, va_gc> *v;
2495       char buf[20];
2496 
2497       type = build_array_type_nelts (type, gcount);
2498       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2499       var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2500 			type);
2501       TREE_STATIC (var) = 1;
2502       TREE_PUBLIC (var) = 0;
2503       DECL_ARTIFICIAL (var) = 1;
2504       DECL_IGNORED_P (var) = 1;
2505       vec_alloc (v, gcount);
2506       FOR_EACH_DEFINED_VARIABLE (vnode)
2507 	if (TREE_ASM_WRITTEN (vnode->decl)
2508 	    && asan_protect_global (vnode->decl))
2509 	  asan_add_global (vnode->decl, TREE_TYPE (type), v);
2510       struct asan_add_string_csts_data aascd;
2511       aascd.type = TREE_TYPE (type);
2512       aascd.v = v;
2513       const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
2514        	(&aascd);
2515       ctor = build_constructor (type, v);
2516       TREE_CONSTANT (ctor) = 1;
2517       TREE_STATIC (ctor) = 1;
2518       DECL_INITIAL (var) = ctor;
2519       varpool_node::finalize_decl (var);
2520 
2521       tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2522       tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
2523       append_to_statement_list (build_call_expr (fn, 2,
2524 						 build_fold_addr_expr (var),
2525 						 gcount_tree),
2526 				&asan_ctor_statements);
2527 
2528       fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2529       append_to_statement_list (build_call_expr (fn, 2,
2530 						 build_fold_addr_expr (var),
2531 						 gcount_tree),
2532 				&dtor_statements);
2533       cgraph_build_static_cdtor ('D', dtor_statements, priority);
2534     }
2535   if (asan_ctor_statements)
2536     cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
2537   flag_sanitize |= SANITIZE_ADDRESS;
2538 }
2539 
2540 /* Expand the ASAN_{LOAD,STORE} builtins.  */
2541 
2542 bool
2543 asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
2544 {
2545   gimple g = gsi_stmt (*iter);
2546   location_t loc = gimple_location (g);
2547 
2548   bool recover_p
2549     = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
2550 
2551   HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
2552   gcc_assert (flags < ASAN_CHECK_LAST);
2553   bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
2554   bool is_store = (flags & ASAN_CHECK_STORE) != 0;
2555   bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
2556 
2557   tree base = gimple_call_arg (g, 1);
2558   tree len = gimple_call_arg (g, 2);
2559   HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
2560 
2561   HOST_WIDE_INT size_in_bytes
2562     = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
2563 
2564   if (use_calls)
2565     {
2566       /* Instrument using callbacks.  */
2567       gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2568 				      NOP_EXPR, base);
2569       gimple_set_location (g, loc);
2570       gsi_insert_before (iter, g, GSI_SAME_STMT);
2571       tree base_addr = gimple_assign_lhs (g);
2572 
2573       int nargs;
2574       tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
2575       if (nargs == 1)
2576 	g = gimple_build_call (fun, 1, base_addr);
2577       else
2578 	{
2579 	  gcc_assert (nargs == 2);
2580 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2581 				   NOP_EXPR, len);
2582 	  gimple_set_location (g, loc);
2583 	  gsi_insert_before (iter, g, GSI_SAME_STMT);
2584 	  tree sz_arg = gimple_assign_lhs (g);
2585 	  g = gimple_build_call (fun, nargs, base_addr, sz_arg);
2586 	}
2587       gimple_set_location (g, loc);
2588       gsi_replace (iter, g, false);
2589       return false;
2590     }
2591 
2592   HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
2593 
2594   tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
2595   tree shadow_type = TREE_TYPE (shadow_ptr_type);
2596 
2597   gimple_stmt_iterator gsi = *iter;
2598 
2599   if (!is_non_zero_len)
2600     {
2601       /* So, the length of the memory area to asan-protect is
2602 	 non-constant.  Let's guard the generated instrumentation code
2603 	 like:
2604 
2605 	 if (len != 0)
2606 	   {
2607 	     //asan instrumentation code goes here.
2608 	   }
2609 	 // falltrough instructions, starting with *ITER.  */
2610 
2611       g = gimple_build_cond (NE_EXPR,
2612 			    len,
2613 			    build_int_cst (TREE_TYPE (len), 0),
2614 			    NULL_TREE, NULL_TREE);
2615       gimple_set_location (g, loc);
2616 
2617       basic_block then_bb, fallthrough_bb;
2618       insert_if_then_before_iter (as_a <gcond *> (g), iter,
2619 				  /*then_more_likely_p=*/true,
2620 				  &then_bb, &fallthrough_bb);
2621       /* Note that fallthrough_bb starts with the statement that was
2622 	pointed to by ITER.  */
2623 
2624       /* The 'then block' of the 'if (len != 0) condition is where
2625 	we'll generate the asan instrumentation code now.  */
2626       gsi = gsi_last_bb (then_bb);
2627     }
2628 
2629   /* Get an iterator on the point where we can add the condition
2630      statement for the instrumentation.  */
2631   basic_block then_bb, else_bb;
2632   gsi = create_cond_insert_point (&gsi, /*before_p*/false,
2633 				  /*then_more_likely_p=*/false,
2634 				  /*create_then_fallthru_edge*/recover_p,
2635 				  &then_bb,
2636 				  &else_bb);
2637 
2638   g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2639 			   NOP_EXPR, base);
2640   gimple_set_location (g, loc);
2641   gsi_insert_before (&gsi, g, GSI_NEW_STMT);
2642   tree base_addr = gimple_assign_lhs (g);
2643 
2644   tree t = NULL_TREE;
2645   if (real_size_in_bytes >= 8)
2646     {
2647       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2648 					     shadow_ptr_type);
2649       t = shadow;
2650     }
2651   else
2652     {
2653       /* Slow path for 1, 2 and 4 byte accesses.  */
2654       /* Test (shadow != 0)
2655 	 & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow).  */
2656       tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
2657 					     shadow_ptr_type);
2658       gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2659       gimple_seq seq = NULL;
2660       gimple_seq_add_stmt (&seq, shadow_test);
2661       /* Aligned (>= 8 bytes) can test just
2662 	 (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
2663 	 to be 0.  */
2664       if (align < 8)
2665 	{
2666 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2667 						   base_addr, 7));
2668 	  gimple_seq_add_stmt (&seq,
2669 			       build_type_cast (shadow_type,
2670 						gimple_seq_last (seq)));
2671 	  if (real_size_in_bytes > 1)
2672 	    gimple_seq_add_stmt (&seq,
2673 				 build_assign (PLUS_EXPR,
2674 					       gimple_seq_last (seq),
2675 					       real_size_in_bytes - 1));
2676 	  t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
2677 	}
2678       else
2679 	t = build_int_cst (shadow_type, real_size_in_bytes - 1);
2680       gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
2681       gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2682 					       gimple_seq_last (seq)));
2683       t = gimple_assign_lhs (gimple_seq_last (seq));
2684       gimple_seq_set_location (seq, loc);
2685       gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2686 
2687       /* For non-constant, misaligned or otherwise weird access sizes,
2688        check first and last byte.  */
2689       if (size_in_bytes == -1)
2690 	{
2691 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2692 				   MINUS_EXPR, len,
2693 				   build_int_cst (pointer_sized_int_node, 1));
2694 	  gimple_set_location (g, loc);
2695 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2696 	  tree last = gimple_assign_lhs (g);
2697 	  g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
2698 				   PLUS_EXPR, base_addr, last);
2699 	  gimple_set_location (g, loc);
2700 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2701 	  tree base_end_addr = gimple_assign_lhs (g);
2702 
2703 	  tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
2704 						 shadow_ptr_type);
2705 	  gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
2706 	  gimple_seq seq = NULL;
2707 	  gimple_seq_add_stmt (&seq, shadow_test);
2708 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
2709 						   base_end_addr, 7));
2710 	  gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
2711 						      gimple_seq_last (seq)));
2712 	  gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
2713 						   gimple_seq_last (seq),
2714 						   shadow));
2715 	  gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
2716 						   gimple_seq_last (seq)));
2717 	  gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
2718 						   gimple_seq_last (seq)));
2719 	  t = gimple_assign_lhs (gimple_seq_last (seq));
2720 	  gimple_seq_set_location (seq, loc);
2721 	  gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
2722 	}
2723     }
2724 
2725   g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
2726 			 NULL_TREE, NULL_TREE);
2727   gimple_set_location (g, loc);
2728   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2729 
2730   /* Generate call to the run-time library (e.g. __asan_report_load8).  */
2731   gsi = gsi_start_bb (then_bb);
2732   int nargs;
2733   tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
2734   g = gimple_build_call (fun, nargs, base_addr, len);
2735   gimple_set_location (g, loc);
2736   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
2737 
2738   gsi_remove (iter, true);
2739   *iter = gsi_start_bb (else_bb);
2740 
2741   return true;
2742 }
2743 
2744 /* Instrument the current function.  */
2745 
2746 static unsigned int
2747 asan_instrument (void)
2748 {
2749   if (shadow_ptr_types[0] == NULL_TREE)
2750     asan_init_shadow_ptr_types ();
2751   transform_statements ();
2752   return 0;
2753 }
2754 
2755 static bool
2756 gate_asan (void)
2757 {
2758   return (flag_sanitize & SANITIZE_ADDRESS) != 0
2759 	  && !lookup_attribute ("no_sanitize_address",
2760 				DECL_ATTRIBUTES (current_function_decl));
2761 }
2762 
2763 namespace {
2764 
2765 const pass_data pass_data_asan =
2766 {
2767   GIMPLE_PASS, /* type */
2768   "asan", /* name */
2769   OPTGROUP_NONE, /* optinfo_flags */
2770   TV_NONE, /* tv_id */
2771   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2772   0, /* properties_provided */
2773   0, /* properties_destroyed */
2774   0, /* todo_flags_start */
2775   TODO_update_ssa, /* todo_flags_finish */
2776 };
2777 
2778 class pass_asan : public gimple_opt_pass
2779 {
2780 public:
2781   pass_asan (gcc::context *ctxt)
2782     : gimple_opt_pass (pass_data_asan, ctxt)
2783   {}
2784 
2785   /* opt_pass methods: */
2786   opt_pass * clone () { return new pass_asan (m_ctxt); }
2787   virtual bool gate (function *) { return gate_asan (); }
2788   virtual unsigned int execute (function *) { return asan_instrument (); }
2789 
2790 }; // class pass_asan
2791 
2792 } // anon namespace
2793 
2794 gimple_opt_pass *
2795 make_pass_asan (gcc::context *ctxt)
2796 {
2797   return new pass_asan (ctxt);
2798 }
2799 
2800 namespace {
2801 
2802 const pass_data pass_data_asan_O0 =
2803 {
2804   GIMPLE_PASS, /* type */
2805   "asan0", /* name */
2806   OPTGROUP_NONE, /* optinfo_flags */
2807   TV_NONE, /* tv_id */
2808   ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
2809   0, /* properties_provided */
2810   0, /* properties_destroyed */
2811   0, /* todo_flags_start */
2812   TODO_update_ssa, /* todo_flags_finish */
2813 };
2814 
2815 class pass_asan_O0 : public gimple_opt_pass
2816 {
2817 public:
2818   pass_asan_O0 (gcc::context *ctxt)
2819     : gimple_opt_pass (pass_data_asan_O0, ctxt)
2820   {}
2821 
2822   /* opt_pass methods: */
2823   virtual bool gate (function *) { return !optimize && gate_asan (); }
2824   virtual unsigned int execute (function *) { return asan_instrument (); }
2825 
2826 }; // class pass_asan_O0
2827 
2828 } // anon namespace
2829 
2830 gimple_opt_pass *
2831 make_pass_asan_O0 (gcc::context *ctxt)
2832 {
2833   return new pass_asan_O0 (ctxt);
2834 }
2835 
2836 #include "gt-asan.h"
2837