xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/asan.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* AddressSanitizer, a fast memory error detector.
2    Copyright (C) 2012-2013 Free Software Foundation, Inc.
3    Contributed by Kostya Serebryany <kcc@google.com>
4 
5 This file is part of GCC.
6 
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11 
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15 for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3.  If not see
19 <http://www.gnu.org/licenses/>.  */
20 
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "gimple.h"
26 #include "tree-iterator.h"
27 #include "tree-flow.h"
28 #include "tree-pass.h"
29 #include "asan.h"
30 #include "gimple-pretty-print.h"
31 #include "target.h"
32 #include "expr.h"
33 #include "optabs.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "langhooks.h"
37 #include "hash-table.h"
38 #include "alloc-pool.h"
39 
40 /* AddressSanitizer finds out-of-bounds and use-after-free bugs
41    with <2x slowdown on average.
42 
43    The tool consists of two parts:
44    instrumentation module (this file) and a run-time library.
45    The instrumentation module adds a run-time check before every memory insn.
46      For a 8- or 16- byte load accessing address X:
47        ShadowAddr = (X >> 3) + Offset
48        ShadowValue = *(char*)ShadowAddr;  // *(short*) for 16-byte access.
49        if (ShadowValue)
50 	 __asan_report_load8(X);
51      For a load of N bytes (N=1, 2 or 4) from address X:
52        ShadowAddr = (X >> 3) + Offset
53        ShadowValue = *(char*)ShadowAddr;
54        if (ShadowValue)
55 	 if ((X & 7) + N - 1 > ShadowValue)
56 	   __asan_report_loadN(X);
57    Stores are instrumented similarly, but using __asan_report_storeN functions.
58    A call too __asan_init() is inserted to the list of module CTORs.
59 
60    The run-time library redefines malloc (so that redzone are inserted around
61    the allocated memory) and free (so that reuse of free-ed memory is delayed),
62    provides __asan_report* and __asan_init functions.
63 
64    Read more:
65    http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
66 
67    The current implementation supports detection of out-of-bounds and
68    use-after-free in the heap, on the stack and for global variables.
69 
70    [Protection of stack variables]
71 
72    To understand how detection of out-of-bounds and use-after-free works
73    for stack variables, lets look at this example on x86_64 where the
74    stack grows downward:
75 
76      int
77      foo ()
78      {
79        char a[23] = {0};
80        int b[2] = {0};
81 
82        a[5] = 1;
83        b[1] = 2;
84 
85        return a[5] + b[1];
86      }
87 
88    For this function, the stack protected by asan will be organized as
89    follows, from the top of the stack to the bottom:
90 
91    Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
92 
93    Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
94 	   the next slot be 32 bytes aligned; this one is called Partial
95 	   Redzone; this 32 bytes alignment is an asan constraint]
96 
97    Slot 3/ [24 bytes for variable 'a']
98 
99    Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
100 
101    Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
102 
103    Slot 6/ [8 bytes for variable 'b']
104 
105    Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
106 	    'LEFT RedZone']
107 
108    The 32 bytes of LEFT red zone at the bottom of the stack can be
109    decomposed as such:
110 
111      1/ The first 8 bytes contain a magical asan number that is always
112      0x41B58AB3.
113 
114      2/ The following 8 bytes contains a pointer to a string (to be
115      parsed at runtime by the runtime asan library), which format is
116      the following:
117 
118       "<function-name> <space> <num-of-variables-on-the-stack>
119       (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
120       <length-of-var-in-bytes> ){n} "
121 
122 	where '(...){n}' means the content inside the parenthesis occurs 'n'
123 	times, with 'n' being the number of variables on the stack.
124 
125       3/ The following 16 bytes of the red zone have no particular
126       format.
127 
128    The shadow memory for that stack layout is going to look like this:
129 
130      - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
131        The F1 byte pattern is a magic number called
132        ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
133        the memory for that shadow byte is part of a the LEFT red zone
134        intended to seat at the bottom of the variables on the stack.
135 
136      - content of shadow memory 8 bytes for slots 6 and 5:
137        0xF4F4F400.  The F4 byte pattern is a magic number
138        called ASAN_STACK_MAGIC_PARTIAL.  It flags the fact that the
139        memory region for this shadow byte is a PARTIAL red zone
140        intended to pad a variable A, so that the slot following
141        {A,padding} is 32 bytes aligned.
142 
143        Note that the fact that the least significant byte of this
144        shadow memory content is 00 means that 8 bytes of its
145        corresponding memory (which corresponds to the memory of
146        variable 'b') is addressable.
147 
148      - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
149        The F2 byte pattern is a magic number called
150        ASAN_STACK_MAGIC_MIDDLE.  It flags the fact that the memory
151        region for this shadow byte is a MIDDLE red zone intended to
152        seat between two 32 aligned slots of {variable,padding}.
153 
154      - content of shadow memory 8 bytes for slot 3 and 2:
155        0xF4000000.  This represents is the concatenation of
156        variable 'a' and the partial red zone following it, like what we
157        had for variable 'b'.  The least significant 3 bytes being 00
158        means that the 3 bytes of variable 'a' are addressable.
159 
160      - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
161        The F3 byte pattern is a magic number called
162        ASAN_STACK_MAGIC_RIGHT.  It flags the fact that the memory
163        region for this shadow byte is a RIGHT red zone intended to seat
164        at the top of the variables of the stack.
165 
166    Note that the real variable layout is done in expand_used_vars in
167    cfgexpand.c.  As far as Address Sanitizer is concerned, it lays out
168    stack variables as well as the different red zones, emits some
169    prologue code to populate the shadow memory as to poison (mark as
170    non-accessible) the regions of the red zones and mark the regions of
171    stack variables as accessible, and emit some epilogue code to
172    un-poison (mark as accessible) the regions of red zones right before
173    the function exits.
174 
175    [Protection of global variables]
176 
177    The basic idea is to insert a red zone between two global variables
178    and install a constructor function that calls the asan runtime to do
179    the populating of the relevant shadow memory regions at load time.
180 
181    So the global variables are laid out as to insert a red zone between
182    them. The size of the red zones is so that each variable starts on a
183    32 bytes boundary.
184 
185    Then a constructor function is installed so that, for each global
186    variable, it calls the runtime asan library function
187    __asan_register_globals_with an instance of this type:
188 
189      struct __asan_global
190      {
191        // Address of the beginning of the global variable.
192        const void *__beg;
193 
194        // Initial size of the global variable.
195        uptr __size;
196 
197        // Size of the global variable + size of the red zone.  This
198        //   size is 32 bytes aligned.
199        uptr __size_with_redzone;
200 
201        // Name of the global variable.
202        const void *__name;
203 
204        // This is always set to NULL for now.
205        uptr __has_dynamic_init;
206      }
207 
208    A destructor function that calls the runtime asan library function
209    _asan_unregister_globals is also installed.  */
210 
211 alias_set_type asan_shadow_set = -1;
212 
213 /* Pointer types to 1 resp. 2 byte integers in shadow memory.  A separate
214    alias set is used for all shadow memory accesses.  */
215 static GTY(()) tree shadow_ptr_types[2];
216 
217 /* Hashtable support for memory references used by gimple
218    statements.  */
219 
220 /* This type represents a reference to a memory region.  */
221 struct asan_mem_ref
222 {
223   /* The expression of the begining of the memory region.  */
224   tree start;
225 
226   /* The size of the access (can be 1, 2, 4, 8, 16 for now).  */
227   char access_size;
228 };
229 
230 static alloc_pool asan_mem_ref_alloc_pool;
231 
232 /* This creates the alloc pool used to store the instances of
233    asan_mem_ref that are stored in the hash table asan_mem_ref_ht.  */
234 
235 static alloc_pool
236 asan_mem_ref_get_alloc_pool ()
237 {
238   if (asan_mem_ref_alloc_pool == NULL)
239     asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
240 						 sizeof (asan_mem_ref),
241 						 10);
242   return asan_mem_ref_alloc_pool;
243 
244 }
245 
246 /* Initializes an instance of asan_mem_ref.  */
247 
248 static void
249 asan_mem_ref_init (asan_mem_ref *ref, tree start, char access_size)
250 {
251   ref->start = start;
252   ref->access_size = access_size;
253 }
254 
255 /* Allocates memory for an instance of asan_mem_ref into the memory
256    pool returned by asan_mem_ref_get_alloc_pool and initialize it.
257    START is the address of (or the expression pointing to) the
258    beginning of memory reference.  ACCESS_SIZE is the size of the
259    access to the referenced memory.  */
260 
261 static asan_mem_ref*
262 asan_mem_ref_new (tree start, char access_size)
263 {
264   asan_mem_ref *ref =
265     (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
266 
267   asan_mem_ref_init (ref, start, access_size);
268   return ref;
269 }
270 
271 /* This builds and returns a pointer to the end of the memory region
272    that starts at START and of length LEN.  */
273 
274 tree
275 asan_mem_ref_get_end (tree start, tree len)
276 {
277   if (len == NULL_TREE || integer_zerop (len))
278     return start;
279 
280   return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
281 }
282 
283 /*  Return a tree expression that represents the end of the referenced
284     memory region.  Beware that this function can actually build a new
285     tree expression.  */
286 
287 tree
288 asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
289 {
290   return asan_mem_ref_get_end (ref->start, len);
291 }
292 
293 struct asan_mem_ref_hasher
294   : typed_noop_remove <asan_mem_ref>
295 {
296   typedef asan_mem_ref value_type;
297   typedef asan_mem_ref compare_type;
298 
299   static inline hashval_t hash (const value_type *);
300   static inline bool equal (const value_type *, const compare_type *);
301 };
302 
303 /* Hash a memory reference.  */
304 
305 inline hashval_t
306 asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
307 {
308   hashval_t h = iterative_hash_expr (mem_ref->start, 0);
309   h = iterative_hash_hashval_t (h, mem_ref->access_size);
310   return h;
311 }
312 
313 /* Compare two memory references.  We accept the length of either
314    memory references to be NULL_TREE.  */
315 
316 inline bool
317 asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
318 			    const asan_mem_ref *m2)
319 {
320   return (m1->access_size == m2->access_size
321 	  && operand_equal_p (m1->start, m2->start, 0));
322 }
323 
324 static hash_table <asan_mem_ref_hasher> asan_mem_ref_ht;
325 
326 /* Returns a reference to the hash table containing memory references.
327    This function ensures that the hash table is created.  Note that
328    this hash table is updated by the function
329    update_mem_ref_hash_table.  */
330 
331 static hash_table <asan_mem_ref_hasher> &
332 get_mem_ref_hash_table ()
333 {
334   if (!asan_mem_ref_ht.is_created ())
335     asan_mem_ref_ht.create (10);
336 
337   return asan_mem_ref_ht;
338 }
339 
340 /* Clear all entries from the memory references hash table.  */
341 
342 static void
343 empty_mem_ref_hash_table ()
344 {
345   if (asan_mem_ref_ht.is_created ())
346     asan_mem_ref_ht.empty ();
347 }
348 
349 /* Free the memory references hash table.  */
350 
351 static void
352 free_mem_ref_resources ()
353 {
354   if (asan_mem_ref_ht.is_created ())
355     asan_mem_ref_ht.dispose ();
356 
357   if (asan_mem_ref_alloc_pool)
358     {
359       free_alloc_pool (asan_mem_ref_alloc_pool);
360       asan_mem_ref_alloc_pool = NULL;
361     }
362 }
363 
364 /* Return true iff the memory reference REF has been instrumented.  */
365 
366 static bool
367 has_mem_ref_been_instrumented (tree ref, char access_size)
368 {
369   asan_mem_ref r;
370   asan_mem_ref_init (&r, ref, access_size);
371 
372   return (get_mem_ref_hash_table ().find (&r) != NULL);
373 }
374 
375 /* Return true iff the memory reference REF has been instrumented.  */
376 
377 static bool
378 has_mem_ref_been_instrumented (const asan_mem_ref *ref)
379 {
380   return has_mem_ref_been_instrumented (ref->start, ref->access_size);
381 }
382 
383 /* Return true iff access to memory region starting at REF and of
384    length LEN has been instrumented.  */
385 
386 static bool
387 has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
388 {
389   /* First let's see if the address of the beginning of REF has been
390      instrumented.  */
391   if (!has_mem_ref_been_instrumented (ref))
392     return false;
393 
394   if (len != 0)
395     {
396       /* Let's see if the end of the region has been instrumented.  */
397       if (!has_mem_ref_been_instrumented (asan_mem_ref_get_end (ref, len),
398 					  ref->access_size))
399 	return false;
400     }
401   return true;
402 }
403 
404 /* Set REF to the memory reference present in a gimple assignment
405    ASSIGNMENT.  Return true upon successful completion, false
406    otherwise.  */
407 
408 static bool
409 get_mem_ref_of_assignment (const gimple assignment,
410 			   asan_mem_ref *ref,
411 			   bool *ref_is_store)
412 {
413   gcc_assert (gimple_assign_single_p (assignment));
414 
415   if (gimple_store_p (assignment))
416     {
417       ref->start = gimple_assign_lhs (assignment);
418       *ref_is_store = true;
419     }
420   else if (gimple_assign_load_p (assignment))
421     {
422       ref->start = gimple_assign_rhs1 (assignment);
423       *ref_is_store = false;
424     }
425   else
426     return false;
427 
428   ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
429   return true;
430 }
431 
432 /* Return the memory references contained in a gimple statement
433    representing a builtin call that has to do with memory access.  */
434 
435 static bool
436 get_mem_refs_of_builtin_call (const gimple call,
437 			      asan_mem_ref *src0,
438 			      tree *src0_len,
439 			      bool *src0_is_store,
440 			      asan_mem_ref *src1,
441 			      tree *src1_len,
442 			      bool *src1_is_store,
443 			      asan_mem_ref *dst,
444 			      tree *dst_len,
445 			      bool *dst_is_store,
446 			      bool *dest_is_deref)
447 {
448   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
449 
450   tree callee = gimple_call_fndecl (call);
451   tree source0 = NULL_TREE, source1 = NULL_TREE,
452     dest = NULL_TREE, len = NULL_TREE;
453   bool is_store = true, got_reference_p = false;
454   char access_size = 1;
455 
456   switch (DECL_FUNCTION_CODE (callee))
457     {
458       /* (s, s, n) style memops.  */
459     case BUILT_IN_BCMP:
460     case BUILT_IN_MEMCMP:
461       source0 = gimple_call_arg (call, 0);
462       source1 = gimple_call_arg (call, 1);
463       len = gimple_call_arg (call, 2);
464       break;
465 
466       /* (src, dest, n) style memops.  */
467     case BUILT_IN_BCOPY:
468       source0 = gimple_call_arg (call, 0);
469       dest = gimple_call_arg (call, 1);
470       len = gimple_call_arg (call, 2);
471       break;
472 
473       /* (dest, src, n) style memops.  */
474     case BUILT_IN_MEMCPY:
475     case BUILT_IN_MEMCPY_CHK:
476     case BUILT_IN_MEMMOVE:
477     case BUILT_IN_MEMMOVE_CHK:
478     case BUILT_IN_MEMPCPY:
479     case BUILT_IN_MEMPCPY_CHK:
480       dest = gimple_call_arg (call, 0);
481       source0 = gimple_call_arg (call, 1);
482       len = gimple_call_arg (call, 2);
483       break;
484 
485       /* (dest, n) style memops.  */
486     case BUILT_IN_BZERO:
487       dest = gimple_call_arg (call, 0);
488       len = gimple_call_arg (call, 1);
489       break;
490 
491       /* (dest, x, n) style memops*/
492     case BUILT_IN_MEMSET:
493     case BUILT_IN_MEMSET_CHK:
494       dest = gimple_call_arg (call, 0);
495       len = gimple_call_arg (call, 2);
496       break;
497 
498     case BUILT_IN_STRLEN:
499       source0 = gimple_call_arg (call, 0);
500       len = gimple_call_lhs (call);
501       break ;
502 
503     /* And now the __atomic* and __sync builtins.
504        These are handled differently from the classical memory memory
505        access builtins above.  */
506 
507     case BUILT_IN_ATOMIC_LOAD_1:
508     case BUILT_IN_ATOMIC_LOAD_2:
509     case BUILT_IN_ATOMIC_LOAD_4:
510     case BUILT_IN_ATOMIC_LOAD_8:
511     case BUILT_IN_ATOMIC_LOAD_16:
512       is_store = false;
513       /* fall through.  */
514 
515     case BUILT_IN_SYNC_FETCH_AND_ADD_1:
516     case BUILT_IN_SYNC_FETCH_AND_ADD_2:
517     case BUILT_IN_SYNC_FETCH_AND_ADD_4:
518     case BUILT_IN_SYNC_FETCH_AND_ADD_8:
519     case BUILT_IN_SYNC_FETCH_AND_ADD_16:
520 
521     case BUILT_IN_SYNC_FETCH_AND_SUB_1:
522     case BUILT_IN_SYNC_FETCH_AND_SUB_2:
523     case BUILT_IN_SYNC_FETCH_AND_SUB_4:
524     case BUILT_IN_SYNC_FETCH_AND_SUB_8:
525     case BUILT_IN_SYNC_FETCH_AND_SUB_16:
526 
527     case BUILT_IN_SYNC_FETCH_AND_OR_1:
528     case BUILT_IN_SYNC_FETCH_AND_OR_2:
529     case BUILT_IN_SYNC_FETCH_AND_OR_4:
530     case BUILT_IN_SYNC_FETCH_AND_OR_8:
531     case BUILT_IN_SYNC_FETCH_AND_OR_16:
532 
533     case BUILT_IN_SYNC_FETCH_AND_AND_1:
534     case BUILT_IN_SYNC_FETCH_AND_AND_2:
535     case BUILT_IN_SYNC_FETCH_AND_AND_4:
536     case BUILT_IN_SYNC_FETCH_AND_AND_8:
537     case BUILT_IN_SYNC_FETCH_AND_AND_16:
538 
539     case BUILT_IN_SYNC_FETCH_AND_XOR_1:
540     case BUILT_IN_SYNC_FETCH_AND_XOR_2:
541     case BUILT_IN_SYNC_FETCH_AND_XOR_4:
542     case BUILT_IN_SYNC_FETCH_AND_XOR_8:
543     case BUILT_IN_SYNC_FETCH_AND_XOR_16:
544 
545     case BUILT_IN_SYNC_FETCH_AND_NAND_1:
546     case BUILT_IN_SYNC_FETCH_AND_NAND_2:
547     case BUILT_IN_SYNC_FETCH_AND_NAND_4:
548     case BUILT_IN_SYNC_FETCH_AND_NAND_8:
549 
550     case BUILT_IN_SYNC_ADD_AND_FETCH_1:
551     case BUILT_IN_SYNC_ADD_AND_FETCH_2:
552     case BUILT_IN_SYNC_ADD_AND_FETCH_4:
553     case BUILT_IN_SYNC_ADD_AND_FETCH_8:
554     case BUILT_IN_SYNC_ADD_AND_FETCH_16:
555 
556     case BUILT_IN_SYNC_SUB_AND_FETCH_1:
557     case BUILT_IN_SYNC_SUB_AND_FETCH_2:
558     case BUILT_IN_SYNC_SUB_AND_FETCH_4:
559     case BUILT_IN_SYNC_SUB_AND_FETCH_8:
560     case BUILT_IN_SYNC_SUB_AND_FETCH_16:
561 
562     case BUILT_IN_SYNC_OR_AND_FETCH_1:
563     case BUILT_IN_SYNC_OR_AND_FETCH_2:
564     case BUILT_IN_SYNC_OR_AND_FETCH_4:
565     case BUILT_IN_SYNC_OR_AND_FETCH_8:
566     case BUILT_IN_SYNC_OR_AND_FETCH_16:
567 
568     case BUILT_IN_SYNC_AND_AND_FETCH_1:
569     case BUILT_IN_SYNC_AND_AND_FETCH_2:
570     case BUILT_IN_SYNC_AND_AND_FETCH_4:
571     case BUILT_IN_SYNC_AND_AND_FETCH_8:
572     case BUILT_IN_SYNC_AND_AND_FETCH_16:
573 
574     case BUILT_IN_SYNC_XOR_AND_FETCH_1:
575     case BUILT_IN_SYNC_XOR_AND_FETCH_2:
576     case BUILT_IN_SYNC_XOR_AND_FETCH_4:
577     case BUILT_IN_SYNC_XOR_AND_FETCH_8:
578     case BUILT_IN_SYNC_XOR_AND_FETCH_16:
579 
580     case BUILT_IN_SYNC_NAND_AND_FETCH_1:
581     case BUILT_IN_SYNC_NAND_AND_FETCH_2:
582     case BUILT_IN_SYNC_NAND_AND_FETCH_4:
583     case BUILT_IN_SYNC_NAND_AND_FETCH_8:
584 
585     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
586     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
587     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
588     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
589     case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
590 
591     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
592     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
593     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
594     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
595     case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
596 
597     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
598     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
599     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
600     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
601     case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
602 
603     case BUILT_IN_SYNC_LOCK_RELEASE_1:
604     case BUILT_IN_SYNC_LOCK_RELEASE_2:
605     case BUILT_IN_SYNC_LOCK_RELEASE_4:
606     case BUILT_IN_SYNC_LOCK_RELEASE_8:
607     case BUILT_IN_SYNC_LOCK_RELEASE_16:
608 
609     case BUILT_IN_ATOMIC_EXCHANGE_1:
610     case BUILT_IN_ATOMIC_EXCHANGE_2:
611     case BUILT_IN_ATOMIC_EXCHANGE_4:
612     case BUILT_IN_ATOMIC_EXCHANGE_8:
613     case BUILT_IN_ATOMIC_EXCHANGE_16:
614 
615     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
616     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
617     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
618     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
619     case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
620 
621     case BUILT_IN_ATOMIC_STORE_1:
622     case BUILT_IN_ATOMIC_STORE_2:
623     case BUILT_IN_ATOMIC_STORE_4:
624     case BUILT_IN_ATOMIC_STORE_8:
625     case BUILT_IN_ATOMIC_STORE_16:
626 
627     case BUILT_IN_ATOMIC_ADD_FETCH_1:
628     case BUILT_IN_ATOMIC_ADD_FETCH_2:
629     case BUILT_IN_ATOMIC_ADD_FETCH_4:
630     case BUILT_IN_ATOMIC_ADD_FETCH_8:
631     case BUILT_IN_ATOMIC_ADD_FETCH_16:
632 
633     case BUILT_IN_ATOMIC_SUB_FETCH_1:
634     case BUILT_IN_ATOMIC_SUB_FETCH_2:
635     case BUILT_IN_ATOMIC_SUB_FETCH_4:
636     case BUILT_IN_ATOMIC_SUB_FETCH_8:
637     case BUILT_IN_ATOMIC_SUB_FETCH_16:
638 
639     case BUILT_IN_ATOMIC_AND_FETCH_1:
640     case BUILT_IN_ATOMIC_AND_FETCH_2:
641     case BUILT_IN_ATOMIC_AND_FETCH_4:
642     case BUILT_IN_ATOMIC_AND_FETCH_8:
643     case BUILT_IN_ATOMIC_AND_FETCH_16:
644 
645     case BUILT_IN_ATOMIC_NAND_FETCH_1:
646     case BUILT_IN_ATOMIC_NAND_FETCH_2:
647     case BUILT_IN_ATOMIC_NAND_FETCH_4:
648     case BUILT_IN_ATOMIC_NAND_FETCH_8:
649     case BUILT_IN_ATOMIC_NAND_FETCH_16:
650 
651     case BUILT_IN_ATOMIC_XOR_FETCH_1:
652     case BUILT_IN_ATOMIC_XOR_FETCH_2:
653     case BUILT_IN_ATOMIC_XOR_FETCH_4:
654     case BUILT_IN_ATOMIC_XOR_FETCH_8:
655     case BUILT_IN_ATOMIC_XOR_FETCH_16:
656 
657     case BUILT_IN_ATOMIC_OR_FETCH_1:
658     case BUILT_IN_ATOMIC_OR_FETCH_2:
659     case BUILT_IN_ATOMIC_OR_FETCH_4:
660     case BUILT_IN_ATOMIC_OR_FETCH_8:
661     case BUILT_IN_ATOMIC_OR_FETCH_16:
662 
663     case BUILT_IN_ATOMIC_FETCH_ADD_1:
664     case BUILT_IN_ATOMIC_FETCH_ADD_2:
665     case BUILT_IN_ATOMIC_FETCH_ADD_4:
666     case BUILT_IN_ATOMIC_FETCH_ADD_8:
667     case BUILT_IN_ATOMIC_FETCH_ADD_16:
668 
669     case BUILT_IN_ATOMIC_FETCH_SUB_1:
670     case BUILT_IN_ATOMIC_FETCH_SUB_2:
671     case BUILT_IN_ATOMIC_FETCH_SUB_4:
672     case BUILT_IN_ATOMIC_FETCH_SUB_8:
673     case BUILT_IN_ATOMIC_FETCH_SUB_16:
674 
675     case BUILT_IN_ATOMIC_FETCH_AND_1:
676     case BUILT_IN_ATOMIC_FETCH_AND_2:
677     case BUILT_IN_ATOMIC_FETCH_AND_4:
678     case BUILT_IN_ATOMIC_FETCH_AND_8:
679     case BUILT_IN_ATOMIC_FETCH_AND_16:
680 
681     case BUILT_IN_ATOMIC_FETCH_NAND_1:
682     case BUILT_IN_ATOMIC_FETCH_NAND_2:
683     case BUILT_IN_ATOMIC_FETCH_NAND_4:
684     case BUILT_IN_ATOMIC_FETCH_NAND_8:
685     case BUILT_IN_ATOMIC_FETCH_NAND_16:
686 
687     case BUILT_IN_ATOMIC_FETCH_XOR_1:
688     case BUILT_IN_ATOMIC_FETCH_XOR_2:
689     case BUILT_IN_ATOMIC_FETCH_XOR_4:
690     case BUILT_IN_ATOMIC_FETCH_XOR_8:
691     case BUILT_IN_ATOMIC_FETCH_XOR_16:
692 
693     case BUILT_IN_ATOMIC_FETCH_OR_1:
694     case BUILT_IN_ATOMIC_FETCH_OR_2:
695     case BUILT_IN_ATOMIC_FETCH_OR_4:
696     case BUILT_IN_ATOMIC_FETCH_OR_8:
697     case BUILT_IN_ATOMIC_FETCH_OR_16:
698       {
699 	dest = gimple_call_arg (call, 0);
700 	/* DEST represents the address of a memory location.
701 	   instrument_derefs wants the memory location, so lets
702 	   dereference the address DEST before handing it to
703 	   instrument_derefs.  */
704 	if (TREE_CODE (dest) == ADDR_EXPR)
705 	  dest = TREE_OPERAND (dest, 0);
706 	else if (TREE_CODE (dest) == SSA_NAME)
707 	  dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
708 			 dest, build_int_cst (TREE_TYPE (dest), 0));
709 	else
710 	  gcc_unreachable ();
711 
712 	access_size = int_size_in_bytes (TREE_TYPE (dest));
713       }
714 
715     default:
716       /* The other builtins memory access are not instrumented in this
717 	 function because they either don't have any length parameter,
718 	 or their length parameter is just a limit.  */
719       break;
720     }
721 
722   if (len != NULL_TREE)
723     {
724       if (source0 != NULL_TREE)
725 	{
726 	  src0->start = source0;
727 	  src0->access_size = access_size;
728 	  *src0_len = len;
729 	  *src0_is_store = false;
730 	}
731 
732       if (source1 != NULL_TREE)
733 	{
734 	  src1->start = source1;
735 	  src1->access_size = access_size;
736 	  *src1_len = len;
737 	  *src1_is_store = false;
738 	}
739 
740       if (dest != NULL_TREE)
741 	{
742 	  dst->start = dest;
743 	  dst->access_size = access_size;
744 	  *dst_len = len;
745 	  *dst_is_store = true;
746 	}
747 
748       got_reference_p = true;
749     }
750   else if (dest)
751     {
752       dst->start = dest;
753       dst->access_size = access_size;
754       *dst_len = NULL_TREE;
755       *dst_is_store = is_store;
756       *dest_is_deref = true;
757       got_reference_p = true;
758     }
759 
760   return got_reference_p;
761 }
762 
763 /* Return true iff a given gimple statement has been instrumented.
764    Note that the statement is "defined" by the memory references it
765    contains.  */
766 
767 static bool
768 has_stmt_been_instrumented_p (gimple stmt)
769 {
770   if (gimple_assign_single_p (stmt))
771     {
772       bool r_is_store;
773       asan_mem_ref r;
774       asan_mem_ref_init (&r, NULL, 1);
775 
776       if (get_mem_ref_of_assignment (stmt, &r, &r_is_store))
777 	return has_mem_ref_been_instrumented (&r);
778     }
779   else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
780     {
781       asan_mem_ref src0, src1, dest;
782       asan_mem_ref_init (&src0, NULL, 1);
783       asan_mem_ref_init (&src1, NULL, 1);
784       asan_mem_ref_init (&dest, NULL, 1);
785 
786       tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
787       bool src0_is_store = false, src1_is_store = false,
788 	dest_is_store = false, dest_is_deref = false;
789       if (get_mem_refs_of_builtin_call (stmt,
790 					&src0, &src0_len, &src0_is_store,
791 					&src1, &src1_len, &src1_is_store,
792 					&dest, &dest_len, &dest_is_store,
793 					&dest_is_deref))
794 	{
795 	  if (src0.start != NULL_TREE
796 	      && !has_mem_ref_been_instrumented (&src0, src0_len))
797 	    return false;
798 
799 	  if (src1.start != NULL_TREE
800 	      && !has_mem_ref_been_instrumented (&src1, src1_len))
801 	    return false;
802 
803 	  if (dest.start != NULL_TREE
804 	      && !has_mem_ref_been_instrumented (&dest, dest_len))
805 	    return false;
806 
807 	  return true;
808 	}
809     }
810   return false;
811 }
812 
813 /*  Insert a memory reference into the hash table.  */
814 
815 static void
816 update_mem_ref_hash_table (tree ref, char access_size)
817 {
818   hash_table <asan_mem_ref_hasher> ht = get_mem_ref_hash_table ();
819 
820   asan_mem_ref r;
821   asan_mem_ref_init (&r, ref, access_size);
822 
823   asan_mem_ref **slot = ht.find_slot (&r, INSERT);
824   if (*slot == NULL)
825     *slot = asan_mem_ref_new (ref, access_size);
826 }
827 
828 /* Initialize shadow_ptr_types array.  */
829 
830 static void
831 asan_init_shadow_ptr_types (void)
832 {
833   asan_shadow_set = new_alias_set ();
834   shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
835   TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
836   shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
837   shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
838   TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
839   shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
840   initialize_sanitizer_builtins ();
841 }
842 
843 /* Asan pretty-printer, used for buidling of the description STRING_CSTs.  */
844 static pretty_printer asan_pp;
845 static bool asan_pp_initialized;
846 
847 /* Initialize asan_pp.  */
848 
849 static void
850 asan_pp_initialize (void)
851 {
852   pp_construct (&asan_pp, /* prefix */NULL, /* line-width */0);
853   asan_pp_initialized = true;
854 }
855 
856 /* Create ADDR_EXPR of STRING_CST with asan_pp text.  */
857 
858 static tree
859 asan_pp_string (void)
860 {
861   const char *buf = pp_base_formatted_text (&asan_pp);
862   size_t len = strlen (buf);
863   tree ret = build_string (len + 1, buf);
864   TREE_TYPE (ret)
865     = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
866 			build_index_type (size_int (len)));
867   TREE_READONLY (ret) = 1;
868   TREE_STATIC (ret) = 1;
869   return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
870 }
871 
872 /* Return a CONST_INT representing 4 subsequent shadow memory bytes.  */
873 
874 static rtx
875 asan_shadow_cst (unsigned char shadow_bytes[4])
876 {
877   int i;
878   unsigned HOST_WIDE_INT val = 0;
879   gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
880   for (i = 0; i < 4; i++)
881     val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
882 	   << (BITS_PER_UNIT * i);
883   return GEN_INT (trunc_int_for_mode (val, SImode));
884 }
885 
886 /* Clear shadow memory at SHADOW_MEM, LEN bytes.  Can't call a library call here
887    though.  */
888 
889 static void
890 asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
891 {
892   rtx insn, insns, top_label, end, addr, tmp, jump;
893 
894   start_sequence ();
895   clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
896   insns = get_insns ();
897   end_sequence ();
898   for (insn = insns; insn; insn = NEXT_INSN (insn))
899     if (CALL_P (insn))
900       break;
901   if (insn == NULL_RTX)
902     {
903       emit_insn (insns);
904       return;
905     }
906 
907   gcc_assert ((len & 3) == 0);
908   top_label = gen_label_rtx ();
909   addr = force_reg (Pmode, XEXP (shadow_mem, 0));
910   shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
911   end = force_reg (Pmode, plus_constant (Pmode, addr, len));
912   emit_label (top_label);
913 
914   emit_move_insn (shadow_mem, const0_rtx);
915   tmp = expand_simple_binop (Pmode, PLUS, addr, GEN_INT (4), addr,
916                              true, OPTAB_LIB_WIDEN);
917   if (tmp != addr)
918     emit_move_insn (addr, tmp);
919   emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
920   jump = get_last_insn ();
921   gcc_assert (JUMP_P (jump));
922   add_reg_note (jump, REG_BR_PROB, GEN_INT (REG_BR_PROB_BASE * 80 / 100));
923 }
924 
925 /* Insert code to protect stack vars.  The prologue sequence should be emitted
926    directly, epilogue sequence returned.  BASE is the register holding the
927    stack base, against which OFFSETS array offsets are relative to, OFFSETS
928    array contains pairs of offsets in reverse order, always the end offset
929    of some gap that needs protection followed by starting offset,
930    and DECLS is an array of representative decls for each var partition.
931    LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
932    elements long (OFFSETS include gap before the first variable as well
933    as gaps after each stack variable).  */
934 
935 rtx
936 asan_emit_stack_protection (rtx base, HOST_WIDE_INT *offsets, tree *decls,
937 			    int length)
938 {
939   rtx shadow_base, shadow_mem, ret, mem;
940   unsigned char shadow_bytes[4];
941   HOST_WIDE_INT base_offset = offsets[length - 1], offset, prev_offset;
942   HOST_WIDE_INT last_offset, last_size;
943   int l;
944   unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
945   tree str_cst;
946 
947   if (shadow_ptr_types[0] == NULL_TREE)
948     asan_init_shadow_ptr_types ();
949 
950   /* First of all, prepare the description string.  */
951   if (!asan_pp_initialized)
952     asan_pp_initialize ();
953 
954   pp_clear_output_area (&asan_pp);
955   if (DECL_NAME (current_function_decl))
956     pp_base_tree_identifier (&asan_pp, DECL_NAME (current_function_decl));
957   else
958     pp_string (&asan_pp, "<unknown>");
959   pp_space (&asan_pp);
960   pp_decimal_int (&asan_pp, length / 2 - 1);
961   pp_space (&asan_pp);
962   for (l = length - 2; l; l -= 2)
963     {
964       tree decl = decls[l / 2 - 1];
965       pp_wide_integer (&asan_pp, offsets[l] - base_offset);
966       pp_space (&asan_pp);
967       pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
968       pp_space (&asan_pp);
969       if (DECL_P (decl) && DECL_NAME (decl))
970 	{
971 	  pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
972 	  pp_space (&asan_pp);
973 	  pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
974 	}
975       else
976 	pp_string (&asan_pp, "9 <unknown>");
977       pp_space (&asan_pp);
978     }
979   str_cst = asan_pp_string ();
980 
981   /* Emit the prologue sequence.  */
982   base = expand_binop (Pmode, add_optab, base, GEN_INT (base_offset),
983 		       NULL_RTX, 1, OPTAB_DIRECT);
984   mem = gen_rtx_MEM (ptr_mode, base);
985   emit_move_insn (mem, GEN_INT (ASAN_STACK_FRAME_MAGIC));
986   mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
987   emit_move_insn (mem, expand_normal (str_cst));
988   shadow_base = expand_binop (Pmode, lshr_optab, base,
989 			      GEN_INT (ASAN_SHADOW_SHIFT),
990 			      NULL_RTX, 1, OPTAB_DIRECT);
991   shadow_base = expand_binop (Pmode, add_optab, shadow_base,
992 			      GEN_INT (targetm.asan_shadow_offset ()),
993 			      NULL_RTX, 1, OPTAB_DIRECT);
994   gcc_assert (asan_shadow_set != -1
995 	      && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
996   shadow_mem = gen_rtx_MEM (SImode, shadow_base);
997   set_mem_alias_set (shadow_mem, asan_shadow_set);
998   prev_offset = base_offset;
999   for (l = length; l; l -= 2)
1000     {
1001       if (l == 2)
1002 	cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
1003       offset = offsets[l - 1];
1004       if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
1005 	{
1006 	  int i;
1007 	  HOST_WIDE_INT aoff
1008 	    = base_offset + ((offset - base_offset)
1009 			     & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1010 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1011 				       (aoff - prev_offset)
1012 				       >> ASAN_SHADOW_SHIFT);
1013 	  prev_offset = aoff;
1014 	  for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
1015 	    if (aoff < offset)
1016 	      {
1017 		if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
1018 		  shadow_bytes[i] = 0;
1019 		else
1020 		  shadow_bytes[i] = offset - aoff;
1021 	      }
1022 	    else
1023 	      shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
1024 	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1025 	  offset = aoff;
1026 	}
1027       while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
1028 	{
1029 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1030 				       (offset - prev_offset)
1031 				       >> ASAN_SHADOW_SHIFT);
1032 	  prev_offset = offset;
1033 	  memset (shadow_bytes, cur_shadow_byte, 4);
1034 	  emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
1035 	  offset += ASAN_RED_ZONE_SIZE;
1036 	}
1037       cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
1038     }
1039   do_pending_stack_adjust ();
1040 
1041   /* Construct epilogue sequence.  */
1042   start_sequence ();
1043 
1044   shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
1045   set_mem_alias_set (shadow_mem, asan_shadow_set);
1046   prev_offset = base_offset;
1047   last_offset = base_offset;
1048   last_size = 0;
1049   for (l = length; l; l -= 2)
1050     {
1051       offset = base_offset + ((offsets[l - 1] - base_offset)
1052 			     & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
1053       if (last_offset + last_size != offset)
1054 	{
1055 	  shadow_mem = adjust_address (shadow_mem, VOIDmode,
1056 				       (last_offset - prev_offset)
1057 				       >> ASAN_SHADOW_SHIFT);
1058 	  prev_offset = last_offset;
1059 	  asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1060 	  last_offset = offset;
1061 	  last_size = 0;
1062 	}
1063       last_size += base_offset + ((offsets[l - 2] - base_offset)
1064 				  & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
1065 		   - offset;
1066     }
1067   if (last_size)
1068     {
1069       shadow_mem = adjust_address (shadow_mem, VOIDmode,
1070 				   (last_offset - prev_offset)
1071 				   >> ASAN_SHADOW_SHIFT);
1072       asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
1073     }
1074 
1075   do_pending_stack_adjust ();
1076 
1077   ret = get_insns ();
1078   end_sequence ();
1079   return ret;
1080 }
1081 
1082 /* Return true if DECL, a global var, might be overridden and needs
1083    therefore a local alias.  */
1084 
1085 static bool
1086 asan_needs_local_alias (tree decl)
1087 {
1088   return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
1089 }
1090 
1091 /* Return true if DECL is a VAR_DECL that should be protected
1092    by Address Sanitizer, by appending a red zone with protected
1093    shadow memory after it and aligning it to at least
1094    ASAN_RED_ZONE_SIZE bytes.  */
1095 
1096 bool
1097 asan_protect_global (tree decl)
1098 {
1099   rtx rtl, symbol;
1100 
1101   if (TREE_CODE (decl) == STRING_CST)
1102     {
1103       /* Instrument all STRING_CSTs except those created
1104 	 by asan_pp_string here.  */
1105       if (shadow_ptr_types[0] != NULL_TREE
1106 	  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1107 	  && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
1108 	return false;
1109       return true;
1110     }
1111   if (TREE_CODE (decl) != VAR_DECL
1112       /* TLS vars aren't statically protectable.  */
1113       || DECL_THREAD_LOCAL_P (decl)
1114       /* Externs will be protected elsewhere.  */
1115       || DECL_EXTERNAL (decl)
1116       || !DECL_RTL_SET_P (decl)
1117       /* Comdat vars pose an ABI problem, we can't know if
1118 	 the var that is selected by the linker will have
1119 	 padding or not.  */
1120       || DECL_ONE_ONLY (decl)
1121       /* Similarly for common vars.  People can use -fno-common.  */
1122       || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
1123       /* Don't protect if using user section, often vars placed
1124 	 into user section from multiple TUs are then assumed
1125 	 to be an array of such vars, putting padding in there
1126 	 breaks this assumption.  */
1127       || (DECL_SECTION_NAME (decl) != NULL_TREE
1128 	  && !DECL_HAS_IMPLICIT_SECTION_NAME_P (decl))
1129       || DECL_SIZE (decl) == 0
1130       || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
1131       || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
1132       || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE)
1133     return false;
1134 
1135   rtl = DECL_RTL (decl);
1136   if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
1137     return false;
1138   symbol = XEXP (rtl, 0);
1139 
1140   if (CONSTANT_POOL_ADDRESS_P (symbol)
1141       || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
1142     return false;
1143 
1144   if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
1145     return false;
1146 
1147 #ifndef ASM_OUTPUT_DEF
1148   if (asan_needs_local_alias (decl))
1149     return false;
1150 #endif
1151 
1152   return true;
1153 }
1154 
1155 /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16}.
1156    IS_STORE is either 1 (for a store) or 0 (for a load).
1157    SIZE_IN_BYTES is one of 1, 2, 4, 8, 16.  */
1158 
1159 static tree
1160 report_error_func (bool is_store, int size_in_bytes)
1161 {
1162   static enum built_in_function report[2][5]
1163     = { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
1164 	  BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
1165 	  BUILT_IN_ASAN_REPORT_LOAD16 },
1166 	{ BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
1167 	  BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
1168 	  BUILT_IN_ASAN_REPORT_STORE16 } };
1169   return builtin_decl_implicit (report[is_store][exact_log2 (size_in_bytes)]);
1170 }
1171 
1172 #define PROB_VERY_UNLIKELY	(REG_BR_PROB_BASE / 2000 - 1)
1173 #define PROB_ALWAYS		(REG_BR_PROB_BASE)
1174 
1175 /* Split the current basic block and create a condition statement
1176    insertion point right before or after the statement pointed to by
1177    ITER.  Return an iterator to the point at which the caller might
1178    safely insert the condition statement.
1179 
1180    THEN_BLOCK must be set to the address of an uninitialized instance
1181    of basic_block.  The function will then set *THEN_BLOCK to the
1182    'then block' of the condition statement to be inserted by the
1183    caller.
1184 
1185    If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
1186    *THEN_BLOCK to *FALLTHROUGH_BLOCK.
1187 
1188    Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
1189    block' of the condition statement to be inserted by the caller.
1190 
1191    Note that *FALLTHROUGH_BLOCK is a new block that contains the
1192    statements starting from *ITER, and *THEN_BLOCK is a new empty
1193    block.
1194 
1195    *ITER is adjusted to point to always point to the first statement
1196     of the basic block * FALLTHROUGH_BLOCK.  That statement is the
1197     same as what ITER was pointing to prior to calling this function,
1198     if BEFORE_P is true; otherwise, it is its following statement.  */
1199 
1200 static gimple_stmt_iterator
1201 create_cond_insert_point (gimple_stmt_iterator *iter,
1202 			  bool before_p,
1203 			  bool then_more_likely_p,
1204 			  bool create_then_fallthru_edge,
1205 			  basic_block *then_block,
1206 			  basic_block *fallthrough_block)
1207 {
1208   gimple_stmt_iterator gsi = *iter;
1209 
1210   if (!gsi_end_p (gsi) && before_p)
1211     gsi_prev (&gsi);
1212 
1213   basic_block cur_bb = gsi_bb (*iter);
1214 
1215   edge e = split_block (cur_bb, gsi_stmt (gsi));
1216 
1217   /* Get a hold on the 'condition block', the 'then block' and the
1218      'else block'.  */
1219   basic_block cond_bb = e->src;
1220   basic_block fallthru_bb = e->dest;
1221   basic_block then_bb = create_empty_bb (cond_bb);
1222 
1223   /* Set up the newly created 'then block'.  */
1224   e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
1225   int fallthrough_probability
1226     = then_more_likely_p
1227     ? PROB_VERY_UNLIKELY
1228     : PROB_ALWAYS - PROB_VERY_UNLIKELY;
1229   e->probability = PROB_ALWAYS - fallthrough_probability;
1230   if (create_then_fallthru_edge)
1231     make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
1232 
1233   /* Set up the fallthrough basic block.  */
1234   e = find_edge (cond_bb, fallthru_bb);
1235   e->flags = EDGE_FALSE_VALUE;
1236   e->count = cond_bb->count;
1237   e->probability = fallthrough_probability;
1238 
1239   /* Update dominance info for the newly created then_bb; note that
1240      fallthru_bb's dominance info has already been updated by
1241      split_bock.  */
1242   if (dom_info_available_p (CDI_DOMINATORS))
1243     set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
1244 
1245   *then_block = then_bb;
1246   *fallthrough_block = fallthru_bb;
1247   *iter = gsi_start_bb (fallthru_bb);
1248 
1249   return gsi_last_bb (cond_bb);
1250 }
1251 
1252 /* Insert an if condition followed by a 'then block' right before the
1253    statement pointed to by ITER.  The fallthrough block -- which is the
1254    else block of the condition as well as the destination of the
1255    outcoming edge of the 'then block' -- starts with the statement
1256    pointed to by ITER.
1257 
1258    COND is the condition of the if.
1259 
1260    If THEN_MORE_LIKELY_P is true, the probability of the edge to the
1261    'then block' is higher than the probability of the edge to the
1262    fallthrough block.
1263 
1264    Upon completion of the function, *THEN_BB is set to the newly
1265    inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
1266    fallthrough block.
1267 
1268    *ITER is adjusted to still point to the same statement it was
1269    pointing to initially.  */
1270 
1271 static void
1272 insert_if_then_before_iter (gimple cond,
1273 			    gimple_stmt_iterator *iter,
1274 			    bool then_more_likely_p,
1275 			    basic_block *then_bb,
1276 			    basic_block *fallthrough_bb)
1277 {
1278   gimple_stmt_iterator cond_insert_point =
1279     create_cond_insert_point (iter,
1280 			      /*before_p=*/true,
1281 			      then_more_likely_p,
1282 			      /*create_then_fallthru_edge=*/true,
1283 			      then_bb,
1284 			      fallthrough_bb);
1285   gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
1286 }
1287 
1288 /* Instrument the memory access instruction BASE.  Insert new
1289    statements before or after ITER.
1290 
1291    Note that the memory access represented by BASE can be either an
1292    SSA_NAME, or a non-SSA expression.  LOCATION is the source code
1293    location.  IS_STORE is TRUE for a store, FALSE for a load.
1294    BEFORE_P is TRUE for inserting the instrumentation code before
1295    ITER, FALSE for inserting it after ITER.  SIZE_IN_BYTES is one of
1296    1, 2, 4, 8, 16.
1297 
1298    If BEFORE_P is TRUE, *ITER is arranged to still point to the
1299    statement it was pointing to prior to calling this function,
1300    otherwise, it points to the statement logically following it.  */
1301 
1302 static void
1303 build_check_stmt (location_t location, tree base, gimple_stmt_iterator *iter,
1304 		  bool before_p, bool is_store, int size_in_bytes)
1305 {
1306   gimple_stmt_iterator gsi;
1307   basic_block then_bb, else_bb;
1308   tree t, base_addr, shadow;
1309   gimple g;
1310   tree shadow_ptr_type = shadow_ptr_types[size_in_bytes == 16 ? 1 : 0];
1311   tree shadow_type = TREE_TYPE (shadow_ptr_type);
1312   tree uintptr_type
1313     = build_nonstandard_integer_type (TYPE_PRECISION (TREE_TYPE (base)), 1);
1314   tree base_ssa = base;
1315 
1316   /* Get an iterator on the point where we can add the condition
1317      statement for the instrumentation.  */
1318   gsi = create_cond_insert_point (iter, before_p,
1319 				  /*then_more_likely_p=*/false,
1320 				  /*create_then_fallthru_edge=*/false,
1321 				  &then_bb,
1322 				  &else_bb);
1323 
1324   base = unshare_expr (base);
1325 
1326   /* BASE can already be an SSA_NAME; in that case, do not create a
1327      new SSA_NAME for it.  */
1328   if (TREE_CODE (base) != SSA_NAME)
1329     {
1330       g = gimple_build_assign_with_ops (TREE_CODE (base),
1331 					make_ssa_name (TREE_TYPE (base), NULL),
1332 					base, NULL_TREE);
1333       gimple_set_location (g, location);
1334       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1335       base_ssa = gimple_assign_lhs (g);
1336     }
1337 
1338   g = gimple_build_assign_with_ops (NOP_EXPR,
1339 				    make_ssa_name (uintptr_type, NULL),
1340 				    base_ssa, NULL_TREE);
1341   gimple_set_location (g, location);
1342   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1343   base_addr = gimple_assign_lhs (g);
1344 
1345   /* Build
1346      (base_addr >> ASAN_SHADOW_SHIFT) + targetm.asan_shadow_offset ().  */
1347 
1348   t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
1349   g = gimple_build_assign_with_ops (RSHIFT_EXPR,
1350 				    make_ssa_name (uintptr_type, NULL),
1351 				    base_addr, t);
1352   gimple_set_location (g, location);
1353   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1354 
1355   t = build_int_cst (uintptr_type, targetm.asan_shadow_offset ());
1356   g = gimple_build_assign_with_ops (PLUS_EXPR,
1357 				    make_ssa_name (uintptr_type, NULL),
1358 				    gimple_assign_lhs (g), t);
1359   gimple_set_location (g, location);
1360   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1361 
1362   g = gimple_build_assign_with_ops (NOP_EXPR,
1363 				    make_ssa_name (shadow_ptr_type, NULL),
1364 				    gimple_assign_lhs (g), NULL_TREE);
1365   gimple_set_location (g, location);
1366   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1367 
1368   t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
1369 	      build_int_cst (shadow_ptr_type, 0));
1370   g = gimple_build_assign_with_ops (MEM_REF,
1371 				    make_ssa_name (shadow_type, NULL),
1372 				    t, NULL_TREE);
1373   gimple_set_location (g, location);
1374   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1375   shadow = gimple_assign_lhs (g);
1376 
1377   if (size_in_bytes < 8)
1378     {
1379       /* Slow path for 1, 2 and 4 byte accesses.
1380 	 Test (shadow != 0)
1381 	      & ((base_addr & 7) + (size_in_bytes - 1)) >= shadow).  */
1382       g = gimple_build_assign_with_ops (NE_EXPR,
1383 					make_ssa_name (boolean_type_node,
1384 						       NULL),
1385 					shadow,
1386 					build_int_cst (shadow_type, 0));
1387       gimple_set_location (g, location);
1388       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1389       t = gimple_assign_lhs (g);
1390 
1391       g = gimple_build_assign_with_ops (BIT_AND_EXPR,
1392 					make_ssa_name (uintptr_type,
1393 						       NULL),
1394 					base_addr,
1395 					build_int_cst (uintptr_type, 7));
1396       gimple_set_location (g, location);
1397       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1398 
1399       g = gimple_build_assign_with_ops (NOP_EXPR,
1400 					make_ssa_name (shadow_type,
1401 						       NULL),
1402 					gimple_assign_lhs (g), NULL_TREE);
1403       gimple_set_location (g, location);
1404       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1405 
1406       if (size_in_bytes > 1)
1407 	{
1408 	  g = gimple_build_assign_with_ops (PLUS_EXPR,
1409 					    make_ssa_name (shadow_type,
1410 							   NULL),
1411 					    gimple_assign_lhs (g),
1412 					    build_int_cst (shadow_type,
1413 							   size_in_bytes - 1));
1414 	  gimple_set_location (g, location);
1415 	  gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1416 	}
1417 
1418       g = gimple_build_assign_with_ops (GE_EXPR,
1419 					make_ssa_name (boolean_type_node,
1420 						       NULL),
1421 					gimple_assign_lhs (g),
1422 					shadow);
1423       gimple_set_location (g, location);
1424       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1425 
1426       g = gimple_build_assign_with_ops (BIT_AND_EXPR,
1427 					make_ssa_name (boolean_type_node,
1428 						       NULL),
1429 					t, gimple_assign_lhs (g));
1430       gimple_set_location (g, location);
1431       gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1432       t = gimple_assign_lhs (g);
1433     }
1434   else
1435     t = shadow;
1436 
1437   g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
1438 			 NULL_TREE, NULL_TREE);
1439   gimple_set_location (g, location);
1440   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1441 
1442   /* Generate call to the run-time library (e.g. __asan_report_load8).  */
1443   gsi = gsi_start_bb (then_bb);
1444   g = gimple_build_call (report_error_func (is_store, size_in_bytes),
1445 			 1, base_addr);
1446   gimple_set_location (g, location);
1447   gsi_insert_after (&gsi, g, GSI_NEW_STMT);
1448 
1449   *iter = gsi_start_bb (else_bb);
1450 }
1451 
1452 /* If T represents a memory access, add instrumentation code before ITER.
1453    LOCATION is source code location.
1454    IS_STORE is either TRUE (for a store) or FALSE (for a load).  */
1455 
1456 static void
1457 instrument_derefs (gimple_stmt_iterator *iter, tree t,
1458 		   location_t location, bool is_store)
1459 {
1460   tree type, base;
1461   HOST_WIDE_INT size_in_bytes;
1462 
1463   type = TREE_TYPE (t);
1464   switch (TREE_CODE (t))
1465     {
1466     case ARRAY_REF:
1467     case COMPONENT_REF:
1468     case INDIRECT_REF:
1469     case MEM_REF:
1470       break;
1471     default:
1472       return;
1473     }
1474 
1475   size_in_bytes = int_size_in_bytes (type);
1476   if ((size_in_bytes & (size_in_bytes - 1)) != 0
1477       || (unsigned HOST_WIDE_INT) size_in_bytes - 1 >= 16)
1478     return;
1479 
1480   HOST_WIDE_INT bitsize, bitpos;
1481   tree offset;
1482   enum machine_mode mode;
1483   int volatilep = 0, unsignedp = 0;
1484   get_inner_reference (t, &bitsize, &bitpos, &offset,
1485 		       &mode, &unsignedp, &volatilep, false);
1486   if (bitpos % (size_in_bytes * BITS_PER_UNIT)
1487       || bitsize != size_in_bytes * BITS_PER_UNIT)
1488     {
1489       if (TREE_CODE (t) == COMPONENT_REF
1490 	  && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
1491 	{
1492 	  tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
1493 	  instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
1494 					   TREE_OPERAND (t, 0), repr,
1495 					   NULL_TREE), location, is_store);
1496 	}
1497       return;
1498     }
1499 
1500   base = build_fold_addr_expr (t);
1501   if (!has_mem_ref_been_instrumented (base, size_in_bytes))
1502     {
1503       build_check_stmt (location, base, iter, /*before_p=*/true,
1504 			is_store, size_in_bytes);
1505       update_mem_ref_hash_table (base, size_in_bytes);
1506       update_mem_ref_hash_table (t, size_in_bytes);
1507     }
1508 
1509 }
1510 
1511 /* Instrument an access to a contiguous memory region that starts at
1512    the address pointed to by BASE, over a length of LEN (expressed in
1513    the sizeof (*BASE) bytes).  ITER points to the instruction before
1514    which the instrumentation instructions must be inserted.  LOCATION
1515    is the source location that the instrumentation instructions must
1516    have.  If IS_STORE is true, then the memory access is a store;
1517    otherwise, it's a load.  */
1518 
1519 static void
1520 instrument_mem_region_access (tree base, tree len,
1521 			      gimple_stmt_iterator *iter,
1522 			      location_t location, bool is_store)
1523 {
1524   if (!POINTER_TYPE_P (TREE_TYPE (base))
1525       || !INTEGRAL_TYPE_P (TREE_TYPE (len))
1526       || integer_zerop (len))
1527     return;
1528 
1529   gimple_stmt_iterator gsi = *iter;
1530 
1531   basic_block fallthrough_bb = NULL, then_bb = NULL;
1532 
1533   /* If the beginning of the memory region has already been
1534      instrumented, do not instrument it.  */
1535   bool start_instrumented = has_mem_ref_been_instrumented (base, 1);
1536 
1537   /* If the end of the memory region has already been instrumented, do
1538      not instrument it. */
1539   tree end = asan_mem_ref_get_end (base, len);
1540   bool end_instrumented = has_mem_ref_been_instrumented (end, 1);
1541 
1542   if (start_instrumented && end_instrumented)
1543     return;
1544 
1545   if (!is_gimple_constant (len))
1546     {
1547       /* So, the length of the memory area to asan-protect is
1548 	 non-constant.  Let's guard the generated instrumentation code
1549 	 like:
1550 
1551 	 if (len != 0)
1552 	   {
1553 	     //asan instrumentation code goes here.
1554 	   }
1555 	   // falltrough instructions, starting with *ITER.  */
1556 
1557       gimple g = gimple_build_cond (NE_EXPR,
1558 				    len,
1559 				    build_int_cst (TREE_TYPE (len), 0),
1560 				    NULL_TREE, NULL_TREE);
1561       gimple_set_location (g, location);
1562       insert_if_then_before_iter (g, iter, /*then_more_likely_p=*/true,
1563 				  &then_bb, &fallthrough_bb);
1564       /* Note that fallthrough_bb starts with the statement that was
1565 	 pointed to by ITER.  */
1566 
1567       /* The 'then block' of the 'if (len != 0) condition is where
1568 	 we'll generate the asan instrumentation code now.  */
1569       gsi = gsi_last_bb (then_bb);
1570     }
1571 
1572   if (!start_instrumented)
1573     {
1574       /* Instrument the beginning of the memory region to be accessed,
1575 	 and arrange for the rest of the intrumentation code to be
1576 	 inserted in the then block *after* the current gsi.  */
1577       build_check_stmt (location, base, &gsi, /*before_p=*/true, is_store, 1);
1578 
1579       if (then_bb)
1580 	/* We are in the case where the length of the region is not
1581 	   constant; so instrumentation code is being generated in the
1582 	   'then block' of the 'if (len != 0) condition.  Let's arrange
1583 	   for the subsequent instrumentation statements to go in the
1584 	   'then block'.  */
1585 	gsi = gsi_last_bb (then_bb);
1586       else
1587         {
1588           *iter = gsi;
1589 	  /* Don't remember this access as instrumented, if length
1590 	     is unknown.  It might be zero and not being actually
1591 	     instrumented, so we can't rely on it being instrumented.  */
1592           update_mem_ref_hash_table (base, 1);
1593 	}
1594     }
1595 
1596   if (end_instrumented)
1597     return;
1598 
1599   /* We want to instrument the access at the end of the memory region,
1600      which is at (base + len - 1).  */
1601 
1602   /* offset = len - 1;  */
1603   len = unshare_expr (len);
1604   tree offset;
1605   gimple_seq seq = NULL;
1606   if (TREE_CODE (len) == INTEGER_CST)
1607     offset = fold_build2 (MINUS_EXPR, size_type_node,
1608 			  fold_convert (size_type_node, len),
1609 			  build_int_cst (size_type_node, 1));
1610   else
1611     {
1612       gimple g;
1613       tree t;
1614 
1615       if (TREE_CODE (len) != SSA_NAME)
1616 	{
1617 	  t = make_ssa_name (TREE_TYPE (len), NULL);
1618 	  g = gimple_build_assign_with_ops (TREE_CODE (len), t, len, NULL);
1619 	  gimple_set_location (g, location);
1620 	  gimple_seq_add_stmt_without_update (&seq, g);
1621 	  len = t;
1622 	}
1623       if (!useless_type_conversion_p (size_type_node, TREE_TYPE (len)))
1624 	{
1625 	  t = make_ssa_name (size_type_node, NULL);
1626 	  g = gimple_build_assign_with_ops (NOP_EXPR, t, len, NULL);
1627 	  gimple_set_location (g, location);
1628 	  gimple_seq_add_stmt_without_update (&seq, g);
1629 	  len = t;
1630 	}
1631 
1632       t = make_ssa_name (size_type_node, NULL);
1633       g = gimple_build_assign_with_ops (MINUS_EXPR, t, len,
1634 					build_int_cst (size_type_node, 1));
1635       gimple_set_location (g, location);
1636       gimple_seq_add_stmt_without_update (&seq, g);
1637       offset = gimple_assign_lhs (g);
1638     }
1639 
1640   /* _1 = base;  */
1641   base = unshare_expr (base);
1642   gimple region_end =
1643     gimple_build_assign_with_ops (TREE_CODE (base),
1644 				  make_ssa_name (TREE_TYPE (base), NULL),
1645 				  base, NULL);
1646   gimple_set_location (region_end, location);
1647   gimple_seq_add_stmt_without_update (&seq, region_end);
1648 
1649   /* _2 = _1 + offset;  */
1650   region_end =
1651     gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1652 				  make_ssa_name (TREE_TYPE (base), NULL),
1653 				  gimple_assign_lhs (region_end),
1654 				  offset);
1655   gimple_set_location (region_end, location);
1656   gimple_seq_add_stmt_without_update (&seq, region_end);
1657   gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
1658 
1659   /* instrument access at _2;  */
1660   gsi = gsi_for_stmt (region_end);
1661   build_check_stmt (location, gimple_assign_lhs (region_end),
1662 		    &gsi, /*before_p=*/false, is_store, 1);
1663 
1664   if (then_bb == NULL)
1665     update_mem_ref_hash_table (end, 1);
1666 
1667   *iter = gsi_for_stmt (gsi_stmt (*iter));
1668 }
1669 
1670 /* Instrument the call (to the builtin strlen function) pointed to by
1671    ITER.
1672 
1673    This function instruments the access to the first byte of the
1674    argument, right before the call.  After the call it instruments the
1675    access to the last byte of the argument; it uses the result of the
1676    call to deduce the offset of that last byte.
1677 
1678    Upon completion, iff the call has actually been instrumented, this
1679    function returns TRUE and *ITER points to the statement logically
1680    following the built-in strlen function call *ITER was initially
1681    pointing to.  Otherwise, the function returns FALSE and *ITER
1682    remains unchanged.  */
1683 
1684 static bool
1685 instrument_strlen_call (gimple_stmt_iterator *iter)
1686 {
1687   gimple call = gsi_stmt (*iter);
1688   gcc_assert (is_gimple_call (call));
1689 
1690   tree callee = gimple_call_fndecl (call);
1691   gcc_assert (is_builtin_fn (callee)
1692 	      && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
1693 	      && DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN);
1694 
1695   tree len = gimple_call_lhs (call);
1696   if (len == NULL)
1697     /* Some passes might clear the return value of the strlen call;
1698        bail out in that case.  Return FALSE as we are not advancing
1699        *ITER.  */
1700     return false;
1701   gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (len)));
1702 
1703   location_t loc = gimple_location (call);
1704   tree str_arg = gimple_call_arg (call, 0);
1705 
1706   /* Instrument the access to the first byte of str_arg.  i.e:
1707 
1708      _1 = str_arg; instrument (_1); */
1709   tree cptr_type = build_pointer_type (char_type_node);
1710   gimple str_arg_ssa =
1711     gimple_build_assign_with_ops (NOP_EXPR,
1712 				  make_ssa_name (cptr_type, NULL),
1713 				  str_arg, NULL);
1714   gimple_set_location (str_arg_ssa, loc);
1715   gimple_stmt_iterator gsi = *iter;
1716   gsi_insert_before (&gsi, str_arg_ssa, GSI_NEW_STMT);
1717   build_check_stmt (loc, gimple_assign_lhs (str_arg_ssa), &gsi,
1718 		    /*before_p=*/false, /*is_store=*/false, 1);
1719 
1720   /* If we initially had an instruction like:
1721 
1722 	 int n = strlen (str)
1723 
1724      we now want to instrument the access to str[n], after the
1725      instruction above.*/
1726 
1727   /* So let's build the access to str[n] that is, access through the
1728      pointer_plus expr: (_1 + len).  */
1729   gimple stmt =
1730     gimple_build_assign_with_ops (POINTER_PLUS_EXPR,
1731 				  make_ssa_name (cptr_type, NULL),
1732 				  gimple_assign_lhs (str_arg_ssa),
1733 				  len);
1734   gimple_set_location (stmt, loc);
1735   gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
1736 
1737   build_check_stmt (loc, gimple_assign_lhs (stmt), &gsi,
1738 		    /*before_p=*/false, /*is_store=*/false, 1);
1739 
1740   /* Ensure that iter points to the statement logically following the
1741      one it was initially pointing to.  */
1742   *iter = gsi;
1743   /* As *ITER has been advanced to point to the next statement, let's
1744      return true to inform transform_statements that it shouldn't
1745      advance *ITER anymore; otherwises it will skip that next
1746      statement, which wouldn't be instrumented.  */
1747   return true;
1748 }
1749 
1750 /* Instrument the call to a built-in memory access function that is
1751    pointed to by the iterator ITER.
1752 
1753    Upon completion, return TRUE iff *ITER has been advanced to the
1754    statement following the one it was originally pointing to.  */
1755 
1756 static bool
1757 instrument_builtin_call (gimple_stmt_iterator *iter)
1758 {
1759   bool iter_advanced_p = false;
1760   gimple call = gsi_stmt (*iter);
1761 
1762   gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
1763 
1764   tree callee = gimple_call_fndecl (call);
1765   location_t loc = gimple_location (call);
1766 
1767   if (DECL_FUNCTION_CODE (callee) == BUILT_IN_STRLEN)
1768     iter_advanced_p = instrument_strlen_call (iter);
1769   else
1770     {
1771       asan_mem_ref src0, src1, dest;
1772       asan_mem_ref_init (&src0, NULL, 1);
1773       asan_mem_ref_init (&src1, NULL, 1);
1774       asan_mem_ref_init (&dest, NULL, 1);
1775 
1776       tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
1777       bool src0_is_store = false, src1_is_store = false,
1778 	dest_is_store = false, dest_is_deref = false;
1779 
1780       if (get_mem_refs_of_builtin_call (call,
1781 					&src0, &src0_len, &src0_is_store,
1782 					&src1, &src1_len, &src1_is_store,
1783 					&dest, &dest_len, &dest_is_store,
1784 					&dest_is_deref))
1785 	{
1786 	  if (dest_is_deref)
1787 	    {
1788 	      instrument_derefs (iter, dest.start, loc, dest_is_store);
1789 	      gsi_next (iter);
1790 	      iter_advanced_p = true;
1791 	    }
1792 	  else if (src0_len || src1_len || dest_len)
1793 	    {
1794 	      if (src0.start != NULL_TREE)
1795 		instrument_mem_region_access (src0.start, src0_len,
1796 					      iter, loc, /*is_store=*/false);
1797 	      if (src1.start != NULL_TREE)
1798 		instrument_mem_region_access (src1.start, src1_len,
1799 					      iter, loc, /*is_store=*/false);
1800 	      if (dest.start != NULL_TREE)
1801 		instrument_mem_region_access (dest.start, dest_len,
1802 					      iter, loc, /*is_store=*/true);
1803 	      *iter = gsi_for_stmt (call);
1804 	      gsi_next (iter);
1805 	      iter_advanced_p = true;
1806 	    }
1807 	}
1808     }
1809   return iter_advanced_p;
1810 }
1811 
1812 /*  Instrument the assignment statement ITER if it is subject to
1813     instrumentation.  Return TRUE iff instrumentation actually
1814     happened.  In that case, the iterator ITER is advanced to the next
1815     logical expression following the one initially pointed to by ITER,
1816     and the relevant memory reference that which access has been
1817     instrumented is added to the memory references hash table.  */
1818 
1819 static bool
1820 maybe_instrument_assignment (gimple_stmt_iterator *iter)
1821 {
1822   gimple s = gsi_stmt (*iter);
1823 
1824   gcc_assert (gimple_assign_single_p (s));
1825 
1826   tree ref_expr = NULL_TREE;
1827   bool is_store, is_instrumented = false;
1828 
1829   if (gimple_store_p (s))
1830     {
1831       ref_expr = gimple_assign_lhs (s);
1832       is_store = true;
1833       instrument_derefs (iter, ref_expr,
1834 			 gimple_location (s),
1835 			 is_store);
1836       is_instrumented = true;
1837     }
1838 
1839   if (gimple_assign_load_p (s))
1840     {
1841       ref_expr = gimple_assign_rhs1 (s);
1842       is_store = false;
1843       instrument_derefs (iter, ref_expr,
1844 			 gimple_location (s),
1845 			 is_store);
1846       is_instrumented = true;
1847     }
1848 
1849   if (is_instrumented)
1850     gsi_next (iter);
1851 
1852   return is_instrumented;
1853 }
1854 
1855 /* Instrument the function call pointed to by the iterator ITER, if it
1856    is subject to instrumentation.  At the moment, the only function
1857    calls that are instrumented are some built-in functions that access
1858    memory.  Look at instrument_builtin_call to learn more.
1859 
1860    Upon completion return TRUE iff *ITER was advanced to the statement
1861    following the one it was originally pointing to.  */
1862 
1863 static bool
1864 maybe_instrument_call (gimple_stmt_iterator *iter)
1865 {
1866   gimple stmt = gsi_stmt (*iter);
1867   bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
1868 
1869   if (is_builtin && instrument_builtin_call (iter))
1870     return true;
1871 
1872   if (gimple_call_noreturn_p (stmt))
1873     {
1874       if (is_builtin)
1875 	{
1876 	  tree callee = gimple_call_fndecl (stmt);
1877 	  switch (DECL_FUNCTION_CODE (callee))
1878 	    {
1879 	    case BUILT_IN_UNREACHABLE:
1880 	    case BUILT_IN_TRAP:
1881 	      /* Don't instrument these.  */
1882 	      return false;
1883 	    }
1884 	}
1885       tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
1886       gimple g = gimple_build_call (decl, 0);
1887       gimple_set_location (g, gimple_location (stmt));
1888       gsi_insert_before (iter, g, GSI_SAME_STMT);
1889     }
1890   return false;
1891 }
1892 
1893 /* Walk each instruction of all basic block and instrument those that
1894    represent memory references: loads, stores, or function calls.
1895    In a given basic block, this function avoids instrumenting memory
1896    references that have already been instrumented.  */
1897 
1898 static void
1899 transform_statements (void)
1900 {
1901   basic_block bb, last_bb = NULL;
1902   gimple_stmt_iterator i;
1903   int saved_last_basic_block = last_basic_block;
1904 
1905   FOR_EACH_BB (bb)
1906     {
1907       basic_block prev_bb = bb;
1908 
1909       if (bb->index >= saved_last_basic_block) continue;
1910 
1911       /* Flush the mem ref hash table, if current bb doesn't have
1912 	 exactly one predecessor, or if that predecessor (skipping
1913 	 over asan created basic blocks) isn't the last processed
1914 	 basic block.  Thus we effectively flush on extended basic
1915 	 block boundaries.  */
1916       while (single_pred_p (prev_bb))
1917 	{
1918 	  prev_bb = single_pred (prev_bb);
1919 	  if (prev_bb->index < saved_last_basic_block)
1920 	    break;
1921 	}
1922       if (prev_bb != last_bb)
1923 	empty_mem_ref_hash_table ();
1924       last_bb = bb;
1925 
1926       for (i = gsi_start_bb (bb); !gsi_end_p (i);)
1927 	{
1928 	  gimple s = gsi_stmt (i);
1929 
1930 	  if (has_stmt_been_instrumented_p (s))
1931 	    gsi_next (&i);
1932 	  else if (gimple_assign_single_p (s)
1933 		   && maybe_instrument_assignment (&i))
1934 	    /*  Nothing to do as maybe_instrument_assignment advanced
1935 		the iterator I.  */;
1936 	  else if (is_gimple_call (s) && maybe_instrument_call (&i))
1937 	    /*  Nothing to do as maybe_instrument_call
1938 		advanced the iterator I.  */;
1939 	  else
1940 	    {
1941 	      /* No instrumentation happened.
1942 
1943 		 If the current instruction is a function call that
1944 		 might free something, let's forget about the memory
1945 		 references that got instrumented.  Otherwise we might
1946 		 miss some instrumentation opportunities.  */
1947 	      if (is_gimple_call (s) && !nonfreeing_call_p (s))
1948 		empty_mem_ref_hash_table ();
1949 
1950 	      gsi_next (&i);
1951 	    }
1952 	}
1953     }
1954   free_mem_ref_resources ();
1955 }
1956 
1957 /* Build
1958    struct __asan_global
1959    {
1960      const void *__beg;
1961      uptr __size;
1962      uptr __size_with_redzone;
1963      const void *__name;
1964      uptr __has_dynamic_init;
1965    } type.  */
1966 
1967 static tree
1968 asan_global_struct (void)
1969 {
1970   static const char *field_names[5]
1971     = { "__beg", "__size", "__size_with_redzone",
1972 	"__name", "__has_dynamic_init" };
1973   tree fields[5], ret;
1974   int i;
1975 
1976   ret = make_node (RECORD_TYPE);
1977   for (i = 0; i < 5; i++)
1978     {
1979       fields[i]
1980 	= build_decl (UNKNOWN_LOCATION, FIELD_DECL,
1981 		      get_identifier (field_names[i]),
1982 		      (i == 0 || i == 3) ? const_ptr_type_node
1983 		      : build_nonstandard_integer_type (POINTER_SIZE, 1));
1984       DECL_CONTEXT (fields[i]) = ret;
1985       if (i)
1986 	DECL_CHAIN (fields[i - 1]) = fields[i];
1987     }
1988   TYPE_FIELDS (ret) = fields[0];
1989   TYPE_NAME (ret) = get_identifier ("__asan_global");
1990   layout_type (ret);
1991   return ret;
1992 }
1993 
1994 /* Append description of a single global DECL into vector V.
1995    TYPE is __asan_global struct type as returned by asan_global_struct.  */
1996 
1997 static void
1998 asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
1999 {
2000   tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
2001   unsigned HOST_WIDE_INT size;
2002   tree str_cst, refdecl = decl;
2003   vec<constructor_elt, va_gc> *vinner = NULL;
2004 
2005   if (!asan_pp_initialized)
2006     asan_pp_initialize ();
2007 
2008   pp_clear_output_area (&asan_pp);
2009   if (DECL_NAME (decl))
2010     pp_base_tree_identifier (&asan_pp, DECL_NAME (decl));
2011   else
2012     pp_string (&asan_pp, "<unknown>");
2013   pp_space (&asan_pp);
2014   pp_left_paren (&asan_pp);
2015   pp_string (&asan_pp, main_input_filename);
2016   pp_right_paren (&asan_pp);
2017   str_cst = asan_pp_string ();
2018 
2019   if (asan_needs_local_alias (decl))
2020     {
2021       char buf[20];
2022       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
2023       refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
2024 			    VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
2025       TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
2026       TREE_READONLY (refdecl) = TREE_READONLY (decl);
2027       TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
2028       DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
2029       DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
2030       DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
2031       TREE_STATIC (refdecl) = 1;
2032       TREE_PUBLIC (refdecl) = 0;
2033       TREE_USED (refdecl) = 1;
2034       assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
2035     }
2036 
2037   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2038 			  fold_convert (const_ptr_type_node,
2039 					build_fold_addr_expr (refdecl)));
2040   size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
2041   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2042   size += asan_red_zone_size (size);
2043   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
2044   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
2045 			  fold_convert (const_ptr_type_node, str_cst));
2046   CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, 0));
2047   init = build_constructor (type, vinner);
2048   CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
2049 }
2050 
2051 /* Initialize sanitizer.def builtins if the FE hasn't initialized them.  */
2052 void
2053 initialize_sanitizer_builtins (void)
2054 {
2055   tree decl;
2056 
2057   if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
2058     return;
2059 
2060   tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
2061   tree BT_FN_VOID_PTR
2062     = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2063   tree BT_FN_VOID_PTR_PTRMODE
2064     = build_function_type_list (void_type_node, ptr_type_node,
2065 				build_nonstandard_integer_type (POINTER_SIZE,
2066 								1), NULL_TREE);
2067   tree BT_FN_VOID_INT
2068     = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
2069   tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
2070   tree BT_FN_IX_CONST_VPTR_INT[5];
2071   tree BT_FN_IX_VPTR_IX_INT[5];
2072   tree BT_FN_VOID_VPTR_IX_INT[5];
2073   tree vptr
2074     = build_pointer_type (build_qualified_type (void_type_node,
2075 						TYPE_QUAL_VOLATILE));
2076   tree cvptr
2077     = build_pointer_type (build_qualified_type (void_type_node,
2078 						TYPE_QUAL_VOLATILE
2079 						|TYPE_QUAL_CONST));
2080   tree boolt
2081     = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
2082   int i;
2083   for (i = 0; i < 5; i++)
2084     {
2085       tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
2086       BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
2087 	= build_function_type_list (boolt, vptr, ptr_type_node, ix,
2088 				    integer_type_node, integer_type_node,
2089 				    NULL_TREE);
2090       BT_FN_IX_CONST_VPTR_INT[i]
2091 	= build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
2092       BT_FN_IX_VPTR_IX_INT[i]
2093 	= build_function_type_list (ix, vptr, ix, integer_type_node,
2094 				    NULL_TREE);
2095       BT_FN_VOID_VPTR_IX_INT[i]
2096 	= build_function_type_list (void_type_node, vptr, ix,
2097 				    integer_type_node, NULL_TREE);
2098     }
2099 #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
2100 #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
2101 #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
2102 #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
2103 #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
2104 #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
2105 #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
2106 #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
2107 #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
2108 #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
2109 #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
2110 #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
2111 #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
2112 #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
2113 #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
2114 #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
2115 #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
2116 #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
2117 #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
2118 #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
2119 #undef ATTR_NOTHROW_LEAF_LIST
2120 #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
2121 #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
2122 #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
2123 #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
2124 #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
2125 #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
2126 #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
2127   ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
2128 #undef DEF_SANITIZER_BUILTIN
2129 #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
2130   decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM,		\
2131 			       BUILT_IN_NORMAL, NAME, NULL_TREE);	\
2132   set_call_expr_flags (decl, ATTRS);					\
2133   set_builtin_decl (ENUM, decl, true);
2134 
2135 #include "sanitizer.def"
2136 
2137 #undef DEF_SANITIZER_BUILTIN
2138 }
2139 
2140 /* Called via htab_traverse.  Count number of emitted
2141    STRING_CSTs in the constant hash table.  */
2142 
2143 static int
2144 count_string_csts (void **slot, void *data)
2145 {
2146   struct constant_descriptor_tree *desc
2147     = (struct constant_descriptor_tree *) *slot;
2148   if (TREE_CODE (desc->value) == STRING_CST
2149       && TREE_ASM_WRITTEN (desc->value)
2150       && asan_protect_global (desc->value))
2151     ++*((unsigned HOST_WIDE_INT *) data);
2152   return 1;
2153 }
2154 
2155 /* Helper structure to pass two parameters to
2156    add_string_csts.  */
2157 
2158 struct asan_add_string_csts_data
2159 {
2160   tree type;
2161   vec<constructor_elt, va_gc> *v;
2162 };
2163 
2164 /* Called via htab_traverse.  Call asan_add_global
2165    on emitted STRING_CSTs from the constant hash table.  */
2166 
2167 static int
2168 add_string_csts (void **slot, void *data)
2169 {
2170   struct constant_descriptor_tree *desc
2171     = (struct constant_descriptor_tree *) *slot;
2172   if (TREE_CODE (desc->value) == STRING_CST
2173       && TREE_ASM_WRITTEN (desc->value)
2174       && asan_protect_global (desc->value))
2175     {
2176       struct asan_add_string_csts_data *aascd
2177 	= (struct asan_add_string_csts_data *) data;
2178       asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
2179 		       aascd->type, aascd->v);
2180     }
2181   return 1;
2182 }
2183 
2184 /* Needs to be GTY(()), because cgraph_build_static_cdtor may
2185    invoke ggc_collect.  */
2186 static GTY(()) tree asan_ctor_statements;
2187 
2188 /* Module-level instrumentation.
2189    - Insert __asan_init() into the list of CTORs.
2190    - TODO: insert redzones around globals.
2191  */
2192 
2193 void
2194 asan_finish_file (void)
2195 {
2196   struct varpool_node *vnode;
2197   unsigned HOST_WIDE_INT gcount = 0;
2198 
2199   if (shadow_ptr_types[0] == NULL_TREE)
2200     asan_init_shadow_ptr_types ();
2201   /* Avoid instrumenting code in the asan ctors/dtors.
2202      We don't need to insert padding after the description strings,
2203      nor after .LASAN* array.  */
2204   flag_asan = 0;
2205 
2206   tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
2207   append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
2208   FOR_EACH_DEFINED_VARIABLE (vnode)
2209     if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2210 	&& asan_protect_global (vnode->symbol.decl))
2211       ++gcount;
2212   htab_t const_desc_htab = constant_pool_htab ();
2213   htab_traverse (const_desc_htab, count_string_csts, &gcount);
2214   if (gcount)
2215     {
2216       tree type = asan_global_struct (), var, ctor;
2217       tree uptr = build_nonstandard_integer_type (POINTER_SIZE, 1);
2218       tree dtor_statements = NULL_TREE;
2219       vec<constructor_elt, va_gc> *v;
2220       char buf[20];
2221 
2222       type = build_array_type_nelts (type, gcount);
2223       ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
2224       var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
2225 			type);
2226       TREE_STATIC (var) = 1;
2227       TREE_PUBLIC (var) = 0;
2228       DECL_ARTIFICIAL (var) = 1;
2229       DECL_IGNORED_P (var) = 1;
2230       vec_alloc (v, gcount);
2231       FOR_EACH_DEFINED_VARIABLE (vnode)
2232 	if (TREE_ASM_WRITTEN (vnode->symbol.decl)
2233 	    && asan_protect_global (vnode->symbol.decl))
2234 	  asan_add_global (vnode->symbol.decl, TREE_TYPE (type), v);
2235       struct asan_add_string_csts_data aascd;
2236       aascd.type = TREE_TYPE (type);
2237       aascd.v = v;
2238       htab_traverse (const_desc_htab, add_string_csts, &aascd);
2239       ctor = build_constructor (type, v);
2240       TREE_CONSTANT (ctor) = 1;
2241       TREE_STATIC (ctor) = 1;
2242       DECL_INITIAL (var) = ctor;
2243       varpool_assemble_decl (varpool_node_for_decl (var));
2244 
2245       fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
2246       append_to_statement_list (build_call_expr (fn, 2,
2247 						 build_fold_addr_expr (var),
2248 						 build_int_cst (uptr, gcount)),
2249 				&asan_ctor_statements);
2250 
2251       fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
2252       append_to_statement_list (build_call_expr (fn, 2,
2253 						 build_fold_addr_expr (var),
2254 						 build_int_cst (uptr, gcount)),
2255 				&dtor_statements);
2256       cgraph_build_static_cdtor ('D', dtor_statements,
2257 				 MAX_RESERVED_INIT_PRIORITY - 1);
2258     }
2259   cgraph_build_static_cdtor ('I', asan_ctor_statements,
2260 			     MAX_RESERVED_INIT_PRIORITY - 1);
2261   flag_asan = 1;
2262 }
2263 
2264 /* Instrument the current function.  */
2265 
2266 static unsigned int
2267 asan_instrument (void)
2268 {
2269   if (shadow_ptr_types[0] == NULL_TREE)
2270     asan_init_shadow_ptr_types ();
2271   transform_statements ();
2272   return 0;
2273 }
2274 
2275 static bool
2276 gate_asan (void)
2277 {
2278   return flag_asan != 0
2279 	  && !lookup_attribute ("no_sanitize_address",
2280 				DECL_ATTRIBUTES (current_function_decl));
2281 }
2282 
2283 struct gimple_opt_pass pass_asan =
2284 {
2285  {
2286   GIMPLE_PASS,
2287   "asan",				/* name  */
2288   OPTGROUP_NONE,			/* optinfo_flags */
2289   gate_asan,				/* gate  */
2290   asan_instrument,			/* execute  */
2291   NULL,					/* sub  */
2292   NULL,					/* next  */
2293   0,					/* static_pass_number  */
2294   TV_NONE,				/* tv_id  */
2295   PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required  */
2296   0,					/* properties_provided  */
2297   0,					/* properties_destroyed  */
2298   0,					/* todo_flags_start  */
2299   TODO_verify_flow | TODO_verify_stmts
2300   | TODO_update_ssa			/* todo_flags_finish  */
2301  }
2302 };
2303 
2304 static bool
2305 gate_asan_O0 (void)
2306 {
2307   return !optimize && gate_asan ();
2308 }
2309 
2310 struct gimple_opt_pass pass_asan_O0 =
2311 {
2312  {
2313   GIMPLE_PASS,
2314   "asan0",				/* name  */
2315   OPTGROUP_NONE,			/* optinfo_flags */
2316   gate_asan_O0,				/* gate  */
2317   asan_instrument,			/* execute  */
2318   NULL,					/* sub  */
2319   NULL,					/* next  */
2320   0,					/* static_pass_number  */
2321   TV_NONE,				/* tv_id  */
2322   PROP_ssa | PROP_cfg | PROP_gimple_leh,/* properties_required  */
2323   0,					/* properties_provided  */
2324   0,					/* properties_destroyed  */
2325   0,					/* todo_flags_start  */
2326   TODO_verify_flow | TODO_verify_stmts
2327   | TODO_update_ssa			/* todo_flags_finish  */
2328  }
2329 };
2330 
2331 #include "gt-asan.h"
2332