xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa.c (revision e6c7e151de239c49d2e38720a061ed9d1fa99309)
1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "asan.h"
45 
46 /* Pointer map of variable mappings, keyed by edge.  */
47 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
48 
49 
50 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
51 
52 void
53 redirect_edge_var_map_add (edge e, tree result, tree def, source_location locus)
54 {
55   edge_var_map new_node;
56 
57   if (edge_var_maps == NULL)
58     edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
59 
60   auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
61   new_node.def = def;
62   new_node.result = result;
63   new_node.locus = locus;
64 
65   slot.safe_push (new_node);
66 }
67 
68 
69 /* Clear the var mappings in edge E.  */
70 
71 void
72 redirect_edge_var_map_clear (edge e)
73 {
74   if (!edge_var_maps)
75     return;
76 
77   auto_vec<edge_var_map> *head = edge_var_maps->get (e);
78 
79   if (head)
80     head->release ();
81 }
82 
83 
84 /* Duplicate the redirected var mappings in OLDE in NEWE.
85 
86    This assumes a hash_map can have multiple edges mapping to the same
87    var_map (many to one mapping), since we don't remove the previous mappings.
88    */
89 
90 void
91 redirect_edge_var_map_dup (edge newe, edge olde)
92 {
93   if (!edge_var_maps)
94     return;
95 
96   auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
97   auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
98   if (!old_head)
99     return;
100 
101   new_head->safe_splice (*old_head);
102 }
103 
104 
105 /* Return the variable mappings for a given edge.  If there is none, return
106    NULL.  */
107 
108 vec<edge_var_map> *
109 redirect_edge_var_map_vector (edge e)
110 {
111   /* Hey, what kind of idiot would... you'd be surprised.  */
112   if (!edge_var_maps)
113     return NULL;
114 
115   auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
116   if (!slot)
117     return NULL;
118 
119   return slot;
120 }
121 
122 /* Clear the edge variable mappings.  */
123 
124 void
125 redirect_edge_var_map_empty (void)
126 {
127   if (edge_var_maps)
128     edge_var_maps->empty ();
129 }
130 
131 
132 /* Remove the corresponding arguments from the PHI nodes in E's
133    destination block and redirect it to DEST.  Return redirected edge.
134    The list of removed arguments is stored in a vector accessed
135    through edge_var_maps.  */
136 
137 edge
138 ssa_redirect_edge (edge e, basic_block dest)
139 {
140   gphi_iterator gsi;
141   gphi *phi;
142 
143   redirect_edge_var_map_clear (e);
144 
145   /* Remove the appropriate PHI arguments in E's destination block.  */
146   for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
147     {
148       tree def;
149       source_location locus ;
150 
151       phi = gsi.phi ();
152       def = gimple_phi_arg_def (phi, e->dest_idx);
153       locus = gimple_phi_arg_location (phi, e->dest_idx);
154 
155       if (def == NULL_TREE)
156 	continue;
157 
158       redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
159     }
160 
161   e = redirect_edge_succ_nodup (e, dest);
162 
163   return e;
164 }
165 
166 
167 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
168    E->dest.  */
169 
170 void
171 flush_pending_stmts (edge e)
172 {
173   gphi *phi;
174   edge_var_map *vm;
175   int i;
176   gphi_iterator gsi;
177 
178   vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
179   if (!v)
180     return;
181 
182   for (gsi = gsi_start_phis (e->dest), i = 0;
183        !gsi_end_p (gsi) && v->iterate (i, &vm);
184        gsi_next (&gsi), i++)
185     {
186       tree def;
187 
188       phi = gsi.phi ();
189       def = redirect_edge_var_map_def (vm);
190       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
191     }
192 
193   redirect_edge_var_map_clear (e);
194 }
195 
196 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
197    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
198    expression with a different value.
199 
200    This will update any annotations (say debug bind stmts) referring
201    to the original LHS, so that they use the RHS instead.  This is
202    done even if NLHS and LHS are the same, for it is understood that
203    the RHS will be modified afterwards, and NLHS will not be assigned
204    an equivalent value.
205 
206    Adjusting any non-annotation uses of the LHS, if needed, is a
207    responsibility of the caller.
208 
209    The effect of this call should be pretty much the same as that of
210    inserting a copy of STMT before STMT, and then removing the
211    original stmt, at which time gsi_remove() would have update
212    annotations, but using this function saves all the inserting,
213    copying and removing.  */
214 
215 void
216 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
217 {
218   if (MAY_HAVE_DEBUG_STMTS)
219     {
220       tree lhs = gimple_get_lhs (stmt);
221 
222       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
223 
224       insert_debug_temp_for_var_def (NULL, lhs);
225     }
226 
227   gimple_set_lhs (stmt, nlhs);
228 }
229 
230 
231 /* Given a tree for an expression for which we might want to emit
232    locations or values in debug information (generally a variable, but
233    we might deal with other kinds of trees in the future), return the
234    tree that should be used as the variable of a DEBUG_BIND STMT or
235    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
236 
237 tree
238 target_for_debug_bind (tree var)
239 {
240   if (!MAY_HAVE_DEBUG_STMTS)
241     return NULL_TREE;
242 
243   if (TREE_CODE (var) == SSA_NAME)
244     {
245       var = SSA_NAME_VAR (var);
246       if (var == NULL_TREE)
247 	return NULL_TREE;
248     }
249 
250   if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
251       && TREE_CODE (var) != PARM_DECL)
252     return NULL_TREE;
253 
254   if (DECL_HAS_VALUE_EXPR_P (var))
255     return target_for_debug_bind (DECL_VALUE_EXPR (var));
256 
257   if (DECL_IGNORED_P (var))
258     return NULL_TREE;
259 
260   /* var-tracking only tracks registers.  */
261   if (!is_gimple_reg_type (TREE_TYPE (var)))
262     return NULL_TREE;
263 
264   return var;
265 }
266 
267 /* Called via walk_tree, look for SSA_NAMEs that have already been
268    released.  */
269 
270 static tree
271 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
272 {
273   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
274 
275   if (wi && wi->is_lhs)
276     return NULL_TREE;
277 
278   if (TREE_CODE (*tp) == SSA_NAME)
279     {
280       if (SSA_NAME_IN_FREE_LIST (*tp))
281 	return *tp;
282 
283       *walk_subtrees = 0;
284     }
285   else if (IS_TYPE_OR_DECL_P (*tp))
286     *walk_subtrees = 0;
287 
288   return NULL_TREE;
289 }
290 
291 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
292    by other DEBUG stmts, and replace uses of the DEF with the
293    newly-created debug temp.  */
294 
295 void
296 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
297 {
298   imm_use_iterator imm_iter;
299   use_operand_p use_p;
300   gimple *stmt;
301   gimple *def_stmt = NULL;
302   int usecount = 0;
303   tree value = NULL;
304 
305   if (!MAY_HAVE_DEBUG_STMTS)
306     return;
307 
308   /* If this name has already been registered for replacement, do nothing
309      as anything that uses this name isn't in SSA form.  */
310   if (name_registered_for_update_p (var))
311     return;
312 
313   /* Check whether there are debug stmts that reference this variable and,
314      if there are, decide whether we should use a debug temp.  */
315   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
316     {
317       stmt = USE_STMT (use_p);
318 
319       if (!gimple_debug_bind_p (stmt))
320 	continue;
321 
322       if (usecount++)
323 	break;
324 
325       if (gimple_debug_bind_get_value (stmt) != var)
326 	{
327 	  /* Count this as an additional use, so as to make sure we
328 	     use a temp unless VAR's definition has a SINGLE_RHS that
329 	     can be shared.  */
330 	  usecount++;
331 	  break;
332 	}
333     }
334 
335   if (!usecount)
336     return;
337 
338   if (gsi)
339     def_stmt = gsi_stmt (*gsi);
340   else
341     def_stmt = SSA_NAME_DEF_STMT (var);
342 
343   /* If we didn't get an insertion point, and the stmt has already
344      been removed, we won't be able to insert the debug bind stmt, so
345      we'll have to drop debug information.  */
346   if (gimple_code (def_stmt) == GIMPLE_PHI)
347     {
348       value = degenerate_phi_result (as_a <gphi *> (def_stmt));
349       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
350 	value = NULL;
351       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
352 	 to.  */
353       else if (value == error_mark_node)
354 	value = NULL;
355     }
356   else if (is_gimple_assign (def_stmt))
357     {
358       bool no_value = false;
359 
360       if (!dom_info_available_p (CDI_DOMINATORS))
361 	{
362 	  struct walk_stmt_info wi;
363 
364 	  memset (&wi, 0, sizeof (wi));
365 
366 	  /* When removing blocks without following reverse dominance
367 	     order, we may sometimes encounter SSA_NAMEs that have
368 	     already been released, referenced in other SSA_DEFs that
369 	     we're about to release.  Consider:
370 
371 	     <bb X>:
372 	     v_1 = foo;
373 
374 	     <bb Y>:
375 	     w_2 = v_1 + bar;
376 	     # DEBUG w => w_2
377 
378 	     If we deleted BB X first, propagating the value of w_2
379 	     won't do us any good.  It's too late to recover their
380 	     original definition of v_1: when it was deleted, it was
381 	     only referenced in other DEFs, it couldn't possibly know
382 	     it should have been retained, and propagating every
383 	     single DEF just in case it might have to be propagated
384 	     into a DEBUG STMT would probably be too wasteful.
385 
386 	     When dominator information is not readily available, we
387 	     check for and accept some loss of debug information.  But
388 	     if it is available, there's no excuse for us to remove
389 	     blocks in the wrong order, so we don't even check for
390 	     dead SSA NAMEs.  SSA verification shall catch any
391 	     errors.  */
392 	  if ((!gsi && !gimple_bb (def_stmt))
393 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
394 	    no_value = true;
395 	}
396 
397       if (!no_value)
398 	value = gimple_assign_rhs_to_tree (def_stmt);
399     }
400 
401   if (value)
402     {
403       /* If there's a single use of VAR, and VAR is the entire debug
404 	 expression (usecount would have been incremented again
405 	 otherwise), and the definition involves only constants and
406 	 SSA names, then we can propagate VALUE into this single use,
407 	 avoiding the temp.
408 
409 	 We can also avoid using a temp if VALUE can be shared and
410 	 propagated into all uses, without generating expressions that
411 	 wouldn't be valid gimple RHSs.
412 
413 	 Other cases that would require unsharing or non-gimple RHSs
414 	 are deferred to a debug temp, although we could avoid temps
415 	 at the expense of duplication of expressions.  */
416 
417       if (CONSTANT_CLASS_P (value)
418 	  || gimple_code (def_stmt) == GIMPLE_PHI
419 	  || (usecount == 1
420 	      && (!gimple_assign_single_p (def_stmt)
421 		  || is_gimple_min_invariant (value)))
422 	  || is_gimple_reg (value))
423 	;
424       else
425 	{
426 	  gdebug *def_temp;
427 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
428 
429 	  def_temp = gimple_build_debug_bind (vexpr,
430 					      unshare_expr (value),
431 					      def_stmt);
432 
433 	  DECL_ARTIFICIAL (vexpr) = 1;
434 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
435 	  if (DECL_P (value))
436 	    SET_DECL_MODE (vexpr, DECL_MODE (value));
437 	  else
438 	    SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
439 
440 	  if (gsi)
441 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
442 	  else
443 	    {
444 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
445 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
446 	    }
447 
448 	  value = vexpr;
449 	}
450     }
451 
452   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
453     {
454       if (!gimple_debug_bind_p (stmt))
455 	continue;
456 
457       if (value)
458 	{
459 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
460 	    /* unshare_expr is not needed here.  vexpr is either a
461 	       SINGLE_RHS, that can be safely shared, some other RHS
462 	       that was unshared when we found it had a single debug
463 	       use, or a DEBUG_EXPR_DECL, that can be safely
464 	       shared.  */
465 	    SET_USE (use_p, unshare_expr (value));
466 	  /* If we didn't replace uses with a debug decl fold the
467 	     resulting expression.  Otherwise we end up with invalid IL.  */
468 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
469 	    {
470 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
471 	      fold_stmt_inplace (&gsi);
472 	    }
473 	}
474       else
475 	gimple_debug_bind_reset_value (stmt);
476 
477       update_stmt (stmt);
478     }
479 }
480 
481 
482 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
483    other DEBUG stmts, and replace uses of the DEF with the
484    newly-created debug temp.  */
485 
486 void
487 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
488 {
489   gimple *stmt;
490   ssa_op_iter op_iter;
491   def_operand_p def_p;
492 
493   if (!MAY_HAVE_DEBUG_STMTS)
494     return;
495 
496   stmt = gsi_stmt (*gsi);
497 
498   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
499     {
500       tree var = DEF_FROM_PTR (def_p);
501 
502       if (TREE_CODE (var) != SSA_NAME)
503 	continue;
504 
505       insert_debug_temp_for_var_def (gsi, var);
506     }
507 }
508 
509 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
510 
511 void
512 reset_debug_uses (gimple *stmt)
513 {
514   ssa_op_iter op_iter;
515   def_operand_p def_p;
516   imm_use_iterator imm_iter;
517   gimple *use_stmt;
518 
519   if (!MAY_HAVE_DEBUG_STMTS)
520     return;
521 
522   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
523     {
524       tree var = DEF_FROM_PTR (def_p);
525 
526       if (TREE_CODE (var) != SSA_NAME)
527 	continue;
528 
529       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
530 	{
531 	  if (!gimple_debug_bind_p (use_stmt))
532 	    continue;
533 
534 	  gimple_debug_bind_reset_value (use_stmt);
535 	  update_stmt (use_stmt);
536 	}
537     }
538 }
539 
540 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
541    dominated stmts before their dominators, so that release_ssa_defs
542    stands a chance of propagating DEFs into debug bind stmts.  */
543 
544 void
545 release_defs_bitset (bitmap toremove)
546 {
547   unsigned j;
548   bitmap_iterator bi;
549 
550   /* Performing a topological sort is probably overkill, this will
551      most likely run in slightly superlinear time, rather than the
552      pathological quadratic worst case.  */
553   while (!bitmap_empty_p (toremove))
554     {
555       unsigned to_remove_bit = -1U;
556       EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
557 	{
558 	  if (to_remove_bit != -1U)
559 	    {
560 	      bitmap_clear_bit (toremove, to_remove_bit);
561 	      to_remove_bit = -1U;
562 	    }
563 
564 	  bool remove_now = true;
565 	  tree var = ssa_name (j);
566 	  gimple *stmt;
567 	  imm_use_iterator uit;
568 
569 	  FOR_EACH_IMM_USE_STMT (stmt, uit, var)
570 	    {
571 	      ssa_op_iter dit;
572 	      def_operand_p def_p;
573 
574 	      /* We can't propagate PHI nodes into debug stmts.  */
575 	      if (gimple_code (stmt) == GIMPLE_PHI
576 		  || is_gimple_debug (stmt))
577 		continue;
578 
579 	      /* If we find another definition to remove that uses
580 		 the one we're looking at, defer the removal of this
581 		 one, so that it can be propagated into debug stmts
582 		 after the other is.  */
583 	      FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
584 		{
585 		  tree odef = DEF_FROM_PTR (def_p);
586 
587 		  if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
588 		    {
589 		      remove_now = false;
590 		      break;
591 		    }
592 		}
593 
594 	      if (!remove_now)
595 		BREAK_FROM_IMM_USE_STMT (uit);
596 	    }
597 
598 	  if (remove_now)
599 	    {
600 	      gimple *def = SSA_NAME_DEF_STMT (var);
601 	      gimple_stmt_iterator gsi = gsi_for_stmt (def);
602 
603 	      if (gimple_code (def) == GIMPLE_PHI)
604 		remove_phi_node (&gsi, true);
605 	      else
606 		{
607 		  gsi_remove (&gsi, true);
608 		  release_defs (def);
609 		}
610 
611 	      to_remove_bit = j;
612 	    }
613 	}
614       if (to_remove_bit != -1U)
615 	bitmap_clear_bit (toremove, to_remove_bit);
616     }
617 
618 }
619 
620 /* Verify virtual SSA form.  */
621 
622 bool
623 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
624 {
625   bool err = false;
626 
627   if (bitmap_bit_p (visited, bb->index))
628     return false;
629 
630   bitmap_set_bit (visited, bb->index);
631 
632   /* Pick up the single virtual PHI def.  */
633   gphi *phi = NULL;
634   for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
635        gsi_next (&si))
636     {
637       tree res = gimple_phi_result (si.phi ());
638       if (virtual_operand_p (res))
639 	{
640 	  if (phi)
641 	    {
642 	      error ("multiple virtual PHI nodes in BB %d", bb->index);
643 	      print_gimple_stmt (stderr, phi, 0, 0);
644 	      print_gimple_stmt (stderr, si.phi (), 0, 0);
645 	      err = true;
646 	    }
647 	  else
648 	    phi = si.phi ();
649 	}
650     }
651   if (phi)
652     {
653       current_vdef = gimple_phi_result (phi);
654       if (TREE_CODE (current_vdef) != SSA_NAME)
655 	{
656 	  error ("virtual definition is not an SSA name");
657 	  print_gimple_stmt (stderr, phi, 0, 0);
658 	  err = true;
659 	}
660     }
661 
662   /* Verify stmts.  */
663   for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
664        gsi_next (&gsi))
665     {
666       gimple *stmt = gsi_stmt (gsi);
667       tree vuse = gimple_vuse (stmt);
668       if (vuse)
669 	{
670 	  if (vuse != current_vdef)
671 	    {
672 	      error ("stmt with wrong VUSE");
673 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
674 	      fprintf (stderr, "expected ");
675 	      print_generic_expr (stderr, current_vdef, 0);
676 	      fprintf (stderr, "\n");
677 	      err = true;
678 	    }
679 	  tree vdef = gimple_vdef (stmt);
680 	  if (vdef)
681 	    {
682 	      current_vdef = vdef;
683 	      if (TREE_CODE (current_vdef) != SSA_NAME)
684 		{
685 		  error ("virtual definition is not an SSA name");
686 		  print_gimple_stmt (stderr, phi, 0, 0);
687 		  err = true;
688 		}
689 	    }
690 	}
691     }
692 
693   /* Verify destination PHI uses and recurse.  */
694   edge_iterator ei;
695   edge e;
696   FOR_EACH_EDGE (e, ei, bb->succs)
697     {
698       gphi *phi = get_virtual_phi (e->dest);
699       if (phi
700 	  && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
701 	{
702 	  error ("PHI node with wrong VUSE on edge from BB %d",
703 		 e->src->index);
704 	  print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
705 	  fprintf (stderr, "expected ");
706 	  print_generic_expr (stderr, current_vdef, 0);
707 	  fprintf (stderr, "\n");
708 	  err = true;
709 	}
710 
711       /* Recurse.  */
712       err |= verify_vssa (e->dest, current_vdef, visited);
713     }
714 
715   return err;
716 }
717 
718 /* Return true if SSA_NAME is malformed and mark it visited.
719 
720    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
721       operand.  */
722 
723 static bool
724 verify_ssa_name (tree ssa_name, bool is_virtual)
725 {
726   if (TREE_CODE (ssa_name) != SSA_NAME)
727     {
728       error ("expected an SSA_NAME object");
729       return true;
730     }
731 
732   if (SSA_NAME_IN_FREE_LIST (ssa_name))
733     {
734       error ("found an SSA_NAME that had been released into the free pool");
735       return true;
736     }
737 
738   if (SSA_NAME_VAR (ssa_name) != NULL_TREE
739       && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
740     {
741       error ("type mismatch between an SSA_NAME and its symbol");
742       return true;
743     }
744 
745   if (is_virtual && !virtual_operand_p (ssa_name))
746     {
747       error ("found a virtual definition for a GIMPLE register");
748       return true;
749     }
750 
751   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
752     {
753       error ("virtual SSA name for non-VOP decl");
754       return true;
755     }
756 
757   if (!is_virtual && virtual_operand_p (ssa_name))
758     {
759       error ("found a real definition for a non-register");
760       return true;
761     }
762 
763   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
764       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
765     {
766       error ("found a default name with a non-empty defining statement");
767       return true;
768     }
769 
770   return false;
771 }
772 
773 
774 /* Return true if the definition of SSA_NAME at block BB is malformed.
775 
776    STMT is the statement where SSA_NAME is created.
777 
778    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
779       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
780       it means that the block in that array slot contains the
781       definition of SSA_NAME.
782 
783    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
784 
785 static bool
786 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
787 	    gimple *stmt, bool is_virtual)
788 {
789   if (verify_ssa_name (ssa_name, is_virtual))
790     goto err;
791 
792   if (SSA_NAME_VAR (ssa_name)
793       && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
794       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
795     {
796       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
797       goto err;
798     }
799 
800   if (definition_block[SSA_NAME_VERSION (ssa_name)])
801     {
802       error ("SSA_NAME created in two different blocks %i and %i",
803 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
804       goto err;
805     }
806 
807   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
808 
809   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
810     {
811       error ("SSA_NAME_DEF_STMT is wrong");
812       fprintf (stderr, "Expected definition statement:\n");
813       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
814       fprintf (stderr, "\nActual definition statement:\n");
815       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
816       goto err;
817     }
818 
819   return false;
820 
821 err:
822   fprintf (stderr, "while verifying SSA_NAME ");
823   print_generic_expr (stderr, ssa_name, 0);
824   fprintf (stderr, " in statement\n");
825   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
826 
827   return true;
828 }
829 
830 
831 /* Return true if the use of SSA_NAME at statement STMT in block BB is
832    malformed.
833 
834    DEF_BB is the block where SSA_NAME was found to be created.
835 
836    IDOM contains immediate dominator information for the flowgraph.
837 
838    CHECK_ABNORMAL is true if the caller wants to check whether this use
839       is flowing through an abnormal edge (only used when checking PHI
840       arguments).
841 
842    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
843      that are defined before STMT in basic block BB.  */
844 
845 static bool
846 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
847 	    gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
848 {
849   bool err = false;
850   tree ssa_name = USE_FROM_PTR (use_p);
851 
852   if (!TREE_VISITED (ssa_name))
853     if (verify_imm_links (stderr, ssa_name))
854       err = true;
855 
856   TREE_VISITED (ssa_name) = 1;
857 
858   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
859       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
860     ; /* Default definitions have empty statements.  Nothing to do.  */
861   else if (!def_bb)
862     {
863       error ("missing definition");
864       err = true;
865     }
866   else if (bb != def_bb
867 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
868     {
869       error ("definition in block %i does not dominate use in block %i",
870 	     def_bb->index, bb->index);
871       err = true;
872     }
873   else if (bb == def_bb
874 	   && names_defined_in_bb != NULL
875 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
876     {
877       error ("definition in block %i follows the use", def_bb->index);
878       err = true;
879     }
880 
881   if (check_abnormal
882       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
883     {
884       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
885       err = true;
886     }
887 
888   /* Make sure the use is in an appropriate list by checking the previous
889      element to make sure it's the same.  */
890   if (use_p->prev == NULL)
891     {
892       error ("no immediate_use list");
893       err = true;
894     }
895   else
896     {
897       tree listvar;
898       if (use_p->prev->use == NULL)
899 	listvar = use_p->prev->loc.ssa_name;
900       else
901 	listvar = USE_FROM_PTR (use_p->prev);
902       if (listvar != ssa_name)
903         {
904 	  error ("wrong immediate use list");
905 	  err = true;
906 	}
907     }
908 
909   if (err)
910     {
911       fprintf (stderr, "for SSA_NAME: ");
912       print_generic_expr (stderr, ssa_name, TDF_VOPS);
913       fprintf (stderr, " in statement:\n");
914       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
915     }
916 
917   return err;
918 }
919 
920 
921 /* Return true if any of the arguments for PHI node PHI at block BB is
922    malformed.
923 
924    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
925       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
926       it means that the block in that array slot contains the
927       definition of SSA_NAME.  */
928 
929 static bool
930 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
931 {
932   edge e;
933   bool err = false;
934   size_t i, phi_num_args = gimple_phi_num_args (phi);
935 
936   if (EDGE_COUNT (bb->preds) != phi_num_args)
937     {
938       error ("incoming edge count does not match number of PHI arguments");
939       err = true;
940       goto error;
941     }
942 
943   for (i = 0; i < phi_num_args; i++)
944     {
945       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
946       tree op = USE_FROM_PTR (op_p);
947 
948       e = EDGE_PRED (bb, i);
949 
950       if (op == NULL_TREE)
951 	{
952 	  error ("PHI argument is missing for edge %d->%d",
953 	         e->src->index,
954 		 e->dest->index);
955 	  err = true;
956 	  goto error;
957 	}
958 
959       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
960 	{
961 	  error ("PHI argument is not SSA_NAME, or invariant");
962 	  err = true;
963 	}
964 
965       if (TREE_CODE (op) == SSA_NAME)
966 	{
967 	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
968 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
969 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
970 	}
971 
972       if (TREE_CODE (op) == ADDR_EXPR)
973 	{
974 	  tree base = TREE_OPERAND (op, 0);
975 	  while (handled_component_p (base))
976 	    base = TREE_OPERAND (base, 0);
977 	  if ((VAR_P (base)
978 	       || TREE_CODE (base) == PARM_DECL
979 	       || TREE_CODE (base) == RESULT_DECL)
980 	      && !TREE_ADDRESSABLE (base))
981 	    {
982 	      error ("address taken, but ADDRESSABLE bit not set");
983 	      err = true;
984 	    }
985 	}
986 
987       if (e->dest != bb)
988 	{
989 	  error ("wrong edge %d->%d for PHI argument",
990 	         e->src->index, e->dest->index);
991 	  err = true;
992 	}
993 
994       if (err)
995 	{
996 	  fprintf (stderr, "PHI argument\n");
997 	  print_generic_stmt (stderr, op, TDF_VOPS);
998 	  goto error;
999 	}
1000     }
1001 
1002 error:
1003   if (err)
1004     {
1005       fprintf (stderr, "for PHI node\n");
1006       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1007     }
1008 
1009 
1010   return err;
1011 }
1012 
1013 
1014 /* Verify common invariants in the SSA web.
1015    TODO: verify the variable annotations.  */
1016 
1017 DEBUG_FUNCTION void
1018 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1019 {
1020   basic_block bb;
1021   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1022   ssa_op_iter iter;
1023   tree op;
1024   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1025   bitmap names_defined_in_bb = BITMAP_ALLOC (NULL);
1026 
1027   gcc_assert (!need_ssa_update_p (cfun));
1028 
1029   timevar_push (TV_TREE_SSA_VERIFY);
1030 
1031     {
1032       /* Keep track of SSA names present in the IL.  */
1033       size_t i;
1034       tree name;
1035       hash_map <void *, tree> ssa_info;
1036 
1037       FOR_EACH_SSA_NAME (i, name, cfun)
1038 	{
1039 	  gimple *stmt;
1040 	  TREE_VISITED (name) = 0;
1041 
1042 	  verify_ssa_name (name, virtual_operand_p (name));
1043 
1044 	  stmt = SSA_NAME_DEF_STMT (name);
1045 	  if (!gimple_nop_p (stmt))
1046 	    {
1047 	      basic_block bb = gimple_bb (stmt);
1048 	      if (verify_def (bb, definition_block,
1049 			      name, stmt, virtual_operand_p (name)))
1050 		goto err;
1051 	    }
1052 
1053 	  void *info = NULL;
1054 	  if (POINTER_TYPE_P (TREE_TYPE (name)))
1055 	    info = SSA_NAME_PTR_INFO (name);
1056 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1057 	    info = SSA_NAME_RANGE_INFO (name);
1058 	  if (info)
1059 	    {
1060 	      bool existed;
1061 	      tree &val = ssa_info.get_or_insert (info, &existed);
1062 	      if (existed)
1063 		{
1064 		  error ("shared SSA name info");
1065 		  print_generic_expr (stderr, val, 0);
1066 		  fprintf (stderr, " and ");
1067 		  print_generic_expr (stderr, name, 0);
1068 		  fprintf (stderr, "\n");
1069 		  goto err;
1070 		}
1071 	      else
1072 		val = name;
1073 	    }
1074 	}
1075     }
1076 
1077   calculate_dominance_info (CDI_DOMINATORS);
1078 
1079   /* Now verify all the uses and make sure they agree with the definitions
1080      found in the previous pass.  */
1081   FOR_EACH_BB_FN (bb, cfun)
1082     {
1083       edge e;
1084       edge_iterator ei;
1085 
1086       /* Make sure that all edges have a clear 'aux' field.  */
1087       FOR_EACH_EDGE (e, ei, bb->preds)
1088 	{
1089 	  if (e->aux)
1090 	    {
1091 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
1092 		      e->dest->index);
1093 	      goto err;
1094 	    }
1095 	}
1096 
1097       /* Verify the arguments for every PHI node in the block.  */
1098       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1099 	{
1100 	  gphi *phi = gsi.phi ();
1101 	  if (verify_phi_args (phi, bb, definition_block))
1102 	    goto err;
1103 
1104 	  bitmap_set_bit (names_defined_in_bb,
1105 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
1106 	}
1107 
1108       /* Now verify all the uses and vuses in every statement of the block.  */
1109       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1110 	   gsi_next (&gsi))
1111 	{
1112 	  gimple *stmt = gsi_stmt (gsi);
1113 	  use_operand_p use_p;
1114 
1115 	  if (check_modified_stmt && gimple_modified_p (stmt))
1116 	    {
1117 	      error ("stmt (%p) marked modified after optimization pass: ",
1118 		     (void *)stmt);
1119 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1120 	      goto err;
1121 	    }
1122 
1123 	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1124 	    {
1125 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1126 	      goto err;
1127 	    }
1128 
1129 	  if (gimple_debug_bind_p (stmt)
1130 	      && !gimple_debug_bind_has_value_p (stmt))
1131 	    continue;
1132 
1133 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1134 	    {
1135 	      op = USE_FROM_PTR (use_p);
1136 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1137 			      use_p, stmt, false, names_defined_in_bb))
1138 		goto err;
1139 	    }
1140 
1141 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1142 	    {
1143 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1144 		{
1145 		  error ("SSA_NAME_DEF_STMT is wrong");
1146 		  fprintf (stderr, "Expected definition statement:\n");
1147 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1148 		  fprintf (stderr, "\nActual definition statement:\n");
1149 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1150 				     4, TDF_VOPS);
1151 		  goto err;
1152 		}
1153 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1154 	    }
1155 	}
1156 
1157       bitmap_clear (names_defined_in_bb);
1158     }
1159 
1160   free (definition_block);
1161 
1162   if (gimple_vop (cfun)
1163       && ssa_default_def (cfun, gimple_vop (cfun)))
1164     {
1165       auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1166       bitmap_clear (visited);
1167       if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1168 		       ssa_default_def (cfun, gimple_vop (cfun)), visited))
1169 	goto err;
1170     }
1171 
1172   /* Restore the dominance information to its prior known state, so
1173      that we do not perturb the compiler's subsequent behavior.  */
1174   if (orig_dom_state == DOM_NONE)
1175     free_dominance_info (CDI_DOMINATORS);
1176   else
1177     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1178 
1179   BITMAP_FREE (names_defined_in_bb);
1180   timevar_pop (TV_TREE_SSA_VERIFY);
1181   return;
1182 
1183 err:
1184   internal_error ("verify_ssa failed");
1185 }
1186 
1187 
1188 /* Initialize global DFA and SSA structures.  */
1189 
1190 void
1191 init_tree_ssa (struct function *fn)
1192 {
1193   fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1194   fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1195   pt_solution_reset (&fn->gimple_df->escaped);
1196   init_ssanames (fn, 0);
1197 }
1198 
1199 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1200 
1201 void
1202 delete_tree_ssa (struct function *fn)
1203 {
1204   fini_ssanames (fn);
1205 
1206   /* We no longer maintain the SSA operand cache at this point.  */
1207   if (ssa_operands_active (fn))
1208     fini_ssa_operands (fn);
1209 
1210   fn->gimple_df->default_defs->empty ();
1211   fn->gimple_df->default_defs = NULL;
1212   pt_solution_reset (&fn->gimple_df->escaped);
1213   if (fn->gimple_df->decls_to_pointers != NULL)
1214     delete fn->gimple_df->decls_to_pointers;
1215   fn->gimple_df->decls_to_pointers = NULL;
1216   fn->gimple_df = NULL;
1217 
1218   /* We no longer need the edge variable maps.  */
1219   redirect_edge_var_map_empty ();
1220 }
1221 
1222 /* Return true if EXPR is a useless type conversion, otherwise return
1223    false.  */
1224 
1225 bool
1226 tree_ssa_useless_type_conversion (tree expr)
1227 {
1228   /* If we have an assignment that merely uses a NOP_EXPR to change
1229      the top of the RHS to the type of the LHS and the type conversion
1230      is "safe", then strip away the type conversion so that we can
1231      enter LHS = RHS into the const_and_copies table.  */
1232   if (CONVERT_EXPR_P (expr)
1233       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1234       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1235     return useless_type_conversion_p
1236       (TREE_TYPE (expr),
1237        TREE_TYPE (TREE_OPERAND (expr, 0)));
1238 
1239   return false;
1240 }
1241 
1242 /* Strip conversions from EXP according to
1243    tree_ssa_useless_type_conversion and return the resulting
1244    expression.  */
1245 
1246 tree
1247 tree_ssa_strip_useless_type_conversions (tree exp)
1248 {
1249   while (tree_ssa_useless_type_conversion (exp))
1250     exp = TREE_OPERAND (exp, 0);
1251   return exp;
1252 }
1253 
1254 /* Return true if T, as SSA_NAME, has an implicit default defined value.  */
1255 
1256 bool
1257 ssa_defined_default_def_p (tree t)
1258 {
1259   tree var = SSA_NAME_VAR (t);
1260 
1261   if (!var)
1262     ;
1263   /* Parameters get their initial value from the function entry.  */
1264   else if (TREE_CODE (var) == PARM_DECL)
1265     return true;
1266   /* When returning by reference the return address is actually a hidden
1267      parameter.  */
1268   else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1269     return true;
1270   /* Hard register variables get their initial value from the ether.  */
1271   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1272     return true;
1273 
1274   return false;
1275 }
1276 
1277 
1278 /* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1279    should be returned if the value is only partially undefined.  */
1280 
1281 bool
1282 ssa_undefined_value_p (tree t, bool partial)
1283 {
1284   gimple *def_stmt;
1285 
1286   if (ssa_defined_default_def_p (t))
1287     return false;
1288 
1289   /* The value is undefined iff its definition statement is empty.  */
1290   def_stmt = SSA_NAME_DEF_STMT (t);
1291   if (gimple_nop_p (def_stmt))
1292     return true;
1293 
1294   /* Check if the complex was not only partially defined.  */
1295   if (partial && is_gimple_assign (def_stmt)
1296       && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1297     {
1298       tree rhs1, rhs2;
1299 
1300       rhs1 = gimple_assign_rhs1 (def_stmt);
1301       rhs2 = gimple_assign_rhs2 (def_stmt);
1302       return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1303 	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1304     }
1305   return false;
1306 }
1307 
1308 
1309 /* Return TRUE iff STMT, a gimple statement, references an undefined
1310    SSA name.  */
1311 
1312 bool
1313 gimple_uses_undefined_value_p (gimple *stmt)
1314 {
1315   ssa_op_iter iter;
1316   tree op;
1317 
1318   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1319     if (ssa_undefined_value_p (op))
1320       return true;
1321 
1322   return false;
1323 }
1324 
1325 
1326 
1327 /* If necessary, rewrite the base of the reference tree *TP from
1328    a MEM_REF to a plain or converted symbol.  */
1329 
1330 static void
1331 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1332 {
1333   tree sym;
1334 
1335   while (handled_component_p (*tp))
1336     tp = &TREE_OPERAND (*tp, 0);
1337   if (TREE_CODE (*tp) == MEM_REF
1338       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1339       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1340       && DECL_P (sym)
1341       && !TREE_ADDRESSABLE (sym)
1342       && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1343       && is_gimple_reg_type (TREE_TYPE (*tp))
1344       && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1345     {
1346       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1347 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1348 					TREE_TYPE (TREE_TYPE (sym)))
1349 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1350 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1351 	{
1352 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1353 			TYPE_SIZE (TREE_TYPE (*tp)),
1354 			int_const_binop (MULT_EXPR,
1355 					 bitsize_int (BITS_PER_UNIT),
1356 					 TREE_OPERAND (*tp, 1)));
1357 	}
1358       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1359 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1360 					     TREE_TYPE (TREE_TYPE (sym))))
1361 	{
1362 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1363 			? REALPART_EXPR : IMAGPART_EXPR,
1364 			TREE_TYPE (*tp), sym);
1365 	}
1366       else if (integer_zerop (TREE_OPERAND (*tp, 1))
1367 	       && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1368 	{
1369 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1370 					  TREE_TYPE (sym)))
1371 	    *tp = build1 (VIEW_CONVERT_EXPR,
1372 			  TREE_TYPE (*tp), sym);
1373 	  else
1374 	    *tp = sym;
1375 	}
1376       else if (DECL_SIZE (sym)
1377 	       && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1378 	       && mem_ref_offset (*tp) >= 0
1379 	       && wi::leu_p (mem_ref_offset (*tp)
1380 			     + wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1381 			     wi::to_offset (DECL_SIZE_UNIT (sym)))
1382 	       && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1383 		   || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1384 		       == TYPE_PRECISION (TREE_TYPE (*tp))))
1385 	       && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1386 				  BITS_PER_UNIT) == 0)
1387 	{
1388 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1389 			TYPE_SIZE (TREE_TYPE (*tp)),
1390 			wide_int_to_tree (bitsizetype,
1391 					  mem_ref_offset (*tp)
1392 					  << LOG2_BITS_PER_UNIT));
1393 	}
1394     }
1395 }
1396 
1397 /* For a tree REF return its base if it is the base of a MEM_REF
1398    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1399 
1400 static tree
1401 non_rewritable_mem_ref_base (tree ref)
1402 {
1403   tree base;
1404 
1405   /* A plain decl does not need it set.  */
1406   if (DECL_P (ref))
1407     return NULL_TREE;
1408 
1409   if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1410     {
1411       base = get_base_address (ref);
1412       if (DECL_P (base))
1413 	return base;
1414       return NULL_TREE;
1415     }
1416 
1417   /* But watch out for MEM_REFs we cannot lower to a
1418      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1419   if (TREE_CODE (base) == MEM_REF
1420       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1421     {
1422       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1423       if (! DECL_P (decl))
1424 	return NULL_TREE;
1425       if (! is_gimple_reg_type (TREE_TYPE (base))
1426 	  || VOID_TYPE_P (TREE_TYPE (base))
1427 	  || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1428 	return decl;
1429       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1430 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1431 	  && useless_type_conversion_p (TREE_TYPE (base),
1432 					TREE_TYPE (TREE_TYPE (decl)))
1433 	  && wi::fits_uhwi_p (mem_ref_offset (base))
1434 	  && wi::gtu_p (wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1435 			mem_ref_offset (base))
1436 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1437 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1438 	return NULL_TREE;
1439       /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR.  */
1440       if (integer_zerop (TREE_OPERAND (base, 1))
1441 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1442 	return NULL_TREE;
1443       /* For integral typed extracts we can use a BIT_FIELD_REF.  */
1444       if (DECL_SIZE (decl)
1445 	  && TREE_CODE (DECL_SIZE (decl)) == INTEGER_CST
1446 	  && mem_ref_offset (base) >= 0
1447 	  && wi::leu_p (mem_ref_offset (base)
1448 			+ wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1449 			wi::to_offset (DECL_SIZE_UNIT (decl)))
1450 	  /* ???  We can't handle bitfield precision extracts without
1451 	     either using an alternate type for the BIT_FIELD_REF and
1452 	     then doing a conversion or possibly adjusting the offset
1453 	     according to endianness.  */
1454 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1455 	      || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1456 		  == TYPE_PRECISION (TREE_TYPE (base))))
1457 	  && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1458 			     BITS_PER_UNIT) == 0)
1459 	return NULL_TREE;
1460       return decl;
1461     }
1462 
1463   return NULL_TREE;
1464 }
1465 
1466 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1467    Otherwise return true.  */
1468 
1469 static bool
1470 non_rewritable_lvalue_p (tree lhs)
1471 {
1472   /* A plain decl is always rewritable.  */
1473   if (DECL_P (lhs))
1474     return false;
1475 
1476   /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1477      a reasonably efficient manner... */
1478   if ((TREE_CODE (lhs) == REALPART_EXPR
1479        || TREE_CODE (lhs) == IMAGPART_EXPR)
1480       && DECL_P (TREE_OPERAND (lhs, 0)))
1481     return false;
1482 
1483   /* ???  The following could be relaxed allowing component
1484      references that do not change the access size.  */
1485   if (TREE_CODE (lhs) == MEM_REF
1486       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1487     {
1488       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1489 
1490       /* A decl that is wrapped inside a MEM-REF that covers
1491 	 it full is also rewritable.  */
1492       if (integer_zerop (TREE_OPERAND (lhs, 1))
1493 	  && DECL_P (decl)
1494 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1495 	  /* If the dynamic type of the decl has larger precision than
1496 	     the decl itself we can't use the decls type for SSA rewriting.  */
1497 	  && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1498 	       || compare_tree_int (DECL_SIZE (decl),
1499 				    TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1500 	      || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1501 		  && (TYPE_PRECISION (TREE_TYPE (decl))
1502 		      >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1503 	  /* Make sure we are not re-writing non-float copying into float
1504 	     copying as that can incur normalization.  */
1505 	  && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1506 	      || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1507 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1508 	return false;
1509 
1510       /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1511 	 using a BIT_INSERT_EXPR.  */
1512       if (DECL_P (decl)
1513 	  && VECTOR_TYPE_P (TREE_TYPE (decl))
1514 	  && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1515 	  && types_compatible_p (TREE_TYPE (lhs),
1516 				 TREE_TYPE (TREE_TYPE (decl)))
1517 	  && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1518 	  && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1519 			      TYPE_SIZE_UNIT (TREE_TYPE (decl)))
1520 	  && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1521 	      % tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1522 	return false;
1523     }
1524 
1525   /* A vector-insert using a BIT_FIELD_REF is rewritable using
1526      BIT_INSERT_EXPR.  */
1527   if (TREE_CODE (lhs) == BIT_FIELD_REF
1528       && DECL_P (TREE_OPERAND (lhs, 0))
1529       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1530       && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1531       && types_compatible_p (TREE_TYPE (lhs),
1532 			     TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0))))
1533       && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1534 	  % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1535     return false;
1536 
1537   return true;
1538 }
1539 
1540 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1541    mark the variable VAR for conversion into SSA.  Return true when updating
1542    stmts is required.  */
1543 
1544 static void
1545 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1546 		    bitmap suitable_for_renaming)
1547 {
1548   /* Global Variables, result decls cannot be changed.  */
1549   if (is_global_var (var)
1550       || TREE_CODE (var) == RESULT_DECL
1551       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1552     return;
1553 
1554   if (TREE_ADDRESSABLE (var)
1555       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1556 	 a non-register.  Otherwise we are confused and forget to
1557 	 add virtual operands for it.  */
1558       && (!is_gimple_reg_type (TREE_TYPE (var))
1559 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1560 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1561 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1562     {
1563       TREE_ADDRESSABLE (var) = 0;
1564       if (is_gimple_reg (var))
1565 	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1566       if (dump_file)
1567 	{
1568 	  fprintf (dump_file, "No longer having address taken: ");
1569 	  print_generic_expr (dump_file, var, 0);
1570 	  fprintf (dump_file, "\n");
1571 	}
1572     }
1573 
1574   if (!DECL_GIMPLE_REG_P (var)
1575       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1576       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1577 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1578       && !TREE_THIS_VOLATILE (var)
1579       && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1580     {
1581       DECL_GIMPLE_REG_P (var) = 1;
1582       bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1583       if (dump_file)
1584 	{
1585 	  fprintf (dump_file, "Now a gimple register: ");
1586 	  print_generic_expr (dump_file, var, 0);
1587 	  fprintf (dump_file, "\n");
1588 	}
1589     }
1590 }
1591 
1592 /* Return true when STMT is ASAN mark where second argument is an address
1593    of a local variable.  */
1594 
1595 static bool
1596 is_asan_mark_p (gimple *stmt)
1597 {
1598   if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1599     return false;
1600 
1601   tree addr = get_base_address (gimple_call_arg (stmt, 1));
1602   if (TREE_CODE (addr) == ADDR_EXPR
1603       && VAR_P (TREE_OPERAND (addr, 0)))
1604     {
1605       tree var = TREE_OPERAND (addr, 0);
1606       if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1607 			    DECL_ATTRIBUTES (var)))
1608 	return false;
1609 
1610       unsigned addressable = TREE_ADDRESSABLE (var);
1611       TREE_ADDRESSABLE (var) = 0;
1612       bool r = is_gimple_reg (var);
1613       TREE_ADDRESSABLE (var) = addressable;
1614       return r;
1615     }
1616 
1617   return false;
1618 }
1619 
1620 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1621 
1622 void
1623 execute_update_addresses_taken (void)
1624 {
1625   basic_block bb;
1626   bitmap addresses_taken = BITMAP_ALLOC (NULL);
1627   bitmap not_reg_needs = BITMAP_ALLOC (NULL);
1628   bitmap suitable_for_renaming = BITMAP_ALLOC (NULL);
1629   tree var;
1630   unsigned i;
1631 
1632   timevar_push (TV_ADDRESS_TAKEN);
1633 
1634   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1635      the function body.  */
1636   FOR_EACH_BB_FN (bb, cfun)
1637     {
1638       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1639 	   gsi_next (&gsi))
1640 	{
1641 	  gimple *stmt = gsi_stmt (gsi);
1642 	  enum gimple_code code = gimple_code (stmt);
1643 	  tree decl;
1644 
1645 	  if (code == GIMPLE_CALL)
1646 	    {
1647 	      if (optimize_atomic_compare_exchange_p (stmt))
1648 		{
1649 		  /* For __atomic_compare_exchange_N if the second argument
1650 		     is &var, don't mark var addressable;
1651 		     if it becomes non-addressable, we'll rewrite it into
1652 		     ATOMIC_COMPARE_EXCHANGE call.  */
1653 		  tree arg = gimple_call_arg (stmt, 1);
1654 		  gimple_call_set_arg (stmt, 1, null_pointer_node);
1655 		  gimple_ior_addresses_taken (addresses_taken, stmt);
1656 		  gimple_call_set_arg (stmt, 1, arg);
1657 		}
1658 	      else if (is_asan_mark_p (stmt)
1659 		       || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1660 		;
1661 	      else
1662 		gimple_ior_addresses_taken (addresses_taken, stmt);
1663 	    }
1664 	  else
1665 	    /* Note all addresses taken by the stmt.  */
1666 	    gimple_ior_addresses_taken (addresses_taken, stmt);
1667 
1668 	  /* If we have a call or an assignment, see if the lhs contains
1669 	     a local decl that requires not to be a gimple register.  */
1670 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1671 	    {
1672               tree lhs = gimple_get_lhs (stmt);
1673               if (lhs
1674 		  && TREE_CODE (lhs) != SSA_NAME
1675 		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1676 		      || non_rewritable_lvalue_p (lhs)))
1677 		{
1678 		  decl = get_base_address (lhs);
1679 		  if (DECL_P (decl))
1680 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1681                 }
1682 	    }
1683 
1684 	  if (gimple_assign_single_p (stmt))
1685 	    {
1686 	      tree rhs = gimple_assign_rhs1 (stmt);
1687 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1688 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1689 	    }
1690 
1691 	  else if (code == GIMPLE_CALL)
1692 	    {
1693 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1694 		{
1695 		  tree arg = gimple_call_arg (stmt, i);
1696 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1697 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1698 		}
1699 	    }
1700 
1701 	  else if (code == GIMPLE_ASM)
1702 	    {
1703 	      gasm *asm_stmt = as_a <gasm *> (stmt);
1704 	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1705 		{
1706 		  tree link = gimple_asm_output_op (asm_stmt, i);
1707 		  tree lhs = TREE_VALUE (link);
1708 		  if (TREE_CODE (lhs) != SSA_NAME)
1709 		    {
1710 		      decl = get_base_address (lhs);
1711 		      if (DECL_P (decl)
1712 			  && (non_rewritable_lvalue_p (lhs)
1713 			      /* We cannot move required conversions from
1714 				 the lhs to the rhs in asm statements, so
1715 				 require we do not need any.  */
1716 			      || !useless_type_conversion_p
1717 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1718 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1719 		    }
1720 		}
1721 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1722 		{
1723 		  tree link = gimple_asm_input_op (asm_stmt, i);
1724 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1725 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1726 		}
1727 	    }
1728 	}
1729 
1730       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1731 	   gsi_next (&gsi))
1732 	{
1733 	  size_t i;
1734 	  gphi *phi = gsi.phi ();
1735 
1736 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1737 	    {
1738 	      tree op = PHI_ARG_DEF (phi, i), var;
1739 	      if (TREE_CODE (op) == ADDR_EXPR
1740 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1741 		  && DECL_P (var))
1742 		bitmap_set_bit (addresses_taken, DECL_UID (var));
1743 	    }
1744 	}
1745     }
1746 
1747   /* We cannot iterate over all referenced vars because that can contain
1748      unused vars from BLOCK trees, which causes code generation differences
1749      for -g vs. -g0.  */
1750   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1751     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1752 			suitable_for_renaming);
1753 
1754   FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1755     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1756 			suitable_for_renaming);
1757 
1758   /* Operand caches need to be recomputed for operands referencing the updated
1759      variables and operands need to be rewritten to expose bare symbols.  */
1760   if (!bitmap_empty_p (suitable_for_renaming))
1761     {
1762       FOR_EACH_BB_FN (bb, cfun)
1763 	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1764 	  {
1765 	    gimple *stmt = gsi_stmt (gsi);
1766 
1767 	    /* Re-write TARGET_MEM_REFs of symbols we want to
1768 	       rewrite into SSA form.  */
1769 	    if (gimple_assign_single_p (stmt))
1770 	      {
1771 		tree lhs = gimple_assign_lhs (stmt);
1772 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1773 		tree sym;
1774 
1775 		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1776 		   gimplify_modify_expr_complex_part.  */
1777 		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1778 		     || TREE_CODE (lhs) == REALPART_EXPR)
1779 		    && DECL_P (TREE_OPERAND (lhs, 0))
1780 		    && bitmap_bit_p (suitable_for_renaming,
1781 				     DECL_UID (TREE_OPERAND (lhs, 0))))
1782 		  {
1783 		    tree other = make_ssa_name (TREE_TYPE (lhs));
1784 		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1785 					? REALPART_EXPR : IMAGPART_EXPR,
1786 					TREE_TYPE (other),
1787 					TREE_OPERAND (lhs, 0));
1788 		    gimple *load = gimple_build_assign (other, lrhs);
1789 		    location_t loc = gimple_location (stmt);
1790 		    gimple_set_location (load, loc);
1791 		    gimple_set_vuse (load, gimple_vuse (stmt));
1792 		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1793 		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1794 		    gimple_assign_set_rhs_with_ops
1795 		      (&gsi, COMPLEX_EXPR,
1796 		       TREE_CODE (lhs) == IMAGPART_EXPR
1797 		       ? other : gimple_assign_rhs1 (stmt),
1798 		       TREE_CODE (lhs) == IMAGPART_EXPR
1799 		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1800 		    stmt = gsi_stmt (gsi);
1801 		    unlink_stmt_vdef (stmt);
1802 		    update_stmt (stmt);
1803 		    continue;
1804 		  }
1805 
1806 		/* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1807 		   into a BIT_INSERT_EXPR.  */
1808 		if (TREE_CODE (lhs) == BIT_FIELD_REF
1809 		    && DECL_P (TREE_OPERAND (lhs, 0))
1810 		    && bitmap_bit_p (suitable_for_renaming,
1811 				     DECL_UID (TREE_OPERAND (lhs, 0)))
1812 		    && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1813 		    && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1814 		    && types_compatible_p (TREE_TYPE (lhs),
1815 					   TREE_TYPE (TREE_TYPE
1816 						       (TREE_OPERAND (lhs, 0))))
1817 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1818 			% tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1819 		  {
1820 		    tree var = TREE_OPERAND (lhs, 0);
1821 		    tree val = gimple_assign_rhs1 (stmt);
1822 		    tree bitpos = TREE_OPERAND (lhs, 2);
1823 		    gimple_assign_set_lhs (stmt, var);
1824 		    gimple_assign_set_rhs_with_ops
1825 		      (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1826 		    stmt = gsi_stmt (gsi);
1827 		    unlink_stmt_vdef (stmt);
1828 		    update_stmt (stmt);
1829 		    continue;
1830 		  }
1831 
1832 		/* Rewrite a vector insert using a MEM_REF on the LHS
1833 		   into a BIT_INSERT_EXPR.  */
1834 		if (TREE_CODE (lhs) == MEM_REF
1835 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1836 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1837 		    && DECL_P (sym)
1838 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1839 		    && VECTOR_TYPE_P (TREE_TYPE (sym))
1840 		    && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1841 		    && types_compatible_p (TREE_TYPE (lhs),
1842 					   TREE_TYPE (TREE_TYPE (sym)))
1843 		    && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1844 		    && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1845 					TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1846 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1847 			% tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1848 		  {
1849 		    tree val = gimple_assign_rhs1 (stmt);
1850 		    tree bitpos
1851 		      = wide_int_to_tree (bitsizetype,
1852 					  mem_ref_offset (lhs) * BITS_PER_UNIT);
1853 		    gimple_assign_set_lhs (stmt, sym);
1854 		    gimple_assign_set_rhs_with_ops
1855 		      (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1856 		    stmt = gsi_stmt (gsi);
1857 		    unlink_stmt_vdef (stmt);
1858 		    update_stmt (stmt);
1859 		    continue;
1860 		  }
1861 
1862 		/* We shouldn't have any fancy wrapping of
1863 		   component-refs on the LHS, but look through
1864 		   VIEW_CONVERT_EXPRs as that is easy.  */
1865 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1866 		  lhs = TREE_OPERAND (lhs, 0);
1867 		if (TREE_CODE (lhs) == MEM_REF
1868 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1869 		    && integer_zerop (TREE_OPERAND (lhs, 1))
1870 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1871 		    && DECL_P (sym)
1872 		    && !TREE_ADDRESSABLE (sym)
1873 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1874 		  lhs = sym;
1875 		else
1876 		  lhs = gimple_assign_lhs (stmt);
1877 
1878 		/* Rewrite the RHS and make sure the resulting assignment
1879 		   is validly typed.  */
1880 		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1881 		rhs = gimple_assign_rhs1 (stmt);
1882 		if (gimple_assign_lhs (stmt) != lhs
1883 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1884 						   TREE_TYPE (rhs)))
1885 		  {
1886 		    if (gimple_clobber_p (stmt))
1887 		      {
1888 			rhs = build_constructor (TREE_TYPE (lhs), NULL);
1889 			TREE_THIS_VOLATILE (rhs) = 1;
1890 		      }
1891 		    else
1892 		      rhs = fold_build1 (VIEW_CONVERT_EXPR,
1893 					 TREE_TYPE (lhs), rhs);
1894 		  }
1895 		if (gimple_assign_lhs (stmt) != lhs)
1896 		  gimple_assign_set_lhs (stmt, lhs);
1897 
1898 		if (gimple_assign_rhs1 (stmt) != rhs)
1899 		  {
1900 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1901 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
1902 		  }
1903 	      }
1904 
1905 	    else if (gimple_code (stmt) == GIMPLE_CALL)
1906 	      {
1907 		unsigned i;
1908 		if (optimize_atomic_compare_exchange_p (stmt))
1909 		  {
1910 		    tree expected = gimple_call_arg (stmt, 1);
1911 		    if (bitmap_bit_p (suitable_for_renaming,
1912 				      DECL_UID (TREE_OPERAND (expected, 0))))
1913 		      {
1914 			fold_builtin_atomic_compare_exchange (&gsi);
1915 			continue;
1916 		      }
1917 		  }
1918 		else if (is_asan_mark_p (stmt))
1919 		  {
1920 		    tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1921 		    if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1922 		      {
1923 			unlink_stmt_vdef (stmt);
1924 			if (asan_mark_p (stmt, ASAN_MARK_POISON))
1925 			  {
1926 			    gcall *call
1927 			      = gimple_build_call_internal (IFN_ASAN_POISON, 0);
1928 			    gimple_call_set_lhs (call, var);
1929 			    gsi_replace (&gsi, call, GSI_SAME_STMT);
1930 			  }
1931 			else
1932 			  {
1933 			    /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1934 			       is uninitialized.  Avoid dependencies on
1935 			       previous out of scope value.  */
1936 			    tree clobber
1937 			      = build_constructor (TREE_TYPE (var), NULL);
1938 			    TREE_THIS_VOLATILE (clobber) = 1;
1939 			    gimple *g = gimple_build_assign (var, clobber);
1940 			    gsi_replace (&gsi, g, GSI_SAME_STMT);
1941 			  }
1942 			continue;
1943 		      }
1944 		  }
1945 		else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1946 		  for (i = 1; i < gimple_call_num_args (stmt); i++)
1947 		    {
1948 		      tree *argp = gimple_call_arg_ptr (stmt, i);
1949 		      if (*argp == null_pointer_node)
1950 			continue;
1951 		      gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
1952 				  && VAR_P (TREE_OPERAND (*argp, 0)));
1953 		      tree var = TREE_OPERAND (*argp, 0);
1954 		      if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1955 			*argp = null_pointer_node;
1956 		    }
1957 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
1958 		  {
1959 		    tree *argp = gimple_call_arg_ptr (stmt, i);
1960 		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
1961 		  }
1962 	      }
1963 
1964 	    else if (gimple_code (stmt) == GIMPLE_ASM)
1965 	      {
1966 		gasm *asm_stmt = as_a <gasm *> (stmt);
1967 		unsigned i;
1968 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1969 		  {
1970 		    tree link = gimple_asm_output_op (asm_stmt, i);
1971 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1972 						suitable_for_renaming);
1973 		  }
1974 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1975 		  {
1976 		    tree link = gimple_asm_input_op (asm_stmt, i);
1977 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
1978 						suitable_for_renaming);
1979 		  }
1980 	      }
1981 
1982 	    else if (gimple_debug_bind_p (stmt)
1983 		     && gimple_debug_bind_has_value_p (stmt))
1984 	      {
1985 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
1986 		tree decl;
1987 		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
1988 		decl = non_rewritable_mem_ref_base (*valuep);
1989 		if (decl
1990 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
1991 		  gimple_debug_bind_reset_value (stmt);
1992 	      }
1993 
1994 	    if (gimple_references_memory_p (stmt)
1995 		|| is_gimple_debug (stmt))
1996 	      update_stmt (stmt);
1997 
1998 	    gsi_next (&gsi);
1999 	  }
2000 
2001       /* Update SSA form here, we are called as non-pass as well.  */
2002       if (number_of_loops (cfun) > 1
2003 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2004 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2005       else
2006 	update_ssa (TODO_update_ssa);
2007     }
2008 
2009   BITMAP_FREE (not_reg_needs);
2010   BITMAP_FREE (addresses_taken);
2011   BITMAP_FREE (suitable_for_renaming);
2012   timevar_pop (TV_ADDRESS_TAKEN);
2013 }
2014 
2015 namespace {
2016 
2017 const pass_data pass_data_update_address_taken =
2018 {
2019   GIMPLE_PASS, /* type */
2020   "addressables", /* name */
2021   OPTGROUP_NONE, /* optinfo_flags */
2022   TV_ADDRESS_TAKEN, /* tv_id */
2023   PROP_ssa, /* properties_required */
2024   0, /* properties_provided */
2025   0, /* properties_destroyed */
2026   0, /* todo_flags_start */
2027   TODO_update_address_taken, /* todo_flags_finish */
2028 };
2029 
2030 class pass_update_address_taken : public gimple_opt_pass
2031 {
2032 public:
2033   pass_update_address_taken (gcc::context *ctxt)
2034     : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2035   {}
2036 
2037   /* opt_pass methods: */
2038 
2039 }; // class pass_update_address_taken
2040 
2041 } // anon namespace
2042 
2043 gimple_opt_pass *
2044 make_pass_update_address_taken (gcc::context *ctxt)
2045 {
2046   return new pass_update_address_taken (ctxt);
2047 }
2048