xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa.c (revision 82d56013d7b633d116a93943de88e08335357a7c)
1 /* Miscellaneous SSA utility functions.
2    Copyright (C) 2001-2019 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "cfghooks.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "diagnostic-core.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
33 #include "gimple-fold.h"
34 #include "gimplify.h"
35 #include "gimple-iterator.h"
36 #include "gimple-walk.h"
37 #include "tree-ssa-loop-manip.h"
38 #include "tree-into-ssa.h"
39 #include "tree-ssa.h"
40 #include "cfgloop.h"
41 #include "cfgexpand.h"
42 #include "tree-cfg.h"
43 #include "tree-dfa.h"
44 #include "stringpool.h"
45 #include "attribs.h"
46 #include "asan.h"
47 
48 /* Pointer map of variable mappings, keyed by edge.  */
49 static hash_map<edge, auto_vec<edge_var_map> > *edge_var_maps;
50 
51 
52 /* Add a mapping with PHI RESULT and PHI DEF associated with edge E.  */
53 
54 void
55 redirect_edge_var_map_add (edge e, tree result, tree def, location_t locus)
56 {
57   edge_var_map new_node;
58 
59   if (edge_var_maps == NULL)
60     edge_var_maps = new hash_map<edge, auto_vec<edge_var_map> >;
61 
62   auto_vec<edge_var_map> &slot = edge_var_maps->get_or_insert (e);
63   new_node.def = def;
64   new_node.result = result;
65   new_node.locus = locus;
66 
67   slot.safe_push (new_node);
68 }
69 
70 
71 /* Clear the var mappings in edge E.  */
72 
73 void
74 redirect_edge_var_map_clear (edge e)
75 {
76   if (!edge_var_maps)
77     return;
78 
79   auto_vec<edge_var_map> *head = edge_var_maps->get (e);
80 
81   if (head)
82     head->release ();
83 }
84 
85 
86 /* Duplicate the redirected var mappings in OLDE in NEWE.
87 
88    This assumes a hash_map can have multiple edges mapping to the same
89    var_map (many to one mapping), since we don't remove the previous mappings.
90    */
91 
92 void
93 redirect_edge_var_map_dup (edge newe, edge olde)
94 {
95   if (!edge_var_maps)
96     return;
97 
98   auto_vec<edge_var_map> *new_head = &edge_var_maps->get_or_insert (newe);
99   auto_vec<edge_var_map> *old_head = edge_var_maps->get (olde);
100   if (!old_head)
101     return;
102 
103   new_head->safe_splice (*old_head);
104 }
105 
106 
107 /* Return the variable mappings for a given edge.  If there is none, return
108    NULL.  */
109 
110 vec<edge_var_map> *
111 redirect_edge_var_map_vector (edge e)
112 {
113   /* Hey, what kind of idiot would... you'd be surprised.  */
114   if (!edge_var_maps)
115     return NULL;
116 
117   auto_vec<edge_var_map> *slot = edge_var_maps->get (e);
118   if (!slot)
119     return NULL;
120 
121   return slot;
122 }
123 
124 /* Clear the edge variable mappings.  */
125 
126 void
127 redirect_edge_var_map_empty (void)
128 {
129   if (edge_var_maps)
130     edge_var_maps->empty ();
131 }
132 
133 
134 /* Remove the corresponding arguments from the PHI nodes in E's
135    destination block and redirect it to DEST.  Return redirected edge.
136    The list of removed arguments is stored in a vector accessed
137    through edge_var_maps.  */
138 
139 edge
140 ssa_redirect_edge (edge e, basic_block dest)
141 {
142   gphi_iterator gsi;
143   gphi *phi;
144 
145   redirect_edge_var_map_clear (e);
146 
147   /* Remove the appropriate PHI arguments in E's destination block.
148      If we are redirecting a copied edge the destination has not
149      got PHI argument space reserved nor an interesting argument.  */
150   if (! (e->dest->flags & BB_DUPLICATED))
151     for (gsi = gsi_start_phis (e->dest); !gsi_end_p (gsi); gsi_next (&gsi))
152       {
153 	tree def;
154 	location_t locus;
155 
156 	phi = gsi.phi ();
157 	def = gimple_phi_arg_def (phi, e->dest_idx);
158 	locus = gimple_phi_arg_location (phi, e->dest_idx);
159 
160 	if (def == NULL_TREE)
161 	  continue;
162 
163 	redirect_edge_var_map_add (e, gimple_phi_result (phi), def, locus);
164       }
165 
166   e = redirect_edge_succ_nodup (e, dest);
167 
168   return e;
169 }
170 
171 
172 /* Add PHI arguments queued in PENDING_STMT list on edge E to edge
173    E->dest.  */
174 
175 void
176 flush_pending_stmts (edge e)
177 {
178   gphi *phi;
179   edge_var_map *vm;
180   int i;
181   gphi_iterator gsi;
182 
183   vec<edge_var_map> *v = redirect_edge_var_map_vector (e);
184   if (!v)
185     return;
186 
187   for (gsi = gsi_start_phis (e->dest), i = 0;
188        !gsi_end_p (gsi) && v->iterate (i, &vm);
189        gsi_next (&gsi), i++)
190     {
191       tree def;
192 
193       phi = gsi.phi ();
194       def = redirect_edge_var_map_def (vm);
195       add_phi_arg (phi, def, e, redirect_edge_var_map_location (vm));
196     }
197 
198   redirect_edge_var_map_clear (e);
199 }
200 
201 /* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
202    GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
203    expression with a different value.
204 
205    This will update any annotations (say debug bind stmts) referring
206    to the original LHS, so that they use the RHS instead.  This is
207    done even if NLHS and LHS are the same, for it is understood that
208    the RHS will be modified afterwards, and NLHS will not be assigned
209    an equivalent value.
210 
211    Adjusting any non-annotation uses of the LHS, if needed, is a
212    responsibility of the caller.
213 
214    The effect of this call should be pretty much the same as that of
215    inserting a copy of STMT before STMT, and then removing the
216    original stmt, at which time gsi_remove() would have update
217    annotations, but using this function saves all the inserting,
218    copying and removing.  */
219 
220 void
221 gimple_replace_ssa_lhs (gimple *stmt, tree nlhs)
222 {
223   if (MAY_HAVE_DEBUG_BIND_STMTS)
224     {
225       tree lhs = gimple_get_lhs (stmt);
226 
227       gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
228 
229       insert_debug_temp_for_var_def (NULL, lhs);
230     }
231 
232   gimple_set_lhs (stmt, nlhs);
233 }
234 
235 
236 /* Given a tree for an expression for which we might want to emit
237    locations or values in debug information (generally a variable, but
238    we might deal with other kinds of trees in the future), return the
239    tree that should be used as the variable of a DEBUG_BIND STMT or
240    VAR_LOCATION INSN or NOTE.  Return NULL if VAR is not to be tracked.  */
241 
242 tree
243 target_for_debug_bind (tree var)
244 {
245   if (!MAY_HAVE_DEBUG_BIND_STMTS)
246     return NULL_TREE;
247 
248   if (TREE_CODE (var) == SSA_NAME)
249     {
250       var = SSA_NAME_VAR (var);
251       if (var == NULL_TREE)
252 	return NULL_TREE;
253     }
254 
255   if ((!VAR_P (var) || VAR_DECL_IS_VIRTUAL_OPERAND (var))
256       && TREE_CODE (var) != PARM_DECL)
257     return NULL_TREE;
258 
259   if (DECL_HAS_VALUE_EXPR_P (var))
260     return target_for_debug_bind (DECL_VALUE_EXPR (var));
261 
262   if (DECL_IGNORED_P (var))
263     return NULL_TREE;
264 
265   /* var-tracking only tracks registers.  */
266   if (!is_gimple_reg_type (TREE_TYPE (var)))
267     return NULL_TREE;
268 
269   return var;
270 }
271 
272 /* Called via walk_tree, look for SSA_NAMEs that have already been
273    released.  */
274 
275 static tree
276 find_released_ssa_name (tree *tp, int *walk_subtrees, void *data_)
277 {
278   struct walk_stmt_info *wi = (struct walk_stmt_info *) data_;
279 
280   if (wi && wi->is_lhs)
281     return NULL_TREE;
282 
283   if (TREE_CODE (*tp) == SSA_NAME)
284     {
285       if (SSA_NAME_IN_FREE_LIST (*tp))
286 	return *tp;
287 
288       *walk_subtrees = 0;
289     }
290   else if (IS_TYPE_OR_DECL_P (*tp))
291     *walk_subtrees = 0;
292 
293   return NULL_TREE;
294 }
295 
296 /* Insert a DEBUG BIND stmt before the DEF of VAR if VAR is referenced
297    by other DEBUG stmts, and replace uses of the DEF with the
298    newly-created debug temp.  */
299 
300 void
301 insert_debug_temp_for_var_def (gimple_stmt_iterator *gsi, tree var)
302 {
303   imm_use_iterator imm_iter;
304   use_operand_p use_p;
305   gimple *stmt;
306   gimple *def_stmt = NULL;
307   int usecount = 0;
308   tree value = NULL;
309 
310   if (!MAY_HAVE_DEBUG_BIND_STMTS)
311     return;
312 
313   /* If this name has already been registered for replacement, do nothing
314      as anything that uses this name isn't in SSA form.  */
315   if (name_registered_for_update_p (var))
316     return;
317 
318   /* Check whether there are debug stmts that reference this variable and,
319      if there are, decide whether we should use a debug temp.  */
320   FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
321     {
322       stmt = USE_STMT (use_p);
323 
324       if (!gimple_debug_bind_p (stmt))
325 	continue;
326 
327       if (usecount++)
328 	break;
329 
330       if (gimple_debug_bind_get_value (stmt) != var)
331 	{
332 	  /* Count this as an additional use, so as to make sure we
333 	     use a temp unless VAR's definition has a SINGLE_RHS that
334 	     can be shared.  */
335 	  usecount++;
336 	  break;
337 	}
338     }
339 
340   if (!usecount)
341     return;
342 
343   if (gsi)
344     def_stmt = gsi_stmt (*gsi);
345   else
346     def_stmt = SSA_NAME_DEF_STMT (var);
347 
348   /* If we didn't get an insertion point, and the stmt has already
349      been removed, we won't be able to insert the debug bind stmt, so
350      we'll have to drop debug information.  */
351   if (gimple_code (def_stmt) == GIMPLE_PHI)
352     {
353       value = degenerate_phi_result (as_a <gphi *> (def_stmt));
354       if (value && walk_tree (&value, find_released_ssa_name, NULL, NULL))
355 	value = NULL;
356       /* error_mark_node is what fixup_noreturn_call changes PHI arguments
357 	 to.  */
358       else if (value == error_mark_node)
359 	value = NULL;
360     }
361   else if (is_gimple_assign (def_stmt))
362     {
363       bool no_value = false;
364 
365       if (!dom_info_available_p (CDI_DOMINATORS))
366 	{
367 	  struct walk_stmt_info wi;
368 
369 	  memset (&wi, 0, sizeof (wi));
370 
371 	  /* When removing blocks without following reverse dominance
372 	     order, we may sometimes encounter SSA_NAMEs that have
373 	     already been released, referenced in other SSA_DEFs that
374 	     we're about to release.  Consider:
375 
376 	     <bb X>:
377 	     v_1 = foo;
378 
379 	     <bb Y>:
380 	     w_2 = v_1 + bar;
381 	     # DEBUG w => w_2
382 
383 	     If we deleted BB X first, propagating the value of w_2
384 	     won't do us any good.  It's too late to recover their
385 	     original definition of v_1: when it was deleted, it was
386 	     only referenced in other DEFs, it couldn't possibly know
387 	     it should have been retained, and propagating every
388 	     single DEF just in case it might have to be propagated
389 	     into a DEBUG STMT would probably be too wasteful.
390 
391 	     When dominator information is not readily available, we
392 	     check for and accept some loss of debug information.  But
393 	     if it is available, there's no excuse for us to remove
394 	     blocks in the wrong order, so we don't even check for
395 	     dead SSA NAMEs.  SSA verification shall catch any
396 	     errors.  */
397 	  if ((!gsi && !gimple_bb (def_stmt))
398 	      || walk_gimple_op (def_stmt, find_released_ssa_name, &wi))
399 	    no_value = true;
400 	}
401 
402       if (!no_value)
403 	value = gimple_assign_rhs_to_tree (def_stmt);
404     }
405 
406   if (value)
407     {
408       /* If there's a single use of VAR, and VAR is the entire debug
409 	 expression (usecount would have been incremented again
410 	 otherwise), and the definition involves only constants and
411 	 SSA names, then we can propagate VALUE into this single use,
412 	 avoiding the temp.
413 
414 	 We can also avoid using a temp if VALUE can be shared and
415 	 propagated into all uses, without generating expressions that
416 	 wouldn't be valid gimple RHSs.
417 
418 	 Other cases that would require unsharing or non-gimple RHSs
419 	 are deferred to a debug temp, although we could avoid temps
420 	 at the expense of duplication of expressions.  */
421 
422       if (CONSTANT_CLASS_P (value)
423 	  || gimple_code (def_stmt) == GIMPLE_PHI
424 	  || (usecount == 1
425 	      && (!gimple_assign_single_p (def_stmt)
426 		  || is_gimple_min_invariant (value)))
427 	  || is_gimple_reg (value))
428 	;
429       else
430 	{
431 	  gdebug *def_temp;
432 	  tree vexpr = make_node (DEBUG_EXPR_DECL);
433 
434 	  def_temp = gimple_build_debug_bind (vexpr,
435 					      unshare_expr (value),
436 					      def_stmt);
437 
438 	  DECL_ARTIFICIAL (vexpr) = 1;
439 	  TREE_TYPE (vexpr) = TREE_TYPE (value);
440 	  if (DECL_P (value))
441 	    SET_DECL_MODE (vexpr, DECL_MODE (value));
442 	  else
443 	    SET_DECL_MODE (vexpr, TYPE_MODE (TREE_TYPE (value)));
444 
445 	  if (gsi)
446 	    gsi_insert_before (gsi, def_temp, GSI_SAME_STMT);
447 	  else
448 	    {
449 	      gimple_stmt_iterator ngsi = gsi_for_stmt (def_stmt);
450 	      gsi_insert_before (&ngsi, def_temp, GSI_SAME_STMT);
451 	    }
452 
453 	  value = vexpr;
454 	}
455     }
456 
457   FOR_EACH_IMM_USE_STMT (stmt, imm_iter, var)
458     {
459       if (!gimple_debug_bind_p (stmt))
460 	continue;
461 
462       if (value)
463 	{
464 	  FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
465 	    /* unshare_expr is not needed here.  vexpr is either a
466 	       SINGLE_RHS, that can be safely shared, some other RHS
467 	       that was unshared when we found it had a single debug
468 	       use, or a DEBUG_EXPR_DECL, that can be safely
469 	       shared.  */
470 	    SET_USE (use_p, unshare_expr (value));
471 	  /* If we didn't replace uses with a debug decl fold the
472 	     resulting expression.  Otherwise we end up with invalid IL.  */
473 	  if (TREE_CODE (value) != DEBUG_EXPR_DECL)
474 	    {
475 	      gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
476 	      fold_stmt_inplace (&gsi);
477 	    }
478 	}
479       else
480 	gimple_debug_bind_reset_value (stmt);
481 
482       update_stmt (stmt);
483     }
484 }
485 
486 
487 /* Insert a DEBUG BIND stmt before STMT for each DEF referenced by
488    other DEBUG stmts, and replace uses of the DEF with the
489    newly-created debug temp.  */
490 
491 void
492 insert_debug_temps_for_defs (gimple_stmt_iterator *gsi)
493 {
494   gimple *stmt;
495   ssa_op_iter op_iter;
496   def_operand_p def_p;
497 
498   if (!MAY_HAVE_DEBUG_BIND_STMTS)
499     return;
500 
501   stmt = gsi_stmt (*gsi);
502 
503   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
504     {
505       tree var = DEF_FROM_PTR (def_p);
506 
507       if (TREE_CODE (var) != SSA_NAME)
508 	continue;
509 
510       insert_debug_temp_for_var_def (gsi, var);
511     }
512 }
513 
514 /* Reset all debug stmts that use SSA_NAME(s) defined in STMT.  */
515 
516 void
517 reset_debug_uses (gimple *stmt)
518 {
519   ssa_op_iter op_iter;
520   def_operand_p def_p;
521   imm_use_iterator imm_iter;
522   gimple *use_stmt;
523 
524   if (!MAY_HAVE_DEBUG_BIND_STMTS)
525     return;
526 
527   FOR_EACH_PHI_OR_STMT_DEF (def_p, stmt, op_iter, SSA_OP_DEF)
528     {
529       tree var = DEF_FROM_PTR (def_p);
530 
531       if (TREE_CODE (var) != SSA_NAME)
532 	continue;
533 
534       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, var)
535 	{
536 	  if (!gimple_debug_bind_p (use_stmt))
537 	    continue;
538 
539 	  gimple_debug_bind_reset_value (use_stmt);
540 	  update_stmt (use_stmt);
541 	}
542     }
543 }
544 
545 /* Delete SSA DEFs for SSA versions in the TOREMOVE bitmap, removing
546    dominated stmts before their dominators, so that release_ssa_defs
547    stands a chance of propagating DEFs into debug bind stmts.  */
548 
549 void
550 release_defs_bitset (bitmap toremove)
551 {
552   unsigned j;
553   bitmap_iterator bi;
554 
555   /* Performing a topological sort is probably overkill, this will
556      most likely run in slightly superlinear time, rather than the
557      pathological quadratic worst case.
558      But iterate from max SSA name version to min one because
559      that mimics allocation order during code generation behavior best.
560      Use an array for this which we compact on-the-fly with a NULL
561      marker moving towards the end of the vector.  */
562   auto_vec<tree, 16> names;
563   names.reserve (bitmap_count_bits (toremove) + 1);
564   names.quick_push (NULL_TREE);
565   EXECUTE_IF_SET_IN_BITMAP (toremove, 0, j, bi)
566     names.quick_push (ssa_name (j));
567 
568   bitmap_tree_view (toremove);
569   while (!bitmap_empty_p (toremove))
570     {
571       j = names.length () - 1;
572       for (unsigned i = names.length () - 1; names[i];)
573 	{
574 	  bool remove_now = true;
575 	  tree var = names[i];
576 	  gimple *stmt;
577 	  imm_use_iterator uit;
578 
579 	  FOR_EACH_IMM_USE_STMT (stmt, uit, var)
580 	    {
581 	      ssa_op_iter dit;
582 	      def_operand_p def_p;
583 
584 	      /* We can't propagate PHI nodes into debug stmts.  */
585 	      if (gimple_code (stmt) == GIMPLE_PHI
586 		  || is_gimple_debug (stmt))
587 		continue;
588 
589 	      /* If we find another definition to remove that uses
590 		 the one we're looking at, defer the removal of this
591 		 one, so that it can be propagated into debug stmts
592 		 after the other is.  */
593 	      FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, dit, SSA_OP_DEF)
594 		{
595 		  tree odef = DEF_FROM_PTR (def_p);
596 
597 		  if (bitmap_bit_p (toremove, SSA_NAME_VERSION (odef)))
598 		    {
599 		      remove_now = false;
600 		      break;
601 		    }
602 		}
603 
604 	      if (!remove_now)
605 		BREAK_FROM_IMM_USE_STMT (uit);
606 	    }
607 
608 	  if (remove_now)
609 	    {
610 	      gimple *def = SSA_NAME_DEF_STMT (var);
611 	      gimple_stmt_iterator gsi = gsi_for_stmt (def);
612 
613 	      if (gimple_code (def) == GIMPLE_PHI)
614 		remove_phi_node (&gsi, true);
615 	      else
616 		{
617 		  gsi_remove (&gsi, true);
618 		  release_defs (def);
619 		}
620 	      bitmap_clear_bit (toremove, SSA_NAME_VERSION (var));
621 	    }
622 	  else
623 	    --i;
624 	  if (--j != i)
625 	    names[i] = names[j];
626 	}
627     }
628   bitmap_list_view (toremove);
629 }
630 
631 /* Verify virtual SSA form.  */
632 
633 bool
634 verify_vssa (basic_block bb, tree current_vdef, sbitmap visited)
635 {
636   bool err = false;
637 
638   if (bitmap_bit_p (visited, bb->index))
639     return false;
640 
641   bitmap_set_bit (visited, bb->index);
642 
643   /* Pick up the single virtual PHI def.  */
644   gphi *phi = NULL;
645   for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);
646        gsi_next (&si))
647     {
648       tree res = gimple_phi_result (si.phi ());
649       if (virtual_operand_p (res))
650 	{
651 	  if (phi)
652 	    {
653 	      error ("multiple virtual PHI nodes in BB %d", bb->index);
654 	      print_gimple_stmt (stderr, phi, 0);
655 	      print_gimple_stmt (stderr, si.phi (), 0);
656 	      err = true;
657 	    }
658 	  else
659 	    phi = si.phi ();
660 	}
661     }
662   if (phi)
663     {
664       current_vdef = gimple_phi_result (phi);
665       if (TREE_CODE (current_vdef) != SSA_NAME)
666 	{
667 	  error ("virtual definition is not an SSA name");
668 	  print_gimple_stmt (stderr, phi, 0);
669 	  err = true;
670 	}
671     }
672 
673   /* Verify stmts.  */
674   for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
675        gsi_next (&gsi))
676     {
677       gimple *stmt = gsi_stmt (gsi);
678       tree vuse = gimple_vuse (stmt);
679       if (vuse)
680 	{
681 	  if (vuse != current_vdef)
682 	    {
683 	      error ("stmt with wrong VUSE");
684 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
685 	      fprintf (stderr, "expected ");
686 	      print_generic_expr (stderr, current_vdef);
687 	      fprintf (stderr, "\n");
688 	      err = true;
689 	    }
690 	  tree vdef = gimple_vdef (stmt);
691 	  if (vdef)
692 	    {
693 	      current_vdef = vdef;
694 	      if (TREE_CODE (current_vdef) != SSA_NAME)
695 		{
696 		  error ("virtual definition is not an SSA name");
697 		  print_gimple_stmt (stderr, phi, 0);
698 		  err = true;
699 		}
700 	    }
701 	}
702     }
703 
704   /* Verify destination PHI uses and recurse.  */
705   edge_iterator ei;
706   edge e;
707   FOR_EACH_EDGE (e, ei, bb->succs)
708     {
709       gphi *phi = get_virtual_phi (e->dest);
710       if (phi
711 	  && PHI_ARG_DEF_FROM_EDGE (phi, e) != current_vdef)
712 	{
713 	  error ("PHI node with wrong VUSE on edge from BB %d",
714 		 e->src->index);
715 	  print_gimple_stmt (stderr, phi, 0, TDF_VOPS);
716 	  fprintf (stderr, "expected ");
717 	  print_generic_expr (stderr, current_vdef);
718 	  fprintf (stderr, "\n");
719 	  err = true;
720 	}
721 
722       /* Recurse.  */
723       err |= verify_vssa (e->dest, current_vdef, visited);
724     }
725 
726   return err;
727 }
728 
729 /* Return true if SSA_NAME is malformed and mark it visited.
730 
731    IS_VIRTUAL is true if this SSA_NAME was found inside a virtual
732       operand.  */
733 
734 static bool
735 verify_ssa_name (tree ssa_name, bool is_virtual)
736 {
737   if (TREE_CODE (ssa_name) != SSA_NAME)
738     {
739       error ("expected an SSA_NAME object");
740       return true;
741     }
742 
743   if (SSA_NAME_IN_FREE_LIST (ssa_name))
744     {
745       error ("found an SSA_NAME that had been released into the free pool");
746       return true;
747     }
748 
749   if (SSA_NAME_VAR (ssa_name) != NULL_TREE
750       && TREE_TYPE (ssa_name) != TREE_TYPE (SSA_NAME_VAR (ssa_name)))
751     {
752       error ("type mismatch between an SSA_NAME and its symbol");
753       return true;
754     }
755 
756   if (is_virtual && !virtual_operand_p (ssa_name))
757     {
758       error ("found a virtual definition for a GIMPLE register");
759       return true;
760     }
761 
762   if (is_virtual && SSA_NAME_VAR (ssa_name) != gimple_vop (cfun))
763     {
764       error ("virtual SSA name for non-VOP decl");
765       return true;
766     }
767 
768   if (!is_virtual && virtual_operand_p (ssa_name))
769     {
770       error ("found a real definition for a non-register");
771       return true;
772     }
773 
774   if (SSA_NAME_IS_DEFAULT_DEF (ssa_name)
775       && !gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name)))
776     {
777       error ("found a default name with a non-empty defining statement");
778       return true;
779     }
780 
781   return false;
782 }
783 
784 
785 /* Return true if the definition of SSA_NAME at block BB is malformed.
786 
787    STMT is the statement where SSA_NAME is created.
788 
789    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
790       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
791       it means that the block in that array slot contains the
792       definition of SSA_NAME.
793 
794    IS_VIRTUAL is true if SSA_NAME is created by a VDEF.  */
795 
796 static bool
797 verify_def (basic_block bb, basic_block *definition_block, tree ssa_name,
798 	    gimple *stmt, bool is_virtual)
799 {
800   if (verify_ssa_name (ssa_name, is_virtual))
801     goto err;
802 
803   if (SSA_NAME_VAR (ssa_name)
804       && TREE_CODE (SSA_NAME_VAR (ssa_name)) == RESULT_DECL
805       && DECL_BY_REFERENCE (SSA_NAME_VAR (ssa_name)))
806     {
807       error ("RESULT_DECL should be read only when DECL_BY_REFERENCE is set");
808       goto err;
809     }
810 
811   if (definition_block[SSA_NAME_VERSION (ssa_name)])
812     {
813       error ("SSA_NAME created in two different blocks %i and %i",
814 	     definition_block[SSA_NAME_VERSION (ssa_name)]->index, bb->index);
815       goto err;
816     }
817 
818   definition_block[SSA_NAME_VERSION (ssa_name)] = bb;
819 
820   if (SSA_NAME_DEF_STMT (ssa_name) != stmt)
821     {
822       error ("SSA_NAME_DEF_STMT is wrong");
823       fprintf (stderr, "Expected definition statement:\n");
824       print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (ssa_name), 4, TDF_VOPS);
825       fprintf (stderr, "\nActual definition statement:\n");
826       print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
827       goto err;
828     }
829 
830   return false;
831 
832 err:
833   fprintf (stderr, "while verifying SSA_NAME ");
834   print_generic_expr (stderr, ssa_name);
835   fprintf (stderr, " in statement\n");
836   print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
837 
838   return true;
839 }
840 
841 
842 /* Return true if the use of SSA_NAME at statement STMT in block BB is
843    malformed.
844 
845    DEF_BB is the block where SSA_NAME was found to be created.
846 
847    IDOM contains immediate dominator information for the flowgraph.
848 
849    CHECK_ABNORMAL is true if the caller wants to check whether this use
850       is flowing through an abnormal edge (only used when checking PHI
851       arguments).
852 
853    If NAMES_DEFINED_IN_BB is not NULL, it contains a bitmap of ssa names
854      that are defined before STMT in basic block BB.  */
855 
856 static bool
857 verify_use (basic_block bb, basic_block def_bb, use_operand_p use_p,
858 	    gimple *stmt, bool check_abnormal, bitmap names_defined_in_bb)
859 {
860   bool err = false;
861   tree ssa_name = USE_FROM_PTR (use_p);
862 
863   if (!TREE_VISITED (ssa_name))
864     if (verify_imm_links (stderr, ssa_name))
865       err = true;
866 
867   TREE_VISITED (ssa_name) = 1;
868 
869   if (gimple_nop_p (SSA_NAME_DEF_STMT (ssa_name))
870       && SSA_NAME_IS_DEFAULT_DEF (ssa_name))
871     ; /* Default definitions have empty statements.  Nothing to do.  */
872   else if (!def_bb)
873     {
874       error ("missing definition");
875       err = true;
876     }
877   else if (bb != def_bb
878 	   && !dominated_by_p (CDI_DOMINATORS, bb, def_bb))
879     {
880       error ("definition in block %i does not dominate use in block %i",
881 	     def_bb->index, bb->index);
882       err = true;
883     }
884   else if (bb == def_bb
885 	   && names_defined_in_bb != NULL
886 	   && !bitmap_bit_p (names_defined_in_bb, SSA_NAME_VERSION (ssa_name)))
887     {
888       error ("definition in block %i follows the use", def_bb->index);
889       err = true;
890     }
891 
892   if (check_abnormal
893       && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ssa_name))
894     {
895       error ("SSA_NAME_OCCURS_IN_ABNORMAL_PHI should be set");
896       err = true;
897     }
898 
899   /* Make sure the use is in an appropriate list by checking the previous
900      element to make sure it's the same.  */
901   if (use_p->prev == NULL)
902     {
903       error ("no immediate_use list");
904       err = true;
905     }
906   else
907     {
908       tree listvar;
909       if (use_p->prev->use == NULL)
910 	listvar = use_p->prev->loc.ssa_name;
911       else
912 	listvar = USE_FROM_PTR (use_p->prev);
913       if (listvar != ssa_name)
914         {
915 	  error ("wrong immediate use list");
916 	  err = true;
917 	}
918     }
919 
920   if (err)
921     {
922       fprintf (stderr, "for SSA_NAME: ");
923       print_generic_expr (stderr, ssa_name, TDF_VOPS);
924       fprintf (stderr, " in statement:\n");
925       print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
926     }
927 
928   return err;
929 }
930 
931 
932 /* Return true if any of the arguments for PHI node PHI at block BB is
933    malformed.
934 
935    DEFINITION_BLOCK is an array of basic blocks indexed by SSA_NAME
936       version numbers.  If DEFINITION_BLOCK[SSA_NAME_VERSION] is set,
937       it means that the block in that array slot contains the
938       definition of SSA_NAME.  */
939 
940 static bool
941 verify_phi_args (gphi *phi, basic_block bb, basic_block *definition_block)
942 {
943   edge e;
944   bool err = false;
945   size_t i, phi_num_args = gimple_phi_num_args (phi);
946 
947   if (EDGE_COUNT (bb->preds) != phi_num_args)
948     {
949       error ("incoming edge count does not match number of PHI arguments");
950       err = true;
951       goto error;
952     }
953 
954   for (i = 0; i < phi_num_args; i++)
955     {
956       use_operand_p op_p = gimple_phi_arg_imm_use_ptr (phi, i);
957       tree op = USE_FROM_PTR (op_p);
958 
959       e = EDGE_PRED (bb, i);
960 
961       if (op == NULL_TREE)
962 	{
963 	  error ("PHI argument is missing for edge %d->%d",
964 	         e->src->index,
965 		 e->dest->index);
966 	  err = true;
967 	  goto error;
968 	}
969 
970       if (TREE_CODE (op) != SSA_NAME && !is_gimple_min_invariant (op))
971 	{
972 	  error ("PHI argument is not SSA_NAME, or invariant");
973 	  err = true;
974 	}
975 
976       if (TREE_CODE (op) == SSA_NAME)
977 	{
978 	  err = verify_ssa_name (op, virtual_operand_p (gimple_phi_result (phi)));
979 	  err |= verify_use (e->src, definition_block[SSA_NAME_VERSION (op)],
980 			     op_p, phi, e->flags & EDGE_ABNORMAL, NULL);
981 	}
982 
983       if (TREE_CODE (op) == ADDR_EXPR)
984 	{
985 	  tree base = TREE_OPERAND (op, 0);
986 	  while (handled_component_p (base))
987 	    base = TREE_OPERAND (base, 0);
988 	  if ((VAR_P (base)
989 	       || TREE_CODE (base) == PARM_DECL
990 	       || TREE_CODE (base) == RESULT_DECL)
991 	      && !TREE_ADDRESSABLE (base))
992 	    {
993 	      error ("address taken, but ADDRESSABLE bit not set");
994 	      err = true;
995 	    }
996 	}
997 
998       if (e->dest != bb)
999 	{
1000 	  error ("wrong edge %d->%d for PHI argument",
1001 	         e->src->index, e->dest->index);
1002 	  err = true;
1003 	}
1004 
1005       if (err)
1006 	{
1007 	  fprintf (stderr, "PHI argument\n");
1008 	  print_generic_stmt (stderr, op, TDF_VOPS);
1009 	  goto error;
1010 	}
1011     }
1012 
1013 error:
1014   if (err)
1015     {
1016       fprintf (stderr, "for PHI node\n");
1017       print_gimple_stmt (stderr, phi, 0, TDF_VOPS|TDF_MEMSYMS);
1018     }
1019 
1020 
1021   return err;
1022 }
1023 
1024 
1025 /* Verify common invariants in the SSA web.
1026    TODO: verify the variable annotations.  */
1027 
1028 DEBUG_FUNCTION void
1029 verify_ssa (bool check_modified_stmt, bool check_ssa_operands)
1030 {
1031   basic_block bb;
1032   basic_block *definition_block = XCNEWVEC (basic_block, num_ssa_names);
1033   ssa_op_iter iter;
1034   tree op;
1035   enum dom_state orig_dom_state = dom_info_state (CDI_DOMINATORS);
1036   auto_bitmap names_defined_in_bb;
1037 
1038   gcc_assert (!need_ssa_update_p (cfun));
1039 
1040   timevar_push (TV_TREE_SSA_VERIFY);
1041 
1042     {
1043       /* Keep track of SSA names present in the IL.  */
1044       size_t i;
1045       tree name;
1046       hash_map <void *, tree> ssa_info;
1047 
1048       FOR_EACH_SSA_NAME (i, name, cfun)
1049 	{
1050 	  gimple *stmt;
1051 	  TREE_VISITED (name) = 0;
1052 
1053 	  verify_ssa_name (name, virtual_operand_p (name));
1054 
1055 	  stmt = SSA_NAME_DEF_STMT (name);
1056 	  if (!gimple_nop_p (stmt))
1057 	    {
1058 	      basic_block bb = gimple_bb (stmt);
1059 	      if (verify_def (bb, definition_block,
1060 			      name, stmt, virtual_operand_p (name)))
1061 		goto err;
1062 	    }
1063 
1064 	  void *info = NULL;
1065 	  if (POINTER_TYPE_P (TREE_TYPE (name)))
1066 	    info = SSA_NAME_PTR_INFO (name);
1067 	  else if (INTEGRAL_TYPE_P (TREE_TYPE (name)))
1068 	    info = SSA_NAME_RANGE_INFO (name);
1069 	  if (info)
1070 	    {
1071 	      bool existed;
1072 	      tree &val = ssa_info.get_or_insert (info, &existed);
1073 	      if (existed)
1074 		{
1075 		  error ("shared SSA name info");
1076 		  print_generic_expr (stderr, val);
1077 		  fprintf (stderr, " and ");
1078 		  print_generic_expr (stderr, name);
1079 		  fprintf (stderr, "\n");
1080 		  goto err;
1081 		}
1082 	      else
1083 		val = name;
1084 	    }
1085 	}
1086     }
1087 
1088   calculate_dominance_info (CDI_DOMINATORS);
1089 
1090   /* Now verify all the uses and make sure they agree with the definitions
1091      found in the previous pass.  */
1092   FOR_EACH_BB_FN (bb, cfun)
1093     {
1094       edge e;
1095       edge_iterator ei;
1096 
1097       /* Make sure that all edges have a clear 'aux' field.  */
1098       FOR_EACH_EDGE (e, ei, bb->preds)
1099 	{
1100 	  if (e->aux)
1101 	    {
1102 	      error ("AUX pointer initialized for edge %d->%d", e->src->index,
1103 		      e->dest->index);
1104 	      goto err;
1105 	    }
1106 	}
1107 
1108       /* Verify the arguments for every PHI node in the block.  */
1109       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1110 	{
1111 	  gphi *phi = gsi.phi ();
1112 	  if (verify_phi_args (phi, bb, definition_block))
1113 	    goto err;
1114 
1115 	  bitmap_set_bit (names_defined_in_bb,
1116 			  SSA_NAME_VERSION (gimple_phi_result (phi)));
1117 	}
1118 
1119       /* Now verify all the uses and vuses in every statement of the block.  */
1120       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1121 	   gsi_next (&gsi))
1122 	{
1123 	  gimple *stmt = gsi_stmt (gsi);
1124 	  use_operand_p use_p;
1125 
1126 	  if (check_modified_stmt && gimple_modified_p (stmt))
1127 	    {
1128 	      error ("stmt (%p) marked modified after optimization pass: ",
1129 		     (void *)stmt);
1130 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1131 	      goto err;
1132 	    }
1133 
1134 	  if (check_ssa_operands && verify_ssa_operands (cfun, stmt))
1135 	    {
1136 	      print_gimple_stmt (stderr, stmt, 0, TDF_VOPS);
1137 	      goto err;
1138 	    }
1139 
1140 	  if (gimple_debug_bind_p (stmt)
1141 	      && !gimple_debug_bind_has_value_p (stmt))
1142 	    continue;
1143 
1144 	  FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE|SSA_OP_VUSE)
1145 	    {
1146 	      op = USE_FROM_PTR (use_p);
1147 	      if (verify_use (bb, definition_block[SSA_NAME_VERSION (op)],
1148 			      use_p, stmt, false, names_defined_in_bb))
1149 		goto err;
1150 	    }
1151 
1152 	  FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_DEFS)
1153 	    {
1154 	      if (SSA_NAME_DEF_STMT (op) != stmt)
1155 		{
1156 		  error ("SSA_NAME_DEF_STMT is wrong");
1157 		  fprintf (stderr, "Expected definition statement:\n");
1158 		  print_gimple_stmt (stderr, stmt, 4, TDF_VOPS);
1159 		  fprintf (stderr, "\nActual definition statement:\n");
1160 		  print_gimple_stmt (stderr, SSA_NAME_DEF_STMT (op),
1161 				     4, TDF_VOPS);
1162 		  goto err;
1163 		}
1164 	      bitmap_set_bit (names_defined_in_bb, SSA_NAME_VERSION (op));
1165 	    }
1166 	}
1167 
1168       bitmap_clear (names_defined_in_bb);
1169     }
1170 
1171   free (definition_block);
1172 
1173   if (gimple_vop (cfun)
1174       && ssa_default_def (cfun, gimple_vop (cfun)))
1175     {
1176       auto_sbitmap visited (last_basic_block_for_fn (cfun) + 1);
1177       bitmap_clear (visited);
1178       if (verify_vssa (ENTRY_BLOCK_PTR_FOR_FN (cfun),
1179 		       ssa_default_def (cfun, gimple_vop (cfun)), visited))
1180 	goto err;
1181     }
1182 
1183   /* Restore the dominance information to its prior known state, so
1184      that we do not perturb the compiler's subsequent behavior.  */
1185   if (orig_dom_state == DOM_NONE)
1186     free_dominance_info (CDI_DOMINATORS);
1187   else
1188     set_dom_info_availability (CDI_DOMINATORS, orig_dom_state);
1189 
1190   timevar_pop (TV_TREE_SSA_VERIFY);
1191   return;
1192 
1193 err:
1194   internal_error ("verify_ssa failed");
1195 }
1196 
1197 
1198 /* Initialize global DFA and SSA structures.  */
1199 
1200 void
1201 init_tree_ssa (struct function *fn)
1202 {
1203   fn->gimple_df = ggc_cleared_alloc<gimple_df> ();
1204   fn->gimple_df->default_defs = hash_table<ssa_name_hasher>::create_ggc (20);
1205   pt_solution_reset (&fn->gimple_df->escaped);
1206   init_ssanames (fn, 0);
1207 }
1208 
1209 /* Deallocate memory associated with SSA data structures for FNDECL.  */
1210 
1211 void
1212 delete_tree_ssa (struct function *fn)
1213 {
1214   fini_ssanames (fn);
1215 
1216   /* We no longer maintain the SSA operand cache at this point.  */
1217   if (ssa_operands_active (fn))
1218     fini_ssa_operands (fn);
1219 
1220   fn->gimple_df->default_defs->empty ();
1221   fn->gimple_df->default_defs = NULL;
1222   pt_solution_reset (&fn->gimple_df->escaped);
1223   if (fn->gimple_df->decls_to_pointers != NULL)
1224     delete fn->gimple_df->decls_to_pointers;
1225   fn->gimple_df->decls_to_pointers = NULL;
1226   fn->gimple_df = NULL;
1227 
1228   /* We no longer need the edge variable maps.  */
1229   redirect_edge_var_map_empty ();
1230 }
1231 
1232 /* Return true if EXPR is a useless type conversion, otherwise return
1233    false.  */
1234 
1235 bool
1236 tree_ssa_useless_type_conversion (tree expr)
1237 {
1238   /* If we have an assignment that merely uses a NOP_EXPR to change
1239      the top of the RHS to the type of the LHS and the type conversion
1240      is "safe", then strip away the type conversion so that we can
1241      enter LHS = RHS into the const_and_copies table.  */
1242   if (CONVERT_EXPR_P (expr)
1243       || TREE_CODE (expr) == VIEW_CONVERT_EXPR
1244       || TREE_CODE (expr) == NON_LVALUE_EXPR)
1245     return useless_type_conversion_p
1246       (TREE_TYPE (expr),
1247        TREE_TYPE (TREE_OPERAND (expr, 0)));
1248 
1249   return false;
1250 }
1251 
1252 /* Strip conversions from EXP according to
1253    tree_ssa_useless_type_conversion and return the resulting
1254    expression.  */
1255 
1256 tree
1257 tree_ssa_strip_useless_type_conversions (tree exp)
1258 {
1259   while (tree_ssa_useless_type_conversion (exp))
1260     exp = TREE_OPERAND (exp, 0);
1261   return exp;
1262 }
1263 
1264 /* Return true if T, as SSA_NAME, has an implicit default defined value.  */
1265 
1266 bool
1267 ssa_defined_default_def_p (tree t)
1268 {
1269   tree var = SSA_NAME_VAR (t);
1270 
1271   if (!var)
1272     ;
1273   /* Parameters get their initial value from the function entry.  */
1274   else if (TREE_CODE (var) == PARM_DECL)
1275     return true;
1276   /* When returning by reference the return address is actually a hidden
1277      parameter.  */
1278   else if (TREE_CODE (var) == RESULT_DECL && DECL_BY_REFERENCE (var))
1279     return true;
1280   /* Hard register variables get their initial value from the ether.  */
1281   else if (VAR_P (var) && DECL_HARD_REGISTER (var))
1282     return true;
1283 
1284   return false;
1285 }
1286 
1287 
1288 /* Return true if T, an SSA_NAME, has an undefined value.  PARTIAL is what
1289    should be returned if the value is only partially undefined.  */
1290 
1291 bool
1292 ssa_undefined_value_p (tree t, bool partial)
1293 {
1294   gimple *def_stmt;
1295 
1296   if (ssa_defined_default_def_p (t))
1297     return false;
1298 
1299   /* The value is undefined iff its definition statement is empty.  */
1300   def_stmt = SSA_NAME_DEF_STMT (t);
1301   if (gimple_nop_p (def_stmt))
1302     return true;
1303 
1304   /* Check if the complex was not only partially defined.  */
1305   if (partial && is_gimple_assign (def_stmt)
1306       && gimple_assign_rhs_code (def_stmt) == COMPLEX_EXPR)
1307     {
1308       tree rhs1, rhs2;
1309 
1310       rhs1 = gimple_assign_rhs1 (def_stmt);
1311       rhs2 = gimple_assign_rhs2 (def_stmt);
1312       return (TREE_CODE (rhs1) == SSA_NAME && ssa_undefined_value_p (rhs1))
1313 	     || (TREE_CODE (rhs2) == SSA_NAME && ssa_undefined_value_p (rhs2));
1314     }
1315   return false;
1316 }
1317 
1318 
1319 /* Return TRUE iff STMT, a gimple statement, references an undefined
1320    SSA name.  */
1321 
1322 bool
1323 gimple_uses_undefined_value_p (gimple *stmt)
1324 {
1325   ssa_op_iter iter;
1326   tree op;
1327 
1328   FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
1329     if (ssa_undefined_value_p (op))
1330       return true;
1331 
1332   return false;
1333 }
1334 
1335 
1336 
1337 /* If necessary, rewrite the base of the reference tree *TP from
1338    a MEM_REF to a plain or converted symbol.  */
1339 
1340 static void
1341 maybe_rewrite_mem_ref_base (tree *tp, bitmap suitable_for_renaming)
1342 {
1343   tree sym;
1344 
1345   while (handled_component_p (*tp))
1346     tp = &TREE_OPERAND (*tp, 0);
1347   if (TREE_CODE (*tp) == MEM_REF
1348       && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
1349       && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
1350       && DECL_P (sym)
1351       && !TREE_ADDRESSABLE (sym)
1352       && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1353       && is_gimple_reg_type (TREE_TYPE (*tp))
1354       && ! VOID_TYPE_P (TREE_TYPE (*tp)))
1355     {
1356       if (TREE_CODE (TREE_TYPE (sym)) == VECTOR_TYPE
1357 	  && useless_type_conversion_p (TREE_TYPE (*tp),
1358 					TREE_TYPE (TREE_TYPE (sym)))
1359 	  && multiple_of_p (sizetype, TREE_OPERAND (*tp, 1),
1360 			    TYPE_SIZE_UNIT (TREE_TYPE (*tp))))
1361 	{
1362 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1363 			TYPE_SIZE (TREE_TYPE (*tp)),
1364 			int_const_binop (MULT_EXPR,
1365 					 bitsize_int (BITS_PER_UNIT),
1366 					 TREE_OPERAND (*tp, 1)));
1367 	}
1368       else if (TREE_CODE (TREE_TYPE (sym)) == COMPLEX_TYPE
1369 	       && useless_type_conversion_p (TREE_TYPE (*tp),
1370 					     TREE_TYPE (TREE_TYPE (sym))))
1371 	{
1372 	  *tp = build1 (integer_zerop (TREE_OPERAND (*tp, 1))
1373 			? REALPART_EXPR : IMAGPART_EXPR,
1374 			TREE_TYPE (*tp), sym);
1375 	}
1376       else if (integer_zerop (TREE_OPERAND (*tp, 1))
1377 	       && DECL_SIZE (sym) == TYPE_SIZE (TREE_TYPE (*tp)))
1378 	{
1379 	  if (!useless_type_conversion_p (TREE_TYPE (*tp),
1380 					  TREE_TYPE (sym)))
1381 	    *tp = build1 (VIEW_CONVERT_EXPR,
1382 			  TREE_TYPE (*tp), sym);
1383 	  else
1384 	    *tp = sym;
1385 	}
1386       else if (DECL_SIZE (sym)
1387 	       && TREE_CODE (DECL_SIZE (sym)) == INTEGER_CST
1388 	       && (known_subrange_p
1389 		   (mem_ref_offset (*tp),
1390 		    wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (*tp))),
1391 		    0, wi::to_offset (DECL_SIZE_UNIT (sym))))
1392 	       && (! INTEGRAL_TYPE_P (TREE_TYPE (*tp))
1393 		   || (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp)))
1394 		       == TYPE_PRECISION (TREE_TYPE (*tp))))
1395 	       && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (*tp))),
1396 				  BITS_PER_UNIT) == 0)
1397 	{
1398 	  *tp = build3 (BIT_FIELD_REF, TREE_TYPE (*tp), sym,
1399 			TYPE_SIZE (TREE_TYPE (*tp)),
1400 			wide_int_to_tree (bitsizetype,
1401 					  mem_ref_offset (*tp)
1402 					  << LOG2_BITS_PER_UNIT));
1403 	}
1404     }
1405 }
1406 
1407 /* For a tree REF return its base if it is the base of a MEM_REF
1408    that cannot be rewritten into SSA form.  Otherwise return NULL_TREE.  */
1409 
1410 static tree
1411 non_rewritable_mem_ref_base (tree ref)
1412 {
1413   tree base;
1414 
1415   /* A plain decl does not need it set.  */
1416   if (DECL_P (ref))
1417     return NULL_TREE;
1418 
1419   if (! (base = CONST_CAST_TREE (strip_invariant_refs (ref))))
1420     {
1421       base = get_base_address (ref);
1422       if (DECL_P (base))
1423 	return base;
1424       return NULL_TREE;
1425     }
1426 
1427   /* But watch out for MEM_REFs we cannot lower to a
1428      VIEW_CONVERT_EXPR or a BIT_FIELD_REF.  */
1429   if (TREE_CODE (base) == MEM_REF
1430       && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1431     {
1432       tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1433       if (! DECL_P (decl))
1434 	return NULL_TREE;
1435       if (! is_gimple_reg_type (TREE_TYPE (base))
1436 	  || VOID_TYPE_P (TREE_TYPE (base))
1437 	  || TREE_THIS_VOLATILE (decl) != TREE_THIS_VOLATILE (base))
1438 	return decl;
1439       if ((TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE
1440 	   || TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE)
1441 	  && useless_type_conversion_p (TREE_TYPE (base),
1442 					TREE_TYPE (TREE_TYPE (decl)))
1443 	  && known_ge (mem_ref_offset (base), 0)
1444 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1445 		       mem_ref_offset (base))
1446 	  && multiple_of_p (sizetype, TREE_OPERAND (base, 1),
1447 			    TYPE_SIZE_UNIT (TREE_TYPE (base))))
1448 	return NULL_TREE;
1449       /* For same sizes and zero offset we can use a VIEW_CONVERT_EXPR.  */
1450       if (integer_zerop (TREE_OPERAND (base, 1))
1451 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (base)))
1452 	return NULL_TREE;
1453       /* For integral typed extracts we can use a BIT_FIELD_REF.  */
1454       if (DECL_SIZE (decl)
1455 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST
1456 	  && (known_subrange_p
1457 	      (mem_ref_offset (base),
1458 	       wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (base))),
1459 	       0, wi::to_poly_offset (DECL_SIZE_UNIT (decl))))
1460 	  /* ???  We can't handle bitfield precision extracts without
1461 	     either using an alternate type for the BIT_FIELD_REF and
1462 	     then doing a conversion or possibly adjusting the offset
1463 	     according to endianness.  */
1464 	  && (! INTEGRAL_TYPE_P (TREE_TYPE (base))
1465 	      || (wi::to_offset (TYPE_SIZE (TREE_TYPE (base)))
1466 		  == TYPE_PRECISION (TREE_TYPE (base))))
1467 	  && wi::umod_trunc (wi::to_offset (TYPE_SIZE (TREE_TYPE (base))),
1468 			     BITS_PER_UNIT) == 0)
1469 	return NULL_TREE;
1470       return decl;
1471     }
1472 
1473   return NULL_TREE;
1474 }
1475 
1476 /* For an lvalue tree LHS return true if it cannot be rewritten into SSA form.
1477    Otherwise return true.  */
1478 
1479 static bool
1480 non_rewritable_lvalue_p (tree lhs)
1481 {
1482   /* A plain decl is always rewritable.  */
1483   if (DECL_P (lhs))
1484     return false;
1485 
1486   /* We can re-write REALPART_EXPR and IMAGPART_EXPR sets in
1487      a reasonably efficient manner... */
1488   if ((TREE_CODE (lhs) == REALPART_EXPR
1489        || TREE_CODE (lhs) == IMAGPART_EXPR)
1490       && DECL_P (TREE_OPERAND (lhs, 0)))
1491     return false;
1492 
1493   /* ???  The following could be relaxed allowing component
1494      references that do not change the access size.  */
1495   if (TREE_CODE (lhs) == MEM_REF
1496       && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
1497     {
1498       tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
1499 
1500       /* A decl that is wrapped inside a MEM-REF that covers
1501 	 it full is also rewritable.  */
1502       if (integer_zerop (TREE_OPERAND (lhs, 1))
1503 	  && DECL_P (decl)
1504 	  && DECL_SIZE (decl) == TYPE_SIZE (TREE_TYPE (lhs))
1505 	  /* If the dynamic type of the decl has larger precision than
1506 	     the decl itself we can't use the decls type for SSA rewriting.  */
1507 	  && ((! INTEGRAL_TYPE_P (TREE_TYPE (decl))
1508 	       || compare_tree_int (DECL_SIZE (decl),
1509 				    TYPE_PRECISION (TREE_TYPE (decl))) == 0)
1510 	      || (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
1511 		  && (TYPE_PRECISION (TREE_TYPE (decl))
1512 		      >= TYPE_PRECISION (TREE_TYPE (lhs)))))
1513 	  /* Make sure we are not re-writing non-float copying into float
1514 	     copying as that can incur normalization.  */
1515 	  && (! FLOAT_TYPE_P (TREE_TYPE (decl))
1516 	      || types_compatible_p (TREE_TYPE (lhs), TREE_TYPE (decl)))
1517 	  && (TREE_THIS_VOLATILE (decl) == TREE_THIS_VOLATILE (lhs)))
1518 	return false;
1519 
1520       /* A vector-insert using a MEM_REF or ARRAY_REF is rewritable
1521 	 using a BIT_INSERT_EXPR.  */
1522       if (DECL_P (decl)
1523 	  && VECTOR_TYPE_P (TREE_TYPE (decl))
1524 	  && TYPE_MODE (TREE_TYPE (decl)) != BLKmode
1525 	  && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1526 			      TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))), 0)
1527 	  && known_ge (mem_ref_offset (lhs), 0)
1528 	  && known_gt (wi::to_poly_offset (TYPE_SIZE_UNIT (TREE_TYPE (decl))),
1529 		       mem_ref_offset (lhs))
1530 	  && multiple_of_p (sizetype, TREE_OPERAND (lhs, 1),
1531 			    TYPE_SIZE_UNIT (TREE_TYPE (lhs))))
1532 	return false;
1533     }
1534 
1535   /* A vector-insert using a BIT_FIELD_REF is rewritable using
1536      BIT_INSERT_EXPR.  */
1537   if (TREE_CODE (lhs) == BIT_FIELD_REF
1538       && DECL_P (TREE_OPERAND (lhs, 0))
1539       && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1540       && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1541       && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1542 			  TYPE_SIZE_UNIT
1543 			    (TREE_TYPE (TREE_TYPE (TREE_OPERAND (lhs, 0)))), 0)
1544       && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1545 	  % tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs)))) == 0)
1546     return false;
1547 
1548   return true;
1549 }
1550 
1551 /* When possible, clear TREE_ADDRESSABLE bit or set DECL_GIMPLE_REG_P bit and
1552    mark the variable VAR for conversion into SSA.  Return true when updating
1553    stmts is required.  */
1554 
1555 static void
1556 maybe_optimize_var (tree var, bitmap addresses_taken, bitmap not_reg_needs,
1557 		    bitmap suitable_for_renaming)
1558 {
1559   /* Global Variables, result decls cannot be changed.  */
1560   if (is_global_var (var)
1561       || TREE_CODE (var) == RESULT_DECL
1562       || bitmap_bit_p (addresses_taken, DECL_UID (var)))
1563     return;
1564 
1565   if (TREE_ADDRESSABLE (var)
1566       /* Do not change TREE_ADDRESSABLE if we need to preserve var as
1567 	 a non-register.  Otherwise we are confused and forget to
1568 	 add virtual operands for it.  */
1569       && (!is_gimple_reg_type (TREE_TYPE (var))
1570 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1571 	  || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1572 	  || !bitmap_bit_p (not_reg_needs, DECL_UID (var))))
1573     {
1574       TREE_ADDRESSABLE (var) = 0;
1575       /* If we cleared TREE_ADDRESSABLE make sure DECL_GIMPLE_REG_P
1576          is unset if we cannot rewrite the var into SSA.  */
1577       if ((TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
1578 	   || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
1579 	  && bitmap_bit_p (not_reg_needs, DECL_UID (var)))
1580 	DECL_GIMPLE_REG_P (var) = 0;
1581       if (is_gimple_reg (var))
1582 	bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1583       if (dump_file)
1584 	{
1585 	  fprintf (dump_file, "No longer having address taken: ");
1586 	  print_generic_expr (dump_file, var);
1587 	  fprintf (dump_file, "\n");
1588 	}
1589     }
1590 
1591   if (!DECL_GIMPLE_REG_P (var)
1592       && !bitmap_bit_p (not_reg_needs, DECL_UID (var))
1593       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
1594 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
1595       && !TREE_THIS_VOLATILE (var)
1596       && (!VAR_P (var) || !DECL_HARD_REGISTER (var)))
1597     {
1598       DECL_GIMPLE_REG_P (var) = 1;
1599       bitmap_set_bit (suitable_for_renaming, DECL_UID (var));
1600       if (dump_file)
1601 	{
1602 	  fprintf (dump_file, "Now a gimple register: ");
1603 	  print_generic_expr (dump_file, var);
1604 	  fprintf (dump_file, "\n");
1605 	}
1606     }
1607 }
1608 
1609 /* Return true when STMT is ASAN mark where second argument is an address
1610    of a local variable.  */
1611 
1612 static bool
1613 is_asan_mark_p (gimple *stmt)
1614 {
1615   if (!gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1616     return false;
1617 
1618   tree addr = get_base_address (gimple_call_arg (stmt, 1));
1619   if (TREE_CODE (addr) == ADDR_EXPR
1620       && VAR_P (TREE_OPERAND (addr, 0)))
1621     {
1622       tree var = TREE_OPERAND (addr, 0);
1623       if (lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1624 			    DECL_ATTRIBUTES (var)))
1625 	return false;
1626 
1627       unsigned addressable = TREE_ADDRESSABLE (var);
1628       TREE_ADDRESSABLE (var) = 0;
1629       bool r = is_gimple_reg (var);
1630       TREE_ADDRESSABLE (var) = addressable;
1631       return r;
1632     }
1633 
1634   return false;
1635 }
1636 
1637 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
1638 
1639 void
1640 execute_update_addresses_taken (void)
1641 {
1642   basic_block bb;
1643   auto_bitmap addresses_taken;
1644   auto_bitmap not_reg_needs;
1645   auto_bitmap suitable_for_renaming;
1646   tree var;
1647   unsigned i;
1648 
1649   timevar_push (TV_ADDRESS_TAKEN);
1650 
1651   /* Collect into ADDRESSES_TAKEN all variables whose address is taken within
1652      the function body.  */
1653   FOR_EACH_BB_FN (bb, cfun)
1654     {
1655       for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);
1656 	   gsi_next (&gsi))
1657 	{
1658 	  gimple *stmt = gsi_stmt (gsi);
1659 	  enum gimple_code code = gimple_code (stmt);
1660 	  tree decl;
1661 
1662 	  if (code == GIMPLE_CALL)
1663 	    {
1664 	      if (optimize_atomic_compare_exchange_p (stmt))
1665 		{
1666 		  /* For __atomic_compare_exchange_N if the second argument
1667 		     is &var, don't mark var addressable;
1668 		     if it becomes non-addressable, we'll rewrite it into
1669 		     ATOMIC_COMPARE_EXCHANGE call.  */
1670 		  tree arg = gimple_call_arg (stmt, 1);
1671 		  gimple_call_set_arg (stmt, 1, null_pointer_node);
1672 		  gimple_ior_addresses_taken (addresses_taken, stmt);
1673 		  gimple_call_set_arg (stmt, 1, arg);
1674 		}
1675 	      else if (is_asan_mark_p (stmt)
1676 		       || gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1677 		;
1678 	      else
1679 		gimple_ior_addresses_taken (addresses_taken, stmt);
1680 	    }
1681 	  else
1682 	    /* Note all addresses taken by the stmt.  */
1683 	    gimple_ior_addresses_taken (addresses_taken, stmt);
1684 
1685 	  /* If we have a call or an assignment, see if the lhs contains
1686 	     a local decl that requires not to be a gimple register.  */
1687 	  if (code == GIMPLE_ASSIGN || code == GIMPLE_CALL)
1688 	    {
1689               tree lhs = gimple_get_lhs (stmt);
1690               if (lhs
1691 		  && TREE_CODE (lhs) != SSA_NAME
1692 		  && ((code == GIMPLE_CALL && ! DECL_P (lhs))
1693 		      || non_rewritable_lvalue_p (lhs)))
1694 		{
1695 		  decl = get_base_address (lhs);
1696 		  if (DECL_P (decl))
1697 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1698                 }
1699 	    }
1700 
1701 	  if (gimple_assign_single_p (stmt))
1702 	    {
1703 	      tree rhs = gimple_assign_rhs1 (stmt);
1704 	      if ((decl = non_rewritable_mem_ref_base (rhs)))
1705 		bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1706 	    }
1707 
1708 	  else if (code == GIMPLE_CALL)
1709 	    {
1710 	      for (i = 0; i < gimple_call_num_args (stmt); ++i)
1711 		{
1712 		  tree arg = gimple_call_arg (stmt, i);
1713 		  if ((decl = non_rewritable_mem_ref_base (arg)))
1714 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1715 		}
1716 	    }
1717 
1718 	  else if (code == GIMPLE_ASM)
1719 	    {
1720 	      gasm *asm_stmt = as_a <gasm *> (stmt);
1721 	      for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
1722 		{
1723 		  tree link = gimple_asm_output_op (asm_stmt, i);
1724 		  tree lhs = TREE_VALUE (link);
1725 		  if (TREE_CODE (lhs) != SSA_NAME)
1726 		    {
1727 		      decl = get_base_address (lhs);
1728 		      if (DECL_P (decl)
1729 			  && (non_rewritable_lvalue_p (lhs)
1730 			      /* We cannot move required conversions from
1731 				 the lhs to the rhs in asm statements, so
1732 				 require we do not need any.  */
1733 			      || !useless_type_conversion_p
1734 			            (TREE_TYPE (lhs), TREE_TYPE (decl))))
1735 			bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1736 		    }
1737 		}
1738 	      for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
1739 		{
1740 		  tree link = gimple_asm_input_op (asm_stmt, i);
1741 		  if ((decl = non_rewritable_mem_ref_base (TREE_VALUE (link))))
1742 		    bitmap_set_bit (not_reg_needs, DECL_UID (decl));
1743 		}
1744 	    }
1745 	}
1746 
1747       for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
1748 	   gsi_next (&gsi))
1749 	{
1750 	  size_t i;
1751 	  gphi *phi = gsi.phi ();
1752 
1753 	  for (i = 0; i < gimple_phi_num_args (phi); i++)
1754 	    {
1755 	      tree op = PHI_ARG_DEF (phi, i), var;
1756 	      if (TREE_CODE (op) == ADDR_EXPR
1757 		  && (var = get_base_address (TREE_OPERAND (op, 0))) != NULL
1758 		  && DECL_P (var))
1759 		bitmap_set_bit (addresses_taken, DECL_UID (var));
1760 	    }
1761 	}
1762     }
1763 
1764   /* We cannot iterate over all referenced vars because that can contain
1765      unused vars from BLOCK trees, which causes code generation differences
1766      for -g vs. -g0.  */
1767   for (var = DECL_ARGUMENTS (cfun->decl); var; var = DECL_CHAIN (var))
1768     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1769 			suitable_for_renaming);
1770 
1771   FOR_EACH_VEC_SAFE_ELT (cfun->local_decls, i, var)
1772     maybe_optimize_var (var, addresses_taken, not_reg_needs,
1773 			suitable_for_renaming);
1774 
1775   /* Operand caches need to be recomputed for operands referencing the updated
1776      variables and operands need to be rewritten to expose bare symbols.  */
1777   if (!bitmap_empty_p (suitable_for_renaming))
1778     {
1779       FOR_EACH_BB_FN (bb, cfun)
1780 	for (gimple_stmt_iterator gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
1781 	  {
1782 	    gimple *stmt = gsi_stmt (gsi);
1783 
1784 	    /* Re-write TARGET_MEM_REFs of symbols we want to
1785 	       rewrite into SSA form.  */
1786 	    if (gimple_assign_single_p (stmt))
1787 	      {
1788 		tree lhs = gimple_assign_lhs (stmt);
1789 		tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
1790 		tree sym;
1791 
1792 		/* Rewrite LHS IMAG/REALPART_EXPR similar to
1793 		   gimplify_modify_expr_complex_part.  */
1794 		if ((TREE_CODE (lhs) == IMAGPART_EXPR
1795 		     || TREE_CODE (lhs) == REALPART_EXPR)
1796 		    && DECL_P (TREE_OPERAND (lhs, 0))
1797 		    && bitmap_bit_p (suitable_for_renaming,
1798 				     DECL_UID (TREE_OPERAND (lhs, 0))))
1799 		  {
1800 		    tree other = make_ssa_name (TREE_TYPE (lhs));
1801 		    tree lrhs = build1 (TREE_CODE (lhs) == IMAGPART_EXPR
1802 					? REALPART_EXPR : IMAGPART_EXPR,
1803 					TREE_TYPE (other),
1804 					TREE_OPERAND (lhs, 0));
1805 		    gimple *load = gimple_build_assign (other, lrhs);
1806 		    location_t loc = gimple_location (stmt);
1807 		    gimple_set_location (load, loc);
1808 		    gimple_set_vuse (load, gimple_vuse (stmt));
1809 		    gsi_insert_before (&gsi, load, GSI_SAME_STMT);
1810 		    gimple_assign_set_lhs (stmt, TREE_OPERAND (lhs, 0));
1811 		    gimple_assign_set_rhs_with_ops
1812 		      (&gsi, COMPLEX_EXPR,
1813 		       TREE_CODE (lhs) == IMAGPART_EXPR
1814 		       ? other : gimple_assign_rhs1 (stmt),
1815 		       TREE_CODE (lhs) == IMAGPART_EXPR
1816 		       ? gimple_assign_rhs1 (stmt) : other, NULL_TREE);
1817 		    stmt = gsi_stmt (gsi);
1818 		    unlink_stmt_vdef (stmt);
1819 		    update_stmt (stmt);
1820 		    continue;
1821 		  }
1822 
1823 		/* Rewrite a vector insert via a BIT_FIELD_REF on the LHS
1824 		   into a BIT_INSERT_EXPR.  */
1825 		if (TREE_CODE (lhs) == BIT_FIELD_REF
1826 		    && DECL_P (TREE_OPERAND (lhs, 0))
1827 		    && bitmap_bit_p (suitable_for_renaming,
1828 				     DECL_UID (TREE_OPERAND (lhs, 0)))
1829 		    && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (lhs, 0)))
1830 		    && TYPE_MODE (TREE_TYPE (TREE_OPERAND (lhs, 0))) != BLKmode
1831 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1832 					TYPE_SIZE_UNIT (TREE_TYPE
1833 					  (TREE_TYPE (TREE_OPERAND (lhs, 0)))),
1834 					0)
1835 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 2))
1836 			% tree_to_uhwi (TYPE_SIZE (TREE_TYPE (lhs))) == 0))
1837 		  {
1838 		    tree var = TREE_OPERAND (lhs, 0);
1839 		    tree val = gimple_assign_rhs1 (stmt);
1840 		    if (! types_compatible_p (TREE_TYPE (TREE_TYPE (var)),
1841 					      TREE_TYPE (val)))
1842 		      {
1843 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (var)));
1844 			gimple *pun
1845 			  = gimple_build_assign (tem,
1846 						 build1 (VIEW_CONVERT_EXPR,
1847 							 TREE_TYPE (tem), val));
1848 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1849 			val = tem;
1850 		      }
1851 		    tree bitpos = TREE_OPERAND (lhs, 2);
1852 		    gimple_assign_set_lhs (stmt, var);
1853 		    gimple_assign_set_rhs_with_ops
1854 		      (&gsi, BIT_INSERT_EXPR, var, val, bitpos);
1855 		    stmt = gsi_stmt (gsi);
1856 		    unlink_stmt_vdef (stmt);
1857 		    update_stmt (stmt);
1858 		    continue;
1859 		  }
1860 
1861 		/* Rewrite a vector insert using a MEM_REF on the LHS
1862 		   into a BIT_INSERT_EXPR.  */
1863 		if (TREE_CODE (lhs) == MEM_REF
1864 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1865 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1866 		    && DECL_P (sym)
1867 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym))
1868 		    && VECTOR_TYPE_P (TREE_TYPE (sym))
1869 		    && TYPE_MODE (TREE_TYPE (sym)) != BLKmode
1870 		    && operand_equal_p (TYPE_SIZE_UNIT (TREE_TYPE (lhs)),
1871 					TYPE_SIZE_UNIT
1872 					  (TREE_TYPE (TREE_TYPE (sym))), 0)
1873 		    && tree_fits_uhwi_p (TREE_OPERAND (lhs, 1))
1874 		    && tree_int_cst_lt (TREE_OPERAND (lhs, 1),
1875 					TYPE_SIZE_UNIT (TREE_TYPE (sym)))
1876 		    && (tree_to_uhwi (TREE_OPERAND (lhs, 1))
1877 			% tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (lhs)))) == 0)
1878 		  {
1879 		    tree val = gimple_assign_rhs1 (stmt);
1880 		    if (! types_compatible_p (TREE_TYPE (val),
1881 					      TREE_TYPE (TREE_TYPE (sym))))
1882 		      {
1883 			tree tem = make_ssa_name (TREE_TYPE (TREE_TYPE (sym)));
1884 			gimple *pun
1885 			  = gimple_build_assign (tem,
1886 						 build1 (VIEW_CONVERT_EXPR,
1887 							 TREE_TYPE (tem), val));
1888 			gsi_insert_before (&gsi, pun, GSI_SAME_STMT);
1889 			val = tem;
1890 		      }
1891 		    tree bitpos
1892 		      = wide_int_to_tree (bitsizetype,
1893 					  mem_ref_offset (lhs) * BITS_PER_UNIT);
1894 		    gimple_assign_set_lhs (stmt, sym);
1895 		    gimple_assign_set_rhs_with_ops
1896 		      (&gsi, BIT_INSERT_EXPR, sym, val, bitpos);
1897 		    stmt = gsi_stmt (gsi);
1898 		    unlink_stmt_vdef (stmt);
1899 		    update_stmt (stmt);
1900 		    continue;
1901 		  }
1902 
1903 		/* We shouldn't have any fancy wrapping of
1904 		   component-refs on the LHS, but look through
1905 		   VIEW_CONVERT_EXPRs as that is easy.  */
1906 		while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
1907 		  lhs = TREE_OPERAND (lhs, 0);
1908 		if (TREE_CODE (lhs) == MEM_REF
1909 		    && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
1910 		    && integer_zerop (TREE_OPERAND (lhs, 1))
1911 		    && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
1912 		    && DECL_P (sym)
1913 		    && !TREE_ADDRESSABLE (sym)
1914 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (sym)))
1915 		  lhs = sym;
1916 		else
1917 		  lhs = gimple_assign_lhs (stmt);
1918 
1919 		/* Rewrite the RHS and make sure the resulting assignment
1920 		   is validly typed.  */
1921 		maybe_rewrite_mem_ref_base (rhsp, suitable_for_renaming);
1922 		rhs = gimple_assign_rhs1 (stmt);
1923 		if (gimple_assign_lhs (stmt) != lhs
1924 		    && !useless_type_conversion_p (TREE_TYPE (lhs),
1925 						   TREE_TYPE (rhs)))
1926 		  {
1927 		    if (gimple_clobber_p (stmt))
1928 		      {
1929 			rhs = build_constructor (TREE_TYPE (lhs), NULL);
1930 			TREE_THIS_VOLATILE (rhs) = 1;
1931 		      }
1932 		    else
1933 		      rhs = fold_build1 (VIEW_CONVERT_EXPR,
1934 					 TREE_TYPE (lhs), rhs);
1935 		  }
1936 		if (gimple_assign_lhs (stmt) != lhs)
1937 		  gimple_assign_set_lhs (stmt, lhs);
1938 
1939 		if (gimple_assign_rhs1 (stmt) != rhs)
1940 		  {
1941 		    gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
1942 		    gimple_assign_set_rhs_from_tree (&gsi, rhs);
1943 		  }
1944 	      }
1945 
1946 	    else if (gimple_code (stmt) == GIMPLE_CALL)
1947 	      {
1948 		unsigned i;
1949 		if (optimize_atomic_compare_exchange_p (stmt))
1950 		  {
1951 		    tree expected = gimple_call_arg (stmt, 1);
1952 		    if (bitmap_bit_p (suitable_for_renaming,
1953 				      DECL_UID (TREE_OPERAND (expected, 0))))
1954 		      {
1955 			fold_builtin_atomic_compare_exchange (&gsi);
1956 			continue;
1957 		      }
1958 		  }
1959 		else if (is_asan_mark_p (stmt))
1960 		  {
1961 		    tree var = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
1962 		    if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1963 		      {
1964 			unlink_stmt_vdef (stmt);
1965 			if (asan_mark_p (stmt, ASAN_MARK_POISON))
1966 			  {
1967 			    gcall *call
1968 			      = gimple_build_call_internal (IFN_ASAN_POISON, 0);
1969 			    gimple_call_set_lhs (call, var);
1970 			    gsi_replace (&gsi, call, GSI_SAME_STMT);
1971 			  }
1972 			else
1973 			  {
1974 			    /* In ASAN_MARK (UNPOISON, &b, ...) the variable
1975 			       is uninitialized.  Avoid dependencies on
1976 			       previous out of scope value.  */
1977 			    tree clobber
1978 			      = build_constructor (TREE_TYPE (var), NULL);
1979 			    TREE_THIS_VOLATILE (clobber) = 1;
1980 			    gimple *g = gimple_build_assign (var, clobber);
1981 			    gsi_replace (&gsi, g, GSI_SAME_STMT);
1982 			  }
1983 			continue;
1984 		      }
1985 		  }
1986 		else if (gimple_call_internal_p (stmt, IFN_GOMP_SIMT_ENTER))
1987 		  for (i = 1; i < gimple_call_num_args (stmt); i++)
1988 		    {
1989 		      tree *argp = gimple_call_arg_ptr (stmt, i);
1990 		      if (*argp == null_pointer_node)
1991 			continue;
1992 		      gcc_assert (TREE_CODE (*argp) == ADDR_EXPR
1993 				  && VAR_P (TREE_OPERAND (*argp, 0)));
1994 		      tree var = TREE_OPERAND (*argp, 0);
1995 		      if (bitmap_bit_p (suitable_for_renaming, DECL_UID (var)))
1996 			*argp = null_pointer_node;
1997 		    }
1998 		for (i = 0; i < gimple_call_num_args (stmt); ++i)
1999 		  {
2000 		    tree *argp = gimple_call_arg_ptr (stmt, i);
2001 		    maybe_rewrite_mem_ref_base (argp, suitable_for_renaming);
2002 		  }
2003 	      }
2004 
2005 	    else if (gimple_code (stmt) == GIMPLE_ASM)
2006 	      {
2007 		gasm *asm_stmt = as_a <gasm *> (stmt);
2008 		unsigned i;
2009 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
2010 		  {
2011 		    tree link = gimple_asm_output_op (asm_stmt, i);
2012 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2013 						suitable_for_renaming);
2014 		  }
2015 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
2016 		  {
2017 		    tree link = gimple_asm_input_op (asm_stmt, i);
2018 		    maybe_rewrite_mem_ref_base (&TREE_VALUE (link),
2019 						suitable_for_renaming);
2020 		  }
2021 	      }
2022 
2023 	    else if (gimple_debug_bind_p (stmt)
2024 		     && gimple_debug_bind_has_value_p (stmt))
2025 	      {
2026 		tree *valuep = gimple_debug_bind_get_value_ptr (stmt);
2027 		tree decl;
2028 		maybe_rewrite_mem_ref_base (valuep, suitable_for_renaming);
2029 		decl = non_rewritable_mem_ref_base (*valuep);
2030 		if (decl
2031 		    && bitmap_bit_p (suitable_for_renaming, DECL_UID (decl)))
2032 		  gimple_debug_bind_reset_value (stmt);
2033 	      }
2034 
2035 	    if (gimple_references_memory_p (stmt)
2036 		|| is_gimple_debug (stmt))
2037 	      update_stmt (stmt);
2038 
2039 	    gsi_next (&gsi);
2040 	  }
2041 
2042       /* Update SSA form here, we are called as non-pass as well.  */
2043       if (number_of_loops (cfun) > 1
2044 	  && loops_state_satisfies_p (LOOP_CLOSED_SSA))
2045 	rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
2046       else
2047 	update_ssa (TODO_update_ssa);
2048     }
2049 
2050   timevar_pop (TV_ADDRESS_TAKEN);
2051 }
2052 
2053 namespace {
2054 
2055 const pass_data pass_data_update_address_taken =
2056 {
2057   GIMPLE_PASS, /* type */
2058   "addressables", /* name */
2059   OPTGROUP_NONE, /* optinfo_flags */
2060   TV_ADDRESS_TAKEN, /* tv_id */
2061   PROP_ssa, /* properties_required */
2062   0, /* properties_provided */
2063   0, /* properties_destroyed */
2064   0, /* todo_flags_start */
2065   TODO_update_address_taken, /* todo_flags_finish */
2066 };
2067 
2068 class pass_update_address_taken : public gimple_opt_pass
2069 {
2070 public:
2071   pass_update_address_taken (gcc::context *ctxt)
2072     : gimple_opt_pass (pass_data_update_address_taken, ctxt)
2073   {}
2074 
2075   /* opt_pass methods: */
2076 
2077 }; // class pass_update_address_taken
2078 
2079 } // anon namespace
2080 
2081 gimple_opt_pass *
2082 make_pass_update_address_taken (gcc::context *ctxt)
2083 {
2084   return new pass_update_address_taken (ctxt);
2085 }
2086