xref: /netbsd-src/external/gpl3/gcc/dist/gcc/tree-ssa-operands.cc (revision b1e838363e3c6fc78a55519254d99869742dd33c)
1 /* SSA operands management for trees.
2    Copyright (C) 2003-2022 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
10 
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14 GNU General Public License for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "timevar.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "diagnostic-core.h"
30 #include "stmt.h"
31 #include "print-tree.h"
32 #include "dumpfile.h"
33 
34 
35 /* This file contains the code required to manage the operands cache of the
36    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
37    annotation.  This cache contains operands that will be of interest to
38    optimizers and other passes wishing to manipulate the IL.
39 
40    The operand type are broken up into REAL and VIRTUAL operands.  The real
41    operands are represented as pointers into the stmt's operand tree.  Thus
42    any manipulation of the real operands will be reflected in the actual tree.
43    Virtual operands are represented solely in the cache, although the base
44    variable for the SSA_NAME may, or may not occur in the stmt's tree.
45    Manipulation of the virtual operands will not be reflected in the stmt tree.
46 
47    The routines in this file are concerned with creating this operand cache
48    from a stmt tree.
49 
50    The operand tree is the parsed by the various get_* routines which look
51    through the stmt tree for the occurrence of operands which may be of
52    interest, and calls are made to the append_* routines whenever one is
53    found.  There are 4 of these routines, each representing one of the
54    4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
55 
56    The append_* routines check for duplication, and simply keep a list of
57    unique objects for each operand type in the build_* extendable vectors.
58 
59    Once the stmt tree is completely parsed, the finalize_ssa_operands()
60    routine is called, which proceeds to perform the finalization routine
61    on each of the 4 operand vectors which have been built up.
62 
63    If the stmt had a previous operand cache, the finalization routines
64    attempt to match up the new operands with the old ones.  If it's a perfect
65    match, the old vector is simply reused.  If it isn't a perfect match, then
66    a new vector is created and the new operands are placed there.  For
67    virtual operands, if the previous cache had SSA_NAME version of a
68    variable, and that same variable occurs in the same operands cache, then
69    the new cache vector will also get the same SSA_NAME.
70 
71    i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
72    operand vector for VUSE, then the new vector will also be modified
73    such that it contains 'a_5' rather than 'a'.  */
74 
75 
76 /* Flags to describe operand properties in helpers.  */
77 
78 /* By default, operands are loaded.  */
79 #define opf_use		0
80 
81 /* Operand is the target of an assignment expression or a
82    call-clobbered variable.  */
83 #define opf_def 	(1 << 0)
84 
85 /* No virtual operands should be created in the expression.  This is used
86    when traversing ADDR_EXPR nodes which have different semantics than
87    other expressions.  Inside an ADDR_EXPR node, the only operands that we
88    need to consider are indices into arrays.  For instance, &a.b[i] should
89    generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
90    VUSE for 'b'.  */
91 #define opf_no_vops 	(1 << 1)
92 
93 /* Operand is in a place where address-taken does not imply addressable.  */
94 #define opf_non_addressable (1 << 3)
95 
96 /* Operand is in a place where opf_non_addressable does not apply.  */
97 #define opf_not_non_addressable (1 << 4)
98 
99 /* Operand is having its address taken.  */
100 #define opf_address_taken (1 << 5)
101 
102 /* Class containing temporary per-stmt state.  */
103 
104 class operands_scanner
105 {
106   public:
operands_scanner(struct function * fun,gimple * statement)107     operands_scanner (struct function *fun, gimple *statement)
108       {
109 	build_vuse = NULL_TREE;
110 	build_vdef = NULL_TREE;
111 	fn = fun;
112 	stmt = statement;
113       }
114 
115     /* Create an operands cache for STMT.  */
116     void build_ssa_operands ();
117 
118     /* Verifies SSA statement operands.  */
119     DEBUG_FUNCTION bool verify_ssa_operands ();
120 
121   private:
122     /* Disable copy and assign of this class, as it may have problems with
123        build_uses vec.  */
124     DISABLE_COPY_AND_ASSIGN (operands_scanner);
125 
126     /* Array for building all the use operands.  */
127     auto_vec<tree *, 16> build_uses;
128 
129     /* The built VDEF operand.  */
130     tree build_vdef;
131 
132     /* The built VUSE operand.  */
133     tree build_vuse;
134 
135     /* Function which STMT belongs to.  */
136     struct function *fn;
137 
138     /* Statement to work on.  */
139     gimple *stmt;
140 
141     /* Takes elements from build_uses and turns them into use operands of STMT.  */
142     void finalize_ssa_uses ();
143 
144     /* Clear the in_list bits and empty the build array for VDEFs and
145        VUSEs.  */
146     void cleanup_build_arrays ();
147 
148     /* Finalize all the build vectors, fill the new ones into INFO.  */
149     void finalize_ssa_stmt_operands ();
150 
151     /* Start the process of building up operands vectors in INFO.  */
152     void start_ssa_stmt_operands ();
153 
154     /* Add USE_P to the list of pointers to operands.  */
155     void append_use (tree *use_p);
156 
157     /* Add VAR to the set of variables that require a VDEF operator.  */
158     void append_vdef (tree var);
159 
160     /* Add VAR to the set of variables that require a VUSE operator.  */
161     void append_vuse (tree var);
162 
163     /* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
164     void add_virtual_operand (int flags);
165 
166 
167     /* Add *VAR_P to the appropriate operand array for statement STMT.
168        FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
169        it will be added to the statement's real operands, otherwise it is
170        added to virtual operands.  */
171     void add_stmt_operand (tree *var_p, int flags);
172 
173     /* A subroutine of get_expr_operands to handle MEM_REF.
174 
175        STMT is the statement being processed, EXPR is the MEM_REF
176 	  that got us here.
177 
178        FLAGS is as in get_expr_operands.  */
179     void get_mem_ref_operands (tree expr, int flags);
180 
181     /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
182     void get_tmr_operands (tree expr, int flags);
183 
184 
185     /* If STMT is a call that may clobber globals and other symbols that
186        escape, add them to the VDEF/VUSE lists for it.  */
187     void maybe_add_call_vops (gcall *stmt);
188 
189     /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
190     void get_asm_stmt_operands (gasm *stmt);
191 
192 
193     /* Recursively scan the expression pointed to by EXPR_P in statement
194        STMT.  FLAGS is one of the OPF_* constants modifying how to
195        interpret the operands found.  */
196     void get_expr_operands (tree *expr_p, int flags);
197 
198     /* Parse STMT looking for operands.  When finished, the various
199        build_* operand vectors will have potential operands in them.  */
200     void parse_ssa_operands ();
201 
202 
203     /* Takes elements from build_defs and turns them into def operands of STMT.
204        TODO -- Make build_defs vec of tree *.  */
205     void finalize_ssa_defs ();
206 };
207 
208 /* Accessor to tree-ssa-operands.cc caches.  */
209 static inline struct ssa_operands *
gimple_ssa_operands(const struct function * fun)210 gimple_ssa_operands (const struct function *fun)
211 {
212   return &fun->gimple_df->ssa_operands;
213 }
214 
215 
216 /*  Return true if the SSA operands cache is active.  */
217 
218 bool
ssa_operands_active(struct function * fun)219 ssa_operands_active (struct function *fun)
220 {
221   if (fun == NULL)
222     return false;
223 
224   return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
225 }
226 
227 
228 /* Create the VOP variable, an artificial global variable to act as a
229    representative of all of the virtual operands FUD chain.  */
230 
231 static void
create_vop_var(struct function * fn)232 create_vop_var (struct function *fn)
233 {
234   tree global_var;
235 
236   gcc_assert (fn->gimple_df->vop == NULL_TREE);
237 
238   global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
239 			   get_identifier (".MEM"),
240 			   void_type_node);
241   DECL_ARTIFICIAL (global_var) = 1;
242   DECL_IGNORED_P (global_var) = 1;
243   TREE_READONLY (global_var) = 0;
244   DECL_EXTERNAL (global_var) = 1;
245   TREE_STATIC (global_var) = 1;
246   TREE_USED (global_var) = 1;
247   DECL_CONTEXT (global_var) = NULL_TREE;
248   TREE_THIS_VOLATILE (global_var) = 0;
249   TREE_ADDRESSABLE (global_var) = 0;
250   VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
251 
252   fn->gimple_df->vop = global_var;
253 }
254 
255 /* These are the sizes of the operand memory buffer in bytes which gets
256    allocated each time more operands space is required.  The final value is
257    the amount that is allocated every time after that.
258    In 1k we can fit 25 use operands (or 63 def operands) on a host with
259    8 byte pointers, that would be 10 statements each with 1 def and 2
260    uses.  */
261 
262 #define OP_SIZE_INIT	0
263 #define OP_SIZE_1	(1024 - sizeof (void *))
264 #define OP_SIZE_2	(1024 * 4 - sizeof (void *))
265 #define OP_SIZE_3	(1024 * 16 - sizeof (void *))
266 
267 /* Initialize the operand cache routines.  */
268 
269 void
init_ssa_operands(struct function * fn)270 init_ssa_operands (struct function *fn)
271 {
272   gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
273   gimple_ssa_operands (fn)->operand_memory_index
274      = gimple_ssa_operands (fn)->ssa_operand_mem_size;
275   gimple_ssa_operands (fn)->ops_active = true;
276   gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
277   create_vop_var (fn);
278 }
279 
280 
281 /* Dispose of anything required by the operand routines.  */
282 
283 void
fini_ssa_operands(struct function * fn)284 fini_ssa_operands (struct function *fn)
285 {
286   struct ssa_operand_memory_d *ptr;
287 
288   gimple_ssa_operands (fn)->free_uses = NULL;
289 
290   while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
291     {
292       gimple_ssa_operands (fn)->operand_memory
293 	= gimple_ssa_operands (fn)->operand_memory->next;
294       ggc_free (ptr);
295     }
296 
297   gimple_ssa_operands (fn)->ops_active = false;
298 
299   fn->gimple_df->vop = NULL_TREE;
300 }
301 
302 
303 /* Return memory for an operand of size SIZE.  */
304 
305 static inline void *
ssa_operand_alloc(struct function * fn,unsigned size)306 ssa_operand_alloc (struct function *fn, unsigned size)
307 {
308   char *ptr;
309 
310   gcc_assert (size == sizeof (struct use_optype_d));
311 
312   if (gimple_ssa_operands (fn)->operand_memory_index + size
313       >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
314     {
315       struct ssa_operand_memory_d *ptr;
316 
317       switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
318 	{
319 	case OP_SIZE_INIT:
320 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
321 	  break;
322 	case OP_SIZE_1:
323 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
324 	  break;
325 	case OP_SIZE_2:
326 	case OP_SIZE_3:
327 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
328 	  break;
329 	default:
330 	  gcc_unreachable ();
331 	}
332 
333 
334       ptr = (ssa_operand_memory_d *) ggc_internal_alloc
335 	(sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
336 
337       ptr->next = gimple_ssa_operands (fn)->operand_memory;
338       gimple_ssa_operands (fn)->operand_memory = ptr;
339       gimple_ssa_operands (fn)->operand_memory_index = 0;
340     }
341 
342   ptr = &(gimple_ssa_operands (fn)->operand_memory
343 	  ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
344   gimple_ssa_operands (fn)->operand_memory_index += size;
345   return ptr;
346 }
347 
348 
349 /* Allocate a USE operand.  */
350 
351 static inline struct use_optype_d *
alloc_use(struct function * fn)352 alloc_use (struct function *fn)
353 {
354   struct use_optype_d *ret;
355   if (gimple_ssa_operands (fn)->free_uses)
356     {
357       ret = gimple_ssa_operands (fn)->free_uses;
358       gimple_ssa_operands (fn)->free_uses
359 	= gimple_ssa_operands (fn)->free_uses->next;
360     }
361   else
362     ret = (struct use_optype_d *)
363           ssa_operand_alloc (fn, sizeof (struct use_optype_d));
364   return ret;
365 }
366 
367 
368 /* Adds OP to the list of uses of statement STMT after LAST.  */
369 
370 static inline use_optype_p
add_use_op(struct function * fn,gimple * stmt,tree * op,use_optype_p last)371 add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
372 {
373   use_optype_p new_use;
374 
375   new_use = alloc_use (fn);
376   USE_OP_PTR (new_use)->use = op;
377   link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
378   last->next = new_use;
379   new_use->next = NULL;
380   return new_use;
381 }
382 
383 
384 
385 /* Takes elements from build_defs and turns them into def operands of STMT.
386    TODO -- Make build_defs vec of tree *.  */
387 
388 inline void
finalize_ssa_defs()389 operands_scanner::finalize_ssa_defs ()
390 {
391   /* Pre-pend the vdef we may have built.  */
392   if (build_vdef != NULL_TREE)
393     {
394       tree oldvdef = gimple_vdef (stmt);
395       if (oldvdef
396 	  && TREE_CODE (oldvdef) == SSA_NAME)
397 	oldvdef = SSA_NAME_VAR (oldvdef);
398       if (oldvdef != build_vdef)
399 	gimple_set_vdef (stmt, build_vdef);
400     }
401 
402   /* Clear and unlink a no longer necessary VDEF.  */
403   if (build_vdef == NULL_TREE
404       && gimple_vdef (stmt) != NULL_TREE)
405     {
406       if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
407 	{
408 	  unlink_stmt_vdef (stmt);
409 	  release_ssa_name_fn (fn, gimple_vdef (stmt));
410 	}
411       gimple_set_vdef (stmt, NULL_TREE);
412     }
413 
414   /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
415   if (gimple_vdef (stmt)
416       && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
417     {
418       fn->gimple_df->rename_vops = 1;
419       fn->gimple_df->ssa_renaming_needed = 1;
420     }
421 }
422 
423 
424 /* Takes elements from build_uses and turns them into use operands of STMT.  */
425 
426 inline void
finalize_ssa_uses()427 operands_scanner::finalize_ssa_uses ()
428 {
429   unsigned new_i;
430   struct use_optype_d new_list;
431   use_optype_p old_ops, ptr, last;
432 
433   /* Pre-pend the VUSE we may have built.  */
434   if (build_vuse != NULL_TREE)
435     {
436       tree oldvuse = gimple_vuse (stmt);
437       if (oldvuse
438 	  && TREE_CODE (oldvuse) == SSA_NAME)
439 	oldvuse = SSA_NAME_VAR (oldvuse);
440       if (oldvuse != (build_vuse != NULL_TREE
441 		      ? build_vuse : build_vdef))
442 	gimple_set_vuse (stmt, NULL_TREE);
443       build_uses.safe_insert (0, gimple_vuse_ptr (stmt));
444     }
445 
446   new_list.next = NULL;
447   last = &new_list;
448 
449   old_ops = gimple_use_ops (stmt);
450 
451   /* Clear a no longer necessary VUSE.  */
452   if (build_vuse == NULL_TREE
453       && gimple_vuse (stmt) != NULL_TREE)
454     gimple_set_vuse (stmt, NULL_TREE);
455 
456   /* If there is anything in the old list, free it.  */
457   if (old_ops)
458     {
459       for (ptr = old_ops; ptr->next; ptr = ptr->next)
460 	delink_imm_use (USE_OP_PTR (ptr));
461       delink_imm_use (USE_OP_PTR (ptr));
462       ptr->next = gimple_ssa_operands (fn)->free_uses;
463       gimple_ssa_operands (fn)->free_uses = old_ops;
464     }
465 
466   /* If we added a VUSE, make sure to set the operand if it is not already
467      present and mark it for renaming.  */
468   if (build_vuse != NULL_TREE
469       && gimple_vuse (stmt) == NULL_TREE)
470     {
471       gimple_set_vuse (stmt, gimple_vop (fn));
472       fn->gimple_df->rename_vops = 1;
473       fn->gimple_df->ssa_renaming_needed = 1;
474     }
475 
476   /* Now create nodes for all the new nodes.  */
477   for (new_i = 0; new_i < build_uses.length (); new_i++)
478     {
479       tree *op = build_uses[new_i];
480       last = add_use_op (fn, stmt, op, last);
481     }
482 
483   /* Now set the stmt's operands.  */
484   gimple_set_use_ops (stmt, new_list.next);
485 }
486 
487 
488 /* Clear the in_list bits and empty the build array for VDEFs and
489    VUSEs.  */
490 
491 inline void
cleanup_build_arrays()492 operands_scanner::cleanup_build_arrays ()
493 {
494   build_vdef = NULL_TREE;
495   build_vuse = NULL_TREE;
496   build_uses.truncate (0);
497 }
498 
499 
500 /* Finalize all the build vectors, fill the new ones into INFO.  */
501 
502 inline void
finalize_ssa_stmt_operands()503 operands_scanner::finalize_ssa_stmt_operands ()
504 {
505   finalize_ssa_defs ();
506   finalize_ssa_uses ();
507   cleanup_build_arrays ();
508 }
509 
510 
511 /* Start the process of building up operands vectors in INFO.  */
512 
513 inline void
start_ssa_stmt_operands()514 operands_scanner::start_ssa_stmt_operands ()
515 {
516   gcc_assert (build_uses.length () == 0);
517   gcc_assert (build_vuse == NULL_TREE);
518   gcc_assert (build_vdef == NULL_TREE);
519 }
520 
521 
522 /* Add USE_P to the list of pointers to operands.  */
523 
524 inline void
append_use(tree * use_p)525 operands_scanner::append_use (tree *use_p)
526 {
527   build_uses.safe_push (use_p);
528 }
529 
530 
531 /* Add VAR to the set of variables that require a VDEF operator.  */
532 
533 inline void
append_vdef(tree var)534 operands_scanner::append_vdef (tree var)
535 {
536   gcc_assert ((build_vdef == NULL_TREE
537 	       || build_vdef == var)
538 	      && (build_vuse == NULL_TREE
539 		  || build_vuse == var));
540 
541   build_vdef = var;
542   build_vuse = var;
543 }
544 
545 
546 /* Add VAR to the set of variables that require a VUSE operator.  */
547 
548 inline void
append_vuse(tree var)549 operands_scanner::append_vuse (tree var)
550 {
551   gcc_assert (build_vuse == NULL_TREE
552 	      || build_vuse == var);
553 
554   build_vuse = var;
555 }
556 
557 /* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
558 
559 void
add_virtual_operand(int flags)560 operands_scanner::add_virtual_operand (int flags)
561 {
562   /* Add virtual operands to the stmt, unless the caller has specifically
563      requested not to do that (used when adding operands inside an
564      ADDR_EXPR expression).  */
565   if (flags & opf_no_vops)
566     return;
567 
568   gcc_assert (!is_gimple_debug (stmt));
569 
570   if (flags & opf_def)
571     append_vdef (gimple_vop (fn));
572   else
573     append_vuse (gimple_vop (fn));
574 }
575 
576 
577 /* Add *VAR_P to the appropriate operand array for statement STMT.
578    FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
579    it will be added to the statement's real operands, otherwise it is
580    added to virtual operands.  */
581 
582 void
add_stmt_operand(tree * var_p,int flags)583 operands_scanner::add_stmt_operand (tree *var_p, int flags)
584 {
585   tree var = *var_p;
586 
587   gcc_assert (SSA_VAR_P (*var_p)
588 	      || TREE_CODE (*var_p) == STRING_CST
589 	      || TREE_CODE (*var_p) == CONST_DECL);
590 
591   if (is_gimple_reg (var))
592     {
593       /* The variable is a GIMPLE register.  Add it to real operands.  */
594       if (flags & opf_def)
595 	;
596       else
597 	append_use (var_p);
598       if (DECL_P (*var_p))
599 	fn->gimple_df->ssa_renaming_needed = 1;
600     }
601   else
602     {
603       /* Mark statements with volatile operands.  */
604       if (!(flags & opf_no_vops)
605 	  && TREE_THIS_VOLATILE (var))
606 	gimple_set_has_volatile_ops (stmt, true);
607 
608       /* The variable is a memory access.  Add virtual operands.  */
609       add_virtual_operand (flags);
610     }
611 }
612 
613 /* Mark the base address of REF as having its address taken.
614    REF may be a single variable whose address has been taken or any
615    other valid GIMPLE memory reference (structure reference, array,
616    etc).  */
617 
618 static void
mark_address_taken(tree ref)619 mark_address_taken (tree ref)
620 {
621   tree var;
622 
623   /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
624      as the only thing we take the address of.  If VAR is a structure,
625      taking the address of a field means that the whole structure may
626      be referenced using pointer arithmetic.  See PR 21407 and the
627      ensuing mailing list discussion.  */
628   var = get_base_address (ref);
629   if (VAR_P (var)
630       || TREE_CODE (var) == RESULT_DECL
631       || TREE_CODE (var) == PARM_DECL)
632     TREE_ADDRESSABLE (var) = 1;
633 }
634 
635 
636 /* A subroutine of get_expr_operands to handle MEM_REF.
637 
638    STMT is the statement being processed, EXPR is the MEM_REF
639       that got us here.
640 
641    FLAGS is as in get_expr_operands.  */
642 
643 void
get_mem_ref_operands(tree expr,int flags)644 operands_scanner::get_mem_ref_operands (tree expr, int flags)
645 {
646   tree *pptr = &TREE_OPERAND (expr, 0);
647 
648   if (!(flags & opf_no_vops)
649       && TREE_THIS_VOLATILE (expr))
650     gimple_set_has_volatile_ops (stmt, true);
651 
652   /* Add the VOP.  */
653   add_virtual_operand (flags);
654 
655   /* If requested, add a USE operand for the base pointer.  */
656   get_expr_operands (pptr,
657 		     opf_non_addressable | opf_use
658 		     | (flags & (opf_no_vops|opf_not_non_addressable)));
659 }
660 
661 
662 /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
663 
664 void
get_tmr_operands(tree expr,int flags)665 operands_scanner::get_tmr_operands(tree expr, int flags)
666 {
667   if (!(flags & opf_no_vops)
668       && TREE_THIS_VOLATILE (expr))
669     gimple_set_has_volatile_ops (stmt, true);
670 
671   /* First record the real operands.  */
672   get_expr_operands (&TMR_BASE (expr),
673 		     opf_non_addressable | opf_use
674 		     | (flags & (opf_no_vops|opf_not_non_addressable)));
675   get_expr_operands (&TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
676   get_expr_operands (&TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
677 
678   add_virtual_operand (flags);
679 }
680 
681 
682 /* If STMT is a call that may clobber globals and other symbols that
683    escape, add them to the VDEF/VUSE lists for it.  */
684 
685 void
maybe_add_call_vops(gcall * stmt)686 operands_scanner::maybe_add_call_vops (gcall *stmt)
687 {
688   int call_flags = gimple_call_flags (stmt);
689 
690   /* If aliases have been computed already, add VDEF or VUSE
691      operands for all the symbols that have been found to be
692      call-clobbered.  */
693   if (!(call_flags & ECF_NOVOPS))
694     {
695       /* A 'pure' or a 'const' function never call-clobbers anything.  */
696       if (!(call_flags & (ECF_PURE | ECF_CONST)))
697 	add_virtual_operand (opf_def);
698       else if (!(call_flags & ECF_CONST))
699 	add_virtual_operand (opf_use);
700     }
701 }
702 
703 
704 /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
705 
706 void
get_asm_stmt_operands(gasm * stmt)707 operands_scanner::get_asm_stmt_operands (gasm *stmt)
708 {
709   size_t i, noutputs;
710   const char **oconstraints;
711   const char *constraint;
712   bool allows_mem, allows_reg, is_inout;
713 
714   noutputs = gimple_asm_noutputs (stmt);
715   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
716 
717   /* Gather all output operands.  */
718   for (i = 0; i < gimple_asm_noutputs (stmt); i++)
719     {
720       tree link = gimple_asm_output_op (stmt, i);
721       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
722       oconstraints[i] = constraint;
723       parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
724 	                       &allows_reg, &is_inout);
725 
726       /* This should have been split in gimplify_asm_expr.  */
727       gcc_assert (!allows_reg || !is_inout);
728 
729       /* Memory operands are addressable.  Note that STMT needs the
730 	 address of this operand.  */
731       if (!allows_reg && allows_mem)
732 	mark_address_taken (TREE_VALUE (link));
733 
734       get_expr_operands (&TREE_VALUE (link), opf_def | opf_not_non_addressable);
735     }
736 
737   /* Gather all input operands.  */
738   for (i = 0; i < gimple_asm_ninputs (stmt); i++)
739     {
740       tree link = gimple_asm_input_op (stmt, i);
741       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
742       parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
743 	                      &allows_mem, &allows_reg);
744 
745       /* Memory operands are addressable.  Note that STMT needs the
746 	 address of this operand.  */
747       if (!allows_reg && allows_mem)
748 	mark_address_taken (TREE_VALUE (link));
749 
750       get_expr_operands (&TREE_VALUE (link), opf_not_non_addressable);
751     }
752 
753   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
754   if (gimple_asm_clobbers_memory_p (stmt))
755     add_virtual_operand (opf_def);
756 }
757 
758 
759 /* Recursively scan the expression pointed to by EXPR_P in statement
760    STMT.  FLAGS is one of the OPF_* constants modifying how to
761    interpret the operands found.  */
762 
763 void
get_expr_operands(tree * expr_p,int flags)764 operands_scanner::get_expr_operands (tree *expr_p, int flags)
765 {
766   enum tree_code code;
767   enum tree_code_class codeclass;
768   tree expr = *expr_p;
769   int uflags = opf_use;
770 
771   if (expr == NULL)
772     return;
773 
774   if (is_gimple_debug (stmt))
775     uflags |= (flags & opf_no_vops);
776 
777   code = TREE_CODE (expr);
778   codeclass = TREE_CODE_CLASS (code);
779 
780   switch (code)
781     {
782     case ADDR_EXPR:
783       /* Taking the address of a variable does not represent a
784 	 reference to it, but the fact that the statement takes its
785 	 address will be of interest to some passes (e.g. alias
786 	 resolution).  */
787       if ((!(flags & opf_non_addressable)
788 	   || (flags & opf_not_non_addressable))
789 	  && !is_gimple_debug (stmt))
790 	mark_address_taken (TREE_OPERAND (expr, 0));
791 
792       /* Otherwise, there may be variables referenced inside but there
793 	 should be no VUSEs created, since the referenced objects are
794 	 not really accessed.  The only operands that we should find
795 	 here are ARRAY_REF indices which will always be real operands
796 	 (GIMPLE does not allow non-registers as array indices).  */
797       flags |= opf_no_vops;
798       get_expr_operands (&TREE_OPERAND (expr, 0),
799 			 flags | opf_not_non_addressable | opf_address_taken);
800       return;
801 
802     case SSA_NAME:
803     case VAR_DECL:
804     case PARM_DECL:
805     case RESULT_DECL:
806     case STRING_CST:
807     case CONST_DECL:
808       if (!(flags & opf_address_taken))
809 	add_stmt_operand (expr_p, flags);
810       return;
811 
812     case DEBUG_EXPR_DECL:
813       gcc_assert (gimple_debug_bind_p (stmt));
814       return;
815 
816     case MEM_REF:
817       get_mem_ref_operands (expr, flags);
818       return;
819 
820     case TARGET_MEM_REF:
821       get_tmr_operands (expr, flags);
822       return;
823 
824     case ARRAY_REF:
825     case ARRAY_RANGE_REF:
826     case COMPONENT_REF:
827     case REALPART_EXPR:
828     case IMAGPART_EXPR:
829       {
830 	if (!(flags & opf_no_vops)
831 	    && TREE_THIS_VOLATILE (expr))
832 	  gimple_set_has_volatile_ops (stmt, true);
833 
834 	get_expr_operands (&TREE_OPERAND (expr, 0), flags);
835 
836 	if (code == COMPONENT_REF)
837 	  get_expr_operands (&TREE_OPERAND (expr, 2), uflags);
838 	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
839 	  {
840 	    get_expr_operands (&TREE_OPERAND (expr, 1), uflags);
841 	    get_expr_operands (&TREE_OPERAND (expr, 2), uflags);
842 	    get_expr_operands (&TREE_OPERAND (expr, 3), uflags);
843 	  }
844 
845 	return;
846       }
847 
848     case WITH_SIZE_EXPR:
849       /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
850 	 and an rvalue reference to its second argument.  */
851       get_expr_operands (&TREE_OPERAND (expr, 1), uflags);
852       get_expr_operands (&TREE_OPERAND (expr, 0), flags);
853       return;
854 
855     case COND_EXPR:
856     case VEC_COND_EXPR:
857     case VEC_PERM_EXPR:
858       get_expr_operands (&TREE_OPERAND (expr, 0), uflags);
859       get_expr_operands (&TREE_OPERAND (expr, 1), uflags);
860       get_expr_operands (&TREE_OPERAND (expr, 2), uflags);
861       return;
862 
863     case CONSTRUCTOR:
864       {
865 	/* General aggregate CONSTRUCTORs have been decomposed, but they
866 	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
867 	constructor_elt *ce;
868 	unsigned HOST_WIDE_INT idx;
869 
870 	/* A volatile constructor is actually TREE_CLOBBER_P, transfer
871 	   the volatility to the statement, don't use TREE_CLOBBER_P for
872 	   mirroring the other uses of THIS_VOLATILE in this file.  */
873 	if (!(flags & opf_no_vops)
874 	    && TREE_THIS_VOLATILE (expr))
875 	  gimple_set_has_volatile_ops (stmt, true);
876 
877 	for (idx = 0;
878 	     vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
879 	     idx++)
880 	  get_expr_operands (&ce->value, uflags);
881 
882 	return;
883       }
884 
885     case BIT_FIELD_REF:
886       if (!(flags & opf_no_vops)
887 	  && TREE_THIS_VOLATILE (expr))
888 	gimple_set_has_volatile_ops (stmt, true);
889       /* FALLTHRU */
890 
891     case VIEW_CONVERT_EXPR:
892     do_unary:
893       get_expr_operands (&TREE_OPERAND (expr, 0), flags);
894       return;
895 
896     case BIT_INSERT_EXPR:
897     case COMPOUND_EXPR:
898     case OBJ_TYPE_REF:
899     case ASSERT_EXPR:
900     do_binary:
901       {
902 	get_expr_operands (&TREE_OPERAND (expr, 0), flags);
903 	get_expr_operands (&TREE_OPERAND (expr, 1), flags);
904 	return;
905       }
906 
907     case DOT_PROD_EXPR:
908     case SAD_EXPR:
909     case REALIGN_LOAD_EXPR:
910     case WIDEN_MULT_PLUS_EXPR:
911     case WIDEN_MULT_MINUS_EXPR:
912       {
913 	get_expr_operands (&TREE_OPERAND (expr, 0), flags);
914 	get_expr_operands (&TREE_OPERAND (expr, 1), flags);
915 	get_expr_operands (&TREE_OPERAND (expr, 2), flags);
916 	return;
917       }
918 
919     case FUNCTION_DECL:
920     case LABEL_DECL:
921     case CASE_LABEL_EXPR:
922       /* Expressions that make no memory references.  */
923       return;
924 
925     default:
926       if (codeclass == tcc_unary)
927 	goto do_unary;
928       if (codeclass == tcc_binary || codeclass == tcc_comparison)
929 	goto do_binary;
930       if (codeclass == tcc_constant || codeclass == tcc_type)
931 	return;
932     }
933 
934   /* If we get here, something has gone wrong.  */
935   if (flag_checking)
936     {
937       fprintf (stderr, "unhandled expression in get_expr_operands():\n");
938       debug_tree (expr);
939       fputs ("\n", stderr);
940       gcc_unreachable ();
941     }
942 }
943 
944 
945 /* Parse STMT looking for operands.  When finished, the various
946    build_* operand vectors will have potential operands in them.  */
947 
948 void
parse_ssa_operands()949 operands_scanner::parse_ssa_operands ()
950 {
951   enum gimple_code code = gimple_code (stmt);
952   size_t i, n, start = 0;
953 
954   switch (code)
955     {
956     case GIMPLE_ASM:
957       get_asm_stmt_operands (as_a <gasm *> (stmt));
958       break;
959 
960     case GIMPLE_TRANSACTION:
961       /* The start of a transaction is a memory barrier.  */
962       add_virtual_operand (opf_def | opf_use);
963       break;
964 
965     case GIMPLE_DEBUG:
966       if (gimple_debug_bind_p (stmt)
967 	  && gimple_debug_bind_has_value_p (stmt))
968 	get_expr_operands (gimple_debug_bind_get_value_ptr (stmt),
969 			   opf_use | opf_no_vops);
970       break;
971 
972     case GIMPLE_RETURN:
973       append_vuse (gimple_vop (fn));
974       goto do_default;
975 
976     case GIMPLE_CALL:
977       /* Add call-clobbered operands, if needed.  */
978       maybe_add_call_vops (as_a <gcall *> (stmt));
979       /* FALLTHRU */
980 
981     case GIMPLE_ASSIGN:
982       get_expr_operands (gimple_op_ptr (stmt, 0), opf_def);
983       start = 1;
984       /* FALLTHRU */
985 
986     default:
987     do_default:
988       n = gimple_num_ops (stmt);
989       for (i = start; i < n; i++)
990 	get_expr_operands (gimple_op_ptr (stmt, i), opf_use);
991       break;
992     }
993 }
994 
995 
996 /* Create an operands cache for STMT.  */
997 
998 void
build_ssa_operands()999 operands_scanner::build_ssa_operands ()
1000 {
1001   /* Initially assume that the statement has no volatile operands.  */
1002   gimple_set_has_volatile_ops (stmt, false);
1003 
1004   start_ssa_stmt_operands ();
1005   parse_ssa_operands ();
1006   finalize_ssa_stmt_operands ();
1007 }
1008 
1009 /* Verifies SSA statement operands.  */
1010 
1011 DEBUG_FUNCTION bool
verify_ssa_operands()1012 operands_scanner::verify_ssa_operands ()
1013 {
1014   use_operand_p use_p;
1015   def_operand_p def_p;
1016   ssa_op_iter iter;
1017   unsigned i;
1018   tree def;
1019   bool volatile_p = gimple_has_volatile_ops (stmt);
1020 
1021   /* build_ssa_operands w/o finalizing them.  */
1022   gimple_set_has_volatile_ops (stmt, false);
1023   start_ssa_stmt_operands ();
1024   parse_ssa_operands ();
1025 
1026   /* Now verify the built operands are the same as present in STMT.  */
1027   def = gimple_vdef (stmt);
1028   if (def
1029       && TREE_CODE (def) == SSA_NAME)
1030     def = SSA_NAME_VAR (def);
1031   if (build_vdef != def)
1032     {
1033       error ("virtual definition of statement not up to date");
1034       return true;
1035     }
1036   if (gimple_vdef (stmt)
1037       && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
1038 	  || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
1039     {
1040       error ("virtual def operand missing for statement");
1041       return true;
1042     }
1043 
1044   tree use = gimple_vuse (stmt);
1045   if (use
1046       && TREE_CODE (use) == SSA_NAME)
1047     use = SSA_NAME_VAR (use);
1048   if (build_vuse != use)
1049     {
1050       error ("virtual use of statement not up to date");
1051       return true;
1052     }
1053   if (gimple_vuse (stmt)
1054       && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
1055 	  || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
1056     {
1057       error ("virtual use operand missing for statement");
1058       return true;
1059     }
1060 
1061   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1062     {
1063       tree *op;
1064       FOR_EACH_VEC_ELT (build_uses, i, op)
1065 	{
1066 	  if (use_p->use == op)
1067 	    {
1068 	      build_uses[i] = NULL;
1069 	      break;
1070 	    }
1071 	}
1072       if (i == build_uses.length ())
1073 	{
1074 	  error ("excess use operand for statement");
1075 	  debug_generic_expr (USE_FROM_PTR (use_p));
1076 	  return true;
1077 	}
1078     }
1079 
1080   tree *op;
1081   FOR_EACH_VEC_ELT (build_uses, i, op)
1082     if (op != NULL)
1083       {
1084 	error ("use operand missing for statement");
1085 	debug_generic_expr (*op);
1086 	return true;
1087       }
1088 
1089   if (gimple_has_volatile_ops (stmt) != volatile_p)
1090     {
1091       error ("statement volatile flag not up to date");
1092       return true;
1093     }
1094 
1095   cleanup_build_arrays ();
1096   return false;
1097 }
1098 
1099 /* Interface for external use.  */
1100 
1101 DEBUG_FUNCTION bool
verify_ssa_operands(struct function * fn,gimple * stmt)1102 verify_ssa_operands (struct function *fn, gimple *stmt)
1103 {
1104   return operands_scanner (fn, stmt).verify_ssa_operands ();
1105 }
1106 
1107 
1108 /* Releases the operands of STMT back to their freelists, and clears
1109    the stmt operand lists.  */
1110 
1111 void
free_stmt_operands(struct function * fn,gimple * stmt)1112 free_stmt_operands (struct function *fn, gimple *stmt)
1113 {
1114   use_optype_p uses = gimple_use_ops (stmt), last_use;
1115 
1116   if (uses)
1117     {
1118       for (last_use = uses; last_use->next; last_use = last_use->next)
1119 	delink_imm_use (USE_OP_PTR (last_use));
1120       delink_imm_use (USE_OP_PTR (last_use));
1121       last_use->next = gimple_ssa_operands (fn)->free_uses;
1122       gimple_ssa_operands (fn)->free_uses = uses;
1123       gimple_set_use_ops (stmt, NULL);
1124     }
1125 
1126   if (gimple_has_mem_ops (stmt))
1127     {
1128       gimple_set_vuse (stmt, NULL_TREE);
1129       gimple_set_vdef (stmt, NULL_TREE);
1130     }
1131 }
1132 
1133 
1134 /* Get the operands of statement STMT.  */
1135 
1136 void
update_stmt_operands(struct function * fn,gimple * stmt)1137 update_stmt_operands (struct function *fn, gimple *stmt)
1138 {
1139   /* If update_stmt_operands is called before SSA is initialized, do
1140      nothing.  */
1141   if (!ssa_operands_active (fn))
1142     return;
1143 
1144   timevar_push (TV_TREE_OPS);
1145 
1146   gcc_assert (gimple_modified_p (stmt));
1147   operands_scanner (fn, stmt).build_ssa_operands ();
1148   gimple_set_modified (stmt, false);
1149 
1150   timevar_pop (TV_TREE_OPS);
1151 }
1152 
1153 
1154 /* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1155    to test the validity of the swap operation.  */
1156 
1157 void
swap_ssa_operands(gimple * stmt,tree * exp0,tree * exp1)1158 swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
1159 {
1160   tree op0, op1;
1161   op0 = *exp0;
1162   op1 = *exp1;
1163 
1164   if (op0 != op1)
1165     {
1166       /* Attempt to preserve the relative positions of these two operands in
1167 	 their * respective immediate use lists by adjusting their use pointer
1168 	 to point to the new operand position.  */
1169       use_optype_p use0, use1, ptr;
1170       use0 = use1 = NULL;
1171 
1172       /* Find the 2 operands in the cache, if they are there.  */
1173       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1174 	if (USE_OP_PTR (ptr)->use == exp0)
1175 	  {
1176 	    use0 = ptr;
1177 	    break;
1178 	  }
1179 
1180       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1181 	if (USE_OP_PTR (ptr)->use == exp1)
1182 	  {
1183 	    use1 = ptr;
1184 	    break;
1185 	  }
1186 
1187       /* And adjust their location to point to the new position of the
1188          operand.  */
1189       if (use0)
1190 	USE_OP_PTR (use0)->use = exp1;
1191       if (use1)
1192 	USE_OP_PTR (use1)->use = exp0;
1193 
1194       /* Now swap the data.  */
1195       *exp0 = op1;
1196       *exp1 = op0;
1197     }
1198 }
1199 
1200 
1201 /* Scan the immediate_use list for VAR making sure its linked properly.
1202    Return TRUE if there is a problem and emit an error message to F.  */
1203 
1204 DEBUG_FUNCTION bool
verify_imm_links(FILE * f,tree var)1205 verify_imm_links (FILE *f, tree var)
1206 {
1207   use_operand_p ptr, prev, list;
1208   unsigned int count;
1209 
1210   gcc_assert (TREE_CODE (var) == SSA_NAME);
1211 
1212   list = &(SSA_NAME_IMM_USE_NODE (var));
1213   gcc_assert (list->use == NULL);
1214 
1215   if (list->prev == NULL)
1216     {
1217       gcc_assert (list->next == NULL);
1218       return false;
1219     }
1220 
1221   prev = list;
1222   count = 0;
1223   for (ptr = list->next; ptr != list; )
1224     {
1225       if (prev != ptr->prev)
1226 	{
1227 	  fprintf (f, "prev != ptr->prev\n");
1228 	  goto error;
1229 	}
1230 
1231       if (ptr->use == NULL)
1232 	{
1233 	  fprintf (f, "ptr->use == NULL\n");
1234 	  goto error; /* 2 roots, or SAFE guard node.  */
1235 	}
1236       else if (*(ptr->use) != var)
1237 	{
1238 	  fprintf (f, "*(ptr->use) != var\n");
1239 	  goto error;
1240 	}
1241 
1242       prev = ptr;
1243       ptr = ptr->next;
1244 
1245       count++;
1246       if (count == 0)
1247 	{
1248 	  fprintf (f, "number of immediate uses doesn't fit unsigned int\n");
1249 	  goto error;
1250 	}
1251     }
1252 
1253   /* Verify list in the other direction.  */
1254   prev = list;
1255   for (ptr = list->prev; ptr != list; )
1256     {
1257       if (prev != ptr->next)
1258 	{
1259 	  fprintf (f, "prev != ptr->next\n");
1260 	  goto error;
1261 	}
1262       prev = ptr;
1263       ptr = ptr->prev;
1264       if (count == 0)
1265 	{
1266 	  fprintf (f, "count-- < 0\n");
1267 	  goto error;
1268 	}
1269       count--;
1270     }
1271 
1272   if (count != 0)
1273     {
1274       fprintf (f, "count != 0\n");
1275       goto error;
1276     }
1277 
1278   return false;
1279 
1280  error:
1281   if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1282     {
1283       fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1284       print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1285     }
1286   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1287 	   (void *)ptr->use);
1288   print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1289   fprintf (f, "\n");
1290   return true;
1291 }
1292 
1293 
1294 /* Dump all the immediate uses to FILE.  */
1295 
1296 void
dump_immediate_uses_for(FILE * file,tree var)1297 dump_immediate_uses_for (FILE *file, tree var)
1298 {
1299   imm_use_iterator iter;
1300   use_operand_p use_p;
1301 
1302   gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1303 
1304   print_generic_expr (file, var, TDF_SLIM);
1305   fprintf (file, " : -->");
1306   if (has_zero_uses (var))
1307     fprintf (file, " no uses.\n");
1308   else
1309     if (has_single_use (var))
1310       fprintf (file, " single use.\n");
1311     else
1312       fprintf (file, "%d uses.\n", num_imm_uses (var));
1313 
1314   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1315     {
1316       if (use_p->loc.stmt == NULL && use_p->use == NULL)
1317         fprintf (file, "***end of stmt iterator marker***\n");
1318       else
1319 	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1320 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1321 	else
1322 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1323     }
1324   fprintf (file, "\n");
1325 }
1326 
1327 
1328 /* Dump all the immediate uses to FILE.  */
1329 
1330 void
dump_immediate_uses(FILE * file)1331 dump_immediate_uses (FILE *file)
1332 {
1333   tree var;
1334   unsigned int x;
1335 
1336   fprintf (file, "Immediate_uses: \n\n");
1337   FOR_EACH_SSA_NAME (x, var, cfun)
1338     {
1339       dump_immediate_uses_for (file, var);
1340     }
1341 }
1342 
1343 
1344 /* Dump def-use edges on stderr.  */
1345 
1346 DEBUG_FUNCTION void
debug_immediate_uses(void)1347 debug_immediate_uses (void)
1348 {
1349   dump_immediate_uses (stderr);
1350 }
1351 
1352 
1353 /* Dump def-use edges on stderr.  */
1354 
1355 DEBUG_FUNCTION void
debug_immediate_uses_for(tree var)1356 debug_immediate_uses_for (tree var)
1357 {
1358   dump_immediate_uses_for (stderr, var);
1359 }
1360 
1361 
1362 /* Unlink STMTs virtual definition from the IL by propagating its use.  */
1363 
1364 void
unlink_stmt_vdef(gimple * stmt)1365 unlink_stmt_vdef (gimple *stmt)
1366 {
1367   use_operand_p use_p;
1368   imm_use_iterator iter;
1369   gimple *use_stmt;
1370   tree vdef = gimple_vdef (stmt);
1371   tree vuse = gimple_vuse (stmt);
1372 
1373   if (!vdef
1374       || TREE_CODE (vdef) != SSA_NAME)
1375     return;
1376 
1377   FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1378     {
1379       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1380 	SET_USE (use_p, vuse);
1381     }
1382 
1383   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1384     SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1385 }
1386 
1387 /* Return true if the var whose chain of uses starts at PTR has a
1388    single nondebug use.  Set USE_P and STMT to that single nondebug
1389    use, if so, or to NULL otherwise.  */
1390 bool
single_imm_use_1(const ssa_use_operand_t * head,use_operand_p * use_p,gimple ** stmt)1391 single_imm_use_1 (const ssa_use_operand_t *head,
1392 		  use_operand_p *use_p, gimple **stmt)
1393 {
1394   ssa_use_operand_t *ptr, *single_use = 0;
1395 
1396   for (ptr = head->next; ptr != head; ptr = ptr->next)
1397     if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1398       {
1399 	if (single_use)
1400 	  {
1401 	    single_use = NULL;
1402 	    break;
1403 	  }
1404 	single_use = ptr;
1405       }
1406 
1407   if (use_p)
1408     *use_p = single_use;
1409 
1410   if (stmt)
1411     *stmt = single_use ? single_use->loc.stmt : NULL;
1412 
1413   return single_use;
1414 }
1415 
1416