xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-ssa-operands.c (revision 38fd149817dfbff97799f62fcb70be98c4e32523)
1*38fd1498Szrj /* SSA operands management for trees.
2*38fd1498Szrj    Copyright (C) 2003-2018 Free Software Foundation, Inc.
3*38fd1498Szrj 
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj 
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify
7*38fd1498Szrj it under the terms of the GNU General Public License as published by
8*38fd1498Szrj the Free Software Foundation; either version 3, or (at your option)
9*38fd1498Szrj any later version.
10*38fd1498Szrj 
11*38fd1498Szrj GCC is distributed in the hope that it will be useful,
12*38fd1498Szrj but WITHOUT ANY WARRANTY; without even the implied warranty of
13*38fd1498Szrj MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14*38fd1498Szrj GNU General Public License for more details.
15*38fd1498Szrj 
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3.  If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>.  */
19*38fd1498Szrj 
20*38fd1498Szrj #include "config.h"
21*38fd1498Szrj #include "system.h"
22*38fd1498Szrj #include "coretypes.h"
23*38fd1498Szrj #include "backend.h"
24*38fd1498Szrj #include "tree.h"
25*38fd1498Szrj #include "gimple.h"
26*38fd1498Szrj #include "timevar.h"
27*38fd1498Szrj #include "ssa.h"
28*38fd1498Szrj #include "gimple-pretty-print.h"
29*38fd1498Szrj #include "diagnostic-core.h"
30*38fd1498Szrj #include "stmt.h"
31*38fd1498Szrj #include "print-tree.h"
32*38fd1498Szrj #include "dumpfile.h"
33*38fd1498Szrj 
34*38fd1498Szrj 
35*38fd1498Szrj /* This file contains the code required to manage the operands cache of the
36*38fd1498Szrj    SSA optimizer.  For every stmt, we maintain an operand cache in the stmt
37*38fd1498Szrj    annotation.  This cache contains operands that will be of interest to
38*38fd1498Szrj    optimizers and other passes wishing to manipulate the IL.
39*38fd1498Szrj 
40*38fd1498Szrj    The operand type are broken up into REAL and VIRTUAL operands.  The real
41*38fd1498Szrj    operands are represented as pointers into the stmt's operand tree.  Thus
42*38fd1498Szrj    any manipulation of the real operands will be reflected in the actual tree.
43*38fd1498Szrj    Virtual operands are represented solely in the cache, although the base
44*38fd1498Szrj    variable for the SSA_NAME may, or may not occur in the stmt's tree.
45*38fd1498Szrj    Manipulation of the virtual operands will not be reflected in the stmt tree.
46*38fd1498Szrj 
47*38fd1498Szrj    The routines in this file are concerned with creating this operand cache
48*38fd1498Szrj    from a stmt tree.
49*38fd1498Szrj 
50*38fd1498Szrj    The operand tree is the parsed by the various get_* routines which look
51*38fd1498Szrj    through the stmt tree for the occurrence of operands which may be of
52*38fd1498Szrj    interest, and calls are made to the append_* routines whenever one is
53*38fd1498Szrj    found.  There are 4 of these routines, each representing one of the
54*38fd1498Szrj    4 types of operands. Defs, Uses, Virtual Uses, and Virtual May Defs.
55*38fd1498Szrj 
56*38fd1498Szrj    The append_* routines check for duplication, and simply keep a list of
57*38fd1498Szrj    unique objects for each operand type in the build_* extendable vectors.
58*38fd1498Szrj 
59*38fd1498Szrj    Once the stmt tree is completely parsed, the finalize_ssa_operands()
60*38fd1498Szrj    routine is called, which proceeds to perform the finalization routine
61*38fd1498Szrj    on each of the 4 operand vectors which have been built up.
62*38fd1498Szrj 
63*38fd1498Szrj    If the stmt had a previous operand cache, the finalization routines
64*38fd1498Szrj    attempt to match up the new operands with the old ones.  If it's a perfect
65*38fd1498Szrj    match, the old vector is simply reused.  If it isn't a perfect match, then
66*38fd1498Szrj    a new vector is created and the new operands are placed there.  For
67*38fd1498Szrj    virtual operands, if the previous cache had SSA_NAME version of a
68*38fd1498Szrj    variable, and that same variable occurs in the same operands cache, then
69*38fd1498Szrj    the new cache vector will also get the same SSA_NAME.
70*38fd1498Szrj 
71*38fd1498Szrj    i.e., if a stmt had a VUSE of 'a_5', and 'a' occurs in the new
72*38fd1498Szrj    operand vector for VUSE, then the new vector will also be modified
73*38fd1498Szrj    such that it contains 'a_5' rather than 'a'.  */
74*38fd1498Szrj 
75*38fd1498Szrj 
76*38fd1498Szrj /* Flags to describe operand properties in helpers.  */
77*38fd1498Szrj 
78*38fd1498Szrj /* By default, operands are loaded.  */
79*38fd1498Szrj #define opf_use		0
80*38fd1498Szrj 
81*38fd1498Szrj /* Operand is the target of an assignment expression or a
82*38fd1498Szrj    call-clobbered variable.  */
83*38fd1498Szrj #define opf_def 	(1 << 0)
84*38fd1498Szrj 
85*38fd1498Szrj /* No virtual operands should be created in the expression.  This is used
86*38fd1498Szrj    when traversing ADDR_EXPR nodes which have different semantics than
87*38fd1498Szrj    other expressions.  Inside an ADDR_EXPR node, the only operands that we
88*38fd1498Szrj    need to consider are indices into arrays.  For instance, &a.b[i] should
89*38fd1498Szrj    generate a USE of 'i' but it should not generate a VUSE for 'a' nor a
90*38fd1498Szrj    VUSE for 'b'.  */
91*38fd1498Szrj #define opf_no_vops 	(1 << 1)
92*38fd1498Szrj 
93*38fd1498Szrj /* Operand is in a place where address-taken does not imply addressable.  */
94*38fd1498Szrj #define opf_non_addressable (1 << 3)
95*38fd1498Szrj 
96*38fd1498Szrj /* Operand is in a place where opf_non_addressable does not apply.  */
97*38fd1498Szrj #define opf_not_non_addressable (1 << 4)
98*38fd1498Szrj 
99*38fd1498Szrj /* Operand is having its address taken.  */
100*38fd1498Szrj #define opf_address_taken (1 << 5)
101*38fd1498Szrj 
102*38fd1498Szrj /* Array for building all the use operands.  */
103*38fd1498Szrj static vec<tree *> build_uses;
104*38fd1498Szrj 
105*38fd1498Szrj /* The built VDEF operand.  */
106*38fd1498Szrj static tree build_vdef;
107*38fd1498Szrj 
108*38fd1498Szrj /* The built VUSE operand.  */
109*38fd1498Szrj static tree build_vuse;
110*38fd1498Szrj 
111*38fd1498Szrj /* Bitmap obstack for our datastructures that needs to survive across
112*38fd1498Szrj    compilations of multiple functions.  */
113*38fd1498Szrj static bitmap_obstack operands_bitmap_obstack;
114*38fd1498Szrj 
115*38fd1498Szrj static void get_expr_operands (struct function *, gimple *, tree *, int);
116*38fd1498Szrj 
117*38fd1498Szrj /* Number of functions with initialized ssa_operands.  */
118*38fd1498Szrj static int n_initialized = 0;
119*38fd1498Szrj 
120*38fd1498Szrj /* Accessor to tree-ssa-operands.c caches.  */
121*38fd1498Szrj static inline struct ssa_operands *
gimple_ssa_operands(const struct function * fun)122*38fd1498Szrj gimple_ssa_operands (const struct function *fun)
123*38fd1498Szrj {
124*38fd1498Szrj   return &fun->gimple_df->ssa_operands;
125*38fd1498Szrj }
126*38fd1498Szrj 
127*38fd1498Szrj 
128*38fd1498Szrj /*  Return true if the SSA operands cache is active.  */
129*38fd1498Szrj 
130*38fd1498Szrj bool
ssa_operands_active(struct function * fun)131*38fd1498Szrj ssa_operands_active (struct function *fun)
132*38fd1498Szrj {
133*38fd1498Szrj   if (fun == NULL)
134*38fd1498Szrj     return false;
135*38fd1498Szrj 
136*38fd1498Szrj   return fun->gimple_df && gimple_ssa_operands (fun)->ops_active;
137*38fd1498Szrj }
138*38fd1498Szrj 
139*38fd1498Szrj 
140*38fd1498Szrj /* Create the VOP variable, an artificial global variable to act as a
141*38fd1498Szrj    representative of all of the virtual operands FUD chain.  */
142*38fd1498Szrj 
143*38fd1498Szrj static void
create_vop_var(struct function * fn)144*38fd1498Szrj create_vop_var (struct function *fn)
145*38fd1498Szrj {
146*38fd1498Szrj   tree global_var;
147*38fd1498Szrj 
148*38fd1498Szrj   gcc_assert (fn->gimple_df->vop == NULL_TREE);
149*38fd1498Szrj 
150*38fd1498Szrj   global_var = build_decl (BUILTINS_LOCATION, VAR_DECL,
151*38fd1498Szrj 			   get_identifier (".MEM"),
152*38fd1498Szrj 			   void_type_node);
153*38fd1498Szrj   DECL_ARTIFICIAL (global_var) = 1;
154*38fd1498Szrj   DECL_IGNORED_P (global_var) = 1;
155*38fd1498Szrj   TREE_READONLY (global_var) = 0;
156*38fd1498Szrj   DECL_EXTERNAL (global_var) = 1;
157*38fd1498Szrj   TREE_STATIC (global_var) = 1;
158*38fd1498Szrj   TREE_USED (global_var) = 1;
159*38fd1498Szrj   DECL_CONTEXT (global_var) = NULL_TREE;
160*38fd1498Szrj   TREE_THIS_VOLATILE (global_var) = 0;
161*38fd1498Szrj   TREE_ADDRESSABLE (global_var) = 0;
162*38fd1498Szrj   VAR_DECL_IS_VIRTUAL_OPERAND (global_var) = 1;
163*38fd1498Szrj 
164*38fd1498Szrj   fn->gimple_df->vop = global_var;
165*38fd1498Szrj }
166*38fd1498Szrj 
167*38fd1498Szrj /* These are the sizes of the operand memory buffer in bytes which gets
168*38fd1498Szrj    allocated each time more operands space is required.  The final value is
169*38fd1498Szrj    the amount that is allocated every time after that.
170*38fd1498Szrj    In 1k we can fit 25 use operands (or 63 def operands) on a host with
171*38fd1498Szrj    8 byte pointers, that would be 10 statements each with 1 def and 2
172*38fd1498Szrj    uses.  */
173*38fd1498Szrj 
174*38fd1498Szrj #define OP_SIZE_INIT	0
175*38fd1498Szrj #define OP_SIZE_1	(1024 - sizeof (void *))
176*38fd1498Szrj #define OP_SIZE_2	(1024 * 4 - sizeof (void *))
177*38fd1498Szrj #define OP_SIZE_3	(1024 * 16 - sizeof (void *))
178*38fd1498Szrj 
179*38fd1498Szrj /* Initialize the operand cache routines.  */
180*38fd1498Szrj 
181*38fd1498Szrj void
init_ssa_operands(struct function * fn)182*38fd1498Szrj init_ssa_operands (struct function *fn)
183*38fd1498Szrj {
184*38fd1498Szrj   if (!n_initialized++)
185*38fd1498Szrj     {
186*38fd1498Szrj       build_uses.create (10);
187*38fd1498Szrj       build_vuse = NULL_TREE;
188*38fd1498Szrj       build_vdef = NULL_TREE;
189*38fd1498Szrj       bitmap_obstack_initialize (&operands_bitmap_obstack);
190*38fd1498Szrj     }
191*38fd1498Szrj 
192*38fd1498Szrj   gcc_assert (gimple_ssa_operands (fn)->operand_memory == NULL);
193*38fd1498Szrj   gimple_ssa_operands (fn)->operand_memory_index
194*38fd1498Szrj      = gimple_ssa_operands (fn)->ssa_operand_mem_size;
195*38fd1498Szrj   gimple_ssa_operands (fn)->ops_active = true;
196*38fd1498Szrj   gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_INIT;
197*38fd1498Szrj   create_vop_var (fn);
198*38fd1498Szrj }
199*38fd1498Szrj 
200*38fd1498Szrj 
201*38fd1498Szrj /* Dispose of anything required by the operand routines.  */
202*38fd1498Szrj 
203*38fd1498Szrj void
fini_ssa_operands(struct function * fn)204*38fd1498Szrj fini_ssa_operands (struct function *fn)
205*38fd1498Szrj {
206*38fd1498Szrj   struct ssa_operand_memory_d *ptr;
207*38fd1498Szrj 
208*38fd1498Szrj   if (!--n_initialized)
209*38fd1498Szrj     {
210*38fd1498Szrj       build_uses.release ();
211*38fd1498Szrj       build_vdef = NULL_TREE;
212*38fd1498Szrj       build_vuse = NULL_TREE;
213*38fd1498Szrj     }
214*38fd1498Szrj 
215*38fd1498Szrj   gimple_ssa_operands (fn)->free_uses = NULL;
216*38fd1498Szrj 
217*38fd1498Szrj   while ((ptr = gimple_ssa_operands (fn)->operand_memory) != NULL)
218*38fd1498Szrj     {
219*38fd1498Szrj       gimple_ssa_operands (fn)->operand_memory
220*38fd1498Szrj 	= gimple_ssa_operands (fn)->operand_memory->next;
221*38fd1498Szrj       ggc_free (ptr);
222*38fd1498Szrj     }
223*38fd1498Szrj 
224*38fd1498Szrj   gimple_ssa_operands (fn)->ops_active = false;
225*38fd1498Szrj 
226*38fd1498Szrj   if (!n_initialized)
227*38fd1498Szrj     bitmap_obstack_release (&operands_bitmap_obstack);
228*38fd1498Szrj 
229*38fd1498Szrj   fn->gimple_df->vop = NULL_TREE;
230*38fd1498Szrj }
231*38fd1498Szrj 
232*38fd1498Szrj 
233*38fd1498Szrj /* Return memory for an operand of size SIZE.  */
234*38fd1498Szrj 
235*38fd1498Szrj static inline void *
ssa_operand_alloc(struct function * fn,unsigned size)236*38fd1498Szrj ssa_operand_alloc (struct function *fn, unsigned size)
237*38fd1498Szrj {
238*38fd1498Szrj   char *ptr;
239*38fd1498Szrj 
240*38fd1498Szrj   gcc_assert (size == sizeof (struct use_optype_d));
241*38fd1498Szrj 
242*38fd1498Szrj   if (gimple_ssa_operands (fn)->operand_memory_index + size
243*38fd1498Szrj       >= gimple_ssa_operands (fn)->ssa_operand_mem_size)
244*38fd1498Szrj     {
245*38fd1498Szrj       struct ssa_operand_memory_d *ptr;
246*38fd1498Szrj 
247*38fd1498Szrj       switch (gimple_ssa_operands (fn)->ssa_operand_mem_size)
248*38fd1498Szrj 	{
249*38fd1498Szrj 	case OP_SIZE_INIT:
250*38fd1498Szrj 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_1;
251*38fd1498Szrj 	  break;
252*38fd1498Szrj 	case OP_SIZE_1:
253*38fd1498Szrj 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_2;
254*38fd1498Szrj 	  break;
255*38fd1498Szrj 	case OP_SIZE_2:
256*38fd1498Szrj 	case OP_SIZE_3:
257*38fd1498Szrj 	  gimple_ssa_operands (fn)->ssa_operand_mem_size = OP_SIZE_3;
258*38fd1498Szrj 	  break;
259*38fd1498Szrj 	default:
260*38fd1498Szrj 	  gcc_unreachable ();
261*38fd1498Szrj 	}
262*38fd1498Szrj 
263*38fd1498Szrj 
264*38fd1498Szrj       ptr = (ssa_operand_memory_d *) ggc_internal_alloc
265*38fd1498Szrj 	(sizeof (void *) + gimple_ssa_operands (fn)->ssa_operand_mem_size);
266*38fd1498Szrj 
267*38fd1498Szrj       ptr->next = gimple_ssa_operands (fn)->operand_memory;
268*38fd1498Szrj       gimple_ssa_operands (fn)->operand_memory = ptr;
269*38fd1498Szrj       gimple_ssa_operands (fn)->operand_memory_index = 0;
270*38fd1498Szrj     }
271*38fd1498Szrj 
272*38fd1498Szrj   ptr = &(gimple_ssa_operands (fn)->operand_memory
273*38fd1498Szrj 	  ->mem[gimple_ssa_operands (fn)->operand_memory_index]);
274*38fd1498Szrj   gimple_ssa_operands (fn)->operand_memory_index += size;
275*38fd1498Szrj   return ptr;
276*38fd1498Szrj }
277*38fd1498Szrj 
278*38fd1498Szrj 
279*38fd1498Szrj /* Allocate a USE operand.  */
280*38fd1498Szrj 
281*38fd1498Szrj static inline struct use_optype_d *
alloc_use(struct function * fn)282*38fd1498Szrj alloc_use (struct function *fn)
283*38fd1498Szrj {
284*38fd1498Szrj   struct use_optype_d *ret;
285*38fd1498Szrj   if (gimple_ssa_operands (fn)->free_uses)
286*38fd1498Szrj     {
287*38fd1498Szrj       ret = gimple_ssa_operands (fn)->free_uses;
288*38fd1498Szrj       gimple_ssa_operands (fn)->free_uses
289*38fd1498Szrj 	= gimple_ssa_operands (fn)->free_uses->next;
290*38fd1498Szrj     }
291*38fd1498Szrj   else
292*38fd1498Szrj     ret = (struct use_optype_d *)
293*38fd1498Szrj           ssa_operand_alloc (fn, sizeof (struct use_optype_d));
294*38fd1498Szrj   return ret;
295*38fd1498Szrj }
296*38fd1498Szrj 
297*38fd1498Szrj 
298*38fd1498Szrj /* Adds OP to the list of uses of statement STMT after LAST.  */
299*38fd1498Szrj 
300*38fd1498Szrj static inline use_optype_p
add_use_op(struct function * fn,gimple * stmt,tree * op,use_optype_p last)301*38fd1498Szrj add_use_op (struct function *fn, gimple *stmt, tree *op, use_optype_p last)
302*38fd1498Szrj {
303*38fd1498Szrj   use_optype_p new_use;
304*38fd1498Szrj 
305*38fd1498Szrj   new_use = alloc_use (fn);
306*38fd1498Szrj   USE_OP_PTR (new_use)->use = op;
307*38fd1498Szrj   link_imm_use_stmt (USE_OP_PTR (new_use), *op, stmt);
308*38fd1498Szrj   last->next = new_use;
309*38fd1498Szrj   new_use->next = NULL;
310*38fd1498Szrj   return new_use;
311*38fd1498Szrj }
312*38fd1498Szrj 
313*38fd1498Szrj 
314*38fd1498Szrj 
315*38fd1498Szrj /* Takes elements from build_defs and turns them into def operands of STMT.
316*38fd1498Szrj    TODO -- Make build_defs vec of tree *.  */
317*38fd1498Szrj 
318*38fd1498Szrj static inline void
finalize_ssa_defs(struct function * fn,gimple * stmt)319*38fd1498Szrj finalize_ssa_defs (struct function *fn, gimple *stmt)
320*38fd1498Szrj {
321*38fd1498Szrj   /* Pre-pend the vdef we may have built.  */
322*38fd1498Szrj   if (build_vdef != NULL_TREE)
323*38fd1498Szrj     {
324*38fd1498Szrj       tree oldvdef = gimple_vdef (stmt);
325*38fd1498Szrj       if (oldvdef
326*38fd1498Szrj 	  && TREE_CODE (oldvdef) == SSA_NAME)
327*38fd1498Szrj 	oldvdef = SSA_NAME_VAR (oldvdef);
328*38fd1498Szrj       if (oldvdef != build_vdef)
329*38fd1498Szrj 	gimple_set_vdef (stmt, build_vdef);
330*38fd1498Szrj     }
331*38fd1498Szrj 
332*38fd1498Szrj   /* Clear and unlink a no longer necessary VDEF.  */
333*38fd1498Szrj   if (build_vdef == NULL_TREE
334*38fd1498Szrj       && gimple_vdef (stmt) != NULL_TREE)
335*38fd1498Szrj     {
336*38fd1498Szrj       if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
337*38fd1498Szrj 	{
338*38fd1498Szrj 	  unlink_stmt_vdef (stmt);
339*38fd1498Szrj 	  release_ssa_name_fn (fn, gimple_vdef (stmt));
340*38fd1498Szrj 	}
341*38fd1498Szrj       gimple_set_vdef (stmt, NULL_TREE);
342*38fd1498Szrj     }
343*38fd1498Szrj 
344*38fd1498Szrj   /* If we have a non-SSA_NAME VDEF, mark it for renaming.  */
345*38fd1498Szrj   if (gimple_vdef (stmt)
346*38fd1498Szrj       && TREE_CODE (gimple_vdef (stmt)) != SSA_NAME)
347*38fd1498Szrj     {
348*38fd1498Szrj       fn->gimple_df->rename_vops = 1;
349*38fd1498Szrj       fn->gimple_df->ssa_renaming_needed = 1;
350*38fd1498Szrj     }
351*38fd1498Szrj }
352*38fd1498Szrj 
353*38fd1498Szrj 
354*38fd1498Szrj /* Takes elements from build_uses and turns them into use operands of STMT.  */
355*38fd1498Szrj 
356*38fd1498Szrj static inline void
finalize_ssa_uses(struct function * fn,gimple * stmt)357*38fd1498Szrj finalize_ssa_uses (struct function *fn, gimple *stmt)
358*38fd1498Szrj {
359*38fd1498Szrj   unsigned new_i;
360*38fd1498Szrj   struct use_optype_d new_list;
361*38fd1498Szrj   use_optype_p old_ops, ptr, last;
362*38fd1498Szrj 
363*38fd1498Szrj   /* Pre-pend the VUSE we may have built.  */
364*38fd1498Szrj   if (build_vuse != NULL_TREE)
365*38fd1498Szrj     {
366*38fd1498Szrj       tree oldvuse = gimple_vuse (stmt);
367*38fd1498Szrj       if (oldvuse
368*38fd1498Szrj 	  && TREE_CODE (oldvuse) == SSA_NAME)
369*38fd1498Szrj 	oldvuse = SSA_NAME_VAR (oldvuse);
370*38fd1498Szrj       if (oldvuse != (build_vuse != NULL_TREE
371*38fd1498Szrj 		      ? build_vuse : build_vdef))
372*38fd1498Szrj 	gimple_set_vuse (stmt, NULL_TREE);
373*38fd1498Szrj       build_uses.safe_insert (0, gimple_vuse_ptr (stmt));
374*38fd1498Szrj     }
375*38fd1498Szrj 
376*38fd1498Szrj   new_list.next = NULL;
377*38fd1498Szrj   last = &new_list;
378*38fd1498Szrj 
379*38fd1498Szrj   old_ops = gimple_use_ops (stmt);
380*38fd1498Szrj 
381*38fd1498Szrj   /* Clear a no longer necessary VUSE.  */
382*38fd1498Szrj   if (build_vuse == NULL_TREE
383*38fd1498Szrj       && gimple_vuse (stmt) != NULL_TREE)
384*38fd1498Szrj     gimple_set_vuse (stmt, NULL_TREE);
385*38fd1498Szrj 
386*38fd1498Szrj   /* If there is anything in the old list, free it.  */
387*38fd1498Szrj   if (old_ops)
388*38fd1498Szrj     {
389*38fd1498Szrj       for (ptr = old_ops; ptr->next; ptr = ptr->next)
390*38fd1498Szrj 	delink_imm_use (USE_OP_PTR (ptr));
391*38fd1498Szrj       delink_imm_use (USE_OP_PTR (ptr));
392*38fd1498Szrj       ptr->next = gimple_ssa_operands (fn)->free_uses;
393*38fd1498Szrj       gimple_ssa_operands (fn)->free_uses = old_ops;
394*38fd1498Szrj     }
395*38fd1498Szrj 
396*38fd1498Szrj   /* If we added a VUSE, make sure to set the operand if it is not already
397*38fd1498Szrj      present and mark it for renaming.  */
398*38fd1498Szrj   if (build_vuse != NULL_TREE
399*38fd1498Szrj       && gimple_vuse (stmt) == NULL_TREE)
400*38fd1498Szrj     {
401*38fd1498Szrj       gimple_set_vuse (stmt, gimple_vop (fn));
402*38fd1498Szrj       fn->gimple_df->rename_vops = 1;
403*38fd1498Szrj       fn->gimple_df->ssa_renaming_needed = 1;
404*38fd1498Szrj     }
405*38fd1498Szrj 
406*38fd1498Szrj   /* Now create nodes for all the new nodes.  */
407*38fd1498Szrj   for (new_i = 0; new_i < build_uses.length (); new_i++)
408*38fd1498Szrj     {
409*38fd1498Szrj       tree *op = build_uses[new_i];
410*38fd1498Szrj       last = add_use_op (fn, stmt, op, last);
411*38fd1498Szrj     }
412*38fd1498Szrj 
413*38fd1498Szrj   /* Now set the stmt's operands.  */
414*38fd1498Szrj   gimple_set_use_ops (stmt, new_list.next);
415*38fd1498Szrj }
416*38fd1498Szrj 
417*38fd1498Szrj 
418*38fd1498Szrj /* Clear the in_list bits and empty the build array for VDEFs and
419*38fd1498Szrj    VUSEs.  */
420*38fd1498Szrj 
421*38fd1498Szrj static inline void
cleanup_build_arrays(void)422*38fd1498Szrj cleanup_build_arrays (void)
423*38fd1498Szrj {
424*38fd1498Szrj   build_vdef = NULL_TREE;
425*38fd1498Szrj   build_vuse = NULL_TREE;
426*38fd1498Szrj   build_uses.truncate (0);
427*38fd1498Szrj }
428*38fd1498Szrj 
429*38fd1498Szrj 
430*38fd1498Szrj /* Finalize all the build vectors, fill the new ones into INFO.  */
431*38fd1498Szrj 
432*38fd1498Szrj static inline void
finalize_ssa_stmt_operands(struct function * fn,gimple * stmt)433*38fd1498Szrj finalize_ssa_stmt_operands (struct function *fn, gimple *stmt)
434*38fd1498Szrj {
435*38fd1498Szrj   finalize_ssa_defs (fn, stmt);
436*38fd1498Szrj   finalize_ssa_uses (fn, stmt);
437*38fd1498Szrj   cleanup_build_arrays ();
438*38fd1498Szrj }
439*38fd1498Szrj 
440*38fd1498Szrj 
441*38fd1498Szrj /* Start the process of building up operands vectors in INFO.  */
442*38fd1498Szrj 
443*38fd1498Szrj static inline void
start_ssa_stmt_operands(void)444*38fd1498Szrj start_ssa_stmt_operands (void)
445*38fd1498Szrj {
446*38fd1498Szrj   gcc_assert (build_uses.length () == 0);
447*38fd1498Szrj   gcc_assert (build_vuse == NULL_TREE);
448*38fd1498Szrj   gcc_assert (build_vdef == NULL_TREE);
449*38fd1498Szrj }
450*38fd1498Szrj 
451*38fd1498Szrj 
452*38fd1498Szrj /* Add USE_P to the list of pointers to operands.  */
453*38fd1498Szrj 
454*38fd1498Szrj static inline void
append_use(tree * use_p)455*38fd1498Szrj append_use (tree *use_p)
456*38fd1498Szrj {
457*38fd1498Szrj   build_uses.safe_push (use_p);
458*38fd1498Szrj }
459*38fd1498Szrj 
460*38fd1498Szrj 
461*38fd1498Szrj /* Add VAR to the set of variables that require a VDEF operator.  */
462*38fd1498Szrj 
463*38fd1498Szrj static inline void
append_vdef(tree var)464*38fd1498Szrj append_vdef (tree var)
465*38fd1498Szrj {
466*38fd1498Szrj   gcc_assert ((build_vdef == NULL_TREE
467*38fd1498Szrj 	       || build_vdef == var)
468*38fd1498Szrj 	      && (build_vuse == NULL_TREE
469*38fd1498Szrj 		  || build_vuse == var));
470*38fd1498Szrj 
471*38fd1498Szrj   build_vdef = var;
472*38fd1498Szrj   build_vuse = var;
473*38fd1498Szrj }
474*38fd1498Szrj 
475*38fd1498Szrj 
476*38fd1498Szrj /* Add VAR to the set of variables that require a VUSE operator.  */
477*38fd1498Szrj 
478*38fd1498Szrj static inline void
append_vuse(tree var)479*38fd1498Szrj append_vuse (tree var)
480*38fd1498Szrj {
481*38fd1498Szrj   gcc_assert (build_vuse == NULL_TREE
482*38fd1498Szrj 	      || build_vuse == var);
483*38fd1498Szrj 
484*38fd1498Szrj   build_vuse = var;
485*38fd1498Szrj }
486*38fd1498Szrj 
487*38fd1498Szrj /* Add virtual operands for STMT.  FLAGS is as in get_expr_operands.  */
488*38fd1498Szrj 
489*38fd1498Szrj static void
add_virtual_operand(struct function * fn,gimple * stmt ATTRIBUTE_UNUSED,int flags)490*38fd1498Szrj add_virtual_operand (struct function *fn,
491*38fd1498Szrj 		     gimple *stmt ATTRIBUTE_UNUSED, int flags)
492*38fd1498Szrj {
493*38fd1498Szrj   /* Add virtual operands to the stmt, unless the caller has specifically
494*38fd1498Szrj      requested not to do that (used when adding operands inside an
495*38fd1498Szrj      ADDR_EXPR expression).  */
496*38fd1498Szrj   if (flags & opf_no_vops)
497*38fd1498Szrj     return;
498*38fd1498Szrj 
499*38fd1498Szrj   gcc_assert (!is_gimple_debug (stmt));
500*38fd1498Szrj 
501*38fd1498Szrj   if (flags & opf_def)
502*38fd1498Szrj     append_vdef (gimple_vop (fn));
503*38fd1498Szrj   else
504*38fd1498Szrj     append_vuse (gimple_vop (fn));
505*38fd1498Szrj }
506*38fd1498Szrj 
507*38fd1498Szrj 
508*38fd1498Szrj /* Add *VAR_P to the appropriate operand array for statement STMT.
509*38fd1498Szrj    FLAGS is as in get_expr_operands.  If *VAR_P is a GIMPLE register,
510*38fd1498Szrj    it will be added to the statement's real operands, otherwise it is
511*38fd1498Szrj    added to virtual operands.  */
512*38fd1498Szrj 
513*38fd1498Szrj static void
add_stmt_operand(struct function * fn,tree * var_p,gimple * stmt,int flags)514*38fd1498Szrj add_stmt_operand (struct function *fn, tree *var_p, gimple *stmt, int flags)
515*38fd1498Szrj {
516*38fd1498Szrj   tree var = *var_p;
517*38fd1498Szrj 
518*38fd1498Szrj   gcc_assert (SSA_VAR_P (*var_p));
519*38fd1498Szrj 
520*38fd1498Szrj   if (is_gimple_reg (var))
521*38fd1498Szrj     {
522*38fd1498Szrj       /* The variable is a GIMPLE register.  Add it to real operands.  */
523*38fd1498Szrj       if (flags & opf_def)
524*38fd1498Szrj 	;
525*38fd1498Szrj       else
526*38fd1498Szrj 	append_use (var_p);
527*38fd1498Szrj       if (DECL_P (*var_p))
528*38fd1498Szrj 	fn->gimple_df->ssa_renaming_needed = 1;
529*38fd1498Szrj     }
530*38fd1498Szrj   else
531*38fd1498Szrj     {
532*38fd1498Szrj       /* Mark statements with volatile operands.  */
533*38fd1498Szrj       if (!(flags & opf_no_vops)
534*38fd1498Szrj 	  && TREE_THIS_VOLATILE (var))
535*38fd1498Szrj 	gimple_set_has_volatile_ops (stmt, true);
536*38fd1498Szrj 
537*38fd1498Szrj       /* The variable is a memory access.  Add virtual operands.  */
538*38fd1498Szrj       add_virtual_operand (fn, stmt, flags);
539*38fd1498Szrj     }
540*38fd1498Szrj }
541*38fd1498Szrj 
542*38fd1498Szrj /* Mark the base address of REF as having its address taken.
543*38fd1498Szrj    REF may be a single variable whose address has been taken or any
544*38fd1498Szrj    other valid GIMPLE memory reference (structure reference, array,
545*38fd1498Szrj    etc).  */
546*38fd1498Szrj 
547*38fd1498Szrj static void
mark_address_taken(tree ref)548*38fd1498Szrj mark_address_taken (tree ref)
549*38fd1498Szrj {
550*38fd1498Szrj   tree var;
551*38fd1498Szrj 
552*38fd1498Szrj   /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
553*38fd1498Szrj      as the only thing we take the address of.  If VAR is a structure,
554*38fd1498Szrj      taking the address of a field means that the whole structure may
555*38fd1498Szrj      be referenced using pointer arithmetic.  See PR 21407 and the
556*38fd1498Szrj      ensuing mailing list discussion.  */
557*38fd1498Szrj   var = get_base_address (ref);
558*38fd1498Szrj   if (var)
559*38fd1498Szrj     {
560*38fd1498Szrj       if (DECL_P (var))
561*38fd1498Szrj 	TREE_ADDRESSABLE (var) = 1;
562*38fd1498Szrj       else if (TREE_CODE (var) == MEM_REF
563*38fd1498Szrj 	       && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
564*38fd1498Szrj 	       && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
565*38fd1498Szrj 	TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
566*38fd1498Szrj     }
567*38fd1498Szrj }
568*38fd1498Szrj 
569*38fd1498Szrj 
570*38fd1498Szrj /* A subroutine of get_expr_operands to handle MEM_REF.
571*38fd1498Szrj 
572*38fd1498Szrj    STMT is the statement being processed, EXPR is the MEM_REF
573*38fd1498Szrj       that got us here.
574*38fd1498Szrj 
575*38fd1498Szrj    FLAGS is as in get_expr_operands.  */
576*38fd1498Szrj 
577*38fd1498Szrj static void
get_mem_ref_operands(struct function * fn,gimple * stmt,tree expr,int flags)578*38fd1498Szrj get_mem_ref_operands (struct function *fn,
579*38fd1498Szrj 		      gimple *stmt, tree expr, int flags)
580*38fd1498Szrj {
581*38fd1498Szrj   tree *pptr = &TREE_OPERAND (expr, 0);
582*38fd1498Szrj 
583*38fd1498Szrj   if (!(flags & opf_no_vops)
584*38fd1498Szrj       && TREE_THIS_VOLATILE (expr))
585*38fd1498Szrj     gimple_set_has_volatile_ops (stmt, true);
586*38fd1498Szrj 
587*38fd1498Szrj   /* Add the VOP.  */
588*38fd1498Szrj   add_virtual_operand (fn, stmt, flags);
589*38fd1498Szrj 
590*38fd1498Szrj   /* If requested, add a USE operand for the base pointer.  */
591*38fd1498Szrj   get_expr_operands (fn, stmt, pptr,
592*38fd1498Szrj 		     opf_non_addressable | opf_use
593*38fd1498Szrj 		     | (flags & (opf_no_vops|opf_not_non_addressable)));
594*38fd1498Szrj }
595*38fd1498Szrj 
596*38fd1498Szrj 
597*38fd1498Szrj /* A subroutine of get_expr_operands to handle TARGET_MEM_REF.  */
598*38fd1498Szrj 
599*38fd1498Szrj static void
get_tmr_operands(struct function * fn,gimple * stmt,tree expr,int flags)600*38fd1498Szrj get_tmr_operands (struct function *fn, gimple *stmt, tree expr, int flags)
601*38fd1498Szrj {
602*38fd1498Szrj   if (!(flags & opf_no_vops)
603*38fd1498Szrj       && TREE_THIS_VOLATILE (expr))
604*38fd1498Szrj     gimple_set_has_volatile_ops (stmt, true);
605*38fd1498Szrj 
606*38fd1498Szrj   /* First record the real operands.  */
607*38fd1498Szrj   get_expr_operands (fn, stmt,
608*38fd1498Szrj 		     &TMR_BASE (expr), opf_use | (flags & opf_no_vops));
609*38fd1498Szrj   get_expr_operands (fn, stmt,
610*38fd1498Szrj 		     &TMR_INDEX (expr), opf_use | (flags & opf_no_vops));
611*38fd1498Szrj   get_expr_operands (fn, stmt,
612*38fd1498Szrj 		     &TMR_INDEX2 (expr), opf_use | (flags & opf_no_vops));
613*38fd1498Szrj 
614*38fd1498Szrj   add_virtual_operand (fn, stmt, flags);
615*38fd1498Szrj }
616*38fd1498Szrj 
617*38fd1498Szrj 
618*38fd1498Szrj /* If STMT is a call that may clobber globals and other symbols that
619*38fd1498Szrj    escape, add them to the VDEF/VUSE lists for it.  */
620*38fd1498Szrj 
621*38fd1498Szrj static void
maybe_add_call_vops(struct function * fn,gcall * stmt)622*38fd1498Szrj maybe_add_call_vops (struct function *fn, gcall *stmt)
623*38fd1498Szrj {
624*38fd1498Szrj   int call_flags = gimple_call_flags (stmt);
625*38fd1498Szrj 
626*38fd1498Szrj   /* If aliases have been computed already, add VDEF or VUSE
627*38fd1498Szrj      operands for all the symbols that have been found to be
628*38fd1498Szrj      call-clobbered.  */
629*38fd1498Szrj   if (!(call_flags & ECF_NOVOPS))
630*38fd1498Szrj     {
631*38fd1498Szrj       /* A 'pure' or a 'const' function never call-clobbers anything.  */
632*38fd1498Szrj       if (!(call_flags & (ECF_PURE | ECF_CONST)))
633*38fd1498Szrj 	add_virtual_operand (fn, stmt, opf_def);
634*38fd1498Szrj       else if (!(call_flags & ECF_CONST))
635*38fd1498Szrj 	add_virtual_operand (fn, stmt, opf_use);
636*38fd1498Szrj     }
637*38fd1498Szrj }
638*38fd1498Szrj 
639*38fd1498Szrj 
640*38fd1498Szrj /* Scan operands in the ASM_EXPR stmt referred to in INFO.  */
641*38fd1498Szrj 
642*38fd1498Szrj static void
get_asm_stmt_operands(struct function * fn,gasm * stmt)643*38fd1498Szrj get_asm_stmt_operands (struct function *fn, gasm *stmt)
644*38fd1498Szrj {
645*38fd1498Szrj   size_t i, noutputs;
646*38fd1498Szrj   const char **oconstraints;
647*38fd1498Szrj   const char *constraint;
648*38fd1498Szrj   bool allows_mem, allows_reg, is_inout;
649*38fd1498Szrj 
650*38fd1498Szrj   noutputs = gimple_asm_noutputs (stmt);
651*38fd1498Szrj   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
652*38fd1498Szrj 
653*38fd1498Szrj   /* Gather all output operands.  */
654*38fd1498Szrj   for (i = 0; i < gimple_asm_noutputs (stmt); i++)
655*38fd1498Szrj     {
656*38fd1498Szrj       tree link = gimple_asm_output_op (stmt, i);
657*38fd1498Szrj       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
658*38fd1498Szrj       oconstraints[i] = constraint;
659*38fd1498Szrj       parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
660*38fd1498Szrj 	                       &allows_reg, &is_inout);
661*38fd1498Szrj 
662*38fd1498Szrj       /* This should have been split in gimplify_asm_expr.  */
663*38fd1498Szrj       gcc_assert (!allows_reg || !is_inout);
664*38fd1498Szrj 
665*38fd1498Szrj       /* Memory operands are addressable.  Note that STMT needs the
666*38fd1498Szrj 	 address of this operand.  */
667*38fd1498Szrj       if (!allows_reg && allows_mem)
668*38fd1498Szrj 	mark_address_taken (TREE_VALUE (link));
669*38fd1498Szrj 
670*38fd1498Szrj       get_expr_operands (fn, stmt,
671*38fd1498Szrj 			 &TREE_VALUE (link), opf_def | opf_not_non_addressable);
672*38fd1498Szrj     }
673*38fd1498Szrj 
674*38fd1498Szrj   /* Gather all input operands.  */
675*38fd1498Szrj   for (i = 0; i < gimple_asm_ninputs (stmt); i++)
676*38fd1498Szrj     {
677*38fd1498Szrj       tree link = gimple_asm_input_op (stmt, i);
678*38fd1498Szrj       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
679*38fd1498Szrj       parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
680*38fd1498Szrj 	                      &allows_mem, &allows_reg);
681*38fd1498Szrj 
682*38fd1498Szrj       /* Memory operands are addressable.  Note that STMT needs the
683*38fd1498Szrj 	 address of this operand.  */
684*38fd1498Szrj       if (!allows_reg && allows_mem)
685*38fd1498Szrj 	mark_address_taken (TREE_VALUE (link));
686*38fd1498Szrj 
687*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_VALUE (link), opf_not_non_addressable);
688*38fd1498Szrj     }
689*38fd1498Szrj 
690*38fd1498Szrj   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
691*38fd1498Szrj   if (gimple_asm_clobbers_memory_p (stmt))
692*38fd1498Szrj     add_virtual_operand (fn, stmt, opf_def);
693*38fd1498Szrj }
694*38fd1498Szrj 
695*38fd1498Szrj 
696*38fd1498Szrj /* Recursively scan the expression pointed to by EXPR_P in statement
697*38fd1498Szrj    STMT.  FLAGS is one of the OPF_* constants modifying how to
698*38fd1498Szrj    interpret the operands found.  */
699*38fd1498Szrj 
700*38fd1498Szrj static void
get_expr_operands(struct function * fn,gimple * stmt,tree * expr_p,int flags)701*38fd1498Szrj get_expr_operands (struct function *fn, gimple *stmt, tree *expr_p, int flags)
702*38fd1498Szrj {
703*38fd1498Szrj   enum tree_code code;
704*38fd1498Szrj   enum tree_code_class codeclass;
705*38fd1498Szrj   tree expr = *expr_p;
706*38fd1498Szrj   int uflags = opf_use;
707*38fd1498Szrj 
708*38fd1498Szrj   if (expr == NULL)
709*38fd1498Szrj     return;
710*38fd1498Szrj 
711*38fd1498Szrj   if (is_gimple_debug (stmt))
712*38fd1498Szrj     uflags |= (flags & opf_no_vops);
713*38fd1498Szrj 
714*38fd1498Szrj   code = TREE_CODE (expr);
715*38fd1498Szrj   codeclass = TREE_CODE_CLASS (code);
716*38fd1498Szrj 
717*38fd1498Szrj   switch (code)
718*38fd1498Szrj     {
719*38fd1498Szrj     case ADDR_EXPR:
720*38fd1498Szrj       /* Taking the address of a variable does not represent a
721*38fd1498Szrj 	 reference to it, but the fact that the statement takes its
722*38fd1498Szrj 	 address will be of interest to some passes (e.g. alias
723*38fd1498Szrj 	 resolution).  */
724*38fd1498Szrj       if ((!(flags & opf_non_addressable)
725*38fd1498Szrj 	   || (flags & opf_not_non_addressable))
726*38fd1498Szrj 	  && !is_gimple_debug (stmt))
727*38fd1498Szrj 	mark_address_taken (TREE_OPERAND (expr, 0));
728*38fd1498Szrj 
729*38fd1498Szrj       /* Otherwise, there may be variables referenced inside but there
730*38fd1498Szrj 	 should be no VUSEs created, since the referenced objects are
731*38fd1498Szrj 	 not really accessed.  The only operands that we should find
732*38fd1498Szrj 	 here are ARRAY_REF indices which will always be real operands
733*38fd1498Szrj 	 (GIMPLE does not allow non-registers as array indices).  */
734*38fd1498Szrj       flags |= opf_no_vops;
735*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0),
736*38fd1498Szrj 			 flags | opf_not_non_addressable | opf_address_taken);
737*38fd1498Szrj       return;
738*38fd1498Szrj 
739*38fd1498Szrj     case SSA_NAME:
740*38fd1498Szrj     case VAR_DECL:
741*38fd1498Szrj     case PARM_DECL:
742*38fd1498Szrj     case RESULT_DECL:
743*38fd1498Szrj       if (!(flags & opf_address_taken))
744*38fd1498Szrj 	add_stmt_operand (fn, expr_p, stmt, flags);
745*38fd1498Szrj       return;
746*38fd1498Szrj 
747*38fd1498Szrj     case DEBUG_EXPR_DECL:
748*38fd1498Szrj       gcc_assert (gimple_debug_bind_p (stmt));
749*38fd1498Szrj       return;
750*38fd1498Szrj 
751*38fd1498Szrj     case MEM_REF:
752*38fd1498Szrj       get_mem_ref_operands (fn, stmt, expr, flags);
753*38fd1498Szrj       return;
754*38fd1498Szrj 
755*38fd1498Szrj     case TARGET_MEM_REF:
756*38fd1498Szrj       get_tmr_operands (fn, stmt, expr, flags);
757*38fd1498Szrj       return;
758*38fd1498Szrj 
759*38fd1498Szrj     case ARRAY_REF:
760*38fd1498Szrj     case ARRAY_RANGE_REF:
761*38fd1498Szrj     case COMPONENT_REF:
762*38fd1498Szrj     case REALPART_EXPR:
763*38fd1498Szrj     case IMAGPART_EXPR:
764*38fd1498Szrj       {
765*38fd1498Szrj 	if (!(flags & opf_no_vops)
766*38fd1498Szrj 	    && TREE_THIS_VOLATILE (expr))
767*38fd1498Szrj 	  gimple_set_has_volatile_ops (stmt, true);
768*38fd1498Szrj 
769*38fd1498Szrj 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
770*38fd1498Szrj 
771*38fd1498Szrj 	if (code == COMPONENT_REF)
772*38fd1498Szrj 	  {
773*38fd1498Szrj 	    if (!(flags & opf_no_vops)
774*38fd1498Szrj 		&& TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
775*38fd1498Szrj 	      gimple_set_has_volatile_ops (stmt, true);
776*38fd1498Szrj 	    get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
777*38fd1498Szrj 	  }
778*38fd1498Szrj 	else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
779*38fd1498Szrj 	  {
780*38fd1498Szrj             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
781*38fd1498Szrj             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
782*38fd1498Szrj             get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 3), uflags);
783*38fd1498Szrj 	  }
784*38fd1498Szrj 
785*38fd1498Szrj 	return;
786*38fd1498Szrj       }
787*38fd1498Szrj 
788*38fd1498Szrj     case WITH_SIZE_EXPR:
789*38fd1498Szrj       /* WITH_SIZE_EXPR is a pass-through reference to its first argument,
790*38fd1498Szrj 	 and an rvalue reference to its second argument.  */
791*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
792*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
793*38fd1498Szrj       return;
794*38fd1498Szrj 
795*38fd1498Szrj     case COND_EXPR:
796*38fd1498Szrj     case VEC_COND_EXPR:
797*38fd1498Szrj     case VEC_PERM_EXPR:
798*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), uflags);
799*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), uflags);
800*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), uflags);
801*38fd1498Szrj       return;
802*38fd1498Szrj 
803*38fd1498Szrj     case CONSTRUCTOR:
804*38fd1498Szrj       {
805*38fd1498Szrj 	/* General aggregate CONSTRUCTORs have been decomposed, but they
806*38fd1498Szrj 	   are still in use as the COMPLEX_EXPR equivalent for vectors.  */
807*38fd1498Szrj 	constructor_elt *ce;
808*38fd1498Szrj 	unsigned HOST_WIDE_INT idx;
809*38fd1498Szrj 
810*38fd1498Szrj 	/* A volatile constructor is actually TREE_CLOBBER_P, transfer
811*38fd1498Szrj 	   the volatility to the statement, don't use TREE_CLOBBER_P for
812*38fd1498Szrj 	   mirroring the other uses of THIS_VOLATILE in this file.  */
813*38fd1498Szrj 	if (!(flags & opf_no_vops)
814*38fd1498Szrj 	    && TREE_THIS_VOLATILE (expr))
815*38fd1498Szrj 	  gimple_set_has_volatile_ops (stmt, true);
816*38fd1498Szrj 
817*38fd1498Szrj 	for (idx = 0;
818*38fd1498Szrj 	     vec_safe_iterate (CONSTRUCTOR_ELTS (expr), idx, &ce);
819*38fd1498Szrj 	     idx++)
820*38fd1498Szrj 	  get_expr_operands (fn, stmt, &ce->value, uflags);
821*38fd1498Szrj 
822*38fd1498Szrj 	return;
823*38fd1498Szrj       }
824*38fd1498Szrj 
825*38fd1498Szrj     case BIT_FIELD_REF:
826*38fd1498Szrj       if (!(flags & opf_no_vops)
827*38fd1498Szrj 	  && TREE_THIS_VOLATILE (expr))
828*38fd1498Szrj 	gimple_set_has_volatile_ops (stmt, true);
829*38fd1498Szrj       /* FALLTHRU */
830*38fd1498Szrj 
831*38fd1498Szrj     case VIEW_CONVERT_EXPR:
832*38fd1498Szrj     do_unary:
833*38fd1498Szrj       get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
834*38fd1498Szrj       return;
835*38fd1498Szrj 
836*38fd1498Szrj     case BIT_INSERT_EXPR:
837*38fd1498Szrj     case COMPOUND_EXPR:
838*38fd1498Szrj     case OBJ_TYPE_REF:
839*38fd1498Szrj     case ASSERT_EXPR:
840*38fd1498Szrj     do_binary:
841*38fd1498Szrj       {
842*38fd1498Szrj 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
843*38fd1498Szrj 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
844*38fd1498Szrj 	return;
845*38fd1498Szrj       }
846*38fd1498Szrj 
847*38fd1498Szrj     case DOT_PROD_EXPR:
848*38fd1498Szrj     case SAD_EXPR:
849*38fd1498Szrj     case REALIGN_LOAD_EXPR:
850*38fd1498Szrj     case WIDEN_MULT_PLUS_EXPR:
851*38fd1498Szrj     case WIDEN_MULT_MINUS_EXPR:
852*38fd1498Szrj     case FMA_EXPR:
853*38fd1498Szrj       {
854*38fd1498Szrj 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 0), flags);
855*38fd1498Szrj 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 1), flags);
856*38fd1498Szrj 	get_expr_operands (fn, stmt, &TREE_OPERAND (expr, 2), flags);
857*38fd1498Szrj 	return;
858*38fd1498Szrj       }
859*38fd1498Szrj 
860*38fd1498Szrj     case FUNCTION_DECL:
861*38fd1498Szrj     case LABEL_DECL:
862*38fd1498Szrj     case CONST_DECL:
863*38fd1498Szrj     case CASE_LABEL_EXPR:
864*38fd1498Szrj       /* Expressions that make no memory references.  */
865*38fd1498Szrj       return;
866*38fd1498Szrj 
867*38fd1498Szrj     default:
868*38fd1498Szrj       if (codeclass == tcc_unary)
869*38fd1498Szrj 	goto do_unary;
870*38fd1498Szrj       if (codeclass == tcc_binary || codeclass == tcc_comparison)
871*38fd1498Szrj 	goto do_binary;
872*38fd1498Szrj       if (codeclass == tcc_constant || codeclass == tcc_type)
873*38fd1498Szrj 	return;
874*38fd1498Szrj     }
875*38fd1498Szrj 
876*38fd1498Szrj   /* If we get here, something has gone wrong.  */
877*38fd1498Szrj   if (flag_checking)
878*38fd1498Szrj     {
879*38fd1498Szrj       fprintf (stderr, "unhandled expression in get_expr_operands():\n");
880*38fd1498Szrj       debug_tree (expr);
881*38fd1498Szrj       fputs ("\n", stderr);
882*38fd1498Szrj       gcc_unreachable ();
883*38fd1498Szrj     }
884*38fd1498Szrj }
885*38fd1498Szrj 
886*38fd1498Szrj 
887*38fd1498Szrj /* Parse STMT looking for operands.  When finished, the various
888*38fd1498Szrj    build_* operand vectors will have potential operands in them.  */
889*38fd1498Szrj 
890*38fd1498Szrj static void
parse_ssa_operands(struct function * fn,gimple * stmt)891*38fd1498Szrj parse_ssa_operands (struct function *fn, gimple *stmt)
892*38fd1498Szrj {
893*38fd1498Szrj   enum gimple_code code = gimple_code (stmt);
894*38fd1498Szrj   size_t i, n, start = 0;
895*38fd1498Szrj 
896*38fd1498Szrj   switch (code)
897*38fd1498Szrj     {
898*38fd1498Szrj     case GIMPLE_ASM:
899*38fd1498Szrj       get_asm_stmt_operands (fn, as_a <gasm *> (stmt));
900*38fd1498Szrj       break;
901*38fd1498Szrj 
902*38fd1498Szrj     case GIMPLE_TRANSACTION:
903*38fd1498Szrj       /* The start of a transaction is a memory barrier.  */
904*38fd1498Szrj       add_virtual_operand (fn, stmt, opf_def | opf_use);
905*38fd1498Szrj       break;
906*38fd1498Szrj 
907*38fd1498Szrj     case GIMPLE_DEBUG:
908*38fd1498Szrj       if (gimple_debug_bind_p (stmt)
909*38fd1498Szrj 	  && gimple_debug_bind_has_value_p (stmt))
910*38fd1498Szrj 	get_expr_operands (fn, stmt, gimple_debug_bind_get_value_ptr (stmt),
911*38fd1498Szrj 			   opf_use | opf_no_vops);
912*38fd1498Szrj       break;
913*38fd1498Szrj 
914*38fd1498Szrj     case GIMPLE_RETURN:
915*38fd1498Szrj       append_vuse (gimple_vop (fn));
916*38fd1498Szrj       goto do_default;
917*38fd1498Szrj 
918*38fd1498Szrj     case GIMPLE_CALL:
919*38fd1498Szrj       /* Add call-clobbered operands, if needed.  */
920*38fd1498Szrj       maybe_add_call_vops (fn, as_a <gcall *> (stmt));
921*38fd1498Szrj       /* FALLTHRU */
922*38fd1498Szrj 
923*38fd1498Szrj     case GIMPLE_ASSIGN:
924*38fd1498Szrj       get_expr_operands (fn, stmt, gimple_op_ptr (stmt, 0), opf_def);
925*38fd1498Szrj       start = 1;
926*38fd1498Szrj       /* FALLTHRU */
927*38fd1498Szrj 
928*38fd1498Szrj     default:
929*38fd1498Szrj     do_default:
930*38fd1498Szrj       n = gimple_num_ops (stmt);
931*38fd1498Szrj       for (i = start; i < n; i++)
932*38fd1498Szrj 	get_expr_operands (fn, stmt, gimple_op_ptr (stmt, i), opf_use);
933*38fd1498Szrj       break;
934*38fd1498Szrj     }
935*38fd1498Szrj }
936*38fd1498Szrj 
937*38fd1498Szrj 
938*38fd1498Szrj /* Create an operands cache for STMT.  */
939*38fd1498Szrj 
940*38fd1498Szrj static void
build_ssa_operands(struct function * fn,gimple * stmt)941*38fd1498Szrj build_ssa_operands (struct function *fn, gimple *stmt)
942*38fd1498Szrj {
943*38fd1498Szrj   /* Initially assume that the statement has no volatile operands.  */
944*38fd1498Szrj   gimple_set_has_volatile_ops (stmt, false);
945*38fd1498Szrj 
946*38fd1498Szrj   start_ssa_stmt_operands ();
947*38fd1498Szrj   parse_ssa_operands (fn, stmt);
948*38fd1498Szrj   finalize_ssa_stmt_operands (fn, stmt);
949*38fd1498Szrj }
950*38fd1498Szrj 
951*38fd1498Szrj /* Verifies SSA statement operands.  */
952*38fd1498Szrj 
953*38fd1498Szrj DEBUG_FUNCTION bool
verify_ssa_operands(struct function * fn,gimple * stmt)954*38fd1498Szrj verify_ssa_operands (struct function *fn, gimple *stmt)
955*38fd1498Szrj {
956*38fd1498Szrj   use_operand_p use_p;
957*38fd1498Szrj   def_operand_p def_p;
958*38fd1498Szrj   ssa_op_iter iter;
959*38fd1498Szrj   unsigned i;
960*38fd1498Szrj   tree def;
961*38fd1498Szrj   bool volatile_p = gimple_has_volatile_ops (stmt);
962*38fd1498Szrj 
963*38fd1498Szrj   /* build_ssa_operands w/o finalizing them.  */
964*38fd1498Szrj   gimple_set_has_volatile_ops (stmt, false);
965*38fd1498Szrj   start_ssa_stmt_operands ();
966*38fd1498Szrj   parse_ssa_operands (fn, stmt);
967*38fd1498Szrj 
968*38fd1498Szrj   /* Now verify the built operands are the same as present in STMT.  */
969*38fd1498Szrj   def = gimple_vdef (stmt);
970*38fd1498Szrj   if (def
971*38fd1498Szrj       && TREE_CODE (def) == SSA_NAME)
972*38fd1498Szrj     def = SSA_NAME_VAR (def);
973*38fd1498Szrj   if (build_vdef != def)
974*38fd1498Szrj     {
975*38fd1498Szrj       error ("virtual definition of statement not up-to-date");
976*38fd1498Szrj       return true;
977*38fd1498Szrj     }
978*38fd1498Szrj   if (gimple_vdef (stmt)
979*38fd1498Szrj       && ((def_p = gimple_vdef_op (stmt)) == NULL_DEF_OPERAND_P
980*38fd1498Szrj 	  || DEF_FROM_PTR (def_p) != gimple_vdef (stmt)))
981*38fd1498Szrj     {
982*38fd1498Szrj       error ("virtual def operand missing for stmt");
983*38fd1498Szrj       return true;
984*38fd1498Szrj     }
985*38fd1498Szrj 
986*38fd1498Szrj   tree use = gimple_vuse (stmt);
987*38fd1498Szrj   if (use
988*38fd1498Szrj       && TREE_CODE (use) == SSA_NAME)
989*38fd1498Szrj     use = SSA_NAME_VAR (use);
990*38fd1498Szrj   if (build_vuse != use)
991*38fd1498Szrj     {
992*38fd1498Szrj       error ("virtual use of statement not up-to-date");
993*38fd1498Szrj       return true;
994*38fd1498Szrj     }
995*38fd1498Szrj   if (gimple_vuse (stmt)
996*38fd1498Szrj       && ((use_p = gimple_vuse_op (stmt)) == NULL_USE_OPERAND_P
997*38fd1498Szrj 	  || USE_FROM_PTR (use_p) != gimple_vuse (stmt)))
998*38fd1498Szrj     {
999*38fd1498Szrj       error ("virtual use operand missing for stmt");
1000*38fd1498Szrj       return true;
1001*38fd1498Szrj     }
1002*38fd1498Szrj 
1003*38fd1498Szrj   FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
1004*38fd1498Szrj     {
1005*38fd1498Szrj       tree *op;
1006*38fd1498Szrj       FOR_EACH_VEC_ELT (build_uses, i, op)
1007*38fd1498Szrj 	{
1008*38fd1498Szrj 	  if (use_p->use == op)
1009*38fd1498Szrj 	    {
1010*38fd1498Szrj 	      build_uses[i] = NULL;
1011*38fd1498Szrj 	      break;
1012*38fd1498Szrj 	    }
1013*38fd1498Szrj 	}
1014*38fd1498Szrj       if (i == build_uses.length ())
1015*38fd1498Szrj 	{
1016*38fd1498Szrj 	  error ("excess use operand for stmt");
1017*38fd1498Szrj 	  debug_generic_expr (USE_FROM_PTR (use_p));
1018*38fd1498Szrj 	  return true;
1019*38fd1498Szrj 	}
1020*38fd1498Szrj     }
1021*38fd1498Szrj 
1022*38fd1498Szrj   tree *op;
1023*38fd1498Szrj   FOR_EACH_VEC_ELT (build_uses, i, op)
1024*38fd1498Szrj     if (op != NULL)
1025*38fd1498Szrj       {
1026*38fd1498Szrj 	error ("use operand missing for stmt");
1027*38fd1498Szrj 	debug_generic_expr (*op);
1028*38fd1498Szrj 	return true;
1029*38fd1498Szrj       }
1030*38fd1498Szrj 
1031*38fd1498Szrj   if (gimple_has_volatile_ops (stmt) != volatile_p)
1032*38fd1498Szrj     {
1033*38fd1498Szrj       error ("stmt volatile flag not up-to-date");
1034*38fd1498Szrj       return true;
1035*38fd1498Szrj     }
1036*38fd1498Szrj 
1037*38fd1498Szrj   cleanup_build_arrays ();
1038*38fd1498Szrj   return false;
1039*38fd1498Szrj }
1040*38fd1498Szrj 
1041*38fd1498Szrj 
1042*38fd1498Szrj /* Releases the operands of STMT back to their freelists, and clears
1043*38fd1498Szrj    the stmt operand lists.  */
1044*38fd1498Szrj 
1045*38fd1498Szrj void
free_stmt_operands(struct function * fn,gimple * stmt)1046*38fd1498Szrj free_stmt_operands (struct function *fn, gimple *stmt)
1047*38fd1498Szrj {
1048*38fd1498Szrj   use_optype_p uses = gimple_use_ops (stmt), last_use;
1049*38fd1498Szrj 
1050*38fd1498Szrj   if (uses)
1051*38fd1498Szrj     {
1052*38fd1498Szrj       for (last_use = uses; last_use->next; last_use = last_use->next)
1053*38fd1498Szrj 	delink_imm_use (USE_OP_PTR (last_use));
1054*38fd1498Szrj       delink_imm_use (USE_OP_PTR (last_use));
1055*38fd1498Szrj       last_use->next = gimple_ssa_operands (fn)->free_uses;
1056*38fd1498Szrj       gimple_ssa_operands (fn)->free_uses = uses;
1057*38fd1498Szrj       gimple_set_use_ops (stmt, NULL);
1058*38fd1498Szrj     }
1059*38fd1498Szrj 
1060*38fd1498Szrj   if (gimple_has_mem_ops (stmt))
1061*38fd1498Szrj     {
1062*38fd1498Szrj       gimple_set_vuse (stmt, NULL_TREE);
1063*38fd1498Szrj       gimple_set_vdef (stmt, NULL_TREE);
1064*38fd1498Szrj     }
1065*38fd1498Szrj }
1066*38fd1498Szrj 
1067*38fd1498Szrj 
1068*38fd1498Szrj /* Get the operands of statement STMT.  */
1069*38fd1498Szrj 
1070*38fd1498Szrj void
update_stmt_operands(struct function * fn,gimple * stmt)1071*38fd1498Szrj update_stmt_operands (struct function *fn, gimple *stmt)
1072*38fd1498Szrj {
1073*38fd1498Szrj   /* If update_stmt_operands is called before SSA is initialized, do
1074*38fd1498Szrj      nothing.  */
1075*38fd1498Szrj   if (!ssa_operands_active (fn))
1076*38fd1498Szrj     return;
1077*38fd1498Szrj 
1078*38fd1498Szrj   timevar_push (TV_TREE_OPS);
1079*38fd1498Szrj 
1080*38fd1498Szrj   gcc_assert (gimple_modified_p (stmt));
1081*38fd1498Szrj   build_ssa_operands (fn, stmt);
1082*38fd1498Szrj   gimple_set_modified (stmt, false);
1083*38fd1498Szrj 
1084*38fd1498Szrj   timevar_pop (TV_TREE_OPS);
1085*38fd1498Szrj }
1086*38fd1498Szrj 
1087*38fd1498Szrj 
1088*38fd1498Szrj /* Swap operands EXP0 and EXP1 in statement STMT.  No attempt is done
1089*38fd1498Szrj    to test the validity of the swap operation.  */
1090*38fd1498Szrj 
1091*38fd1498Szrj void
swap_ssa_operands(gimple * stmt,tree * exp0,tree * exp1)1092*38fd1498Szrj swap_ssa_operands (gimple *stmt, tree *exp0, tree *exp1)
1093*38fd1498Szrj {
1094*38fd1498Szrj   tree op0, op1;
1095*38fd1498Szrj   op0 = *exp0;
1096*38fd1498Szrj   op1 = *exp1;
1097*38fd1498Szrj 
1098*38fd1498Szrj   if (op0 != op1)
1099*38fd1498Szrj     {
1100*38fd1498Szrj       /* Attempt to preserve the relative positions of these two operands in
1101*38fd1498Szrj 	 their * respective immediate use lists by adjusting their use pointer
1102*38fd1498Szrj 	 to point to the new operand position.  */
1103*38fd1498Szrj       use_optype_p use0, use1, ptr;
1104*38fd1498Szrj       use0 = use1 = NULL;
1105*38fd1498Szrj 
1106*38fd1498Szrj       /* Find the 2 operands in the cache, if they are there.  */
1107*38fd1498Szrj       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1108*38fd1498Szrj 	if (USE_OP_PTR (ptr)->use == exp0)
1109*38fd1498Szrj 	  {
1110*38fd1498Szrj 	    use0 = ptr;
1111*38fd1498Szrj 	    break;
1112*38fd1498Szrj 	  }
1113*38fd1498Szrj 
1114*38fd1498Szrj       for (ptr = gimple_use_ops (stmt); ptr; ptr = ptr->next)
1115*38fd1498Szrj 	if (USE_OP_PTR (ptr)->use == exp1)
1116*38fd1498Szrj 	  {
1117*38fd1498Szrj 	    use1 = ptr;
1118*38fd1498Szrj 	    break;
1119*38fd1498Szrj 	  }
1120*38fd1498Szrj 
1121*38fd1498Szrj       /* And adjust their location to point to the new position of the
1122*38fd1498Szrj          operand.  */
1123*38fd1498Szrj       if (use0)
1124*38fd1498Szrj 	USE_OP_PTR (use0)->use = exp1;
1125*38fd1498Szrj       if (use1)
1126*38fd1498Szrj 	USE_OP_PTR (use1)->use = exp0;
1127*38fd1498Szrj 
1128*38fd1498Szrj       /* Now swap the data.  */
1129*38fd1498Szrj       *exp0 = op1;
1130*38fd1498Szrj       *exp1 = op0;
1131*38fd1498Szrj     }
1132*38fd1498Szrj }
1133*38fd1498Szrj 
1134*38fd1498Szrj 
1135*38fd1498Szrj /* Scan the immediate_use list for VAR making sure its linked properly.
1136*38fd1498Szrj    Return TRUE if there is a problem and emit an error message to F.  */
1137*38fd1498Szrj 
1138*38fd1498Szrj DEBUG_FUNCTION bool
verify_imm_links(FILE * f,tree var)1139*38fd1498Szrj verify_imm_links (FILE *f, tree var)
1140*38fd1498Szrj {
1141*38fd1498Szrj   use_operand_p ptr, prev, list;
1142*38fd1498Szrj   unsigned int count;
1143*38fd1498Szrj 
1144*38fd1498Szrj   gcc_assert (TREE_CODE (var) == SSA_NAME);
1145*38fd1498Szrj 
1146*38fd1498Szrj   list = &(SSA_NAME_IMM_USE_NODE (var));
1147*38fd1498Szrj   gcc_assert (list->use == NULL);
1148*38fd1498Szrj 
1149*38fd1498Szrj   if (list->prev == NULL)
1150*38fd1498Szrj     {
1151*38fd1498Szrj       gcc_assert (list->next == NULL);
1152*38fd1498Szrj       return false;
1153*38fd1498Szrj     }
1154*38fd1498Szrj 
1155*38fd1498Szrj   prev = list;
1156*38fd1498Szrj   count = 0;
1157*38fd1498Szrj   for (ptr = list->next; ptr != list; )
1158*38fd1498Szrj     {
1159*38fd1498Szrj       if (prev != ptr->prev)
1160*38fd1498Szrj 	{
1161*38fd1498Szrj 	  fprintf (f, "prev != ptr->prev\n");
1162*38fd1498Szrj 	  goto error;
1163*38fd1498Szrj 	}
1164*38fd1498Szrj 
1165*38fd1498Szrj       if (ptr->use == NULL)
1166*38fd1498Szrj 	{
1167*38fd1498Szrj 	  fprintf (f, "ptr->use == NULL\n");
1168*38fd1498Szrj 	  goto error; /* 2 roots, or SAFE guard node.  */
1169*38fd1498Szrj 	}
1170*38fd1498Szrj       else if (*(ptr->use) != var)
1171*38fd1498Szrj 	{
1172*38fd1498Szrj 	  fprintf (f, "*(ptr->use) != var\n");
1173*38fd1498Szrj 	  goto error;
1174*38fd1498Szrj 	}
1175*38fd1498Szrj 
1176*38fd1498Szrj       prev = ptr;
1177*38fd1498Szrj       ptr = ptr->next;
1178*38fd1498Szrj 
1179*38fd1498Szrj       count++;
1180*38fd1498Szrj       if (count == 0)
1181*38fd1498Szrj 	{
1182*38fd1498Szrj 	  fprintf (f, "number of immediate uses doesn't fit unsigned int\n");
1183*38fd1498Szrj 	  goto error;
1184*38fd1498Szrj 	}
1185*38fd1498Szrj     }
1186*38fd1498Szrj 
1187*38fd1498Szrj   /* Verify list in the other direction.  */
1188*38fd1498Szrj   prev = list;
1189*38fd1498Szrj   for (ptr = list->prev; ptr != list; )
1190*38fd1498Szrj     {
1191*38fd1498Szrj       if (prev != ptr->next)
1192*38fd1498Szrj 	{
1193*38fd1498Szrj 	  fprintf (f, "prev != ptr->next\n");
1194*38fd1498Szrj 	  goto error;
1195*38fd1498Szrj 	}
1196*38fd1498Szrj       prev = ptr;
1197*38fd1498Szrj       ptr = ptr->prev;
1198*38fd1498Szrj       if (count == 0)
1199*38fd1498Szrj 	{
1200*38fd1498Szrj 	  fprintf (f, "count-- < 0\n");
1201*38fd1498Szrj 	  goto error;
1202*38fd1498Szrj 	}
1203*38fd1498Szrj       count--;
1204*38fd1498Szrj     }
1205*38fd1498Szrj 
1206*38fd1498Szrj   if (count != 0)
1207*38fd1498Szrj     {
1208*38fd1498Szrj       fprintf (f, "count != 0\n");
1209*38fd1498Szrj       goto error;
1210*38fd1498Szrj     }
1211*38fd1498Szrj 
1212*38fd1498Szrj   return false;
1213*38fd1498Szrj 
1214*38fd1498Szrj  error:
1215*38fd1498Szrj   if (ptr->loc.stmt && gimple_modified_p (ptr->loc.stmt))
1216*38fd1498Szrj     {
1217*38fd1498Szrj       fprintf (f, " STMT MODIFIED. - <%p> ", (void *)ptr->loc.stmt);
1218*38fd1498Szrj       print_gimple_stmt (f, ptr->loc.stmt, 0, TDF_SLIM);
1219*38fd1498Szrj     }
1220*38fd1498Szrj   fprintf (f, " IMM ERROR : (use_p : tree - %p:%p)", (void *)ptr,
1221*38fd1498Szrj 	   (void *)ptr->use);
1222*38fd1498Szrj   print_generic_expr (f, USE_FROM_PTR (ptr), TDF_SLIM);
1223*38fd1498Szrj   fprintf (f, "\n");
1224*38fd1498Szrj   return true;
1225*38fd1498Szrj }
1226*38fd1498Szrj 
1227*38fd1498Szrj 
1228*38fd1498Szrj /* Dump all the immediate uses to FILE.  */
1229*38fd1498Szrj 
1230*38fd1498Szrj void
dump_immediate_uses_for(FILE * file,tree var)1231*38fd1498Szrj dump_immediate_uses_for (FILE *file, tree var)
1232*38fd1498Szrj {
1233*38fd1498Szrj   imm_use_iterator iter;
1234*38fd1498Szrj   use_operand_p use_p;
1235*38fd1498Szrj 
1236*38fd1498Szrj   gcc_assert (var && TREE_CODE (var) == SSA_NAME);
1237*38fd1498Szrj 
1238*38fd1498Szrj   print_generic_expr (file, var, TDF_SLIM);
1239*38fd1498Szrj   fprintf (file, " : -->");
1240*38fd1498Szrj   if (has_zero_uses (var))
1241*38fd1498Szrj     fprintf (file, " no uses.\n");
1242*38fd1498Szrj   else
1243*38fd1498Szrj     if (has_single_use (var))
1244*38fd1498Szrj       fprintf (file, " single use.\n");
1245*38fd1498Szrj     else
1246*38fd1498Szrj       fprintf (file, "%d uses.\n", num_imm_uses (var));
1247*38fd1498Szrj 
1248*38fd1498Szrj   FOR_EACH_IMM_USE_FAST (use_p, iter, var)
1249*38fd1498Szrj     {
1250*38fd1498Szrj       if (use_p->loc.stmt == NULL && use_p->use == NULL)
1251*38fd1498Szrj         fprintf (file, "***end of stmt iterator marker***\n");
1252*38fd1498Szrj       else
1253*38fd1498Szrj 	if (!is_gimple_reg (USE_FROM_PTR (use_p)))
1254*38fd1498Szrj 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_VOPS|TDF_MEMSYMS);
1255*38fd1498Szrj 	else
1256*38fd1498Szrj 	  print_gimple_stmt (file, USE_STMT (use_p), 0, TDF_SLIM);
1257*38fd1498Szrj     }
1258*38fd1498Szrj   fprintf (file, "\n");
1259*38fd1498Szrj }
1260*38fd1498Szrj 
1261*38fd1498Szrj 
1262*38fd1498Szrj /* Dump all the immediate uses to FILE.  */
1263*38fd1498Szrj 
1264*38fd1498Szrj void
dump_immediate_uses(FILE * file)1265*38fd1498Szrj dump_immediate_uses (FILE *file)
1266*38fd1498Szrj {
1267*38fd1498Szrj   tree var;
1268*38fd1498Szrj   unsigned int x;
1269*38fd1498Szrj 
1270*38fd1498Szrj   fprintf (file, "Immediate_uses: \n\n");
1271*38fd1498Szrj   FOR_EACH_SSA_NAME (x, var, cfun)
1272*38fd1498Szrj     {
1273*38fd1498Szrj       dump_immediate_uses_for (file, var);
1274*38fd1498Szrj     }
1275*38fd1498Szrj }
1276*38fd1498Szrj 
1277*38fd1498Szrj 
1278*38fd1498Szrj /* Dump def-use edges on stderr.  */
1279*38fd1498Szrj 
1280*38fd1498Szrj DEBUG_FUNCTION void
debug_immediate_uses(void)1281*38fd1498Szrj debug_immediate_uses (void)
1282*38fd1498Szrj {
1283*38fd1498Szrj   dump_immediate_uses (stderr);
1284*38fd1498Szrj }
1285*38fd1498Szrj 
1286*38fd1498Szrj 
1287*38fd1498Szrj /* Dump def-use edges on stderr.  */
1288*38fd1498Szrj 
1289*38fd1498Szrj DEBUG_FUNCTION void
debug_immediate_uses_for(tree var)1290*38fd1498Szrj debug_immediate_uses_for (tree var)
1291*38fd1498Szrj {
1292*38fd1498Szrj   dump_immediate_uses_for (stderr, var);
1293*38fd1498Szrj }
1294*38fd1498Szrj 
1295*38fd1498Szrj 
1296*38fd1498Szrj /* Unlink STMTs virtual definition from the IL by propagating its use.  */
1297*38fd1498Szrj 
1298*38fd1498Szrj void
unlink_stmt_vdef(gimple * stmt)1299*38fd1498Szrj unlink_stmt_vdef (gimple *stmt)
1300*38fd1498Szrj {
1301*38fd1498Szrj   use_operand_p use_p;
1302*38fd1498Szrj   imm_use_iterator iter;
1303*38fd1498Szrj   gimple *use_stmt;
1304*38fd1498Szrj   tree vdef = gimple_vdef (stmt);
1305*38fd1498Szrj   tree vuse = gimple_vuse (stmt);
1306*38fd1498Szrj 
1307*38fd1498Szrj   if (!vdef
1308*38fd1498Szrj       || TREE_CODE (vdef) != SSA_NAME)
1309*38fd1498Szrj     return;
1310*38fd1498Szrj 
1311*38fd1498Szrj   FOR_EACH_IMM_USE_STMT (use_stmt, iter, vdef)
1312*38fd1498Szrj     {
1313*38fd1498Szrj       FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
1314*38fd1498Szrj 	SET_USE (use_p, vuse);
1315*38fd1498Szrj     }
1316*38fd1498Szrj 
1317*38fd1498Szrj   if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vdef))
1318*38fd1498Szrj     SSA_NAME_OCCURS_IN_ABNORMAL_PHI (vuse) = 1;
1319*38fd1498Szrj }
1320*38fd1498Szrj 
1321*38fd1498Szrj /* Return true if the var whose chain of uses starts at PTR has a
1322*38fd1498Szrj    single nondebug use.  Set USE_P and STMT to that single nondebug
1323*38fd1498Szrj    use, if so, or to NULL otherwise.  */
1324*38fd1498Szrj bool
single_imm_use_1(const ssa_use_operand_t * head,use_operand_p * use_p,gimple ** stmt)1325*38fd1498Szrj single_imm_use_1 (const ssa_use_operand_t *head,
1326*38fd1498Szrj 		  use_operand_p *use_p, gimple **stmt)
1327*38fd1498Szrj {
1328*38fd1498Szrj   ssa_use_operand_t *ptr, *single_use = 0;
1329*38fd1498Szrj 
1330*38fd1498Szrj   for (ptr = head->next; ptr != head; ptr = ptr->next)
1331*38fd1498Szrj     if (USE_STMT(ptr) && !is_gimple_debug (USE_STMT (ptr)))
1332*38fd1498Szrj       {
1333*38fd1498Szrj 	if (single_use)
1334*38fd1498Szrj 	  {
1335*38fd1498Szrj 	    single_use = NULL;
1336*38fd1498Szrj 	    break;
1337*38fd1498Szrj 	  }
1338*38fd1498Szrj 	single_use = ptr;
1339*38fd1498Szrj       }
1340*38fd1498Szrj 
1341*38fd1498Szrj   if (use_p)
1342*38fd1498Szrj     *use_p = single_use;
1343*38fd1498Szrj 
1344*38fd1498Szrj   if (stmt)
1345*38fd1498Szrj     *stmt = single_use ? single_use->loc.stmt : NULL;
1346*38fd1498Szrj 
1347*38fd1498Szrj   return single_use;
1348*38fd1498Szrj }
1349*38fd1498Szrj 
1350