1 /* SCC value numbering for trees
2 Copyright (C) 2006-2022 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "fold-const-call.h"
74 #include "ipa-modref-tree.h"
75 #include "ipa-modref.h"
76 #include "tree-ssa-sccvn.h"
77
78 /* This algorithm is based on the SCC algorithm presented by Keith
79 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
80 (http://citeseer.ist.psu.edu/41805.html). In
81 straight line code, it is equivalent to a regular hash based value
82 numbering that is performed in reverse postorder.
83
84 For code with cycles, there are two alternatives, both of which
85 require keeping the hashtables separate from the actual list of
86 value numbers for SSA names.
87
88 1. Iterate value numbering in an RPO walk of the blocks, removing
89 all the entries from the hashtable after each iteration (but
90 keeping the SSA name->value number mapping between iterations).
91 Iterate until it does not change.
92
93 2. Perform value numbering as part of an SCC walk on the SSA graph,
94 iterating only the cycles in the SSA graph until they do not change
95 (using a separate, optimistic hashtable for value numbering the SCC
96 operands).
97
98 The second is not just faster in practice (because most SSA graph
99 cycles do not involve all the variables in the graph), it also has
100 some nice properties.
101
102 One of these nice properties is that when we pop an SCC off the
103 stack, we are guaranteed to have processed all the operands coming from
104 *outside of that SCC*, so we do not need to do anything special to
105 ensure they have value numbers.
106
107 Another nice property is that the SCC walk is done as part of a DFS
108 of the SSA graph, which makes it easy to perform combining and
109 simplifying operations at the same time.
110
111 The code below is deliberately written in a way that makes it easy
112 to separate the SCC walk from the other work it does.
113
114 In order to propagate constants through the code, we track which
115 expressions contain constants, and use those while folding. In
116 theory, we could also track expressions whose value numbers are
117 replaced, in case we end up folding based on expression
118 identities.
119
120 In order to value number memory, we assign value numbers to vuses.
121 This enables us to note that, for example, stores to the same
122 address of the same value from the same starting memory states are
123 equivalent.
124 TODO:
125
126 1. We can iterate only the changing portions of the SCC's, but
127 I have not seen an SCC big enough for this to be a win.
128 2. If you differentiate between phi nodes for loops and phi nodes
129 for if-then-else, you can properly consider phi nodes in different
130 blocks for equivalence.
131 3. We could value number vuses in more cases, particularly, whole
132 structure copies.
133 */
134
135 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
136 #define BB_EXECUTABLE BB_VISITED
137
138 static vn_lookup_kind default_vn_walk_kind;
139
140 /* vn_nary_op hashtable helpers. */
141
142 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
143 {
144 typedef vn_nary_op_s *compare_type;
145 static inline hashval_t hash (const vn_nary_op_s *);
146 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
147 };
148
149 /* Return the computed hashcode for nary operation P1. */
150
151 inline hashval_t
hash(const vn_nary_op_s * vno1)152 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
153 {
154 return vno1->hashcode;
155 }
156
157 /* Compare nary operations P1 and P2 and return true if they are
158 equivalent. */
159
160 inline bool
equal(const vn_nary_op_s * vno1,const vn_nary_op_s * vno2)161 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
162 {
163 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
164 }
165
166 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
167 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
168
169
170 /* vn_phi hashtable helpers. */
171
172 static int
173 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
174
175 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
176 {
177 static inline hashval_t hash (const vn_phi_s *);
178 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
179 };
180
181 /* Return the computed hashcode for phi operation P1. */
182
183 inline hashval_t
hash(const vn_phi_s * vp1)184 vn_phi_hasher::hash (const vn_phi_s *vp1)
185 {
186 return vp1->hashcode;
187 }
188
189 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
190
191 inline bool
equal(const vn_phi_s * vp1,const vn_phi_s * vp2)192 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
193 {
194 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
195 }
196
197 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
198 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199
200
201 /* Compare two reference operands P1 and P2 for equality. Return true if
202 they are equal, and false otherwise. */
203
204 static int
vn_reference_op_eq(const void * p1,const void * p2)205 vn_reference_op_eq (const void *p1, const void *p2)
206 {
207 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
208 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
209
210 return (vro1->opcode == vro2->opcode
211 /* We do not care for differences in type qualification. */
212 && (vro1->type == vro2->type
213 || (vro1->type && vro2->type
214 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
215 TYPE_MAIN_VARIANT (vro2->type))))
216 && expressions_equal_p (vro1->op0, vro2->op0)
217 && expressions_equal_p (vro1->op1, vro2->op1)
218 && expressions_equal_p (vro1->op2, vro2->op2)
219 && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
220 }
221
222 /* Free a reference operation structure VP. */
223
224 static inline void
free_reference(vn_reference_s * vr)225 free_reference (vn_reference_s *vr)
226 {
227 vr->operands.release ();
228 }
229
230
231 /* vn_reference hashtable helpers. */
232
233 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
234 {
235 static inline hashval_t hash (const vn_reference_s *);
236 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
237 };
238
239 /* Return the hashcode for a given reference operation P1. */
240
241 inline hashval_t
hash(const vn_reference_s * vr1)242 vn_reference_hasher::hash (const vn_reference_s *vr1)
243 {
244 return vr1->hashcode;
245 }
246
247 inline bool
equal(const vn_reference_s * v,const vn_reference_s * c)248 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
249 {
250 return v == c || vn_reference_eq (v, c);
251 }
252
253 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
254 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
255
256 /* Pretty-print OPS to OUTFILE. */
257
258 void
print_vn_reference_ops(FILE * outfile,const vec<vn_reference_op_s> ops)259 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
260 {
261 vn_reference_op_t vro;
262 unsigned int i;
263 fprintf (outfile, "{");
264 for (i = 0; ops.iterate (i, &vro); i++)
265 {
266 bool closebrace = false;
267 if (vro->opcode != SSA_NAME
268 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
269 {
270 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
271 if (vro->op0 || vro->opcode == CALL_EXPR)
272 {
273 fprintf (outfile, "<");
274 closebrace = true;
275 }
276 }
277 if (vro->op0 || vro->opcode == CALL_EXPR)
278 {
279 if (!vro->op0)
280 fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
281 else
282 print_generic_expr (outfile, vro->op0);
283 if (vro->op1)
284 {
285 fprintf (outfile, ",");
286 print_generic_expr (outfile, vro->op1);
287 }
288 if (vro->op2)
289 {
290 fprintf (outfile, ",");
291 print_generic_expr (outfile, vro->op2);
292 }
293 }
294 if (closebrace)
295 fprintf (outfile, ">");
296 if (i != ops.length () - 1)
297 fprintf (outfile, ",");
298 }
299 fprintf (outfile, "}");
300 }
301
302 DEBUG_FUNCTION void
debug_vn_reference_ops(const vec<vn_reference_op_s> ops)303 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
304 {
305 print_vn_reference_ops (stderr, ops);
306 fputc ('\n', stderr);
307 }
308
309 /* The set of VN hashtables. */
310
311 typedef struct vn_tables_s
312 {
313 vn_nary_op_table_type *nary;
314 vn_phi_table_type *phis;
315 vn_reference_table_type *references;
316 } *vn_tables_t;
317
318
319 /* vn_constant hashtable helpers. */
320
321 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
322 {
323 static inline hashval_t hash (const vn_constant_s *);
324 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
325 };
326
327 /* Hash table hash function for vn_constant_t. */
328
329 inline hashval_t
hash(const vn_constant_s * vc1)330 vn_constant_hasher::hash (const vn_constant_s *vc1)
331 {
332 return vc1->hashcode;
333 }
334
335 /* Hash table equality function for vn_constant_t. */
336
337 inline bool
equal(const vn_constant_s * vc1,const vn_constant_s * vc2)338 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
339 {
340 if (vc1->hashcode != vc2->hashcode)
341 return false;
342
343 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
344 }
345
346 static hash_table<vn_constant_hasher> *constant_to_value_id;
347
348
349 /* Obstack we allocate the vn-tables elements from. */
350 static obstack vn_tables_obstack;
351 /* Special obstack we never unwind. */
352 static obstack vn_tables_insert_obstack;
353
354 static vn_reference_t last_inserted_ref;
355 static vn_phi_t last_inserted_phi;
356 static vn_nary_op_t last_inserted_nary;
357 static vn_ssa_aux_t last_pushed_avail;
358
359 /* Valid hashtables storing information we have proven to be
360 correct. */
361 static vn_tables_t valid_info;
362
363
364 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
365 an SSA name, otherwise just return it. */
366 tree (*vn_valueize) (tree);
367 static tree
vn_valueize_for_srt(tree t,void * context ATTRIBUTE_UNUSED)368 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
369 {
370 basic_block saved_vn_context_bb = vn_context_bb;
371 /* Look for sth available at the definition block of the argument.
372 This avoids inconsistencies between availability there which
373 decides if the stmt can be removed and availability at the
374 use site. The SSA property ensures that things available
375 at the definition are also available at uses. */
376 if (!SSA_NAME_IS_DEFAULT_DEF (t))
377 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
378 tree res = vn_valueize (t);
379 vn_context_bb = saved_vn_context_bb;
380 return res;
381 }
382
383
384 /* This represents the top of the VN lattice, which is the universal
385 value. */
386
387 tree VN_TOP;
388
389 /* Unique counter for our value ids. */
390
391 static unsigned int next_value_id;
392 static int next_constant_value_id;
393
394
395 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
396 are allocated on an obstack for locality reasons, and to free them
397 without looping over the vec. */
398
399 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
400 {
401 typedef vn_ssa_aux_t value_type;
402 typedef tree compare_type;
403 static inline hashval_t hash (const value_type &);
404 static inline bool equal (const value_type &, const compare_type &);
mark_deletedvn_ssa_aux_hasher405 static inline void mark_deleted (value_type &) {}
406 static const bool empty_zero_p = true;
mark_emptyvn_ssa_aux_hasher407 static inline void mark_empty (value_type &e) { e = NULL; }
is_deletedvn_ssa_aux_hasher408 static inline bool is_deleted (value_type &) { return false; }
is_emptyvn_ssa_aux_hasher409 static inline bool is_empty (value_type &e) { return e == NULL; }
410 };
411
412 hashval_t
hash(const value_type & entry)413 vn_ssa_aux_hasher::hash (const value_type &entry)
414 {
415 return SSA_NAME_VERSION (entry->name);
416 }
417
418 bool
equal(const value_type & entry,const compare_type & name)419 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
420 {
421 return name == entry->name;
422 }
423
424 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
425 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
426 static struct obstack vn_ssa_aux_obstack;
427
428 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
429 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
430 vn_nary_op_table_type *);
431 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
432 enum tree_code, tree, tree *);
433 static tree vn_lookup_simplify_result (gimple_match_op *);
434 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
435 (tree, alias_set_type, alias_set_type, tree,
436 vec<vn_reference_op_s, va_heap>, tree);
437
438 /* Return whether there is value numbering information for a given SSA name. */
439
440 bool
has_VN_INFO(tree name)441 has_VN_INFO (tree name)
442 {
443 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
444 }
445
446 vn_ssa_aux_t
VN_INFO(tree name)447 VN_INFO (tree name)
448 {
449 vn_ssa_aux_t *res
450 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
451 INSERT);
452 if (*res != NULL)
453 return *res;
454
455 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
456 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
457 newinfo->name = name;
458 newinfo->valnum = VN_TOP;
459 /* We are using the visited flag to handle uses with defs not within the
460 region being value-numbered. */
461 newinfo->visited = false;
462
463 /* Given we create the VN_INFOs on-demand now we have to do initialization
464 different than VN_TOP here. */
465 if (SSA_NAME_IS_DEFAULT_DEF (name))
466 switch (TREE_CODE (SSA_NAME_VAR (name)))
467 {
468 case VAR_DECL:
469 /* All undefined vars are VARYING. */
470 newinfo->valnum = name;
471 newinfo->visited = true;
472 break;
473
474 case PARM_DECL:
475 /* Parameters are VARYING but we can record a condition
476 if we know it is a non-NULL pointer. */
477 newinfo->visited = true;
478 newinfo->valnum = name;
479 if (POINTER_TYPE_P (TREE_TYPE (name))
480 && nonnull_arg_p (SSA_NAME_VAR (name)))
481 {
482 tree ops[2];
483 ops[0] = name;
484 ops[1] = build_int_cst (TREE_TYPE (name), 0);
485 vn_nary_op_t nary;
486 /* Allocate from non-unwinding stack. */
487 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
488 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
489 boolean_type_node, ops);
490 nary->predicated_values = 0;
491 nary->u.result = boolean_true_node;
492 vn_nary_op_insert_into (nary, valid_info->nary);
493 gcc_assert (nary->unwind_to == NULL);
494 /* Also do not link it into the undo chain. */
495 last_inserted_nary = nary->next;
496 nary->next = (vn_nary_op_t)(void *)-1;
497 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
498 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
499 boolean_type_node, ops);
500 nary->predicated_values = 0;
501 nary->u.result = boolean_false_node;
502 vn_nary_op_insert_into (nary, valid_info->nary);
503 gcc_assert (nary->unwind_to == NULL);
504 last_inserted_nary = nary->next;
505 nary->next = (vn_nary_op_t)(void *)-1;
506 if (dump_file && (dump_flags & TDF_DETAILS))
507 {
508 fprintf (dump_file, "Recording ");
509 print_generic_expr (dump_file, name, TDF_SLIM);
510 fprintf (dump_file, " != 0\n");
511 }
512 }
513 break;
514
515 case RESULT_DECL:
516 /* If the result is passed by invisible reference the default
517 def is initialized, otherwise it's uninitialized. Still
518 undefined is varying. */
519 newinfo->visited = true;
520 newinfo->valnum = name;
521 break;
522
523 default:
524 gcc_unreachable ();
525 }
526 return newinfo;
527 }
528
529 /* Return the SSA value of X. */
530
531 inline tree
SSA_VAL(tree x,bool * visited=NULL)532 SSA_VAL (tree x, bool *visited = NULL)
533 {
534 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
535 if (visited)
536 *visited = tem && tem->visited;
537 return tem && tem->visited ? tem->valnum : x;
538 }
539
540 /* Return the SSA value of the VUSE x, supporting released VDEFs
541 during elimination which will value-number the VDEF to the
542 associated VUSE (but not substitute in the whole lattice). */
543
544 static inline tree
vuse_ssa_val(tree x)545 vuse_ssa_val (tree x)
546 {
547 if (!x)
548 return NULL_TREE;
549
550 do
551 {
552 x = SSA_VAL (x);
553 gcc_assert (x != VN_TOP);
554 }
555 while (SSA_NAME_IN_FREE_LIST (x));
556
557 return x;
558 }
559
560 /* Similar to the above but used as callback for walk_non_aliased_vuses
561 and thus should stop at unvisited VUSE to not walk across region
562 boundaries. */
563
564 static tree
vuse_valueize(tree vuse)565 vuse_valueize (tree vuse)
566 {
567 do
568 {
569 bool visited;
570 vuse = SSA_VAL (vuse, &visited);
571 if (!visited)
572 return NULL_TREE;
573 gcc_assert (vuse != VN_TOP);
574 }
575 while (SSA_NAME_IN_FREE_LIST (vuse));
576 return vuse;
577 }
578
579
580 /* Return the vn_kind the expression computed by the stmt should be
581 associated with. */
582
583 enum vn_kind
vn_get_stmt_kind(gimple * stmt)584 vn_get_stmt_kind (gimple *stmt)
585 {
586 switch (gimple_code (stmt))
587 {
588 case GIMPLE_CALL:
589 return VN_REFERENCE;
590 case GIMPLE_PHI:
591 return VN_PHI;
592 case GIMPLE_ASSIGN:
593 {
594 enum tree_code code = gimple_assign_rhs_code (stmt);
595 tree rhs1 = gimple_assign_rhs1 (stmt);
596 switch (get_gimple_rhs_class (code))
597 {
598 case GIMPLE_UNARY_RHS:
599 case GIMPLE_BINARY_RHS:
600 case GIMPLE_TERNARY_RHS:
601 return VN_NARY;
602 case GIMPLE_SINGLE_RHS:
603 switch (TREE_CODE_CLASS (code))
604 {
605 case tcc_reference:
606 /* VOP-less references can go through unary case. */
607 if ((code == REALPART_EXPR
608 || code == IMAGPART_EXPR
609 || code == VIEW_CONVERT_EXPR
610 || code == BIT_FIELD_REF)
611 && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
612 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
613 return VN_NARY;
614
615 /* Fallthrough. */
616 case tcc_declaration:
617 return VN_REFERENCE;
618
619 case tcc_constant:
620 return VN_CONSTANT;
621
622 default:
623 if (code == ADDR_EXPR)
624 return (is_gimple_min_invariant (rhs1)
625 ? VN_CONSTANT : VN_REFERENCE);
626 else if (code == CONSTRUCTOR)
627 return VN_NARY;
628 return VN_NONE;
629 }
630 default:
631 return VN_NONE;
632 }
633 }
634 default:
635 return VN_NONE;
636 }
637 }
638
639 /* Lookup a value id for CONSTANT and return it. If it does not
640 exist returns 0. */
641
642 unsigned int
get_constant_value_id(tree constant)643 get_constant_value_id (tree constant)
644 {
645 vn_constant_s **slot;
646 struct vn_constant_s vc;
647
648 vc.hashcode = vn_hash_constant_with_type (constant);
649 vc.constant = constant;
650 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
651 if (slot)
652 return (*slot)->value_id;
653 return 0;
654 }
655
656 /* Lookup a value id for CONSTANT, and if it does not exist, create a
657 new one and return it. If it does exist, return it. */
658
659 unsigned int
get_or_alloc_constant_value_id(tree constant)660 get_or_alloc_constant_value_id (tree constant)
661 {
662 vn_constant_s **slot;
663 struct vn_constant_s vc;
664 vn_constant_t vcp;
665
666 /* If the hashtable isn't initialized we're not running from PRE and thus
667 do not need value-ids. */
668 if (!constant_to_value_id)
669 return 0;
670
671 vc.hashcode = vn_hash_constant_with_type (constant);
672 vc.constant = constant;
673 slot = constant_to_value_id->find_slot (&vc, INSERT);
674 if (*slot)
675 return (*slot)->value_id;
676
677 vcp = XNEW (struct vn_constant_s);
678 vcp->hashcode = vc.hashcode;
679 vcp->constant = constant;
680 vcp->value_id = get_next_constant_value_id ();
681 *slot = vcp;
682 return vcp->value_id;
683 }
684
685 /* Compute the hash for a reference operand VRO1. */
686
687 static void
vn_reference_op_compute_hash(const vn_reference_op_t vro1,inchash::hash & hstate)688 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
689 {
690 hstate.add_int (vro1->opcode);
691 if (vro1->opcode == CALL_EXPR && !vro1->op0)
692 hstate.add_int (vro1->clique);
693 if (vro1->op0)
694 inchash::add_expr (vro1->op0, hstate);
695 if (vro1->op1)
696 inchash::add_expr (vro1->op1, hstate);
697 if (vro1->op2)
698 inchash::add_expr (vro1->op2, hstate);
699 }
700
701 /* Compute a hash for the reference operation VR1 and return it. */
702
703 static hashval_t
vn_reference_compute_hash(const vn_reference_t vr1)704 vn_reference_compute_hash (const vn_reference_t vr1)
705 {
706 inchash::hash hstate;
707 hashval_t result;
708 int i;
709 vn_reference_op_t vro;
710 poly_int64 off = -1;
711 bool deref = false;
712
713 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
714 {
715 if (vro->opcode == MEM_REF)
716 deref = true;
717 else if (vro->opcode != ADDR_EXPR)
718 deref = false;
719 if (maybe_ne (vro->off, -1))
720 {
721 if (known_eq (off, -1))
722 off = 0;
723 off += vro->off;
724 }
725 else
726 {
727 if (maybe_ne (off, -1)
728 && maybe_ne (off, 0))
729 hstate.add_poly_int (off);
730 off = -1;
731 if (deref
732 && vro->opcode == ADDR_EXPR)
733 {
734 if (vro->op0)
735 {
736 tree op = TREE_OPERAND (vro->op0, 0);
737 hstate.add_int (TREE_CODE (op));
738 inchash::add_expr (op, hstate);
739 }
740 }
741 else
742 vn_reference_op_compute_hash (vro, hstate);
743 }
744 }
745 result = hstate.end ();
746 /* ??? We would ICE later if we hash instead of adding that in. */
747 if (vr1->vuse)
748 result += SSA_NAME_VERSION (vr1->vuse);
749
750 return result;
751 }
752
753 /* Return true if reference operations VR1 and VR2 are equivalent. This
754 means they have the same set of operands and vuses. */
755
756 bool
vn_reference_eq(const_vn_reference_t const vr1,const_vn_reference_t const vr2)757 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
758 {
759 unsigned i, j;
760
761 /* Early out if this is not a hash collision. */
762 if (vr1->hashcode != vr2->hashcode)
763 return false;
764
765 /* The VOP needs to be the same. */
766 if (vr1->vuse != vr2->vuse)
767 return false;
768
769 /* If the operands are the same we are done. */
770 if (vr1->operands == vr2->operands)
771 return true;
772
773 if (!vr1->type || !vr2->type)
774 {
775 if (vr1->type != vr2->type)
776 return false;
777 }
778 else if (vr1->type == vr2->type)
779 ;
780 else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
781 || (COMPLETE_TYPE_P (vr1->type)
782 && !expressions_equal_p (TYPE_SIZE (vr1->type),
783 TYPE_SIZE (vr2->type))))
784 return false;
785 else if (vr1->operands[0].opcode == CALL_EXPR
786 && !types_compatible_p (vr1->type, vr2->type))
787 return false;
788 else if (INTEGRAL_TYPE_P (vr1->type)
789 && INTEGRAL_TYPE_P (vr2->type))
790 {
791 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
792 return false;
793 }
794 else if (INTEGRAL_TYPE_P (vr1->type)
795 && (TYPE_PRECISION (vr1->type)
796 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
797 return false;
798 else if (INTEGRAL_TYPE_P (vr2->type)
799 && (TYPE_PRECISION (vr2->type)
800 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
801 return false;
802
803 i = 0;
804 j = 0;
805 do
806 {
807 poly_int64 off1 = 0, off2 = 0;
808 vn_reference_op_t vro1, vro2;
809 vn_reference_op_s tem1, tem2;
810 bool deref1 = false, deref2 = false;
811 bool reverse1 = false, reverse2 = false;
812 for (; vr1->operands.iterate (i, &vro1); i++)
813 {
814 if (vro1->opcode == MEM_REF)
815 deref1 = true;
816 /* Do not look through a storage order barrier. */
817 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
818 return false;
819 reverse1 |= vro1->reverse;
820 if (known_eq (vro1->off, -1))
821 break;
822 off1 += vro1->off;
823 }
824 for (; vr2->operands.iterate (j, &vro2); j++)
825 {
826 if (vro2->opcode == MEM_REF)
827 deref2 = true;
828 /* Do not look through a storage order barrier. */
829 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
830 return false;
831 reverse2 |= vro2->reverse;
832 if (known_eq (vro2->off, -1))
833 break;
834 off2 += vro2->off;
835 }
836 if (maybe_ne (off1, off2) || reverse1 != reverse2)
837 return false;
838 if (deref1 && vro1->opcode == ADDR_EXPR)
839 {
840 memset (&tem1, 0, sizeof (tem1));
841 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
842 tem1.type = TREE_TYPE (tem1.op0);
843 tem1.opcode = TREE_CODE (tem1.op0);
844 vro1 = &tem1;
845 deref1 = false;
846 }
847 if (deref2 && vro2->opcode == ADDR_EXPR)
848 {
849 memset (&tem2, 0, sizeof (tem2));
850 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
851 tem2.type = TREE_TYPE (tem2.op0);
852 tem2.opcode = TREE_CODE (tem2.op0);
853 vro2 = &tem2;
854 deref2 = false;
855 }
856 if (deref1 != deref2)
857 return false;
858 if (!vn_reference_op_eq (vro1, vro2))
859 return false;
860 ++j;
861 ++i;
862 }
863 while (vr1->operands.length () != i
864 || vr2->operands.length () != j);
865
866 return true;
867 }
868
869 /* Copy the operations present in load/store REF into RESULT, a vector of
870 vn_reference_op_s's. */
871
872 static void
copy_reference_ops_from_ref(tree ref,vec<vn_reference_op_s> * result)873 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
874 {
875 /* For non-calls, store the information that makes up the address. */
876 tree orig = ref;
877 while (ref)
878 {
879 vn_reference_op_s temp;
880
881 memset (&temp, 0, sizeof (temp));
882 temp.type = TREE_TYPE (ref);
883 temp.opcode = TREE_CODE (ref);
884 temp.off = -1;
885
886 switch (temp.opcode)
887 {
888 case MODIFY_EXPR:
889 temp.op0 = TREE_OPERAND (ref, 1);
890 break;
891 case WITH_SIZE_EXPR:
892 temp.op0 = TREE_OPERAND (ref, 1);
893 temp.off = 0;
894 break;
895 case MEM_REF:
896 /* The base address gets its own vn_reference_op_s structure. */
897 temp.op0 = TREE_OPERAND (ref, 1);
898 if (!mem_ref_offset (ref).to_shwi (&temp.off))
899 temp.off = -1;
900 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
901 temp.base = MR_DEPENDENCE_BASE (ref);
902 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
903 break;
904 case TARGET_MEM_REF:
905 /* The base address gets its own vn_reference_op_s structure. */
906 temp.op0 = TMR_INDEX (ref);
907 temp.op1 = TMR_STEP (ref);
908 temp.op2 = TMR_OFFSET (ref);
909 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
910 temp.base = MR_DEPENDENCE_BASE (ref);
911 result->safe_push (temp);
912 memset (&temp, 0, sizeof (temp));
913 temp.type = NULL_TREE;
914 temp.opcode = ERROR_MARK;
915 temp.op0 = TMR_INDEX2 (ref);
916 temp.off = -1;
917 break;
918 case BIT_FIELD_REF:
919 /* Record bits, position and storage order. */
920 temp.op0 = TREE_OPERAND (ref, 1);
921 temp.op1 = TREE_OPERAND (ref, 2);
922 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
923 temp.off = -1;
924 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
925 break;
926 case COMPONENT_REF:
927 /* The field decl is enough to unambiguously specify the field,
928 so use its type here. */
929 temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
930 temp.op0 = TREE_OPERAND (ref, 1);
931 temp.op1 = TREE_OPERAND (ref, 2);
932 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
933 && TYPE_REVERSE_STORAGE_ORDER
934 (TREE_TYPE (TREE_OPERAND (ref, 0))));
935 {
936 tree this_offset = component_ref_field_offset (ref);
937 if (this_offset
938 && poly_int_tree_p (this_offset))
939 {
940 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
941 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
942 {
943 poly_offset_int off
944 = (wi::to_poly_offset (this_offset)
945 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
946 /* Probibit value-numbering zero offset components
947 of addresses the same before the pass folding
948 __builtin_object_size had a chance to run. */
949 if (TREE_CODE (orig) != ADDR_EXPR
950 || maybe_ne (off, 0)
951 || (cfun->curr_properties & PROP_objsz))
952 off.to_shwi (&temp.off);
953 }
954 }
955 }
956 break;
957 case ARRAY_RANGE_REF:
958 case ARRAY_REF:
959 {
960 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
961 /* Record index as operand. */
962 temp.op0 = TREE_OPERAND (ref, 1);
963 /* Always record lower bounds and element size. */
964 temp.op1 = array_ref_low_bound (ref);
965 /* But record element size in units of the type alignment. */
966 temp.op2 = TREE_OPERAND (ref, 3);
967 temp.align = eltype->type_common.align;
968 if (! temp.op2)
969 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
970 size_int (TYPE_ALIGN_UNIT (eltype)));
971 if (poly_int_tree_p (temp.op0)
972 && poly_int_tree_p (temp.op1)
973 && TREE_CODE (temp.op2) == INTEGER_CST)
974 {
975 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
976 - wi::to_poly_offset (temp.op1))
977 * wi::to_offset (temp.op2)
978 * vn_ref_op_align_unit (&temp));
979 off.to_shwi (&temp.off);
980 }
981 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
982 && TYPE_REVERSE_STORAGE_ORDER
983 (TREE_TYPE (TREE_OPERAND (ref, 0))));
984 }
985 break;
986 case VAR_DECL:
987 if (DECL_HARD_REGISTER (ref))
988 {
989 temp.op0 = ref;
990 break;
991 }
992 /* Fallthru. */
993 case PARM_DECL:
994 case CONST_DECL:
995 case RESULT_DECL:
996 /* Canonicalize decls to MEM[&decl] which is what we end up with
997 when valueizing MEM[ptr] with ptr = &decl. */
998 temp.opcode = MEM_REF;
999 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
1000 temp.off = 0;
1001 result->safe_push (temp);
1002 temp.opcode = ADDR_EXPR;
1003 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
1004 temp.type = TREE_TYPE (temp.op0);
1005 temp.off = -1;
1006 break;
1007 case STRING_CST:
1008 case INTEGER_CST:
1009 case POLY_INT_CST:
1010 case COMPLEX_CST:
1011 case VECTOR_CST:
1012 case REAL_CST:
1013 case FIXED_CST:
1014 case CONSTRUCTOR:
1015 case SSA_NAME:
1016 temp.op0 = ref;
1017 break;
1018 case ADDR_EXPR:
1019 if (is_gimple_min_invariant (ref))
1020 {
1021 temp.op0 = ref;
1022 break;
1023 }
1024 break;
1025 /* These are only interesting for their operands, their
1026 existence, and their type. They will never be the last
1027 ref in the chain of references (IE they require an
1028 operand), so we don't have to put anything
1029 for op* as it will be handled by the iteration */
1030 case REALPART_EXPR:
1031 temp.off = 0;
1032 break;
1033 case VIEW_CONVERT_EXPR:
1034 temp.off = 0;
1035 temp.reverse = storage_order_barrier_p (ref);
1036 break;
1037 case IMAGPART_EXPR:
1038 /* This is only interesting for its constant offset. */
1039 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1040 break;
1041 default:
1042 gcc_unreachable ();
1043 }
1044 result->safe_push (temp);
1045
1046 if (REFERENCE_CLASS_P (ref)
1047 || TREE_CODE (ref) == MODIFY_EXPR
1048 || TREE_CODE (ref) == WITH_SIZE_EXPR
1049 || (TREE_CODE (ref) == ADDR_EXPR
1050 && !is_gimple_min_invariant (ref)))
1051 ref = TREE_OPERAND (ref, 0);
1052 else
1053 ref = NULL_TREE;
1054 }
1055 }
1056
1057 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1058 operands in *OPS, the reference alias set SET and the reference type TYPE.
1059 Return true if something useful was produced. */
1060
1061 bool
ao_ref_init_from_vn_reference(ao_ref * ref,alias_set_type set,alias_set_type base_set,tree type,const vec<vn_reference_op_s> & ops)1062 ao_ref_init_from_vn_reference (ao_ref *ref,
1063 alias_set_type set, alias_set_type base_set,
1064 tree type, const vec<vn_reference_op_s> &ops)
1065 {
1066 unsigned i;
1067 tree base = NULL_TREE;
1068 tree *op0_p = &base;
1069 poly_offset_int offset = 0;
1070 poly_offset_int max_size;
1071 poly_offset_int size = -1;
1072 tree size_tree = NULL_TREE;
1073
1074 /* We don't handle calls. */
1075 if (!type)
1076 return false;
1077
1078 machine_mode mode = TYPE_MODE (type);
1079 if (mode == BLKmode)
1080 size_tree = TYPE_SIZE (type);
1081 else
1082 size = GET_MODE_BITSIZE (mode);
1083 if (size_tree != NULL_TREE
1084 && poly_int_tree_p (size_tree))
1085 size = wi::to_poly_offset (size_tree);
1086
1087 /* Lower the final access size from the outermost expression. */
1088 const_vn_reference_op_t cst_op = &ops[0];
1089 /* Cast away constness for the sake of the const-unsafe
1090 FOR_EACH_VEC_ELT(). */
1091 vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1092 size_tree = NULL_TREE;
1093 if (op->opcode == COMPONENT_REF)
1094 size_tree = DECL_SIZE (op->op0);
1095 else if (op->opcode == BIT_FIELD_REF)
1096 size_tree = op->op0;
1097 if (size_tree != NULL_TREE
1098 && poly_int_tree_p (size_tree)
1099 && (!known_size_p (size)
1100 || known_lt (wi::to_poly_offset (size_tree), size)))
1101 size = wi::to_poly_offset (size_tree);
1102
1103 /* Initially, maxsize is the same as the accessed element size.
1104 In the following it will only grow (or become -1). */
1105 max_size = size;
1106
1107 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1108 and find the ultimate containing object. */
1109 FOR_EACH_VEC_ELT (ops, i, op)
1110 {
1111 switch (op->opcode)
1112 {
1113 /* These may be in the reference ops, but we cannot do anything
1114 sensible with them here. */
1115 case ADDR_EXPR:
1116 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1117 if (base != NULL_TREE
1118 && TREE_CODE (base) == MEM_REF
1119 && op->op0
1120 && DECL_P (TREE_OPERAND (op->op0, 0)))
1121 {
1122 const_vn_reference_op_t pop = &ops[i-1];
1123 base = TREE_OPERAND (op->op0, 0);
1124 if (known_eq (pop->off, -1))
1125 {
1126 max_size = -1;
1127 offset = 0;
1128 }
1129 else
1130 offset += pop->off * BITS_PER_UNIT;
1131 op0_p = NULL;
1132 break;
1133 }
1134 /* Fallthru. */
1135 case CALL_EXPR:
1136 return false;
1137
1138 /* Record the base objects. */
1139 case MEM_REF:
1140 *op0_p = build2 (MEM_REF, op->type,
1141 NULL_TREE, op->op0);
1142 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1143 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1144 op0_p = &TREE_OPERAND (*op0_p, 0);
1145 break;
1146
1147 case VAR_DECL:
1148 case PARM_DECL:
1149 case RESULT_DECL:
1150 case SSA_NAME:
1151 *op0_p = op->op0;
1152 op0_p = NULL;
1153 break;
1154
1155 /* And now the usual component-reference style ops. */
1156 case BIT_FIELD_REF:
1157 offset += wi::to_poly_offset (op->op1);
1158 break;
1159
1160 case COMPONENT_REF:
1161 {
1162 tree field = op->op0;
1163 /* We do not have a complete COMPONENT_REF tree here so we
1164 cannot use component_ref_field_offset. Do the interesting
1165 parts manually. */
1166 tree this_offset = DECL_FIELD_OFFSET (field);
1167
1168 if (op->op1 || !poly_int_tree_p (this_offset))
1169 max_size = -1;
1170 else
1171 {
1172 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1173 << LOG2_BITS_PER_UNIT);
1174 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1175 offset += woffset;
1176 }
1177 break;
1178 }
1179
1180 case ARRAY_RANGE_REF:
1181 case ARRAY_REF:
1182 /* We recorded the lower bound and the element size. */
1183 if (!poly_int_tree_p (op->op0)
1184 || !poly_int_tree_p (op->op1)
1185 || TREE_CODE (op->op2) != INTEGER_CST)
1186 max_size = -1;
1187 else
1188 {
1189 poly_offset_int woffset
1190 = wi::sext (wi::to_poly_offset (op->op0)
1191 - wi::to_poly_offset (op->op1),
1192 TYPE_PRECISION (sizetype));
1193 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1194 woffset <<= LOG2_BITS_PER_UNIT;
1195 offset += woffset;
1196 }
1197 break;
1198
1199 case REALPART_EXPR:
1200 break;
1201
1202 case IMAGPART_EXPR:
1203 offset += size;
1204 break;
1205
1206 case VIEW_CONVERT_EXPR:
1207 break;
1208
1209 case STRING_CST:
1210 case INTEGER_CST:
1211 case COMPLEX_CST:
1212 case VECTOR_CST:
1213 case REAL_CST:
1214 case CONSTRUCTOR:
1215 case CONST_DECL:
1216 return false;
1217
1218 default:
1219 return false;
1220 }
1221 }
1222
1223 if (base == NULL_TREE)
1224 return false;
1225
1226 ref->ref = NULL_TREE;
1227 ref->base = base;
1228 ref->ref_alias_set = set;
1229 ref->base_alias_set = base_set;
1230 /* We discount volatiles from value-numbering elsewhere. */
1231 ref->volatile_p = false;
1232
1233 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1234 {
1235 ref->offset = 0;
1236 ref->size = -1;
1237 ref->max_size = -1;
1238 return true;
1239 }
1240
1241 if (!offset.to_shwi (&ref->offset))
1242 {
1243 ref->offset = 0;
1244 ref->max_size = -1;
1245 return true;
1246 }
1247
1248 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1249 ref->max_size = -1;
1250
1251 return true;
1252 }
1253
1254 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1255 vn_reference_op_s's. */
1256
1257 static void
copy_reference_ops_from_call(gcall * call,vec<vn_reference_op_s> * result)1258 copy_reference_ops_from_call (gcall *call,
1259 vec<vn_reference_op_s> *result)
1260 {
1261 vn_reference_op_s temp;
1262 unsigned i;
1263 tree lhs = gimple_call_lhs (call);
1264 int lr;
1265
1266 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1267 different. By adding the lhs here in the vector, we ensure that the
1268 hashcode is different, guaranteeing a different value number. */
1269 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1270 {
1271 memset (&temp, 0, sizeof (temp));
1272 temp.opcode = MODIFY_EXPR;
1273 temp.type = TREE_TYPE (lhs);
1274 temp.op0 = lhs;
1275 temp.off = -1;
1276 result->safe_push (temp);
1277 }
1278
1279 /* Copy the type, opcode, function, static chain and EH region, if any. */
1280 memset (&temp, 0, sizeof (temp));
1281 temp.type = gimple_call_fntype (call);
1282 temp.opcode = CALL_EXPR;
1283 temp.op0 = gimple_call_fn (call);
1284 if (gimple_call_internal_p (call))
1285 temp.clique = gimple_call_internal_fn (call);
1286 temp.op1 = gimple_call_chain (call);
1287 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1288 temp.op2 = size_int (lr);
1289 temp.off = -1;
1290 result->safe_push (temp);
1291
1292 /* Copy the call arguments. As they can be references as well,
1293 just chain them together. */
1294 for (i = 0; i < gimple_call_num_args (call); ++i)
1295 {
1296 tree callarg = gimple_call_arg (call, i);
1297 copy_reference_ops_from_ref (callarg, result);
1298 }
1299 }
1300
1301 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1302 *I_P to point to the last element of the replacement. */
1303 static bool
vn_reference_fold_indirect(vec<vn_reference_op_s> * ops,unsigned int * i_p)1304 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1305 unsigned int *i_p)
1306 {
1307 unsigned int i = *i_p;
1308 vn_reference_op_t op = &(*ops)[i];
1309 vn_reference_op_t mem_op = &(*ops)[i - 1];
1310 tree addr_base;
1311 poly_int64 addr_offset = 0;
1312
1313 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1314 from .foo.bar to the preceding MEM_REF offset and replace the
1315 address with &OBJ. */
1316 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1317 &addr_offset, vn_valueize);
1318 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1319 if (addr_base != TREE_OPERAND (op->op0, 0))
1320 {
1321 poly_offset_int off
1322 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1323 SIGNED)
1324 + addr_offset);
1325 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1326 op->op0 = build_fold_addr_expr (addr_base);
1327 if (tree_fits_shwi_p (mem_op->op0))
1328 mem_op->off = tree_to_shwi (mem_op->op0);
1329 else
1330 mem_op->off = -1;
1331 return true;
1332 }
1333 return false;
1334 }
1335
1336 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1337 *I_P to point to the last element of the replacement. */
1338 static bool
vn_reference_maybe_forwprop_address(vec<vn_reference_op_s> * ops,unsigned int * i_p)1339 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1340 unsigned int *i_p)
1341 {
1342 bool changed = false;
1343 vn_reference_op_t op;
1344
1345 do
1346 {
1347 unsigned int i = *i_p;
1348 op = &(*ops)[i];
1349 vn_reference_op_t mem_op = &(*ops)[i - 1];
1350 gimple *def_stmt;
1351 enum tree_code code;
1352 poly_offset_int off;
1353
1354 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1355 if (!is_gimple_assign (def_stmt))
1356 return changed;
1357
1358 code = gimple_assign_rhs_code (def_stmt);
1359 if (code != ADDR_EXPR
1360 && code != POINTER_PLUS_EXPR)
1361 return changed;
1362
1363 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1364
1365 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1366 from .foo.bar to the preceding MEM_REF offset and replace the
1367 address with &OBJ. */
1368 if (code == ADDR_EXPR)
1369 {
1370 tree addr, addr_base;
1371 poly_int64 addr_offset;
1372
1373 addr = gimple_assign_rhs1 (def_stmt);
1374 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1375 &addr_offset,
1376 vn_valueize);
1377 /* If that didn't work because the address isn't invariant propagate
1378 the reference tree from the address operation in case the current
1379 dereference isn't offsetted. */
1380 if (!addr_base
1381 && *i_p == ops->length () - 1
1382 && known_eq (off, 0)
1383 /* This makes us disable this transform for PRE where the
1384 reference ops might be also used for code insertion which
1385 is invalid. */
1386 && default_vn_walk_kind == VN_WALKREWRITE)
1387 {
1388 auto_vec<vn_reference_op_s, 32> tem;
1389 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1390 /* Make sure to preserve TBAA info. The only objects not
1391 wrapped in MEM_REFs that can have their address taken are
1392 STRING_CSTs. */
1393 if (tem.length () >= 2
1394 && tem[tem.length () - 2].opcode == MEM_REF)
1395 {
1396 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1397 new_mem_op->op0
1398 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1399 wi::to_poly_wide (new_mem_op->op0));
1400 }
1401 else
1402 gcc_assert (tem.last ().opcode == STRING_CST);
1403 ops->pop ();
1404 ops->pop ();
1405 ops->safe_splice (tem);
1406 --*i_p;
1407 return true;
1408 }
1409 if (!addr_base
1410 || TREE_CODE (addr_base) != MEM_REF
1411 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1412 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1413 0))))
1414 return changed;
1415
1416 off += addr_offset;
1417 off += mem_ref_offset (addr_base);
1418 op->op0 = TREE_OPERAND (addr_base, 0);
1419 }
1420 else
1421 {
1422 tree ptr, ptroff;
1423 ptr = gimple_assign_rhs1 (def_stmt);
1424 ptroff = gimple_assign_rhs2 (def_stmt);
1425 if (TREE_CODE (ptr) != SSA_NAME
1426 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1427 /* Make sure to not endlessly recurse.
1428 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1429 happen when we value-number a PHI to its backedge value. */
1430 || SSA_VAL (ptr) == op->op0
1431 || !poly_int_tree_p (ptroff))
1432 return changed;
1433
1434 off += wi::to_poly_offset (ptroff);
1435 op->op0 = ptr;
1436 }
1437
1438 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1439 if (tree_fits_shwi_p (mem_op->op0))
1440 mem_op->off = tree_to_shwi (mem_op->op0);
1441 else
1442 mem_op->off = -1;
1443 /* ??? Can end up with endless recursion here!?
1444 gcc.c-torture/execute/strcmp-1.c */
1445 if (TREE_CODE (op->op0) == SSA_NAME)
1446 op->op0 = SSA_VAL (op->op0);
1447 if (TREE_CODE (op->op0) != SSA_NAME)
1448 op->opcode = TREE_CODE (op->op0);
1449
1450 changed = true;
1451 }
1452 /* Tail-recurse. */
1453 while (TREE_CODE (op->op0) == SSA_NAME);
1454
1455 /* Fold a remaining *&. */
1456 if (TREE_CODE (op->op0) == ADDR_EXPR)
1457 vn_reference_fold_indirect (ops, i_p);
1458
1459 return changed;
1460 }
1461
1462 /* Optimize the reference REF to a constant if possible or return
1463 NULL_TREE if not. */
1464
1465 tree
fully_constant_vn_reference_p(vn_reference_t ref)1466 fully_constant_vn_reference_p (vn_reference_t ref)
1467 {
1468 vec<vn_reference_op_s> operands = ref->operands;
1469 vn_reference_op_t op;
1470
1471 /* Try to simplify the translated expression if it is
1472 a call to a builtin function with at most two arguments. */
1473 op = &operands[0];
1474 if (op->opcode == CALL_EXPR
1475 && (!op->op0
1476 || (TREE_CODE (op->op0) == ADDR_EXPR
1477 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1478 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
1479 BUILT_IN_NORMAL)))
1480 && operands.length () >= 2
1481 && operands.length () <= 3)
1482 {
1483 vn_reference_op_t arg0, arg1 = NULL;
1484 bool anyconst = false;
1485 arg0 = &operands[1];
1486 if (operands.length () > 2)
1487 arg1 = &operands[2];
1488 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1489 || (arg0->opcode == ADDR_EXPR
1490 && is_gimple_min_invariant (arg0->op0)))
1491 anyconst = true;
1492 if (arg1
1493 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1494 || (arg1->opcode == ADDR_EXPR
1495 && is_gimple_min_invariant (arg1->op0))))
1496 anyconst = true;
1497 if (anyconst)
1498 {
1499 combined_fn fn;
1500 if (op->op0)
1501 fn = as_combined_fn (DECL_FUNCTION_CODE
1502 (TREE_OPERAND (op->op0, 0)));
1503 else
1504 fn = as_combined_fn ((internal_fn) op->clique);
1505 tree folded;
1506 if (arg1)
1507 folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
1508 else
1509 folded = fold_const_call (fn, ref->type, arg0->op0);
1510 if (folded
1511 && is_gimple_min_invariant (folded))
1512 return folded;
1513 }
1514 }
1515
1516 /* Simplify reads from constants or constant initializers. */
1517 else if (BITS_PER_UNIT == 8
1518 && ref->type
1519 && COMPLETE_TYPE_P (ref->type)
1520 && is_gimple_reg_type (ref->type))
1521 {
1522 poly_int64 off = 0;
1523 HOST_WIDE_INT size;
1524 if (INTEGRAL_TYPE_P (ref->type))
1525 size = TYPE_PRECISION (ref->type);
1526 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1527 size = tree_to_shwi (TYPE_SIZE (ref->type));
1528 else
1529 return NULL_TREE;
1530 if (size % BITS_PER_UNIT != 0
1531 || size > MAX_BITSIZE_MODE_ANY_MODE)
1532 return NULL_TREE;
1533 size /= BITS_PER_UNIT;
1534 unsigned i;
1535 for (i = 0; i < operands.length (); ++i)
1536 {
1537 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1538 {
1539 ++i;
1540 break;
1541 }
1542 if (known_eq (operands[i].off, -1))
1543 return NULL_TREE;
1544 off += operands[i].off;
1545 if (operands[i].opcode == MEM_REF)
1546 {
1547 ++i;
1548 break;
1549 }
1550 }
1551 vn_reference_op_t base = &operands[--i];
1552 tree ctor = error_mark_node;
1553 tree decl = NULL_TREE;
1554 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1555 ctor = base->op0;
1556 else if (base->opcode == MEM_REF
1557 && base[1].opcode == ADDR_EXPR
1558 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1559 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1560 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1561 {
1562 decl = TREE_OPERAND (base[1].op0, 0);
1563 if (TREE_CODE (decl) == STRING_CST)
1564 ctor = decl;
1565 else
1566 ctor = ctor_for_folding (decl);
1567 }
1568 if (ctor == NULL_TREE)
1569 return build_zero_cst (ref->type);
1570 else if (ctor != error_mark_node)
1571 {
1572 HOST_WIDE_INT const_off;
1573 if (decl)
1574 {
1575 tree res = fold_ctor_reference (ref->type, ctor,
1576 off * BITS_PER_UNIT,
1577 size * BITS_PER_UNIT, decl);
1578 if (res)
1579 {
1580 STRIP_USELESS_TYPE_CONVERSION (res);
1581 if (is_gimple_min_invariant (res))
1582 return res;
1583 }
1584 }
1585 else if (off.is_constant (&const_off))
1586 {
1587 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1588 int len = native_encode_expr (ctor, buf, size, const_off);
1589 if (len > 0)
1590 return native_interpret_expr (ref->type, buf, len);
1591 }
1592 }
1593 }
1594
1595 return NULL_TREE;
1596 }
1597
1598 /* Return true if OPS contain a storage order barrier. */
1599
1600 static bool
contains_storage_order_barrier_p(vec<vn_reference_op_s> ops)1601 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1602 {
1603 vn_reference_op_t op;
1604 unsigned i;
1605
1606 FOR_EACH_VEC_ELT (ops, i, op)
1607 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1608 return true;
1609
1610 return false;
1611 }
1612
1613 /* Return true if OPS represent an access with reverse storage order. */
1614
1615 static bool
reverse_storage_order_for_component_p(vec<vn_reference_op_s> ops)1616 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1617 {
1618 unsigned i = 0;
1619 if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1620 ++i;
1621 switch (ops[i].opcode)
1622 {
1623 case ARRAY_REF:
1624 case COMPONENT_REF:
1625 case BIT_FIELD_REF:
1626 case MEM_REF:
1627 return ops[i].reverse;
1628 default:
1629 return false;
1630 }
1631 }
1632
1633 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1634 structures into their value numbers. This is done in-place, and
1635 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1636 whether any operands were valueized. */
1637
1638 static void
valueize_refs_1(vec<vn_reference_op_s> * orig,bool * valueized_anything,bool with_avail=false)1639 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1640 bool with_avail = false)
1641 {
1642 *valueized_anything = false;
1643
1644 for (unsigned i = 0; i < orig->length (); ++i)
1645 {
1646 re_valueize:
1647 vn_reference_op_t vro = &(*orig)[i];
1648 if (vro->opcode == SSA_NAME
1649 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1650 {
1651 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1652 if (tem != vro->op0)
1653 {
1654 *valueized_anything = true;
1655 vro->op0 = tem;
1656 }
1657 /* If it transforms from an SSA_NAME to a constant, update
1658 the opcode. */
1659 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1660 vro->opcode = TREE_CODE (vro->op0);
1661 }
1662 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1663 {
1664 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1665 if (tem != vro->op1)
1666 {
1667 *valueized_anything = true;
1668 vro->op1 = tem;
1669 }
1670 }
1671 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1672 {
1673 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1674 if (tem != vro->op2)
1675 {
1676 *valueized_anything = true;
1677 vro->op2 = tem;
1678 }
1679 }
1680 /* If it transforms from an SSA_NAME to an address, fold with
1681 a preceding indirect reference. */
1682 if (i > 0
1683 && vro->op0
1684 && TREE_CODE (vro->op0) == ADDR_EXPR
1685 && (*orig)[i - 1].opcode == MEM_REF)
1686 {
1687 if (vn_reference_fold_indirect (orig, &i))
1688 *valueized_anything = true;
1689 }
1690 else if (i > 0
1691 && vro->opcode == SSA_NAME
1692 && (*orig)[i - 1].opcode == MEM_REF)
1693 {
1694 if (vn_reference_maybe_forwprop_address (orig, &i))
1695 {
1696 *valueized_anything = true;
1697 /* Re-valueize the current operand. */
1698 goto re_valueize;
1699 }
1700 }
1701 /* If it transforms a non-constant ARRAY_REF into a constant
1702 one, adjust the constant offset. */
1703 else if (vro->opcode == ARRAY_REF
1704 && known_eq (vro->off, -1)
1705 && poly_int_tree_p (vro->op0)
1706 && poly_int_tree_p (vro->op1)
1707 && TREE_CODE (vro->op2) == INTEGER_CST)
1708 {
1709 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1710 - wi::to_poly_offset (vro->op1))
1711 * wi::to_offset (vro->op2)
1712 * vn_ref_op_align_unit (vro));
1713 off.to_shwi (&vro->off);
1714 }
1715 }
1716 }
1717
1718 static void
valueize_refs(vec<vn_reference_op_s> * orig)1719 valueize_refs (vec<vn_reference_op_s> *orig)
1720 {
1721 bool tem;
1722 valueize_refs_1 (orig, &tem);
1723 }
1724
1725 static vec<vn_reference_op_s> shared_lookup_references;
1726
1727 /* Create a vector of vn_reference_op_s structures from REF, a
1728 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1729 this function. *VALUEIZED_ANYTHING will specify whether any
1730 operands were valueized. */
1731
1732 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_ref(tree ref,bool * valueized_anything)1733 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1734 {
1735 if (!ref)
1736 return vNULL;
1737 shared_lookup_references.truncate (0);
1738 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1739 valueize_refs_1 (&shared_lookup_references, valueized_anything);
1740 return shared_lookup_references;
1741 }
1742
1743 /* Create a vector of vn_reference_op_s structures from CALL, a
1744 call statement. The vector is shared among all callers of
1745 this function. */
1746
1747 static vec<vn_reference_op_s>
valueize_shared_reference_ops_from_call(gcall * call)1748 valueize_shared_reference_ops_from_call (gcall *call)
1749 {
1750 if (!call)
1751 return vNULL;
1752 shared_lookup_references.truncate (0);
1753 copy_reference_ops_from_call (call, &shared_lookup_references);
1754 valueize_refs (&shared_lookup_references);
1755 return shared_lookup_references;
1756 }
1757
1758 /* Lookup a SCCVN reference operation VR in the current hash table.
1759 Returns the resulting value number if it exists in the hash table,
1760 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1761 vn_reference_t stored in the hashtable if something is found. */
1762
1763 static tree
vn_reference_lookup_1(vn_reference_t vr,vn_reference_t * vnresult)1764 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1765 {
1766 vn_reference_s **slot;
1767 hashval_t hash;
1768
1769 hash = vr->hashcode;
1770 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1771 if (slot)
1772 {
1773 if (vnresult)
1774 *vnresult = (vn_reference_t)*slot;
1775 return ((vn_reference_t)*slot)->result;
1776 }
1777
1778 return NULL_TREE;
1779 }
1780
1781
1782 /* Partial definition tracking support. */
1783
1784 struct pd_range
1785 {
1786 HOST_WIDE_INT offset;
1787 HOST_WIDE_INT size;
1788 };
1789
1790 struct pd_data
1791 {
1792 tree rhs;
1793 HOST_WIDE_INT offset;
1794 HOST_WIDE_INT size;
1795 };
1796
1797 /* Context for alias walking. */
1798
1799 struct vn_walk_cb_data
1800 {
vn_walk_cb_datavn_walk_cb_data1801 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1802 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_,
1803 bool redundant_store_removal_p_)
1804 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1805 mask (mask_), masked_result (NULL_TREE), same_val (NULL_TREE),
1806 vn_walk_kind (vn_walk_kind_),
1807 tbaa_p (tbaa_p_), redundant_store_removal_p (redundant_store_removal_p_),
1808 saved_operands (vNULL), first_set (-2), first_base_set (-2),
1809 known_ranges (NULL)
1810 {
1811 if (!last_vuse_ptr)
1812 last_vuse_ptr = &last_vuse;
1813 ao_ref_init (&orig_ref, orig_ref_);
1814 if (mask)
1815 {
1816 wide_int w = wi::to_wide (mask);
1817 unsigned int pos = 0, prec = w.get_precision ();
1818 pd_data pd;
1819 pd.rhs = build_constructor (NULL_TREE, NULL);
1820 /* When bitwise and with a constant is done on a memory load,
1821 we don't really need all the bits to be defined or defined
1822 to constants, we don't really care what is in the position
1823 corresponding to 0 bits in the mask.
1824 So, push the ranges of those 0 bits in the mask as artificial
1825 zero stores and let the partial def handling code do the
1826 rest. */
1827 while (pos < prec)
1828 {
1829 int tz = wi::ctz (w);
1830 if (pos + tz > prec)
1831 tz = prec - pos;
1832 if (tz)
1833 {
1834 if (BYTES_BIG_ENDIAN)
1835 pd.offset = prec - pos - tz;
1836 else
1837 pd.offset = pos;
1838 pd.size = tz;
1839 void *r = push_partial_def (pd, 0, 0, 0, prec);
1840 gcc_assert (r == NULL_TREE);
1841 }
1842 pos += tz;
1843 if (pos == prec)
1844 break;
1845 w = wi::lrshift (w, tz);
1846 tz = wi::ctz (wi::bit_not (w));
1847 if (pos + tz > prec)
1848 tz = prec - pos;
1849 pos += tz;
1850 w = wi::lrshift (w, tz);
1851 }
1852 }
1853 }
1854 ~vn_walk_cb_data ();
1855 void *finish (alias_set_type, alias_set_type, tree);
1856 void *push_partial_def (pd_data pd,
1857 alias_set_type, alias_set_type, HOST_WIDE_INT,
1858 HOST_WIDE_INT);
1859
1860 vn_reference_t vr;
1861 ao_ref orig_ref;
1862 tree *last_vuse_ptr;
1863 tree last_vuse;
1864 tree mask;
1865 tree masked_result;
1866 tree same_val;
1867 vn_lookup_kind vn_walk_kind;
1868 bool tbaa_p;
1869 bool redundant_store_removal_p;
1870 vec<vn_reference_op_s> saved_operands;
1871
1872 /* The VDEFs of partial defs we come along. */
1873 auto_vec<pd_data, 2> partial_defs;
1874 /* The first defs range to avoid splay tree setup in most cases. */
1875 pd_range first_range;
1876 alias_set_type first_set;
1877 alias_set_type first_base_set;
1878 splay_tree known_ranges;
1879 obstack ranges_obstack;
1880 };
1881
~vn_walk_cb_data()1882 vn_walk_cb_data::~vn_walk_cb_data ()
1883 {
1884 if (known_ranges)
1885 {
1886 splay_tree_delete (known_ranges);
1887 obstack_free (&ranges_obstack, NULL);
1888 }
1889 saved_operands.release ();
1890 }
1891
1892 void *
finish(alias_set_type set,alias_set_type base_set,tree val)1893 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1894 {
1895 if (first_set != -2)
1896 {
1897 set = first_set;
1898 base_set = first_base_set;
1899 }
1900 if (mask)
1901 {
1902 masked_result = val;
1903 return (void *) -1;
1904 }
1905 if (same_val && !operand_equal_p (val, same_val))
1906 return (void *) -1;
1907 vec<vn_reference_op_s> &operands
1908 = saved_operands.exists () ? saved_operands : vr->operands;
1909 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1910 vr->type, operands, val);
1911 }
1912
1913 /* pd_range splay-tree helpers. */
1914
1915 static int
pd_range_compare(splay_tree_key offset1p,splay_tree_key offset2p)1916 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1917 {
1918 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1919 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1920 if (offset1 < offset2)
1921 return -1;
1922 else if (offset1 > offset2)
1923 return 1;
1924 return 0;
1925 }
1926
1927 static void *
pd_tree_alloc(int size,void * data_)1928 pd_tree_alloc (int size, void *data_)
1929 {
1930 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1931 return obstack_alloc (&data->ranges_obstack, size);
1932 }
1933
1934 static void
pd_tree_dealloc(void *,void *)1935 pd_tree_dealloc (void *, void *)
1936 {
1937 }
1938
1939 /* Push PD to the vector of partial definitions returning a
1940 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1941 NULL when we want to continue looking for partial defs or -1
1942 on failure. */
1943
1944 void *
push_partial_def(pd_data pd,alias_set_type set,alias_set_type base_set,HOST_WIDE_INT offseti,HOST_WIDE_INT maxsizei)1945 vn_walk_cb_data::push_partial_def (pd_data pd,
1946 alias_set_type set, alias_set_type base_set,
1947 HOST_WIDE_INT offseti,
1948 HOST_WIDE_INT maxsizei)
1949 {
1950 const HOST_WIDE_INT bufsize = 64;
1951 /* We're using a fixed buffer for encoding so fail early if the object
1952 we want to interpret is bigger. */
1953 if (maxsizei > bufsize * BITS_PER_UNIT
1954 || CHAR_BIT != 8
1955 || BITS_PER_UNIT != 8
1956 /* Not prepared to handle PDP endian. */
1957 || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1958 return (void *)-1;
1959
1960 /* Turn too large constant stores into non-constant stores. */
1961 if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1962 pd.rhs = error_mark_node;
1963
1964 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1965 most a partial byte before and/or after the region. */
1966 if (!CONSTANT_CLASS_P (pd.rhs))
1967 {
1968 if (pd.offset < offseti)
1969 {
1970 HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1971 gcc_assert (pd.size > o);
1972 pd.size -= o;
1973 pd.offset += o;
1974 }
1975 if (pd.size > maxsizei)
1976 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1977 }
1978
1979 pd.offset -= offseti;
1980
1981 bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1982 || CONSTANT_CLASS_P (pd.rhs));
1983 if (partial_defs.is_empty ())
1984 {
1985 /* If we get a clobber upfront, fail. */
1986 if (TREE_CLOBBER_P (pd.rhs))
1987 return (void *)-1;
1988 if (!pd_constant_p)
1989 return (void *)-1;
1990 partial_defs.safe_push (pd);
1991 first_range.offset = pd.offset;
1992 first_range.size = pd.size;
1993 first_set = set;
1994 first_base_set = base_set;
1995 last_vuse_ptr = NULL;
1996 /* Continue looking for partial defs. */
1997 return NULL;
1998 }
1999
2000 if (!known_ranges)
2001 {
2002 /* ??? Optimize the case where the 2nd partial def completes things. */
2003 gcc_obstack_init (&ranges_obstack);
2004 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
2005 pd_tree_alloc,
2006 pd_tree_dealloc, this);
2007 splay_tree_insert (known_ranges,
2008 (splay_tree_key)&first_range.offset,
2009 (splay_tree_value)&first_range);
2010 }
2011
2012 pd_range newr = { pd.offset, pd.size };
2013 splay_tree_node n;
2014 pd_range *r;
2015 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2016 HOST_WIDE_INT loffset = newr.offset + 1;
2017 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
2018 && ((r = (pd_range *)n->value), true)
2019 && ranges_known_overlap_p (r->offset, r->size + 1,
2020 newr.offset, newr.size))
2021 {
2022 /* Ignore partial defs already covered. Here we also drop shadowed
2023 clobbers arriving here at the floor. */
2024 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
2025 return NULL;
2026 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
2027 }
2028 else
2029 {
2030 /* newr.offset wasn't covered yet, insert the range. */
2031 r = XOBNEW (&ranges_obstack, pd_range);
2032 *r = newr;
2033 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
2034 (splay_tree_value)r);
2035 }
2036 /* Merge r which now contains newr and is a member of the splay tree with
2037 adjacent overlapping ranges. */
2038 pd_range *rafter;
2039 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
2040 && ((rafter = (pd_range *)n->value), true)
2041 && ranges_known_overlap_p (r->offset, r->size + 1,
2042 rafter->offset, rafter->size))
2043 {
2044 r->size = MAX (r->offset + r->size,
2045 rafter->offset + rafter->size) - r->offset;
2046 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2047 }
2048 /* If we get a clobber, fail. */
2049 if (TREE_CLOBBER_P (pd.rhs))
2050 return (void *)-1;
2051 /* Non-constants are OK as long as they are shadowed by a constant. */
2052 if (!pd_constant_p)
2053 return (void *)-1;
2054 partial_defs.safe_push (pd);
2055
2056 /* Now we have merged newr into the range tree. When we have covered
2057 [offseti, sizei] then the tree will contain exactly one node which has
2058 the desired properties and it will be 'r'. */
2059 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2060 /* Continue looking for partial defs. */
2061 return NULL;
2062
2063 /* Now simply native encode all partial defs in reverse order. */
2064 unsigned ndefs = partial_defs.length ();
2065 /* We support up to 512-bit values (for V8DFmode). */
2066 unsigned char buffer[bufsize + 1];
2067 unsigned char this_buffer[bufsize + 1];
2068 int len;
2069
2070 memset (buffer, 0, bufsize + 1);
2071 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2072 while (!partial_defs.is_empty ())
2073 {
2074 pd_data pd = partial_defs.pop ();
2075 unsigned int amnt;
2076 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2077 {
2078 /* Empty CONSTRUCTOR. */
2079 if (pd.size >= needed_len * BITS_PER_UNIT)
2080 len = needed_len;
2081 else
2082 len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2083 memset (this_buffer, 0, len);
2084 }
2085 else
2086 {
2087 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2088 MAX (0, -pd.offset) / BITS_PER_UNIT);
2089 if (len <= 0
2090 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2091 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2092 {
2093 if (dump_file && (dump_flags & TDF_DETAILS))
2094 fprintf (dump_file, "Failed to encode %u "
2095 "partial definitions\n", ndefs);
2096 return (void *)-1;
2097 }
2098 }
2099
2100 unsigned char *p = buffer;
2101 HOST_WIDE_INT size = pd.size;
2102 if (pd.offset < 0)
2103 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2104 this_buffer[len] = 0;
2105 if (BYTES_BIG_ENDIAN)
2106 {
2107 /* LSB of this_buffer[len - 1] byte should be at
2108 pd.offset + pd.size - 1 bits in buffer. */
2109 amnt = ((unsigned HOST_WIDE_INT) pd.offset
2110 + pd.size) % BITS_PER_UNIT;
2111 if (amnt)
2112 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2113 unsigned char *q = this_buffer;
2114 unsigned int off = 0;
2115 if (pd.offset >= 0)
2116 {
2117 unsigned int msk;
2118 off = pd.offset / BITS_PER_UNIT;
2119 gcc_assert (off < needed_len);
2120 p = buffer + off;
2121 if (size <= amnt)
2122 {
2123 msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2124 *p = (*p & ~msk) | (this_buffer[len] & msk);
2125 size = 0;
2126 }
2127 else
2128 {
2129 if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2130 q = (this_buffer + len
2131 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2132 / BITS_PER_UNIT));
2133 if (pd.offset % BITS_PER_UNIT)
2134 {
2135 msk = -1U << (BITS_PER_UNIT
2136 - (pd.offset % BITS_PER_UNIT));
2137 *p = (*p & msk) | (*q & ~msk);
2138 p++;
2139 q++;
2140 off++;
2141 size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2142 gcc_assert (size >= 0);
2143 }
2144 }
2145 }
2146 else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2147 {
2148 q = (this_buffer + len
2149 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2150 / BITS_PER_UNIT));
2151 if (pd.offset % BITS_PER_UNIT)
2152 {
2153 q++;
2154 size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2155 % BITS_PER_UNIT);
2156 gcc_assert (size >= 0);
2157 }
2158 }
2159 if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2160 > needed_len)
2161 size = (needed_len - off) * BITS_PER_UNIT;
2162 memcpy (p, q, size / BITS_PER_UNIT);
2163 if (size % BITS_PER_UNIT)
2164 {
2165 unsigned int msk
2166 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2167 p += size / BITS_PER_UNIT;
2168 q += size / BITS_PER_UNIT;
2169 *p = (*q & msk) | (*p & ~msk);
2170 }
2171 }
2172 else
2173 {
2174 if (pd.offset >= 0)
2175 {
2176 /* LSB of this_buffer[0] byte should be at pd.offset bits
2177 in buffer. */
2178 unsigned int msk;
2179 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2180 amnt = pd.offset % BITS_PER_UNIT;
2181 if (amnt)
2182 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2183 unsigned int off = pd.offset / BITS_PER_UNIT;
2184 gcc_assert (off < needed_len);
2185 size = MIN (size,
2186 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2187 p = buffer + off;
2188 if (amnt + size < BITS_PER_UNIT)
2189 {
2190 /* Low amnt bits come from *p, then size bits
2191 from this_buffer[0] and the remaining again from
2192 *p. */
2193 msk = ((1 << size) - 1) << amnt;
2194 *p = (*p & ~msk) | (this_buffer[0] & msk);
2195 size = 0;
2196 }
2197 else if (amnt)
2198 {
2199 msk = -1U << amnt;
2200 *p = (*p & ~msk) | (this_buffer[0] & msk);
2201 p++;
2202 size -= (BITS_PER_UNIT - amnt);
2203 }
2204 }
2205 else
2206 {
2207 amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2208 if (amnt)
2209 size -= BITS_PER_UNIT - amnt;
2210 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2211 if (amnt)
2212 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2213 }
2214 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2215 p += size / BITS_PER_UNIT;
2216 if (size % BITS_PER_UNIT)
2217 {
2218 unsigned int msk = -1U << (size % BITS_PER_UNIT);
2219 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2220 & ~msk) | (*p & msk);
2221 }
2222 }
2223 }
2224
2225 tree type = vr->type;
2226 /* Make sure to interpret in a type that has a range covering the whole
2227 access size. */
2228 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2229 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2230 tree val;
2231 if (BYTES_BIG_ENDIAN)
2232 {
2233 unsigned sz = needed_len;
2234 if (maxsizei % BITS_PER_UNIT)
2235 shift_bytes_in_array_right (buffer, needed_len,
2236 BITS_PER_UNIT
2237 - (maxsizei % BITS_PER_UNIT));
2238 if (INTEGRAL_TYPE_P (type))
2239 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2240 if (sz > needed_len)
2241 {
2242 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2243 val = native_interpret_expr (type, this_buffer, sz);
2244 }
2245 else
2246 val = native_interpret_expr (type, buffer, needed_len);
2247 }
2248 else
2249 val = native_interpret_expr (type, buffer, bufsize);
2250 /* If we chop off bits because the types precision doesn't match the memory
2251 access size this is ok when optimizing reads but not when called from
2252 the DSE code during elimination. */
2253 if (val && type != vr->type)
2254 {
2255 if (! int_fits_type_p (val, vr->type))
2256 val = NULL_TREE;
2257 else
2258 val = fold_convert (vr->type, val);
2259 }
2260
2261 if (val)
2262 {
2263 if (dump_file && (dump_flags & TDF_DETAILS))
2264 fprintf (dump_file,
2265 "Successfully combined %u partial definitions\n", ndefs);
2266 /* We are using the alias-set of the first store we encounter which
2267 should be appropriate here. */
2268 return finish (first_set, first_base_set, val);
2269 }
2270 else
2271 {
2272 if (dump_file && (dump_flags & TDF_DETAILS))
2273 fprintf (dump_file,
2274 "Failed to interpret %u encoded partial definitions\n", ndefs);
2275 return (void *)-1;
2276 }
2277 }
2278
2279 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2280 with the current VUSE and performs the expression lookup. */
2281
2282 static void *
vn_reference_lookup_2(ao_ref * op ATTRIBUTE_UNUSED,tree vuse,void * data_)2283 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2284 {
2285 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2286 vn_reference_t vr = data->vr;
2287 vn_reference_s **slot;
2288 hashval_t hash;
2289
2290 /* If we have partial definitions recorded we have to go through
2291 vn_reference_lookup_3. */
2292 if (!data->partial_defs.is_empty ())
2293 return NULL;
2294
2295 if (data->last_vuse_ptr)
2296 {
2297 *data->last_vuse_ptr = vuse;
2298 data->last_vuse = vuse;
2299 }
2300
2301 /* Fixup vuse and hash. */
2302 if (vr->vuse)
2303 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2304 vr->vuse = vuse_ssa_val (vuse);
2305 if (vr->vuse)
2306 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2307
2308 hash = vr->hashcode;
2309 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2310 if (slot)
2311 {
2312 if ((*slot)->result && data->saved_operands.exists ())
2313 return data->finish (vr->set, vr->base_set, (*slot)->result);
2314 return *slot;
2315 }
2316
2317 return NULL;
2318 }
2319
2320 /* Lookup an existing or insert a new vn_reference entry into the
2321 value table for the VUSE, SET, TYPE, OPERANDS reference which
2322 has the value VALUE which is either a constant or an SSA name. */
2323
2324 static vn_reference_t
vn_reference_lookup_or_insert_for_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s,va_heap> operands,tree value)2325 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2326 alias_set_type set,
2327 alias_set_type base_set,
2328 tree type,
2329 vec<vn_reference_op_s,
2330 va_heap> operands,
2331 tree value)
2332 {
2333 vn_reference_s vr1;
2334 vn_reference_t result;
2335 unsigned value_id;
2336 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2337 vr1.operands = operands;
2338 vr1.type = type;
2339 vr1.set = set;
2340 vr1.base_set = base_set;
2341 vr1.hashcode = vn_reference_compute_hash (&vr1);
2342 if (vn_reference_lookup_1 (&vr1, &result))
2343 return result;
2344 if (TREE_CODE (value) == SSA_NAME)
2345 value_id = VN_INFO (value)->value_id;
2346 else
2347 value_id = get_or_alloc_constant_value_id (value);
2348 return vn_reference_insert_pieces (vuse, set, base_set, type,
2349 operands.copy (), value, value_id);
2350 }
2351
2352 /* Return a value-number for RCODE OPS... either by looking up an existing
2353 value-number for the possibly simplified result or by inserting the
2354 operation if INSERT is true. If SIMPLIFY is false, return a value
2355 number for the unsimplified expression. */
2356
2357 static tree
vn_nary_build_or_lookup_1(gimple_match_op * res_op,bool insert,bool simplify)2358 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
2359 bool simplify)
2360 {
2361 tree result = NULL_TREE;
2362 /* We will be creating a value number for
2363 RCODE (OPS...).
2364 So first simplify and lookup this expression to see if it
2365 is already available. */
2366 /* For simplification valueize. */
2367 unsigned i = 0;
2368 if (simplify)
2369 for (i = 0; i < res_op->num_ops; ++i)
2370 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2371 {
2372 tree tem = vn_valueize (res_op->ops[i]);
2373 if (!tem)
2374 break;
2375 res_op->ops[i] = tem;
2376 }
2377 /* If valueization of an operand fails (it is not available), skip
2378 simplification. */
2379 bool res = false;
2380 if (i == res_op->num_ops)
2381 {
2382 mprts_hook = vn_lookup_simplify_result;
2383 res = res_op->resimplify (NULL, vn_valueize);
2384 mprts_hook = NULL;
2385 }
2386 gimple *new_stmt = NULL;
2387 if (res
2388 && gimple_simplified_result_is_gimple_val (res_op))
2389 {
2390 /* The expression is already available. */
2391 result = res_op->ops[0];
2392 /* Valueize it, simplification returns sth in AVAIL only. */
2393 if (TREE_CODE (result) == SSA_NAME)
2394 result = SSA_VAL (result);
2395 }
2396 else
2397 {
2398 tree val = vn_lookup_simplify_result (res_op);
2399 if (!val && insert)
2400 {
2401 gimple_seq stmts = NULL;
2402 result = maybe_push_res_to_seq (res_op, &stmts);
2403 if (result)
2404 {
2405 gcc_assert (gimple_seq_singleton_p (stmts));
2406 new_stmt = gimple_seq_first_stmt (stmts);
2407 }
2408 }
2409 else
2410 /* The expression is already available. */
2411 result = val;
2412 }
2413 if (new_stmt)
2414 {
2415 /* The expression is not yet available, value-number lhs to
2416 the new SSA_NAME we created. */
2417 /* Initialize value-number information properly. */
2418 vn_ssa_aux_t result_info = VN_INFO (result);
2419 result_info->valnum = result;
2420 result_info->value_id = get_next_value_id ();
2421 result_info->visited = 1;
2422 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2423 new_stmt);
2424 result_info->needs_insertion = true;
2425 /* ??? PRE phi-translation inserts NARYs without corresponding
2426 SSA name result. Re-use those but set their result according
2427 to the stmt we just built. */
2428 vn_nary_op_t nary = NULL;
2429 vn_nary_op_lookup_stmt (new_stmt, &nary);
2430 if (nary)
2431 {
2432 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2433 nary->u.result = gimple_assign_lhs (new_stmt);
2434 }
2435 /* As all "inserted" statements are singleton SCCs, insert
2436 to the valid table. This is strictly needed to
2437 avoid re-generating new value SSA_NAMEs for the same
2438 expression during SCC iteration over and over (the
2439 optimistic table gets cleared after each iteration).
2440 We do not need to insert into the optimistic table, as
2441 lookups there will fall back to the valid table. */
2442 else
2443 {
2444 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2445 vn_nary_op_t vno1
2446 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2447 vno1->value_id = result_info->value_id;
2448 vno1->length = length;
2449 vno1->predicated_values = 0;
2450 vno1->u.result = result;
2451 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2452 vn_nary_op_insert_into (vno1, valid_info->nary);
2453 /* Also do not link it into the undo chain. */
2454 last_inserted_nary = vno1->next;
2455 vno1->next = (vn_nary_op_t)(void *)-1;
2456 }
2457 if (dump_file && (dump_flags & TDF_DETAILS))
2458 {
2459 fprintf (dump_file, "Inserting name ");
2460 print_generic_expr (dump_file, result);
2461 fprintf (dump_file, " for expression ");
2462 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2463 fprintf (dump_file, "\n");
2464 }
2465 }
2466 return result;
2467 }
2468
2469 /* Return a value-number for RCODE OPS... either by looking up an existing
2470 value-number for the simplified result or by inserting the operation. */
2471
2472 static tree
vn_nary_build_or_lookup(gimple_match_op * res_op)2473 vn_nary_build_or_lookup (gimple_match_op *res_op)
2474 {
2475 return vn_nary_build_or_lookup_1 (res_op, true, true);
2476 }
2477
2478 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2479 its value if present. */
2480
2481 tree
vn_nary_simplify(vn_nary_op_t nary)2482 vn_nary_simplify (vn_nary_op_t nary)
2483 {
2484 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2485 return NULL_TREE;
2486 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2487 nary->type, nary->length);
2488 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2489 return vn_nary_build_or_lookup_1 (&op, false, true);
2490 }
2491
2492 /* Elimination engine. */
2493
2494 class eliminate_dom_walker : public dom_walker
2495 {
2496 public:
2497 eliminate_dom_walker (cdi_direction, bitmap);
2498 ~eliminate_dom_walker ();
2499
2500 virtual edge before_dom_children (basic_block);
2501 virtual void after_dom_children (basic_block);
2502
2503 virtual tree eliminate_avail (basic_block, tree op);
2504 virtual void eliminate_push_avail (basic_block, tree op);
2505 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2506
2507 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2508
2509 unsigned eliminate_cleanup (bool region_p = false);
2510
2511 bool do_pre;
2512 unsigned int el_todo;
2513 unsigned int eliminations;
2514 unsigned int insertions;
2515
2516 /* SSA names that had their defs inserted by PRE if do_pre. */
2517 bitmap inserted_exprs;
2518
2519 /* Blocks with statements that have had their EH properties changed. */
2520 bitmap need_eh_cleanup;
2521
2522 /* Blocks with statements that have had their AB properties changed. */
2523 bitmap need_ab_cleanup;
2524
2525 /* Local state for the eliminate domwalk. */
2526 auto_vec<gimple *> to_remove;
2527 auto_vec<gimple *> to_fixup;
2528 auto_vec<tree> avail;
2529 auto_vec<tree> avail_stack;
2530 };
2531
2532 /* Adaptor to the elimination engine using RPO availability. */
2533
2534 class rpo_elim : public eliminate_dom_walker
2535 {
2536 public:
rpo_elim(basic_block entry_)2537 rpo_elim(basic_block entry_)
2538 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2539 m_avail_freelist (NULL) {}
2540
2541 virtual tree eliminate_avail (basic_block, tree op);
2542
2543 virtual void eliminate_push_avail (basic_block, tree);
2544
2545 basic_block entry;
2546 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2547 obstack. */
2548 vn_avail *m_avail_freelist;
2549 };
2550
2551 /* Global RPO state for access from hooks. */
2552 static eliminate_dom_walker *rpo_avail;
2553 basic_block vn_context_bb;
2554
2555 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2556 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2557 Otherwise return false. */
2558
2559 static bool
adjust_offsets_for_equal_base_address(tree base1,poly_int64 * offset1,tree base2,poly_int64 * offset2)2560 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2561 tree base2, poly_int64 *offset2)
2562 {
2563 poly_int64 soff;
2564 if (TREE_CODE (base1) == MEM_REF
2565 && TREE_CODE (base2) == MEM_REF)
2566 {
2567 if (mem_ref_offset (base1).to_shwi (&soff))
2568 {
2569 base1 = TREE_OPERAND (base1, 0);
2570 *offset1 += soff * BITS_PER_UNIT;
2571 }
2572 if (mem_ref_offset (base2).to_shwi (&soff))
2573 {
2574 base2 = TREE_OPERAND (base2, 0);
2575 *offset2 += soff * BITS_PER_UNIT;
2576 }
2577 return operand_equal_p (base1, base2, 0);
2578 }
2579 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2580 }
2581
2582 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2583 from the statement defining VUSE and if not successful tries to
2584 translate *REFP and VR_ through an aggregate copy at the definition
2585 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2586 of *REF and *VR. If only disambiguation was performed then
2587 *DISAMBIGUATE_ONLY is set to true. */
2588
2589 static void *
vn_reference_lookup_3(ao_ref * ref,tree vuse,void * data_,translate_flags * disambiguate_only)2590 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2591 translate_flags *disambiguate_only)
2592 {
2593 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2594 vn_reference_t vr = data->vr;
2595 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2596 tree base = ao_ref_base (ref);
2597 HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2598 static vec<vn_reference_op_s> lhs_ops;
2599 ao_ref lhs_ref;
2600 bool lhs_ref_ok = false;
2601 poly_int64 copy_size;
2602
2603 /* First try to disambiguate after value-replacing in the definitions LHS. */
2604 if (is_gimple_assign (def_stmt))
2605 {
2606 tree lhs = gimple_assign_lhs (def_stmt);
2607 bool valueized_anything = false;
2608 /* Avoid re-allocation overhead. */
2609 lhs_ops.truncate (0);
2610 basic_block saved_rpo_bb = vn_context_bb;
2611 vn_context_bb = gimple_bb (def_stmt);
2612 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2613 {
2614 copy_reference_ops_from_ref (lhs, &lhs_ops);
2615 valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2616 }
2617 vn_context_bb = saved_rpo_bb;
2618 ao_ref_init (&lhs_ref, lhs);
2619 lhs_ref_ok = true;
2620 if (valueized_anything
2621 && ao_ref_init_from_vn_reference
2622 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2623 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2624 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2625 {
2626 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2627 return NULL;
2628 }
2629
2630 /* When the def is a CLOBBER we can optimistically disambiguate
2631 against it since any overlap it would be undefined behavior.
2632 Avoid this for obvious must aliases to save compile-time though.
2633 We also may not do this when the query is used for redundant
2634 store removal. */
2635 if (!data->redundant_store_removal_p
2636 && gimple_clobber_p (def_stmt)
2637 && !operand_equal_p (ao_ref_base (&lhs_ref), base, OEP_ADDRESS_OF))
2638 {
2639 *disambiguate_only = TR_DISAMBIGUATE;
2640 return NULL;
2641 }
2642
2643 /* Besides valueizing the LHS we can also use access-path based
2644 disambiguation on the original non-valueized ref. */
2645 if (!ref->ref
2646 && lhs_ref_ok
2647 && data->orig_ref.ref)
2648 {
2649 /* We want to use the non-valueized LHS for this, but avoid redundant
2650 work. */
2651 ao_ref *lref = &lhs_ref;
2652 ao_ref lref_alt;
2653 if (valueized_anything)
2654 {
2655 ao_ref_init (&lref_alt, lhs);
2656 lref = &lref_alt;
2657 }
2658 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2659 {
2660 *disambiguate_only = (valueized_anything
2661 ? TR_VALUEIZE_AND_DISAMBIGUATE
2662 : TR_DISAMBIGUATE);
2663 return NULL;
2664 }
2665 }
2666
2667 /* If we reach a clobbering statement try to skip it and see if
2668 we find a VN result with exactly the same value as the
2669 possible clobber. In this case we can ignore the clobber
2670 and return the found value. */
2671 if (is_gimple_reg_type (TREE_TYPE (lhs))
2672 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2673 && (ref->ref || data->orig_ref.ref)
2674 && !data->mask
2675 && data->partial_defs.is_empty ()
2676 && multiple_p (get_object_alignment
2677 (ref->ref ? ref->ref : data->orig_ref.ref),
2678 ref->size)
2679 && multiple_p (get_object_alignment (lhs), ref->size))
2680 {
2681 tree rhs = gimple_assign_rhs1 (def_stmt);
2682 /* ??? We may not compare to ahead values which might be from
2683 a different loop iteration but only to loop invariants. Use
2684 CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
2685 The one-hop lookup below doesn't have this issue since there's
2686 a virtual PHI before we ever reach a backedge to cross.
2687 We can skip multiple defs as long as they are from the same
2688 value though. */
2689 if (data->same_val
2690 && !operand_equal_p (data->same_val, rhs))
2691 ;
2692 else if (CONSTANT_CLASS_P (rhs))
2693 {
2694 if (dump_file && (dump_flags & TDF_DETAILS))
2695 {
2696 fprintf (dump_file,
2697 "Skipping possible redundant definition ");
2698 print_gimple_stmt (dump_file, def_stmt, 0);
2699 }
2700 /* Delay the actual compare of the values to the end of the walk
2701 but do not update last_vuse from here. */
2702 data->last_vuse_ptr = NULL;
2703 data->same_val = rhs;
2704 return NULL;
2705 }
2706 else
2707 {
2708 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2709 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2710 data->last_vuse_ptr = NULL;
2711 tree saved_vuse = vr->vuse;
2712 hashval_t saved_hashcode = vr->hashcode;
2713 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt),
2714 data);
2715 /* Need to restore vr->vuse and vr->hashcode. */
2716 vr->vuse = saved_vuse;
2717 vr->hashcode = saved_hashcode;
2718 data->last_vuse_ptr = saved_last_vuse_ptr;
2719 if (res && res != (void *)-1)
2720 {
2721 vn_reference_t vnresult = (vn_reference_t) res;
2722 if (TREE_CODE (rhs) == SSA_NAME)
2723 rhs = SSA_VAL (rhs);
2724 if (vnresult->result
2725 && operand_equal_p (vnresult->result, rhs, 0))
2726 return res;
2727 }
2728 }
2729 }
2730 }
2731 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2732 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2733 && gimple_call_num_args (def_stmt) <= 4)
2734 {
2735 /* For builtin calls valueize its arguments and call the
2736 alias oracle again. Valueization may improve points-to
2737 info of pointers and constify size and position arguments.
2738 Originally this was motivated by PR61034 which has
2739 conditional calls to free falsely clobbering ref because
2740 of imprecise points-to info of the argument. */
2741 tree oldargs[4];
2742 bool valueized_anything = false;
2743 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2744 {
2745 oldargs[i] = gimple_call_arg (def_stmt, i);
2746 tree val = vn_valueize (oldargs[i]);
2747 if (val != oldargs[i])
2748 {
2749 gimple_call_set_arg (def_stmt, i, val);
2750 valueized_anything = true;
2751 }
2752 }
2753 if (valueized_anything)
2754 {
2755 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2756 ref, data->tbaa_p);
2757 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2758 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2759 if (!res)
2760 {
2761 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2762 return NULL;
2763 }
2764 }
2765 }
2766
2767 if (*disambiguate_only > TR_TRANSLATE)
2768 return (void *)-1;
2769
2770 /* If we cannot constrain the size of the reference we cannot
2771 test if anything kills it. */
2772 if (!ref->max_size_known_p ())
2773 return (void *)-1;
2774
2775 poly_int64 offset = ref->offset;
2776 poly_int64 maxsize = ref->max_size;
2777
2778 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2779 from that definition.
2780 1) Memset. */
2781 if (is_gimple_reg_type (vr->type)
2782 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2783 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2784 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2785 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2786 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2787 && CHAR_BIT == 8
2788 && BITS_PER_UNIT == 8
2789 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2790 && offset.is_constant (&offseti)
2791 && ref->size.is_constant (&sizei)
2792 && (offseti % BITS_PER_UNIT == 0
2793 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2794 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2795 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2796 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2797 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2798 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2799 {
2800 tree base2;
2801 poly_int64 offset2, size2, maxsize2;
2802 bool reverse;
2803 tree ref2 = gimple_call_arg (def_stmt, 0);
2804 if (TREE_CODE (ref2) == SSA_NAME)
2805 {
2806 ref2 = SSA_VAL (ref2);
2807 if (TREE_CODE (ref2) == SSA_NAME
2808 && (TREE_CODE (base) != MEM_REF
2809 || TREE_OPERAND (base, 0) != ref2))
2810 {
2811 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2812 if (gimple_assign_single_p (def_stmt)
2813 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2814 ref2 = gimple_assign_rhs1 (def_stmt);
2815 }
2816 }
2817 if (TREE_CODE (ref2) == ADDR_EXPR)
2818 {
2819 ref2 = TREE_OPERAND (ref2, 0);
2820 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2821 &reverse);
2822 if (!known_size_p (maxsize2)
2823 || !known_eq (maxsize2, size2)
2824 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2825 return (void *)-1;
2826 }
2827 else if (TREE_CODE (ref2) == SSA_NAME)
2828 {
2829 poly_int64 soff;
2830 if (TREE_CODE (base) != MEM_REF
2831 || !(mem_ref_offset (base)
2832 << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2833 return (void *)-1;
2834 offset += soff;
2835 offset2 = 0;
2836 if (TREE_OPERAND (base, 0) != ref2)
2837 {
2838 gimple *def = SSA_NAME_DEF_STMT (ref2);
2839 if (is_gimple_assign (def)
2840 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2841 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2842 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2843 {
2844 tree rhs2 = gimple_assign_rhs2 (def);
2845 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2846 SIGNED)
2847 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2848 return (void *)-1;
2849 ref2 = gimple_assign_rhs1 (def);
2850 if (TREE_CODE (ref2) == SSA_NAME)
2851 ref2 = SSA_VAL (ref2);
2852 }
2853 else
2854 return (void *)-1;
2855 }
2856 }
2857 else
2858 return (void *)-1;
2859 tree len = gimple_call_arg (def_stmt, 2);
2860 HOST_WIDE_INT leni, offset2i;
2861 if (TREE_CODE (len) == SSA_NAME)
2862 len = SSA_VAL (len);
2863 /* Sometimes the above trickery is smarter than alias analysis. Take
2864 advantage of that. */
2865 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2866 (wi::to_poly_offset (len)
2867 << LOG2_BITS_PER_UNIT)))
2868 return NULL;
2869 if (data->partial_defs.is_empty ()
2870 && known_subrange_p (offset, maxsize, offset2,
2871 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2872 {
2873 tree val;
2874 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2875 val = build_zero_cst (vr->type);
2876 else if (INTEGRAL_TYPE_P (vr->type)
2877 && known_eq (ref->size, 8)
2878 && offseti % BITS_PER_UNIT == 0)
2879 {
2880 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2881 vr->type, gimple_call_arg (def_stmt, 1));
2882 val = vn_nary_build_or_lookup (&res_op);
2883 if (!val
2884 || (TREE_CODE (val) == SSA_NAME
2885 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2886 return (void *)-1;
2887 }
2888 else
2889 {
2890 unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2891 if (INTEGRAL_TYPE_P (vr->type))
2892 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2893 unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2894 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2895 buflen);
2896 if (BYTES_BIG_ENDIAN)
2897 {
2898 unsigned int amnt
2899 = (((unsigned HOST_WIDE_INT) offseti + sizei)
2900 % BITS_PER_UNIT);
2901 if (amnt)
2902 {
2903 shift_bytes_in_array_right (buf, buflen,
2904 BITS_PER_UNIT - amnt);
2905 buf++;
2906 buflen--;
2907 }
2908 }
2909 else if (offseti % BITS_PER_UNIT != 0)
2910 {
2911 unsigned int amnt
2912 = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2913 % BITS_PER_UNIT);
2914 shift_bytes_in_array_left (buf, buflen, amnt);
2915 buf++;
2916 buflen--;
2917 }
2918 val = native_interpret_expr (vr->type, buf, buflen);
2919 if (!val)
2920 return (void *)-1;
2921 }
2922 return data->finish (0, 0, val);
2923 }
2924 /* For now handle clearing memory with partial defs. */
2925 else if (known_eq (ref->size, maxsize)
2926 && integer_zerop (gimple_call_arg (def_stmt, 1))
2927 && tree_fits_poly_int64_p (len)
2928 && tree_to_poly_int64 (len).is_constant (&leni)
2929 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2930 && offset.is_constant (&offseti)
2931 && offset2.is_constant (&offset2i)
2932 && maxsize.is_constant (&maxsizei)
2933 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2934 leni << LOG2_BITS_PER_UNIT))
2935 {
2936 pd_data pd;
2937 pd.rhs = build_constructor (NULL_TREE, NULL);
2938 pd.offset = offset2i;
2939 pd.size = leni << LOG2_BITS_PER_UNIT;
2940 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2941 }
2942 }
2943
2944 /* 2) Assignment from an empty CONSTRUCTOR. */
2945 else if (is_gimple_reg_type (vr->type)
2946 && gimple_assign_single_p (def_stmt)
2947 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2948 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2949 {
2950 tree base2;
2951 poly_int64 offset2, size2, maxsize2;
2952 HOST_WIDE_INT offset2i, size2i;
2953 gcc_assert (lhs_ref_ok);
2954 base2 = ao_ref_base (&lhs_ref);
2955 offset2 = lhs_ref.offset;
2956 size2 = lhs_ref.size;
2957 maxsize2 = lhs_ref.max_size;
2958 if (known_size_p (maxsize2)
2959 && known_eq (maxsize2, size2)
2960 && adjust_offsets_for_equal_base_address (base, &offset,
2961 base2, &offset2))
2962 {
2963 if (data->partial_defs.is_empty ()
2964 && known_subrange_p (offset, maxsize, offset2, size2))
2965 {
2966 /* While technically undefined behavior do not optimize
2967 a full read from a clobber. */
2968 if (gimple_clobber_p (def_stmt))
2969 return (void *)-1;
2970 tree val = build_zero_cst (vr->type);
2971 return data->finish (ao_ref_alias_set (&lhs_ref),
2972 ao_ref_base_alias_set (&lhs_ref), val);
2973 }
2974 else if (known_eq (ref->size, maxsize)
2975 && maxsize.is_constant (&maxsizei)
2976 && offset.is_constant (&offseti)
2977 && offset2.is_constant (&offset2i)
2978 && size2.is_constant (&size2i)
2979 && ranges_known_overlap_p (offseti, maxsizei,
2980 offset2i, size2i))
2981 {
2982 /* Let clobbers be consumed by the partial-def tracker
2983 which can choose to ignore them if they are shadowed
2984 by a later def. */
2985 pd_data pd;
2986 pd.rhs = gimple_assign_rhs1 (def_stmt);
2987 pd.offset = offset2i;
2988 pd.size = size2i;
2989 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2990 ao_ref_base_alias_set (&lhs_ref),
2991 offseti, maxsizei);
2992 }
2993 }
2994 }
2995
2996 /* 3) Assignment from a constant. We can use folds native encode/interpret
2997 routines to extract the assigned bits. */
2998 else if (known_eq (ref->size, maxsize)
2999 && is_gimple_reg_type (vr->type)
3000 && !reverse_storage_order_for_component_p (vr->operands)
3001 && !contains_storage_order_barrier_p (vr->operands)
3002 && gimple_assign_single_p (def_stmt)
3003 && CHAR_BIT == 8
3004 && BITS_PER_UNIT == 8
3005 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
3006 /* native_encode and native_decode operate on arrays of bytes
3007 and so fundamentally need a compile-time size and offset. */
3008 && maxsize.is_constant (&maxsizei)
3009 && offset.is_constant (&offseti)
3010 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
3011 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
3012 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
3013 {
3014 tree lhs = gimple_assign_lhs (def_stmt);
3015 tree base2;
3016 poly_int64 offset2, size2, maxsize2;
3017 HOST_WIDE_INT offset2i, size2i;
3018 bool reverse;
3019 gcc_assert (lhs_ref_ok);
3020 base2 = ao_ref_base (&lhs_ref);
3021 offset2 = lhs_ref.offset;
3022 size2 = lhs_ref.size;
3023 maxsize2 = lhs_ref.max_size;
3024 reverse = reverse_storage_order_for_component_p (lhs);
3025 if (base2
3026 && !reverse
3027 && !storage_order_barrier_p (lhs)
3028 && known_eq (maxsize2, size2)
3029 && adjust_offsets_for_equal_base_address (base, &offset,
3030 base2, &offset2)
3031 && offset.is_constant (&offseti)
3032 && offset2.is_constant (&offset2i)
3033 && size2.is_constant (&size2i))
3034 {
3035 if (data->partial_defs.is_empty ()
3036 && known_subrange_p (offseti, maxsizei, offset2, size2))
3037 {
3038 /* We support up to 512-bit values (for V8DFmode). */
3039 unsigned char buffer[65];
3040 int len;
3041
3042 tree rhs = gimple_assign_rhs1 (def_stmt);
3043 if (TREE_CODE (rhs) == SSA_NAME)
3044 rhs = SSA_VAL (rhs);
3045 len = native_encode_expr (rhs,
3046 buffer, sizeof (buffer) - 1,
3047 (offseti - offset2i) / BITS_PER_UNIT);
3048 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
3049 {
3050 tree type = vr->type;
3051 unsigned char *buf = buffer;
3052 unsigned int amnt = 0;
3053 /* Make sure to interpret in a type that has a range
3054 covering the whole access size. */
3055 if (INTEGRAL_TYPE_P (vr->type)
3056 && maxsizei != TYPE_PRECISION (vr->type))
3057 type = build_nonstandard_integer_type (maxsizei,
3058 TYPE_UNSIGNED (type));
3059 if (BYTES_BIG_ENDIAN)
3060 {
3061 /* For big-endian native_encode_expr stored the rhs
3062 such that the LSB of it is the LSB of buffer[len - 1].
3063 That bit is stored into memory at position
3064 offset2 + size2 - 1, i.e. in byte
3065 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3066 E.g. for offset2 1 and size2 14, rhs -1 and memory
3067 previously cleared that is:
3068 0 1
3069 01111111|11111110
3070 Now, if we want to extract offset 2 and size 12 from
3071 it using native_interpret_expr (which actually works
3072 for integral bitfield types in terms of byte size of
3073 the mode), the native_encode_expr stored the value
3074 into buffer as
3075 XX111111|11111111
3076 and returned len 2 (the X bits are outside of
3077 precision).
3078 Let sz be maxsize / BITS_PER_UNIT if not extracting
3079 a bitfield, and GET_MODE_SIZE otherwise.
3080 We need to align the LSB of the value we want to
3081 extract as the LSB of buf[sz - 1].
3082 The LSB from memory we need to read is at position
3083 offset + maxsize - 1. */
3084 HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3085 if (INTEGRAL_TYPE_P (type))
3086 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3087 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3088 - offseti - maxsizei) % BITS_PER_UNIT;
3089 if (amnt)
3090 shift_bytes_in_array_right (buffer, len, amnt);
3091 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3092 - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3093 if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3094 len = 0;
3095 else
3096 {
3097 buf = buffer + len - sz - amnt;
3098 len -= (buf - buffer);
3099 }
3100 }
3101 else
3102 {
3103 amnt = ((unsigned HOST_WIDE_INT) offset2i
3104 - offseti) % BITS_PER_UNIT;
3105 if (amnt)
3106 {
3107 buffer[len] = 0;
3108 shift_bytes_in_array_left (buffer, len + 1, amnt);
3109 buf = buffer + 1;
3110 }
3111 }
3112 tree val = native_interpret_expr (type, buf, len);
3113 /* If we chop off bits because the types precision doesn't
3114 match the memory access size this is ok when optimizing
3115 reads but not when called from the DSE code during
3116 elimination. */
3117 if (val
3118 && type != vr->type)
3119 {
3120 if (! int_fits_type_p (val, vr->type))
3121 val = NULL_TREE;
3122 else
3123 val = fold_convert (vr->type, val);
3124 }
3125
3126 if (val)
3127 return data->finish (ao_ref_alias_set (&lhs_ref),
3128 ao_ref_base_alias_set (&lhs_ref), val);
3129 }
3130 }
3131 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3132 size2i))
3133 {
3134 pd_data pd;
3135 tree rhs = gimple_assign_rhs1 (def_stmt);
3136 if (TREE_CODE (rhs) == SSA_NAME)
3137 rhs = SSA_VAL (rhs);
3138 pd.rhs = rhs;
3139 pd.offset = offset2i;
3140 pd.size = size2i;
3141 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3142 ao_ref_base_alias_set (&lhs_ref),
3143 offseti, maxsizei);
3144 }
3145 }
3146 }
3147
3148 /* 4) Assignment from an SSA name which definition we may be able
3149 to access pieces from or we can combine to a larger entity. */
3150 else if (known_eq (ref->size, maxsize)
3151 && is_gimple_reg_type (vr->type)
3152 && !reverse_storage_order_for_component_p (vr->operands)
3153 && !contains_storage_order_barrier_p (vr->operands)
3154 && gimple_assign_single_p (def_stmt)
3155 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3156 {
3157 tree lhs = gimple_assign_lhs (def_stmt);
3158 tree base2;
3159 poly_int64 offset2, size2, maxsize2;
3160 HOST_WIDE_INT offset2i, size2i, offseti;
3161 bool reverse;
3162 gcc_assert (lhs_ref_ok);
3163 base2 = ao_ref_base (&lhs_ref);
3164 offset2 = lhs_ref.offset;
3165 size2 = lhs_ref.size;
3166 maxsize2 = lhs_ref.max_size;
3167 reverse = reverse_storage_order_for_component_p (lhs);
3168 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3169 if (!reverse
3170 && !storage_order_barrier_p (lhs)
3171 && known_size_p (maxsize2)
3172 && known_eq (maxsize2, size2)
3173 && adjust_offsets_for_equal_base_address (base, &offset,
3174 base2, &offset2))
3175 {
3176 if (data->partial_defs.is_empty ()
3177 && known_subrange_p (offset, maxsize, offset2, size2)
3178 /* ??? We can't handle bitfield precision extracts without
3179 either using an alternate type for the BIT_FIELD_REF and
3180 then doing a conversion or possibly adjusting the offset
3181 according to endianness. */
3182 && (! INTEGRAL_TYPE_P (vr->type)
3183 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3184 && multiple_p (ref->size, BITS_PER_UNIT))
3185 {
3186 tree val = NULL_TREE;
3187 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3188 || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3189 {
3190 gimple_match_op op (gimple_match_cond::UNCOND,
3191 BIT_FIELD_REF, vr->type,
3192 SSA_VAL (def_rhs),
3193 bitsize_int (ref->size),
3194 bitsize_int (offset - offset2));
3195 val = vn_nary_build_or_lookup (&op);
3196 }
3197 else if (known_eq (ref->size, size2))
3198 {
3199 gimple_match_op op (gimple_match_cond::UNCOND,
3200 VIEW_CONVERT_EXPR, vr->type,
3201 SSA_VAL (def_rhs));
3202 val = vn_nary_build_or_lookup (&op);
3203 }
3204 if (val
3205 && (TREE_CODE (val) != SSA_NAME
3206 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3207 return data->finish (ao_ref_alias_set (&lhs_ref),
3208 ao_ref_base_alias_set (&lhs_ref), val);
3209 }
3210 else if (maxsize.is_constant (&maxsizei)
3211 && offset.is_constant (&offseti)
3212 && offset2.is_constant (&offset2i)
3213 && size2.is_constant (&size2i)
3214 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3215 {
3216 pd_data pd;
3217 pd.rhs = SSA_VAL (def_rhs);
3218 pd.offset = offset2i;
3219 pd.size = size2i;
3220 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3221 ao_ref_base_alias_set (&lhs_ref),
3222 offseti, maxsizei);
3223 }
3224 }
3225 }
3226
3227 /* 5) For aggregate copies translate the reference through them if
3228 the copy kills ref. */
3229 else if (data->vn_walk_kind == VN_WALKREWRITE
3230 && gimple_assign_single_p (def_stmt)
3231 && (DECL_P (gimple_assign_rhs1 (def_stmt))
3232 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3233 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3234 {
3235 tree base2;
3236 int i, j, k;
3237 auto_vec<vn_reference_op_s> rhs;
3238 vn_reference_op_t vro;
3239 ao_ref r;
3240
3241 gcc_assert (lhs_ref_ok);
3242
3243 /* See if the assignment kills REF. */
3244 base2 = ao_ref_base (&lhs_ref);
3245 if (!lhs_ref.max_size_known_p ()
3246 || (base != base2
3247 && (TREE_CODE (base) != MEM_REF
3248 || TREE_CODE (base2) != MEM_REF
3249 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3250 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3251 TREE_OPERAND (base2, 1))))
3252 || !stmt_kills_ref_p (def_stmt, ref))
3253 return (void *)-1;
3254
3255 /* Find the common base of ref and the lhs. lhs_ops already
3256 contains valueized operands for the lhs. */
3257 i = vr->operands.length () - 1;
3258 j = lhs_ops.length () - 1;
3259 while (j >= 0 && i >= 0
3260 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3261 {
3262 i--;
3263 j--;
3264 }
3265
3266 /* ??? The innermost op should always be a MEM_REF and we already
3267 checked that the assignment to the lhs kills vr. Thus for
3268 aggregate copies using char[] types the vn_reference_op_eq
3269 may fail when comparing types for compatibility. But we really
3270 don't care here - further lookups with the rewritten operands
3271 will simply fail if we messed up types too badly. */
3272 poly_int64 extra_off = 0;
3273 if (j == 0 && i >= 0
3274 && lhs_ops[0].opcode == MEM_REF
3275 && known_ne (lhs_ops[0].off, -1))
3276 {
3277 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3278 i--, j--;
3279 else if (vr->operands[i].opcode == MEM_REF
3280 && known_ne (vr->operands[i].off, -1))
3281 {
3282 extra_off = vr->operands[i].off - lhs_ops[0].off;
3283 i--, j--;
3284 }
3285 }
3286
3287 /* i now points to the first additional op.
3288 ??? LHS may not be completely contained in VR, one or more
3289 VIEW_CONVERT_EXPRs could be in its way. We could at least
3290 try handling outermost VIEW_CONVERT_EXPRs. */
3291 if (j != -1)
3292 return (void *)-1;
3293
3294 /* Punt if the additional ops contain a storage order barrier. */
3295 for (k = i; k >= 0; k--)
3296 {
3297 vro = &vr->operands[k];
3298 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3299 return (void *)-1;
3300 }
3301
3302 /* Now re-write REF to be based on the rhs of the assignment. */
3303 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3304 copy_reference_ops_from_ref (rhs1, &rhs);
3305
3306 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3307 bool force_no_tbaa = false;
3308 if (maybe_ne (extra_off, 0))
3309 {
3310 if (rhs.length () < 2)
3311 return (void *)-1;
3312 int ix = rhs.length () - 2;
3313 if (rhs[ix].opcode != MEM_REF
3314 || known_eq (rhs[ix].off, -1))
3315 return (void *)-1;
3316 rhs[ix].off += extra_off;
3317 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3318 build_int_cst (TREE_TYPE (rhs[ix].op0),
3319 extra_off));
3320 /* When we have offsetted the RHS, reading only parts of it,
3321 we can no longer use the original TBAA type, force alias-set
3322 zero. */
3323 force_no_tbaa = true;
3324 }
3325
3326 /* Save the operands since we need to use the original ones for
3327 the hash entry we use. */
3328 if (!data->saved_operands.exists ())
3329 data->saved_operands = vr->operands.copy ();
3330
3331 /* We need to pre-pend vr->operands[0..i] to rhs. */
3332 vec<vn_reference_op_s> old = vr->operands;
3333 if (i + 1 + rhs.length () > vr->operands.length ())
3334 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3335 else
3336 vr->operands.truncate (i + 1 + rhs.length ());
3337 FOR_EACH_VEC_ELT (rhs, j, vro)
3338 vr->operands[i + 1 + j] = *vro;
3339 valueize_refs (&vr->operands);
3340 if (old == shared_lookup_references)
3341 shared_lookup_references = vr->operands;
3342 vr->hashcode = vn_reference_compute_hash (vr);
3343
3344 /* Try folding the new reference to a constant. */
3345 tree val = fully_constant_vn_reference_p (vr);
3346 if (val)
3347 {
3348 if (data->partial_defs.is_empty ())
3349 return data->finish (ao_ref_alias_set (&lhs_ref),
3350 ao_ref_base_alias_set (&lhs_ref), val);
3351 /* This is the only interesting case for partial-def handling
3352 coming from targets that like to gimplify init-ctors as
3353 aggregate copies from constant data like aarch64 for
3354 PR83518. */
3355 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3356 {
3357 pd_data pd;
3358 pd.rhs = val;
3359 pd.offset = 0;
3360 pd.size = maxsizei;
3361 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3362 ao_ref_base_alias_set (&lhs_ref),
3363 0, maxsizei);
3364 }
3365 }
3366
3367 /* Continuing with partial defs isn't easily possible here, we
3368 have to find a full def from further lookups from here. Probably
3369 not worth the special-casing everywhere. */
3370 if (!data->partial_defs.is_empty ())
3371 return (void *)-1;
3372
3373 /* Adjust *ref from the new operands. */
3374 ao_ref rhs1_ref;
3375 ao_ref_init (&rhs1_ref, rhs1);
3376 if (!ao_ref_init_from_vn_reference (&r,
3377 force_no_tbaa ? 0
3378 : ao_ref_alias_set (&rhs1_ref),
3379 force_no_tbaa ? 0
3380 : ao_ref_base_alias_set (&rhs1_ref),
3381 vr->type, vr->operands))
3382 return (void *)-1;
3383 /* This can happen with bitfields. */
3384 if (maybe_ne (ref->size, r.size))
3385 {
3386 /* If the access lacks some subsetting simply apply that by
3387 shortening it. That in the end can only be successful
3388 if we can pun the lookup result which in turn requires
3389 exact offsets. */
3390 if (known_eq (r.size, r.max_size)
3391 && known_lt (ref->size, r.size))
3392 r.size = r.max_size = ref->size;
3393 else
3394 return (void *)-1;
3395 }
3396 *ref = r;
3397
3398 /* Do not update last seen VUSE after translating. */
3399 data->last_vuse_ptr = NULL;
3400 /* Invalidate the original access path since it now contains
3401 the wrong base. */
3402 data->orig_ref.ref = NULL_TREE;
3403 /* Use the alias-set of this LHS for recording an eventual result. */
3404 if (data->first_set == -2)
3405 {
3406 data->first_set = ao_ref_alias_set (&lhs_ref);
3407 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3408 }
3409
3410 /* Keep looking for the adjusted *REF / VR pair. */
3411 return NULL;
3412 }
3413
3414 /* 6) For memcpy copies translate the reference through them if the copy
3415 kills ref. But we cannot (easily) do this translation if the memcpy is
3416 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3417 can modify the storage order of objects (see storage_order_barrier_p). */
3418 else if (data->vn_walk_kind == VN_WALKREWRITE
3419 && is_gimple_reg_type (vr->type)
3420 /* ??? Handle BCOPY as well. */
3421 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3422 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3423 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3424 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3425 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3426 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3427 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3428 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3429 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3430 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3431 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), ©_size)
3432 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3433 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3434 ©_size)))
3435 /* Handling this is more complicated, give up for now. */
3436 && data->partial_defs.is_empty ())
3437 {
3438 tree lhs, rhs;
3439 ao_ref r;
3440 poly_int64 rhs_offset, lhs_offset;
3441 vn_reference_op_s op;
3442 poly_uint64 mem_offset;
3443 poly_int64 at, byte_maxsize;
3444
3445 /* Only handle non-variable, addressable refs. */
3446 if (maybe_ne (ref->size, maxsize)
3447 || !multiple_p (offset, BITS_PER_UNIT, &at)
3448 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3449 return (void *)-1;
3450
3451 /* Extract a pointer base and an offset for the destination. */
3452 lhs = gimple_call_arg (def_stmt, 0);
3453 lhs_offset = 0;
3454 if (TREE_CODE (lhs) == SSA_NAME)
3455 {
3456 lhs = vn_valueize (lhs);
3457 if (TREE_CODE (lhs) == SSA_NAME)
3458 {
3459 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3460 if (gimple_assign_single_p (def_stmt)
3461 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3462 lhs = gimple_assign_rhs1 (def_stmt);
3463 }
3464 }
3465 if (TREE_CODE (lhs) == ADDR_EXPR)
3466 {
3467 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3468 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3469 return (void *)-1;
3470 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3471 &lhs_offset);
3472 if (!tem)
3473 return (void *)-1;
3474 if (TREE_CODE (tem) == MEM_REF
3475 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3476 {
3477 lhs = TREE_OPERAND (tem, 0);
3478 if (TREE_CODE (lhs) == SSA_NAME)
3479 lhs = vn_valueize (lhs);
3480 lhs_offset += mem_offset;
3481 }
3482 else if (DECL_P (tem))
3483 lhs = build_fold_addr_expr (tem);
3484 else
3485 return (void *)-1;
3486 }
3487 if (TREE_CODE (lhs) != SSA_NAME
3488 && TREE_CODE (lhs) != ADDR_EXPR)
3489 return (void *)-1;
3490
3491 /* Extract a pointer base and an offset for the source. */
3492 rhs = gimple_call_arg (def_stmt, 1);
3493 rhs_offset = 0;
3494 if (TREE_CODE (rhs) == SSA_NAME)
3495 rhs = vn_valueize (rhs);
3496 if (TREE_CODE (rhs) == ADDR_EXPR)
3497 {
3498 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3499 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3500 return (void *)-1;
3501 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3502 &rhs_offset);
3503 if (!tem)
3504 return (void *)-1;
3505 if (TREE_CODE (tem) == MEM_REF
3506 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3507 {
3508 rhs = TREE_OPERAND (tem, 0);
3509 rhs_offset += mem_offset;
3510 }
3511 else if (DECL_P (tem)
3512 || TREE_CODE (tem) == STRING_CST)
3513 rhs = build_fold_addr_expr (tem);
3514 else
3515 return (void *)-1;
3516 }
3517 if (TREE_CODE (rhs) == SSA_NAME)
3518 rhs = SSA_VAL (rhs);
3519 else if (TREE_CODE (rhs) != ADDR_EXPR)
3520 return (void *)-1;
3521
3522 /* The bases of the destination and the references have to agree. */
3523 if (TREE_CODE (base) == MEM_REF)
3524 {
3525 if (TREE_OPERAND (base, 0) != lhs
3526 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3527 return (void *) -1;
3528 at += mem_offset;
3529 }
3530 else if (!DECL_P (base)
3531 || TREE_CODE (lhs) != ADDR_EXPR
3532 || TREE_OPERAND (lhs, 0) != base)
3533 return (void *)-1;
3534
3535 /* If the access is completely outside of the memcpy destination
3536 area there is no aliasing. */
3537 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3538 return NULL;
3539 /* And the access has to be contained within the memcpy destination. */
3540 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3541 return (void *)-1;
3542
3543 /* Save the operands since we need to use the original ones for
3544 the hash entry we use. */
3545 if (!data->saved_operands.exists ())
3546 data->saved_operands = vr->operands.copy ();
3547
3548 /* Make room for 2 operands in the new reference. */
3549 if (vr->operands.length () < 2)
3550 {
3551 vec<vn_reference_op_s> old = vr->operands;
3552 vr->operands.safe_grow_cleared (2, true);
3553 if (old == shared_lookup_references)
3554 shared_lookup_references = vr->operands;
3555 }
3556 else
3557 vr->operands.truncate (2);
3558
3559 /* The looked-through reference is a simple MEM_REF. */
3560 memset (&op, 0, sizeof (op));
3561 op.type = vr->type;
3562 op.opcode = MEM_REF;
3563 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3564 op.off = at - lhs_offset + rhs_offset;
3565 vr->operands[0] = op;
3566 op.type = TREE_TYPE (rhs);
3567 op.opcode = TREE_CODE (rhs);
3568 op.op0 = rhs;
3569 op.off = -1;
3570 vr->operands[1] = op;
3571 vr->hashcode = vn_reference_compute_hash (vr);
3572
3573 /* Try folding the new reference to a constant. */
3574 tree val = fully_constant_vn_reference_p (vr);
3575 if (val)
3576 return data->finish (0, 0, val);
3577
3578 /* Adjust *ref from the new operands. */
3579 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3580 return (void *)-1;
3581 /* This can happen with bitfields. */
3582 if (maybe_ne (ref->size, r.size))
3583 return (void *)-1;
3584 *ref = r;
3585
3586 /* Do not update last seen VUSE after translating. */
3587 data->last_vuse_ptr = NULL;
3588 /* Invalidate the original access path since it now contains
3589 the wrong base. */
3590 data->orig_ref.ref = NULL_TREE;
3591 /* Use the alias-set of this stmt for recording an eventual result. */
3592 if (data->first_set == -2)
3593 {
3594 data->first_set = 0;
3595 data->first_base_set = 0;
3596 }
3597
3598 /* Keep looking for the adjusted *REF / VR pair. */
3599 return NULL;
3600 }
3601
3602 /* Bail out and stop walking. */
3603 return (void *)-1;
3604 }
3605
3606 /* Return a reference op vector from OP that can be used for
3607 vn_reference_lookup_pieces. The caller is responsible for releasing
3608 the vector. */
3609
3610 vec<vn_reference_op_s>
vn_reference_operands_for_lookup(tree op)3611 vn_reference_operands_for_lookup (tree op)
3612 {
3613 bool valueized;
3614 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3615 }
3616
3617 /* Lookup a reference operation by it's parts, in the current hash table.
3618 Returns the resulting value number if it exists in the hash table,
3619 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3620 vn_reference_t stored in the hashtable if something is found. */
3621
3622 tree
vn_reference_lookup_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,vn_reference_t * vnresult,vn_lookup_kind kind)3623 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3624 alias_set_type base_set, tree type,
3625 vec<vn_reference_op_s> operands,
3626 vn_reference_t *vnresult, vn_lookup_kind kind)
3627 {
3628 struct vn_reference_s vr1;
3629 vn_reference_t tmp;
3630 tree cst;
3631
3632 if (!vnresult)
3633 vnresult = &tmp;
3634 *vnresult = NULL;
3635
3636 vr1.vuse = vuse_ssa_val (vuse);
3637 shared_lookup_references.truncate (0);
3638 shared_lookup_references.safe_grow (operands.length (), true);
3639 memcpy (shared_lookup_references.address (),
3640 operands.address (),
3641 sizeof (vn_reference_op_s)
3642 * operands.length ());
3643 bool valueized_p;
3644 valueize_refs_1 (&shared_lookup_references, &valueized_p);
3645 vr1.operands = shared_lookup_references;
3646 vr1.type = type;
3647 vr1.set = set;
3648 vr1.base_set = base_set;
3649 vr1.hashcode = vn_reference_compute_hash (&vr1);
3650 if ((cst = fully_constant_vn_reference_p (&vr1)))
3651 return cst;
3652
3653 vn_reference_lookup_1 (&vr1, vnresult);
3654 if (!*vnresult
3655 && kind != VN_NOWALK
3656 && vr1.vuse)
3657 {
3658 ao_ref r;
3659 unsigned limit = param_sccvn_max_alias_queries_per_access;
3660 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE,
3661 false);
3662 vec<vn_reference_op_s> ops_for_ref;
3663 if (!valueized_p)
3664 ops_for_ref = vr1.operands;
3665 else
3666 {
3667 /* For ao_ref_from_mem we have to ensure only available SSA names
3668 end up in base and the only convenient way to make this work
3669 for PRE is to re-valueize with that in mind. */
3670 ops_for_ref.create (operands.length ());
3671 ops_for_ref.quick_grow (operands.length ());
3672 memcpy (ops_for_ref.address (),
3673 operands.address (),
3674 sizeof (vn_reference_op_s)
3675 * operands.length ());
3676 valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3677 }
3678 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3679 ops_for_ref))
3680 *vnresult
3681 = ((vn_reference_t)
3682 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3683 vn_reference_lookup_3, vuse_valueize,
3684 limit, &data));
3685 if (ops_for_ref != shared_lookup_references)
3686 ops_for_ref.release ();
3687 gcc_checking_assert (vr1.operands == shared_lookup_references);
3688 if (*vnresult
3689 && data.same_val
3690 && (!(*vnresult)->result
3691 || !operand_equal_p ((*vnresult)->result, data.same_val)))
3692 {
3693 *vnresult = NULL;
3694 return NULL_TREE;
3695 }
3696 }
3697
3698 if (*vnresult)
3699 return (*vnresult)->result;
3700
3701 return NULL_TREE;
3702 }
3703
3704 /* Lookup OP in the current hash table, and return the resulting value
3705 number if it exists in the hash table. Return NULL_TREE if it does
3706 not exist in the hash table or if the result field of the structure
3707 was NULL.. VNRESULT will be filled in with the vn_reference_t
3708 stored in the hashtable if one exists. When TBAA_P is false assume
3709 we are looking up a store and treat it as having alias-set zero.
3710 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3711 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3712 load is bitwise anded with MASK and so we are only interested in a subset
3713 of the bits and can ignore if the other bits are uninitialized or
3714 not initialized with constants. When doing redundant store removal
3715 the caller has to set REDUNDANT_STORE_REMOVAL_P. */
3716
3717 tree
vn_reference_lookup(tree op,tree vuse,vn_lookup_kind kind,vn_reference_t * vnresult,bool tbaa_p,tree * last_vuse_ptr,tree mask,bool redundant_store_removal_p)3718 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3719 vn_reference_t *vnresult, bool tbaa_p,
3720 tree *last_vuse_ptr, tree mask,
3721 bool redundant_store_removal_p)
3722 {
3723 vec<vn_reference_op_s> operands;
3724 struct vn_reference_s vr1;
3725 bool valueized_anything;
3726
3727 if (vnresult)
3728 *vnresult = NULL;
3729
3730 vr1.vuse = vuse_ssa_val (vuse);
3731 vr1.operands = operands
3732 = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3733 vr1.type = TREE_TYPE (op);
3734 ao_ref op_ref;
3735 ao_ref_init (&op_ref, op);
3736 vr1.set = ao_ref_alias_set (&op_ref);
3737 vr1.base_set = ao_ref_base_alias_set (&op_ref);
3738 vr1.hashcode = vn_reference_compute_hash (&vr1);
3739 if (mask == NULL_TREE)
3740 if (tree cst = fully_constant_vn_reference_p (&vr1))
3741 return cst;
3742
3743 if (kind != VN_NOWALK && vr1.vuse)
3744 {
3745 vn_reference_t wvnresult;
3746 ao_ref r;
3747 unsigned limit = param_sccvn_max_alias_queries_per_access;
3748 auto_vec<vn_reference_op_s> ops_for_ref;
3749 if (valueized_anything)
3750 {
3751 copy_reference_ops_from_ref (op, &ops_for_ref);
3752 bool tem;
3753 valueize_refs_1 (&ops_for_ref, &tem, true);
3754 }
3755 /* Make sure to use a valueized reference if we valueized anything.
3756 Otherwise preserve the full reference for advanced TBAA. */
3757 if (!valueized_anything
3758 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3759 vr1.type, ops_for_ref))
3760 ao_ref_init (&r, op);
3761 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3762 last_vuse_ptr, kind, tbaa_p, mask,
3763 redundant_store_removal_p);
3764
3765 wvnresult
3766 = ((vn_reference_t)
3767 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3768 vn_reference_lookup_3, vuse_valueize, limit,
3769 &data));
3770 gcc_checking_assert (vr1.operands == shared_lookup_references);
3771 if (wvnresult)
3772 {
3773 gcc_assert (mask == NULL_TREE);
3774 if (data.same_val
3775 && (!wvnresult->result
3776 || !operand_equal_p (wvnresult->result, data.same_val)))
3777 return NULL_TREE;
3778 if (vnresult)
3779 *vnresult = wvnresult;
3780 return wvnresult->result;
3781 }
3782 else if (mask)
3783 return data.masked_result;
3784
3785 return NULL_TREE;
3786 }
3787
3788 if (last_vuse_ptr)
3789 *last_vuse_ptr = vr1.vuse;
3790 if (mask)
3791 return NULL_TREE;
3792 return vn_reference_lookup_1 (&vr1, vnresult);
3793 }
3794
3795 /* Lookup CALL in the current hash table and return the entry in
3796 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3797
3798 void
vn_reference_lookup_call(gcall * call,vn_reference_t * vnresult,vn_reference_t vr)3799 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3800 vn_reference_t vr)
3801 {
3802 if (vnresult)
3803 *vnresult = NULL;
3804
3805 tree vuse = gimple_vuse (call);
3806
3807 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3808 vr->operands = valueize_shared_reference_ops_from_call (call);
3809 tree lhs = gimple_call_lhs (call);
3810 /* For non-SSA return values the referece ops contain the LHS. */
3811 vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
3812 ? TREE_TYPE (lhs) : NULL_TREE);
3813 vr->punned = false;
3814 vr->set = 0;
3815 vr->base_set = 0;
3816 vr->hashcode = vn_reference_compute_hash (vr);
3817 vn_reference_lookup_1 (vr, vnresult);
3818 }
3819
3820 /* Insert OP into the current hash table with a value number of RESULT. */
3821
3822 static void
vn_reference_insert(tree op,tree result,tree vuse,tree vdef)3823 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3824 {
3825 vn_reference_s **slot;
3826 vn_reference_t vr1;
3827 bool tem;
3828
3829 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3830 if (TREE_CODE (result) == SSA_NAME)
3831 vr1->value_id = VN_INFO (result)->value_id;
3832 else
3833 vr1->value_id = get_or_alloc_constant_value_id (result);
3834 vr1->vuse = vuse_ssa_val (vuse);
3835 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3836 vr1->type = TREE_TYPE (op);
3837 vr1->punned = false;
3838 ao_ref op_ref;
3839 ao_ref_init (&op_ref, op);
3840 vr1->set = ao_ref_alias_set (&op_ref);
3841 vr1->base_set = ao_ref_base_alias_set (&op_ref);
3842 vr1->hashcode = vn_reference_compute_hash (vr1);
3843 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3844 vr1->result_vdef = vdef;
3845
3846 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3847 INSERT);
3848
3849 /* Because IL walking on reference lookup can end up visiting
3850 a def that is only to be visited later in iteration order
3851 when we are about to make an irreducible region reducible
3852 the def can be effectively processed and its ref being inserted
3853 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3854 but save a lookup if we deal with already inserted refs here. */
3855 if (*slot)
3856 {
3857 /* We cannot assert that we have the same value either because
3858 when disentangling an irreducible region we may end up visiting
3859 a use before the corresponding def. That's a missed optimization
3860 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3861 if (dump_file && (dump_flags & TDF_DETAILS)
3862 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3863 {
3864 fprintf (dump_file, "Keeping old value ");
3865 print_generic_expr (dump_file, (*slot)->result);
3866 fprintf (dump_file, " because of collision\n");
3867 }
3868 free_reference (vr1);
3869 obstack_free (&vn_tables_obstack, vr1);
3870 return;
3871 }
3872
3873 *slot = vr1;
3874 vr1->next = last_inserted_ref;
3875 last_inserted_ref = vr1;
3876 }
3877
3878 /* Insert a reference by it's pieces into the current hash table with
3879 a value number of RESULT. Return the resulting reference
3880 structure we created. */
3881
3882 vn_reference_t
vn_reference_insert_pieces(tree vuse,alias_set_type set,alias_set_type base_set,tree type,vec<vn_reference_op_s> operands,tree result,unsigned int value_id)3883 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3884 alias_set_type base_set, tree type,
3885 vec<vn_reference_op_s> operands,
3886 tree result, unsigned int value_id)
3887
3888 {
3889 vn_reference_s **slot;
3890 vn_reference_t vr1;
3891
3892 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3893 vr1->value_id = value_id;
3894 vr1->vuse = vuse_ssa_val (vuse);
3895 vr1->operands = operands;
3896 valueize_refs (&vr1->operands);
3897 vr1->type = type;
3898 vr1->punned = false;
3899 vr1->set = set;
3900 vr1->base_set = base_set;
3901 vr1->hashcode = vn_reference_compute_hash (vr1);
3902 if (result && TREE_CODE (result) == SSA_NAME)
3903 result = SSA_VAL (result);
3904 vr1->result = result;
3905 vr1->result_vdef = NULL_TREE;
3906
3907 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3908 INSERT);
3909
3910 /* At this point we should have all the things inserted that we have
3911 seen before, and we should never try inserting something that
3912 already exists. */
3913 gcc_assert (!*slot);
3914
3915 *slot = vr1;
3916 vr1->next = last_inserted_ref;
3917 last_inserted_ref = vr1;
3918 return vr1;
3919 }
3920
3921 /* Compute and return the hash value for nary operation VBO1. */
3922
3923 hashval_t
vn_nary_op_compute_hash(const vn_nary_op_t vno1)3924 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3925 {
3926 inchash::hash hstate;
3927 unsigned i;
3928
3929 if (((vno1->length == 2
3930 && commutative_tree_code (vno1->opcode))
3931 || (vno1->length == 3
3932 && commutative_ternary_tree_code (vno1->opcode)))
3933 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3934 std::swap (vno1->op[0], vno1->op[1]);
3935 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3936 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3937 {
3938 std::swap (vno1->op[0], vno1->op[1]);
3939 vno1->opcode = swap_tree_comparison (vno1->opcode);
3940 }
3941
3942 hstate.add_int (vno1->opcode);
3943 for (i = 0; i < vno1->length; ++i)
3944 inchash::add_expr (vno1->op[i], hstate);
3945
3946 return hstate.end ();
3947 }
3948
3949 /* Compare nary operations VNO1 and VNO2 and return true if they are
3950 equivalent. */
3951
3952 bool
vn_nary_op_eq(const_vn_nary_op_t const vno1,const_vn_nary_op_t const vno2)3953 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3954 {
3955 unsigned i;
3956
3957 if (vno1->hashcode != vno2->hashcode)
3958 return false;
3959
3960 if (vno1->length != vno2->length)
3961 return false;
3962
3963 if (vno1->opcode != vno2->opcode
3964 || !types_compatible_p (vno1->type, vno2->type))
3965 return false;
3966
3967 for (i = 0; i < vno1->length; ++i)
3968 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3969 return false;
3970
3971 /* BIT_INSERT_EXPR has an implict operand as the type precision
3972 of op1. Need to check to make sure they are the same. */
3973 if (vno1->opcode == BIT_INSERT_EXPR
3974 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3975 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3976 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3977 return false;
3978
3979 return true;
3980 }
3981
3982 /* Initialize VNO from the pieces provided. */
3983
3984 static void
init_vn_nary_op_from_pieces(vn_nary_op_t vno,unsigned int length,enum tree_code code,tree type,tree * ops)3985 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3986 enum tree_code code, tree type, tree *ops)
3987 {
3988 vno->opcode = code;
3989 vno->length = length;
3990 vno->type = type;
3991 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3992 }
3993
3994 /* Return the number of operands for a vn_nary ops structure from STMT. */
3995
3996 unsigned int
vn_nary_length_from_stmt(gimple * stmt)3997 vn_nary_length_from_stmt (gimple *stmt)
3998 {
3999 switch (gimple_assign_rhs_code (stmt))
4000 {
4001 case REALPART_EXPR:
4002 case IMAGPART_EXPR:
4003 case VIEW_CONVERT_EXPR:
4004 return 1;
4005
4006 case BIT_FIELD_REF:
4007 return 3;
4008
4009 case CONSTRUCTOR:
4010 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4011
4012 default:
4013 return gimple_num_ops (stmt) - 1;
4014 }
4015 }
4016
4017 /* Initialize VNO from STMT. */
4018
4019 void
init_vn_nary_op_from_stmt(vn_nary_op_t vno,gassign * stmt)4020 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
4021 {
4022 unsigned i;
4023
4024 vno->opcode = gimple_assign_rhs_code (stmt);
4025 vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
4026 switch (vno->opcode)
4027 {
4028 case REALPART_EXPR:
4029 case IMAGPART_EXPR:
4030 case VIEW_CONVERT_EXPR:
4031 vno->length = 1;
4032 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4033 break;
4034
4035 case BIT_FIELD_REF:
4036 vno->length = 3;
4037 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
4038 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
4039 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
4040 break;
4041
4042 case CONSTRUCTOR:
4043 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
4044 for (i = 0; i < vno->length; ++i)
4045 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
4046 break;
4047
4048 default:
4049 gcc_checking_assert (!gimple_assign_single_p (stmt));
4050 vno->length = gimple_num_ops (stmt) - 1;
4051 for (i = 0; i < vno->length; ++i)
4052 vno->op[i] = gimple_op (stmt, i + 1);
4053 }
4054 }
4055
4056 /* Compute the hashcode for VNO and look for it in the hash table;
4057 return the resulting value number if it exists in the hash table.
4058 Return NULL_TREE if it does not exist in the hash table or if the
4059 result field of the operation is NULL. VNRESULT will contain the
4060 vn_nary_op_t from the hashtable if it exists. */
4061
4062 static tree
vn_nary_op_lookup_1(vn_nary_op_t vno,vn_nary_op_t * vnresult)4063 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
4064 {
4065 vn_nary_op_s **slot;
4066
4067 if (vnresult)
4068 *vnresult = NULL;
4069
4070 for (unsigned i = 0; i < vno->length; ++i)
4071 if (TREE_CODE (vno->op[i]) == SSA_NAME)
4072 vno->op[i] = SSA_VAL (vno->op[i]);
4073
4074 vno->hashcode = vn_nary_op_compute_hash (vno);
4075 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
4076 if (!slot)
4077 return NULL_TREE;
4078 if (vnresult)
4079 *vnresult = *slot;
4080 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
4081 }
4082
4083 /* Lookup a n-ary operation by its pieces and return the resulting value
4084 number if it exists in the hash table. Return NULL_TREE if it does
4085 not exist in the hash table or if the result field of the operation
4086 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4087 if it exists. */
4088
4089 tree
vn_nary_op_lookup_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,vn_nary_op_t * vnresult)4090 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
4091 tree type, tree *ops, vn_nary_op_t *vnresult)
4092 {
4093 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
4094 sizeof_vn_nary_op (length));
4095 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4096 return vn_nary_op_lookup_1 (vno1, vnresult);
4097 }
4098
4099 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4100 value number if it exists in the hash table. Return NULL_TREE if
4101 it does not exist in the hash table. VNRESULT will contain the
4102 vn_nary_op_t from the hashtable if it exists. */
4103
4104 tree
vn_nary_op_lookup_stmt(gimple * stmt,vn_nary_op_t * vnresult)4105 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4106 {
4107 vn_nary_op_t vno1
4108 = XALLOCAVAR (struct vn_nary_op_s,
4109 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4110 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4111 return vn_nary_op_lookup_1 (vno1, vnresult);
4112 }
4113
4114 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4115
4116 vn_nary_op_t
alloc_vn_nary_op_noinit(unsigned int length,struct obstack * stack)4117 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4118 {
4119 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4120 }
4121
4122 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4123 obstack. */
4124
4125 static vn_nary_op_t
alloc_vn_nary_op(unsigned int length,tree result,unsigned int value_id)4126 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4127 {
4128 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4129
4130 vno1->value_id = value_id;
4131 vno1->length = length;
4132 vno1->predicated_values = 0;
4133 vno1->u.result = result;
4134
4135 return vno1;
4136 }
4137
4138 /* Insert VNO into TABLE. */
4139
4140 static vn_nary_op_t
vn_nary_op_insert_into(vn_nary_op_t vno,vn_nary_op_table_type * table)4141 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table)
4142 {
4143 vn_nary_op_s **slot;
4144
4145 gcc_assert (! vno->predicated_values
4146 || (! vno->u.values->next
4147 && vno->u.values->n == 1));
4148
4149 for (unsigned i = 0; i < vno->length; ++i)
4150 if (TREE_CODE (vno->op[i]) == SSA_NAME)
4151 vno->op[i] = SSA_VAL (vno->op[i]);
4152
4153 vno->hashcode = vn_nary_op_compute_hash (vno);
4154 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4155 vno->unwind_to = *slot;
4156 if (*slot)
4157 {
4158 /* Prefer non-predicated values.
4159 ??? Only if those are constant, otherwise, with constant predicated
4160 value, turn them into predicated values with entry-block validity
4161 (??? but we always find the first valid result currently). */
4162 if ((*slot)->predicated_values
4163 && ! vno->predicated_values)
4164 {
4165 /* ??? We cannot remove *slot from the unwind stack list.
4166 For the moment we deal with this by skipping not found
4167 entries but this isn't ideal ... */
4168 *slot = vno;
4169 /* ??? Maintain a stack of states we can unwind in
4170 vn_nary_op_s? But how far do we unwind? In reality
4171 we need to push change records somewhere... Or not
4172 unwind vn_nary_op_s and linking them but instead
4173 unwind the results "list", linking that, which also
4174 doesn't move on hashtable resize. */
4175 /* We can also have a ->unwind_to recording *slot there.
4176 That way we can make u.values a fixed size array with
4177 recording the number of entries but of course we then
4178 have always N copies for each unwind_to-state. Or we
4179 make sure to only ever append and each unwinding will
4180 pop off one entry (but how to deal with predicated
4181 replaced with non-predicated here?) */
4182 vno->next = last_inserted_nary;
4183 last_inserted_nary = vno;
4184 return vno;
4185 }
4186 else if (vno->predicated_values
4187 && ! (*slot)->predicated_values)
4188 return *slot;
4189 else if (vno->predicated_values
4190 && (*slot)->predicated_values)
4191 {
4192 /* ??? Factor this all into a insert_single_predicated_value
4193 routine. */
4194 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4195 basic_block vno_bb
4196 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4197 vn_pval *nval = vno->u.values;
4198 vn_pval **next = &vno->u.values;
4199 bool found = false;
4200 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4201 {
4202 if (expressions_equal_p (val->result, nval->result))
4203 {
4204 found = true;
4205 for (unsigned i = 0; i < val->n; ++i)
4206 {
4207 basic_block val_bb
4208 = BASIC_BLOCK_FOR_FN (cfun,
4209 val->valid_dominated_by_p[i]);
4210 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4211 /* Value registered with more generic predicate. */
4212 return *slot;
4213 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4214 /* Shouldn't happen, we insert in RPO order. */
4215 gcc_unreachable ();
4216 }
4217 /* Append value. */
4218 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4219 sizeof (vn_pval)
4220 + val->n * sizeof (int));
4221 (*next)->next = NULL;
4222 (*next)->result = val->result;
4223 (*next)->n = val->n + 1;
4224 memcpy ((*next)->valid_dominated_by_p,
4225 val->valid_dominated_by_p,
4226 val->n * sizeof (int));
4227 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4228 next = &(*next)->next;
4229 if (dump_file && (dump_flags & TDF_DETAILS))
4230 fprintf (dump_file, "Appending predicate to value.\n");
4231 continue;
4232 }
4233 /* Copy other predicated values. */
4234 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4235 sizeof (vn_pval)
4236 + (val->n-1) * sizeof (int));
4237 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4238 (*next)->next = NULL;
4239 next = &(*next)->next;
4240 }
4241 if (!found)
4242 *next = nval;
4243
4244 *slot = vno;
4245 vno->next = last_inserted_nary;
4246 last_inserted_nary = vno;
4247 return vno;
4248 }
4249
4250 /* While we do not want to insert things twice it's awkward to
4251 avoid it in the case where visit_nary_op pattern-matches stuff
4252 and ends up simplifying the replacement to itself. We then
4253 get two inserts, one from visit_nary_op and one from
4254 vn_nary_build_or_lookup.
4255 So allow inserts with the same value number. */
4256 if ((*slot)->u.result == vno->u.result)
4257 return *slot;
4258 }
4259
4260 /* ??? There's also optimistic vs. previous commited state merging
4261 that is problematic for the case of unwinding. */
4262
4263 /* ??? We should return NULL if we do not use 'vno' and have the
4264 caller release it. */
4265 gcc_assert (!*slot);
4266
4267 *slot = vno;
4268 vno->next = last_inserted_nary;
4269 last_inserted_nary = vno;
4270 return vno;
4271 }
4272
4273 /* Insert a n-ary operation into the current hash table using it's
4274 pieces. Return the vn_nary_op_t structure we created and put in
4275 the hashtable. */
4276
4277 vn_nary_op_t
vn_nary_op_insert_pieces(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id)4278 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4279 tree type, tree *ops,
4280 tree result, unsigned int value_id)
4281 {
4282 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4283 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4284 return vn_nary_op_insert_into (vno1, valid_info->nary);
4285 }
4286
4287 static vn_nary_op_t
vn_nary_op_insert_pieces_predicated(unsigned int length,enum tree_code code,tree type,tree * ops,tree result,unsigned int value_id,edge pred_e)4288 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4289 tree type, tree *ops,
4290 tree result, unsigned int value_id,
4291 edge pred_e)
4292 {
4293 /* ??? Currently tracking BBs. */
4294 if (! single_pred_p (pred_e->dest))
4295 {
4296 /* Never record for backedges. */
4297 if (pred_e->flags & EDGE_DFS_BACK)
4298 return NULL;
4299 edge_iterator ei;
4300 edge e;
4301 int cnt = 0;
4302 /* Ignore backedges. */
4303 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4304 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4305 cnt++;
4306 if (cnt != 1)
4307 return NULL;
4308 }
4309 if (dump_file && (dump_flags & TDF_DETAILS)
4310 /* ??? Fix dumping, but currently we only get comparisons. */
4311 && TREE_CODE_CLASS (code) == tcc_comparison)
4312 {
4313 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4314 pred_e->dest->index);
4315 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4316 fprintf (dump_file, " %s ", get_tree_code_name (code));
4317 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4318 fprintf (dump_file, " == %s\n",
4319 integer_zerop (result) ? "false" : "true");
4320 }
4321 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4322 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4323 vno1->predicated_values = 1;
4324 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4325 sizeof (vn_pval));
4326 vno1->u.values->next = NULL;
4327 vno1->u.values->result = result;
4328 vno1->u.values->n = 1;
4329 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4330 return vn_nary_op_insert_into (vno1, valid_info->nary);
4331 }
4332
4333 static bool
4334 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4335
4336 static tree
vn_nary_op_get_predicated_value(vn_nary_op_t vno,basic_block bb)4337 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4338 {
4339 if (! vno->predicated_values)
4340 return vno->u.result;
4341 for (vn_pval *val = vno->u.values; val; val = val->next)
4342 for (unsigned i = 0; i < val->n; ++i)
4343 /* Do not handle backedge executability optimistically since
4344 when figuring out whether to iterate we do not consider
4345 changed predication. */
4346 if (dominated_by_p_w_unex
4347 (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
4348 false))
4349 return val->result;
4350 return NULL_TREE;
4351 }
4352
4353 /* Insert the rhs of STMT into the current hash table with a value number of
4354 RESULT. */
4355
4356 static vn_nary_op_t
vn_nary_op_insert_stmt(gimple * stmt,tree result)4357 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4358 {
4359 vn_nary_op_t vno1
4360 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4361 result, VN_INFO (result)->value_id);
4362 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4363 return vn_nary_op_insert_into (vno1, valid_info->nary);
4364 }
4365
4366 /* Compute a hashcode for PHI operation VP1 and return it. */
4367
4368 static inline hashval_t
vn_phi_compute_hash(vn_phi_t vp1)4369 vn_phi_compute_hash (vn_phi_t vp1)
4370 {
4371 inchash::hash hstate;
4372 tree phi1op;
4373 tree type;
4374 edge e;
4375 edge_iterator ei;
4376
4377 hstate.add_int (EDGE_COUNT (vp1->block->preds));
4378 switch (EDGE_COUNT (vp1->block->preds))
4379 {
4380 case 1:
4381 break;
4382 case 2:
4383 if (vp1->block->loop_father->header == vp1->block)
4384 ;
4385 else
4386 break;
4387 /* Fallthru. */
4388 default:
4389 hstate.add_int (vp1->block->index);
4390 }
4391
4392 /* If all PHI arguments are constants we need to distinguish
4393 the PHI node via its type. */
4394 type = vp1->type;
4395 hstate.merge_hash (vn_hash_type (type));
4396
4397 FOR_EACH_EDGE (e, ei, vp1->block->preds)
4398 {
4399 /* Don't hash backedge values they need to be handled as VN_TOP
4400 for optimistic value-numbering. */
4401 if (e->flags & EDGE_DFS_BACK)
4402 continue;
4403
4404 phi1op = vp1->phiargs[e->dest_idx];
4405 if (phi1op == VN_TOP)
4406 continue;
4407 inchash::add_expr (phi1op, hstate);
4408 }
4409
4410 return hstate.end ();
4411 }
4412
4413
4414 /* Return true if COND1 and COND2 represent the same condition, set
4415 *INVERTED_P if one needs to be inverted to make it the same as
4416 the other. */
4417
4418 static bool
cond_stmts_equal_p(gcond * cond1,tree lhs1,tree rhs1,gcond * cond2,tree lhs2,tree rhs2,bool * inverted_p)4419 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4420 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4421 {
4422 enum tree_code code1 = gimple_cond_code (cond1);
4423 enum tree_code code2 = gimple_cond_code (cond2);
4424
4425 *inverted_p = false;
4426 if (code1 == code2)
4427 ;
4428 else if (code1 == swap_tree_comparison (code2))
4429 std::swap (lhs2, rhs2);
4430 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4431 *inverted_p = true;
4432 else if (code1 == invert_tree_comparison
4433 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4434 {
4435 std::swap (lhs2, rhs2);
4436 *inverted_p = true;
4437 }
4438 else
4439 return false;
4440
4441 return ((expressions_equal_p (lhs1, lhs2)
4442 && expressions_equal_p (rhs1, rhs2))
4443 || (commutative_tree_code (code1)
4444 && expressions_equal_p (lhs1, rhs2)
4445 && expressions_equal_p (rhs1, lhs2)));
4446 }
4447
4448 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4449
4450 static int
vn_phi_eq(const_vn_phi_t const vp1,const_vn_phi_t const vp2)4451 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4452 {
4453 if (vp1->hashcode != vp2->hashcode)
4454 return false;
4455
4456 if (vp1->block != vp2->block)
4457 {
4458 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4459 return false;
4460
4461 switch (EDGE_COUNT (vp1->block->preds))
4462 {
4463 case 1:
4464 /* Single-arg PHIs are just copies. */
4465 break;
4466
4467 case 2:
4468 {
4469 /* Rule out backedges into the PHI. */
4470 if (vp1->block->loop_father->header == vp1->block
4471 || vp2->block->loop_father->header == vp2->block)
4472 return false;
4473
4474 /* If the PHI nodes do not have compatible types
4475 they are not the same. */
4476 if (!types_compatible_p (vp1->type, vp2->type))
4477 return false;
4478
4479 basic_block idom1
4480 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4481 basic_block idom2
4482 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4483 /* If the immediate dominator end in switch stmts multiple
4484 values may end up in the same PHI arg via intermediate
4485 CFG merges. */
4486 if (EDGE_COUNT (idom1->succs) != 2
4487 || EDGE_COUNT (idom2->succs) != 2)
4488 return false;
4489
4490 /* Verify the controlling stmt is the same. */
4491 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4492 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4493 if (! last1 || ! last2)
4494 return false;
4495 bool inverted_p;
4496 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4497 last2, vp2->cclhs, vp2->ccrhs,
4498 &inverted_p))
4499 return false;
4500
4501 /* Get at true/false controlled edges into the PHI. */
4502 edge te1, te2, fe1, fe2;
4503 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4504 &te1, &fe1)
4505 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4506 &te2, &fe2))
4507 return false;
4508
4509 /* Swap edges if the second condition is the inverted of the
4510 first. */
4511 if (inverted_p)
4512 std::swap (te2, fe2);
4513
4514 /* Since we do not know which edge will be executed we have
4515 to be careful when matching VN_TOP. Be conservative and
4516 only match VN_TOP == VN_TOP for now, we could allow
4517 VN_TOP on the not prevailing PHI though. See for example
4518 PR102920. */
4519 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4520 vp2->phiargs[te2->dest_idx], false)
4521 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4522 vp2->phiargs[fe2->dest_idx], false))
4523 return false;
4524
4525 return true;
4526 }
4527
4528 default:
4529 return false;
4530 }
4531 }
4532
4533 /* If the PHI nodes do not have compatible types
4534 they are not the same. */
4535 if (!types_compatible_p (vp1->type, vp2->type))
4536 return false;
4537
4538 /* Any phi in the same block will have it's arguments in the
4539 same edge order, because of how we store phi nodes. */
4540 unsigned nargs = EDGE_COUNT (vp1->block->preds);
4541 for (unsigned i = 0; i < nargs; ++i)
4542 {
4543 tree phi1op = vp1->phiargs[i];
4544 tree phi2op = vp2->phiargs[i];
4545 if (phi1op == phi2op)
4546 continue;
4547 if (!expressions_equal_p (phi1op, phi2op, false))
4548 return false;
4549 }
4550
4551 return true;
4552 }
4553
4554 /* Lookup PHI in the current hash table, and return the resulting
4555 value number if it exists in the hash table. Return NULL_TREE if
4556 it does not exist in the hash table. */
4557
4558 static tree
vn_phi_lookup(gimple * phi,bool backedges_varying_p)4559 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4560 {
4561 vn_phi_s **slot;
4562 struct vn_phi_s *vp1;
4563 edge e;
4564 edge_iterator ei;
4565
4566 vp1 = XALLOCAVAR (struct vn_phi_s,
4567 sizeof (struct vn_phi_s)
4568 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4569
4570 /* Canonicalize the SSA_NAME's to their value number. */
4571 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4572 {
4573 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4574 if (TREE_CODE (def) == SSA_NAME
4575 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4576 {
4577 if (ssa_undefined_value_p (def, false))
4578 def = VN_TOP;
4579 else
4580 def = SSA_VAL (def);
4581 }
4582 vp1->phiargs[e->dest_idx] = def;
4583 }
4584 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4585 vp1->block = gimple_bb (phi);
4586 /* Extract values of the controlling condition. */
4587 vp1->cclhs = NULL_TREE;
4588 vp1->ccrhs = NULL_TREE;
4589 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4590 if (EDGE_COUNT (idom1->succs) == 2)
4591 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4592 {
4593 /* ??? We want to use SSA_VAL here. But possibly not
4594 allow VN_TOP. */
4595 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4596 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4597 }
4598 vp1->hashcode = vn_phi_compute_hash (vp1);
4599 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4600 if (!slot)
4601 return NULL_TREE;
4602 return (*slot)->result;
4603 }
4604
4605 /* Insert PHI into the current hash table with a value number of
4606 RESULT. */
4607
4608 static vn_phi_t
vn_phi_insert(gimple * phi,tree result,bool backedges_varying_p)4609 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4610 {
4611 vn_phi_s **slot;
4612 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4613 sizeof (vn_phi_s)
4614 + ((gimple_phi_num_args (phi) - 1)
4615 * sizeof (tree)));
4616 edge e;
4617 edge_iterator ei;
4618
4619 /* Canonicalize the SSA_NAME's to their value number. */
4620 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4621 {
4622 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4623 if (TREE_CODE (def) == SSA_NAME
4624 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4625 {
4626 if (ssa_undefined_value_p (def, false))
4627 def = VN_TOP;
4628 else
4629 def = SSA_VAL (def);
4630 }
4631 vp1->phiargs[e->dest_idx] = def;
4632 }
4633 vp1->value_id = VN_INFO (result)->value_id;
4634 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4635 vp1->block = gimple_bb (phi);
4636 /* Extract values of the controlling condition. */
4637 vp1->cclhs = NULL_TREE;
4638 vp1->ccrhs = NULL_TREE;
4639 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4640 if (EDGE_COUNT (idom1->succs) == 2)
4641 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4642 {
4643 /* ??? We want to use SSA_VAL here. But possibly not
4644 allow VN_TOP. */
4645 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4646 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4647 }
4648 vp1->result = result;
4649 vp1->hashcode = vn_phi_compute_hash (vp1);
4650
4651 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4652 gcc_assert (!*slot);
4653
4654 *slot = vp1;
4655 vp1->next = last_inserted_phi;
4656 last_inserted_phi = vp1;
4657 return vp1;
4658 }
4659
4660
4661 /* Return true if BB1 is dominated by BB2 taking into account edges
4662 that are not executable. When ALLOW_BACK is false consider not
4663 executable backedges as executable. */
4664
4665 static bool
dominated_by_p_w_unex(basic_block bb1,basic_block bb2,bool allow_back)4666 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4667 {
4668 edge_iterator ei;
4669 edge e;
4670
4671 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4672 return true;
4673
4674 /* Before iterating we'd like to know if there exists a
4675 (executable) path from bb2 to bb1 at all, if not we can
4676 directly return false. For now simply iterate once. */
4677
4678 /* Iterate to the single executable bb1 predecessor. */
4679 if (EDGE_COUNT (bb1->preds) > 1)
4680 {
4681 edge prede = NULL;
4682 FOR_EACH_EDGE (e, ei, bb1->preds)
4683 if ((e->flags & EDGE_EXECUTABLE)
4684 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4685 {
4686 if (prede)
4687 {
4688 prede = NULL;
4689 break;
4690 }
4691 prede = e;
4692 }
4693 if (prede)
4694 {
4695 bb1 = prede->src;
4696
4697 /* Re-do the dominance check with changed bb1. */
4698 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4699 return true;
4700 }
4701 }
4702
4703 /* Iterate to the single executable bb2 successor. */
4704 if (EDGE_COUNT (bb2->succs) > 1)
4705 {
4706 edge succe = NULL;
4707 FOR_EACH_EDGE (e, ei, bb2->succs)
4708 if ((e->flags & EDGE_EXECUTABLE)
4709 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4710 {
4711 if (succe)
4712 {
4713 succe = NULL;
4714 break;
4715 }
4716 succe = e;
4717 }
4718 if (succe)
4719 {
4720 /* Verify the reached block is only reached through succe.
4721 If there is only one edge we can spare us the dominator
4722 check and iterate directly. */
4723 if (EDGE_COUNT (succe->dest->preds) > 1)
4724 {
4725 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4726 if (e != succe
4727 && ((e->flags & EDGE_EXECUTABLE)
4728 || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4729 {
4730 succe = NULL;
4731 break;
4732 }
4733 }
4734 if (succe)
4735 {
4736 bb2 = succe->dest;
4737
4738 /* Re-do the dominance check with changed bb2. */
4739 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4740 return true;
4741 }
4742 }
4743 }
4744
4745 /* We could now iterate updating bb1 / bb2. */
4746 return false;
4747 }
4748
4749 /* Set the value number of FROM to TO, return true if it has changed
4750 as a result. */
4751
4752 static inline bool
set_ssa_val_to(tree from,tree to)4753 set_ssa_val_to (tree from, tree to)
4754 {
4755 vn_ssa_aux_t from_info = VN_INFO (from);
4756 tree currval = from_info->valnum; // SSA_VAL (from)
4757 poly_int64 toff, coff;
4758 bool curr_undefined = false;
4759 bool curr_invariant = false;
4760
4761 /* The only thing we allow as value numbers are ssa_names
4762 and invariants. So assert that here. We don't allow VN_TOP
4763 as visiting a stmt should produce a value-number other than
4764 that.
4765 ??? Still VN_TOP can happen for unreachable code, so force
4766 it to varying in that case. Not all code is prepared to
4767 get VN_TOP on valueization. */
4768 if (to == VN_TOP)
4769 {
4770 /* ??? When iterating and visiting PHI <undef, backedge-value>
4771 for the first time we rightfully get VN_TOP and we need to
4772 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4773 With SCCVN we were simply lucky we iterated the other PHI
4774 cycles first and thus visited the backedge-value DEF. */
4775 if (currval == VN_TOP)
4776 goto set_and_exit;
4777 if (dump_file && (dump_flags & TDF_DETAILS))
4778 fprintf (dump_file, "Forcing value number to varying on "
4779 "receiving VN_TOP\n");
4780 to = from;
4781 }
4782
4783 gcc_checking_assert (to != NULL_TREE
4784 && ((TREE_CODE (to) == SSA_NAME
4785 && (to == from || SSA_VAL (to) == to))
4786 || is_gimple_min_invariant (to)));
4787
4788 if (from != to)
4789 {
4790 if (currval == from)
4791 {
4792 if (dump_file && (dump_flags & TDF_DETAILS))
4793 {
4794 fprintf (dump_file, "Not changing value number of ");
4795 print_generic_expr (dump_file, from);
4796 fprintf (dump_file, " from VARYING to ");
4797 print_generic_expr (dump_file, to);
4798 fprintf (dump_file, "\n");
4799 }
4800 return false;
4801 }
4802 curr_invariant = is_gimple_min_invariant (currval);
4803 curr_undefined = (TREE_CODE (currval) == SSA_NAME
4804 && ssa_undefined_value_p (currval, false));
4805 if (currval != VN_TOP
4806 && !curr_invariant
4807 && !curr_undefined
4808 && is_gimple_min_invariant (to))
4809 {
4810 if (dump_file && (dump_flags & TDF_DETAILS))
4811 {
4812 fprintf (dump_file, "Forcing VARYING instead of changing "
4813 "value number of ");
4814 print_generic_expr (dump_file, from);
4815 fprintf (dump_file, " from ");
4816 print_generic_expr (dump_file, currval);
4817 fprintf (dump_file, " (non-constant) to ");
4818 print_generic_expr (dump_file, to);
4819 fprintf (dump_file, " (constant)\n");
4820 }
4821 to = from;
4822 }
4823 else if (currval != VN_TOP
4824 && !curr_undefined
4825 && TREE_CODE (to) == SSA_NAME
4826 && ssa_undefined_value_p (to, false))
4827 {
4828 if (dump_file && (dump_flags & TDF_DETAILS))
4829 {
4830 fprintf (dump_file, "Forcing VARYING instead of changing "
4831 "value number of ");
4832 print_generic_expr (dump_file, from);
4833 fprintf (dump_file, " from ");
4834 print_generic_expr (dump_file, currval);
4835 fprintf (dump_file, " (non-undefined) to ");
4836 print_generic_expr (dump_file, to);
4837 fprintf (dump_file, " (undefined)\n");
4838 }
4839 to = from;
4840 }
4841 else if (TREE_CODE (to) == SSA_NAME
4842 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4843 to = from;
4844 }
4845
4846 set_and_exit:
4847 if (dump_file && (dump_flags & TDF_DETAILS))
4848 {
4849 fprintf (dump_file, "Setting value number of ");
4850 print_generic_expr (dump_file, from);
4851 fprintf (dump_file, " to ");
4852 print_generic_expr (dump_file, to);
4853 }
4854
4855 if (currval != to
4856 && !operand_equal_p (currval, to, 0)
4857 /* Different undefined SSA names are not actually different. See
4858 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4859 && !(curr_undefined
4860 && TREE_CODE (to) == SSA_NAME
4861 && ssa_undefined_value_p (to, false))
4862 /* ??? For addresses involving volatile objects or types operand_equal_p
4863 does not reliably detect ADDR_EXPRs as equal. We know we are only
4864 getting invariant gimple addresses here, so can use
4865 get_addr_base_and_unit_offset to do this comparison. */
4866 && !(TREE_CODE (currval) == ADDR_EXPR
4867 && TREE_CODE (to) == ADDR_EXPR
4868 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4869 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4870 && known_eq (coff, toff)))
4871 {
4872 if (to != from
4873 && currval != VN_TOP
4874 && !curr_undefined
4875 /* We do not want to allow lattice transitions from one value
4876 to another since that may lead to not terminating iteration
4877 (see PR95049). Since there's no convenient way to check
4878 for the allowed transition of VAL -> PHI (loop entry value,
4879 same on two PHIs, to same PHI result) we restrict the check
4880 to invariants. */
4881 && curr_invariant
4882 && is_gimple_min_invariant (to))
4883 {
4884 if (dump_file && (dump_flags & TDF_DETAILS))
4885 fprintf (dump_file, " forced VARYING");
4886 to = from;
4887 }
4888 if (dump_file && (dump_flags & TDF_DETAILS))
4889 fprintf (dump_file, " (changed)\n");
4890 from_info->valnum = to;
4891 return true;
4892 }
4893 if (dump_file && (dump_flags & TDF_DETAILS))
4894 fprintf (dump_file, "\n");
4895 return false;
4896 }
4897
4898 /* Set all definitions in STMT to value number to themselves.
4899 Return true if a value number changed. */
4900
4901 static bool
defs_to_varying(gimple * stmt)4902 defs_to_varying (gimple *stmt)
4903 {
4904 bool changed = false;
4905 ssa_op_iter iter;
4906 def_operand_p defp;
4907
4908 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4909 {
4910 tree def = DEF_FROM_PTR (defp);
4911 changed |= set_ssa_val_to (def, def);
4912 }
4913 return changed;
4914 }
4915
4916 /* Visit a copy between LHS and RHS, return true if the value number
4917 changed. */
4918
4919 static bool
visit_copy(tree lhs,tree rhs)4920 visit_copy (tree lhs, tree rhs)
4921 {
4922 /* Valueize. */
4923 rhs = SSA_VAL (rhs);
4924
4925 return set_ssa_val_to (lhs, rhs);
4926 }
4927
4928 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4929 is the same. */
4930
4931 static tree
valueized_wider_op(tree wide_type,tree op,bool allow_truncate)4932 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4933 {
4934 if (TREE_CODE (op) == SSA_NAME)
4935 op = vn_valueize (op);
4936
4937 /* Either we have the op widened available. */
4938 tree ops[3] = {};
4939 ops[0] = op;
4940 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4941 wide_type, ops, NULL);
4942 if (tem)
4943 return tem;
4944
4945 /* Or the op is truncated from some existing value. */
4946 if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4947 {
4948 gimple *def = SSA_NAME_DEF_STMT (op);
4949 if (is_gimple_assign (def)
4950 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4951 {
4952 tem = gimple_assign_rhs1 (def);
4953 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4954 {
4955 if (TREE_CODE (tem) == SSA_NAME)
4956 tem = vn_valueize (tem);
4957 return tem;
4958 }
4959 }
4960 }
4961
4962 /* For constants simply extend it. */
4963 if (TREE_CODE (op) == INTEGER_CST)
4964 return wide_int_to_tree (wide_type, wi::to_widest (op));
4965
4966 return NULL_TREE;
4967 }
4968
4969 /* Visit a nary operator RHS, value number it, and return true if the
4970 value number of LHS has changed as a result. */
4971
4972 static bool
visit_nary_op(tree lhs,gassign * stmt)4973 visit_nary_op (tree lhs, gassign *stmt)
4974 {
4975 vn_nary_op_t vnresult;
4976 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4977 if (! result && vnresult)
4978 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4979 if (result)
4980 return set_ssa_val_to (lhs, result);
4981
4982 /* Do some special pattern matching for redundancies of operations
4983 in different types. */
4984 enum tree_code code = gimple_assign_rhs_code (stmt);
4985 tree type = TREE_TYPE (lhs);
4986 tree rhs1 = gimple_assign_rhs1 (stmt);
4987 switch (code)
4988 {
4989 CASE_CONVERT:
4990 /* Match arithmetic done in a different type where we can easily
4991 substitute the result from some earlier sign-changed or widened
4992 operation. */
4993 if (INTEGRAL_TYPE_P (type)
4994 && TREE_CODE (rhs1) == SSA_NAME
4995 /* We only handle sign-changes, zero-extension -> & mask or
4996 sign-extension if we know the inner operation doesn't
4997 overflow. */
4998 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4999 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5000 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5001 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
5002 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
5003 {
5004 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5005 if (def
5006 && (gimple_assign_rhs_code (def) == PLUS_EXPR
5007 || gimple_assign_rhs_code (def) == MINUS_EXPR
5008 || gimple_assign_rhs_code (def) == MULT_EXPR))
5009 {
5010 tree ops[3] = {};
5011 /* When requiring a sign-extension we cannot model a
5012 previous truncation with a single op so don't bother. */
5013 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
5014 /* Either we have the op widened available. */
5015 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
5016 allow_truncate);
5017 if (ops[0])
5018 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
5019 allow_truncate);
5020 if (ops[0] && ops[1])
5021 {
5022 ops[0] = vn_nary_op_lookup_pieces
5023 (2, gimple_assign_rhs_code (def), type, ops, NULL);
5024 /* We have wider operation available. */
5025 if (ops[0]
5026 /* If the leader is a wrapping operation we can
5027 insert it for code hoisting w/o introducing
5028 undefined overflow. If it is not it has to
5029 be available. See PR86554. */
5030 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
5031 || (rpo_avail && vn_context_bb
5032 && rpo_avail->eliminate_avail (vn_context_bb,
5033 ops[0]))))
5034 {
5035 unsigned lhs_prec = TYPE_PRECISION (type);
5036 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
5037 if (lhs_prec == rhs_prec
5038 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
5039 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
5040 {
5041 gimple_match_op match_op (gimple_match_cond::UNCOND,
5042 NOP_EXPR, type, ops[0]);
5043 result = vn_nary_build_or_lookup (&match_op);
5044 if (result)
5045 {
5046 bool changed = set_ssa_val_to (lhs, result);
5047 vn_nary_op_insert_stmt (stmt, result);
5048 return changed;
5049 }
5050 }
5051 else
5052 {
5053 tree mask = wide_int_to_tree
5054 (type, wi::mask (rhs_prec, false, lhs_prec));
5055 gimple_match_op match_op (gimple_match_cond::UNCOND,
5056 BIT_AND_EXPR,
5057 TREE_TYPE (lhs),
5058 ops[0], mask);
5059 result = vn_nary_build_or_lookup (&match_op);
5060 if (result)
5061 {
5062 bool changed = set_ssa_val_to (lhs, result);
5063 vn_nary_op_insert_stmt (stmt, result);
5064 return changed;
5065 }
5066 }
5067 }
5068 }
5069 }
5070 }
5071 break;
5072 case BIT_AND_EXPR:
5073 if (INTEGRAL_TYPE_P (type)
5074 && TREE_CODE (rhs1) == SSA_NAME
5075 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
5076 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
5077 && default_vn_walk_kind != VN_NOWALK
5078 && CHAR_BIT == 8
5079 && BITS_PER_UNIT == 8
5080 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
5081 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
5082 && !integer_zerop (gimple_assign_rhs2 (stmt)))
5083 {
5084 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5085 if (ass
5086 && !gimple_has_volatile_ops (ass)
5087 && vn_get_stmt_kind (ass) == VN_REFERENCE)
5088 {
5089 tree last_vuse = gimple_vuse (ass);
5090 tree op = gimple_assign_rhs1 (ass);
5091 tree result = vn_reference_lookup (op, gimple_vuse (ass),
5092 default_vn_walk_kind,
5093 NULL, true, &last_vuse,
5094 gimple_assign_rhs2 (stmt));
5095 if (result
5096 && useless_type_conversion_p (TREE_TYPE (result),
5097 TREE_TYPE (op)))
5098 return set_ssa_val_to (lhs, result);
5099 }
5100 }
5101 break;
5102 case TRUNC_DIV_EXPR:
5103 if (TYPE_UNSIGNED (type))
5104 break;
5105 /* Fallthru. */
5106 case RDIV_EXPR:
5107 case MULT_EXPR:
5108 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5109 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5110 {
5111 tree rhs[2];
5112 rhs[0] = rhs1;
5113 rhs[1] = gimple_assign_rhs2 (stmt);
5114 for (unsigned i = 0; i <= 1; ++i)
5115 {
5116 unsigned j = i == 0 ? 1 : 0;
5117 tree ops[2];
5118 gimple_match_op match_op (gimple_match_cond::UNCOND,
5119 NEGATE_EXPR, type, rhs[i]);
5120 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
5121 ops[j] = rhs[j];
5122 if (ops[i]
5123 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5124 type, ops, NULL)))
5125 {
5126 gimple_match_op match_op (gimple_match_cond::UNCOND,
5127 NEGATE_EXPR, type, ops[0]);
5128 result = vn_nary_build_or_lookup_1 (&match_op, true, false);
5129 if (result)
5130 {
5131 bool changed = set_ssa_val_to (lhs, result);
5132 vn_nary_op_insert_stmt (stmt, result);
5133 return changed;
5134 }
5135 }
5136 }
5137 }
5138 break;
5139 default:
5140 break;
5141 }
5142
5143 bool changed = set_ssa_val_to (lhs, lhs);
5144 vn_nary_op_insert_stmt (stmt, lhs);
5145 return changed;
5146 }
5147
5148 /* Visit a call STMT storing into LHS. Return true if the value number
5149 of the LHS has changed as a result. */
5150
5151 static bool
visit_reference_op_call(tree lhs,gcall * stmt)5152 visit_reference_op_call (tree lhs, gcall *stmt)
5153 {
5154 bool changed = false;
5155 struct vn_reference_s vr1;
5156 vn_reference_t vnresult = NULL;
5157 tree vdef = gimple_vdef (stmt);
5158 modref_summary *summary;
5159
5160 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5161 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5162 lhs = NULL_TREE;
5163
5164 vn_reference_lookup_call (stmt, &vnresult, &vr1);
5165
5166 /* If the lookup did not succeed for pure functions try to use
5167 modref info to find a candidate to CSE to. */
5168 const unsigned accesses_limit = 8;
5169 if (!vnresult
5170 && !vdef
5171 && lhs
5172 && gimple_vuse (stmt)
5173 && (((summary = get_modref_function_summary (stmt, NULL))
5174 && !summary->global_memory_read
5175 && summary->load_accesses < accesses_limit)
5176 || gimple_call_flags (stmt) & ECF_CONST))
5177 {
5178 /* First search if we can do someting useful and build a
5179 vector of all loads we have to check. */
5180 bool unknown_memory_access = false;
5181 auto_vec<ao_ref, accesses_limit> accesses;
5182 unsigned load_accesses = summary ? summary->load_accesses : 0;
5183 if (!unknown_memory_access)
5184 /* Add loads done as part of setting up the call arguments.
5185 That's also necessary for CONST functions which will
5186 not have a modref summary. */
5187 for (unsigned i = 0; i < gimple_call_num_args (stmt); ++i)
5188 {
5189 tree arg = gimple_call_arg (stmt, i);
5190 if (TREE_CODE (arg) != SSA_NAME
5191 && !is_gimple_min_invariant (arg))
5192 {
5193 if (accesses.length () >= accesses_limit - load_accesses)
5194 {
5195 unknown_memory_access = true;
5196 break;
5197 }
5198 accesses.quick_grow (accesses.length () + 1);
5199 ao_ref_init (&accesses.last (), arg);
5200 }
5201 }
5202 if (summary && !unknown_memory_access)
5203 {
5204 /* Add loads as analyzed by IPA modref. */
5205 for (auto base_node : summary->loads->bases)
5206 if (unknown_memory_access)
5207 break;
5208 else for (auto ref_node : base_node->refs)
5209 if (unknown_memory_access)
5210 break;
5211 else for (auto access_node : ref_node->accesses)
5212 {
5213 accesses.quick_grow (accesses.length () + 1);
5214 ao_ref *r = &accesses.last ();
5215 if (!access_node.get_ao_ref (stmt, r))
5216 {
5217 /* Initialize a ref based on the argument and
5218 unknown offset if possible. */
5219 tree arg = access_node.get_call_arg (stmt);
5220 if (arg && TREE_CODE (arg) == SSA_NAME)
5221 arg = SSA_VAL (arg);
5222 if (arg
5223 && TREE_CODE (arg) == ADDR_EXPR
5224 && (arg = get_base_address (arg))
5225 && DECL_P (arg))
5226 {
5227 ao_ref_init (r, arg);
5228 r->ref = NULL_TREE;
5229 r->base = arg;
5230 }
5231 else
5232 {
5233 unknown_memory_access = true;
5234 break;
5235 }
5236 }
5237 r->base_alias_set = base_node->base;
5238 r->ref_alias_set = ref_node->ref;
5239 }
5240 }
5241
5242 /* Walk the VUSE->VDEF chain optimistically trying to find an entry
5243 for the call in the hashtable. */
5244 unsigned limit = (unknown_memory_access
5245 ? 0
5246 : (param_sccvn_max_alias_queries_per_access
5247 / (accesses.length () + 1)));
5248 tree saved_vuse = vr1.vuse;
5249 hashval_t saved_hashcode = vr1.hashcode;
5250 while (limit > 0 && !vnresult && !SSA_NAME_IS_DEFAULT_DEF (vr1.vuse))
5251 {
5252 vr1.hashcode = vr1.hashcode - SSA_NAME_VERSION (vr1.vuse);
5253 gimple *def = SSA_NAME_DEF_STMT (vr1.vuse);
5254 /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but
5255 do not bother for now. */
5256 if (is_a <gphi *> (def))
5257 break;
5258 vr1.vuse = vuse_ssa_val (gimple_vuse (def));
5259 vr1.hashcode = vr1.hashcode + SSA_NAME_VERSION (vr1.vuse);
5260 vn_reference_lookup_1 (&vr1, &vnresult);
5261 limit--;
5262 }
5263
5264 /* If we found a candidate to CSE to verify it is valid. */
5265 if (vnresult && !accesses.is_empty ())
5266 {
5267 tree vuse = vuse_ssa_val (gimple_vuse (stmt));
5268 while (vnresult && vuse != vr1.vuse)
5269 {
5270 gimple *def = SSA_NAME_DEF_STMT (vuse);
5271 for (auto &ref : accesses)
5272 {
5273 /* ??? stmt_may_clobber_ref_p_1 does per stmt constant
5274 analysis overhead that we might be able to cache. */
5275 if (stmt_may_clobber_ref_p_1 (def, &ref, true))
5276 {
5277 vnresult = NULL;
5278 break;
5279 }
5280 }
5281 vuse = vuse_ssa_val (gimple_vuse (def));
5282 }
5283 }
5284 vr1.vuse = saved_vuse;
5285 vr1.hashcode = saved_hashcode;
5286 }
5287
5288 if (vnresult)
5289 {
5290 if (vdef)
5291 {
5292 if (vnresult->result_vdef)
5293 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5294 else if (!lhs && gimple_call_lhs (stmt))
5295 /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
5296 as the call still acts as a lhs store. */
5297 changed |= set_ssa_val_to (vdef, vdef);
5298 else
5299 /* If the call was discovered to be pure or const reflect
5300 that as far as possible. */
5301 changed |= set_ssa_val_to (vdef,
5302 vuse_ssa_val (gimple_vuse (stmt)));
5303 }
5304
5305 if (!vnresult->result && lhs)
5306 vnresult->result = lhs;
5307
5308 if (vnresult->result && lhs)
5309 changed |= set_ssa_val_to (lhs, vnresult->result);
5310 }
5311 else
5312 {
5313 vn_reference_t vr2;
5314 vn_reference_s **slot;
5315 tree vdef_val = vdef;
5316 if (vdef)
5317 {
5318 /* If we value numbered an indirect functions function to
5319 one not clobbering memory value number its VDEF to its
5320 VUSE. */
5321 tree fn = gimple_call_fn (stmt);
5322 if (fn && TREE_CODE (fn) == SSA_NAME)
5323 {
5324 fn = SSA_VAL (fn);
5325 if (TREE_CODE (fn) == ADDR_EXPR
5326 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5327 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5328 & (ECF_CONST | ECF_PURE))
5329 /* If stmt has non-SSA_NAME lhs, value number the
5330 vdef to itself, as the call still acts as a lhs
5331 store. */
5332 && (lhs || gimple_call_lhs (stmt) == NULL_TREE))
5333 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5334 }
5335 changed |= set_ssa_val_to (vdef, vdef_val);
5336 }
5337 if (lhs)
5338 changed |= set_ssa_val_to (lhs, lhs);
5339 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5340 vr2->vuse = vr1.vuse;
5341 /* As we are not walking the virtual operand chain we know the
5342 shared_lookup_references are still original so we can re-use
5343 them here. */
5344 vr2->operands = vr1.operands.copy ();
5345 vr2->type = vr1.type;
5346 vr2->punned = vr1.punned;
5347 vr2->set = vr1.set;
5348 vr2->base_set = vr1.base_set;
5349 vr2->hashcode = vr1.hashcode;
5350 vr2->result = lhs;
5351 vr2->result_vdef = vdef_val;
5352 vr2->value_id = 0;
5353 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5354 INSERT);
5355 gcc_assert (!*slot);
5356 *slot = vr2;
5357 vr2->next = last_inserted_ref;
5358 last_inserted_ref = vr2;
5359 }
5360
5361 return changed;
5362 }
5363
5364 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5365 and return true if the value number of the LHS has changed as a result. */
5366
5367 static bool
visit_reference_op_load(tree lhs,tree op,gimple * stmt)5368 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5369 {
5370 bool changed = false;
5371 tree result;
5372 vn_reference_t res;
5373
5374 tree vuse = gimple_vuse (stmt);
5375 tree last_vuse = vuse;
5376 result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
5377
5378 /* We handle type-punning through unions by value-numbering based
5379 on offset and size of the access. Be prepared to handle a
5380 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5381 if (result
5382 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5383 {
5384 /* Avoid the type punning in case the result mode has padding where
5385 the op we lookup has not. */
5386 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5387 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5388 result = NULL_TREE;
5389 else
5390 {
5391 /* We will be setting the value number of lhs to the value number
5392 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5393 So first simplify and lookup this expression to see if it
5394 is already available. */
5395 gimple_match_op res_op (gimple_match_cond::UNCOND,
5396 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5397 result = vn_nary_build_or_lookup (&res_op);
5398 if (result
5399 && TREE_CODE (result) == SSA_NAME
5400 && VN_INFO (result)->needs_insertion)
5401 /* Track whether this is the canonical expression for different
5402 typed loads. We use that as a stopgap measure for code
5403 hoisting when dealing with floating point loads. */
5404 res->punned = true;
5405 }
5406
5407 /* When building the conversion fails avoid inserting the reference
5408 again. */
5409 if (!result)
5410 return set_ssa_val_to (lhs, lhs);
5411 }
5412
5413 if (result)
5414 changed = set_ssa_val_to (lhs, result);
5415 else
5416 {
5417 changed = set_ssa_val_to (lhs, lhs);
5418 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5419 if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
5420 {
5421 if (dump_file && (dump_flags & TDF_DETAILS))
5422 {
5423 fprintf (dump_file, "Using extra use virtual operand ");
5424 print_generic_expr (dump_file, last_vuse);
5425 fprintf (dump_file, "\n");
5426 }
5427 vn_reference_insert (op, lhs, vuse, NULL_TREE);
5428 }
5429 }
5430
5431 return changed;
5432 }
5433
5434
5435 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5436 and return true if the value number of the LHS has changed as a result. */
5437
5438 static bool
visit_reference_op_store(tree lhs,tree op,gimple * stmt)5439 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5440 {
5441 bool changed = false;
5442 vn_reference_t vnresult = NULL;
5443 tree assign;
5444 bool resultsame = false;
5445 tree vuse = gimple_vuse (stmt);
5446 tree vdef = gimple_vdef (stmt);
5447
5448 if (TREE_CODE (op) == SSA_NAME)
5449 op = SSA_VAL (op);
5450
5451 /* First we want to lookup using the *vuses* from the store and see
5452 if there the last store to this location with the same address
5453 had the same value.
5454
5455 The vuses represent the memory state before the store. If the
5456 memory state, address, and value of the store is the same as the
5457 last store to this location, then this store will produce the
5458 same memory state as that store.
5459
5460 In this case the vdef versions for this store are value numbered to those
5461 vuse versions, since they represent the same memory state after
5462 this store.
5463
5464 Otherwise, the vdefs for the store are used when inserting into
5465 the table, since the store generates a new memory state. */
5466
5467 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5468 if (vnresult
5469 && vnresult->result)
5470 {
5471 tree result = vnresult->result;
5472 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5473 || result == SSA_VAL (result));
5474 resultsame = expressions_equal_p (result, op);
5475 if (resultsame)
5476 {
5477 /* If the TBAA state isn't compatible for downstream reads
5478 we cannot value-number the VDEFs the same. */
5479 ao_ref lhs_ref;
5480 ao_ref_init (&lhs_ref, lhs);
5481 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5482 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5483 if ((vnresult->set != set
5484 && ! alias_set_subset_of (set, vnresult->set))
5485 || (vnresult->base_set != base_set
5486 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5487 resultsame = false;
5488 }
5489 }
5490
5491 if (!resultsame)
5492 {
5493 if (dump_file && (dump_flags & TDF_DETAILS))
5494 {
5495 fprintf (dump_file, "No store match\n");
5496 fprintf (dump_file, "Value numbering store ");
5497 print_generic_expr (dump_file, lhs);
5498 fprintf (dump_file, " to ");
5499 print_generic_expr (dump_file, op);
5500 fprintf (dump_file, "\n");
5501 }
5502 /* Have to set value numbers before insert, since insert is
5503 going to valueize the references in-place. */
5504 if (vdef)
5505 changed |= set_ssa_val_to (vdef, vdef);
5506
5507 /* Do not insert structure copies into the tables. */
5508 if (is_gimple_min_invariant (op)
5509 || is_gimple_reg (op))
5510 vn_reference_insert (lhs, op, vdef, NULL);
5511
5512 /* Only perform the following when being called from PRE
5513 which embeds tail merging. */
5514 if (default_vn_walk_kind == VN_WALK)
5515 {
5516 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5517 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5518 if (!vnresult)
5519 vn_reference_insert (assign, lhs, vuse, vdef);
5520 }
5521 }
5522 else
5523 {
5524 /* We had a match, so value number the vdef to have the value
5525 number of the vuse it came from. */
5526
5527 if (dump_file && (dump_flags & TDF_DETAILS))
5528 fprintf (dump_file, "Store matched earlier value, "
5529 "value numbering store vdefs to matching vuses.\n");
5530
5531 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5532 }
5533
5534 return changed;
5535 }
5536
5537 /* Visit and value number PHI, return true if the value number
5538 changed. When BACKEDGES_VARYING_P is true then assume all
5539 backedge values are varying. When INSERTED is not NULL then
5540 this is just a ahead query for a possible iteration, set INSERTED
5541 to true if we'd insert into the hashtable. */
5542
5543 static bool
visit_phi(gimple * phi,bool * inserted,bool backedges_varying_p)5544 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5545 {
5546 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5547 tree backedge_val = NULL_TREE;
5548 bool seen_non_backedge = false;
5549 tree sameval_base = NULL_TREE;
5550 poly_int64 soff, doff;
5551 unsigned n_executable = 0;
5552 edge_iterator ei;
5553 edge e;
5554
5555 /* TODO: We could check for this in initialization, and replace this
5556 with a gcc_assert. */
5557 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5558 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5559
5560 /* We track whether a PHI was CSEd to to avoid excessive iterations
5561 that would be necessary only because the PHI changed arguments
5562 but not value. */
5563 if (!inserted)
5564 gimple_set_plf (phi, GF_PLF_1, false);
5565
5566 /* See if all non-TOP arguments have the same value. TOP is
5567 equivalent to everything, so we can ignore it. */
5568 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5569 if (e->flags & EDGE_EXECUTABLE)
5570 {
5571 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5572
5573 if (def == PHI_RESULT (phi))
5574 continue;
5575 ++n_executable;
5576 if (TREE_CODE (def) == SSA_NAME)
5577 {
5578 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5579 def = SSA_VAL (def);
5580 if (e->flags & EDGE_DFS_BACK)
5581 backedge_val = def;
5582 }
5583 if (!(e->flags & EDGE_DFS_BACK))
5584 seen_non_backedge = true;
5585 if (def == VN_TOP)
5586 ;
5587 /* Ignore undefined defs for sameval but record one. */
5588 else if (TREE_CODE (def) == SSA_NAME
5589 && ! virtual_operand_p (def)
5590 && ssa_undefined_value_p (def, false))
5591 seen_undef = def;
5592 else if (sameval == VN_TOP)
5593 sameval = def;
5594 else if (!expressions_equal_p (def, sameval))
5595 {
5596 /* We know we're arriving only with invariant addresses here,
5597 try harder comparing them. We can do some caching here
5598 which we cannot do in expressions_equal_p. */
5599 if (TREE_CODE (def) == ADDR_EXPR
5600 && TREE_CODE (sameval) == ADDR_EXPR
5601 && sameval_base != (void *)-1)
5602 {
5603 if (!sameval_base)
5604 sameval_base = get_addr_base_and_unit_offset
5605 (TREE_OPERAND (sameval, 0), &soff);
5606 if (!sameval_base)
5607 sameval_base = (tree)(void *)-1;
5608 else if ((get_addr_base_and_unit_offset
5609 (TREE_OPERAND (def, 0), &doff) == sameval_base)
5610 && known_eq (soff, doff))
5611 continue;
5612 }
5613 sameval = NULL_TREE;
5614 break;
5615 }
5616 }
5617
5618 /* If the value we want to use is flowing over the backedge and we
5619 should take it as VARYING but it has a non-VARYING value drop to
5620 VARYING.
5621 If we value-number a virtual operand never value-number to the
5622 value from the backedge as that confuses the alias-walking code.
5623 See gcc.dg/torture/pr87176.c. If the value is the same on a
5624 non-backedge everything is OK though. */
5625 bool visited_p;
5626 if ((backedge_val
5627 && !seen_non_backedge
5628 && TREE_CODE (backedge_val) == SSA_NAME
5629 && sameval == backedge_val
5630 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5631 || SSA_VAL (backedge_val) != backedge_val))
5632 /* Do not value-number a virtual operand to sth not visited though
5633 given that allows us to escape a region in alias walking. */
5634 || (sameval
5635 && TREE_CODE (sameval) == SSA_NAME
5636 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5637 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5638 && (SSA_VAL (sameval, &visited_p), !visited_p)))
5639 /* Note this just drops to VARYING without inserting the PHI into
5640 the hashes. */
5641 result = PHI_RESULT (phi);
5642 /* If none of the edges was executable keep the value-number at VN_TOP,
5643 if only a single edge is exectuable use its value. */
5644 else if (n_executable <= 1)
5645 result = seen_undef ? seen_undef : sameval;
5646 /* If we saw only undefined values and VN_TOP use one of the
5647 undefined values. */
5648 else if (sameval == VN_TOP)
5649 result = seen_undef ? seen_undef : sameval;
5650 /* First see if it is equivalent to a phi node in this block. We prefer
5651 this as it allows IV elimination - see PRs 66502 and 67167. */
5652 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5653 {
5654 if (!inserted
5655 && TREE_CODE (result) == SSA_NAME
5656 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5657 {
5658 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5659 if (dump_file && (dump_flags & TDF_DETAILS))
5660 {
5661 fprintf (dump_file, "Marking CSEd to PHI node ");
5662 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5663 0, TDF_SLIM);
5664 fprintf (dump_file, "\n");
5665 }
5666 }
5667 }
5668 /* If all values are the same use that, unless we've seen undefined
5669 values as well and the value isn't constant.
5670 CCP/copyprop have the same restriction to not remove uninit warnings. */
5671 else if (sameval
5672 && (! seen_undef || is_gimple_min_invariant (sameval)))
5673 result = sameval;
5674 else
5675 {
5676 result = PHI_RESULT (phi);
5677 /* Only insert PHIs that are varying, for constant value numbers
5678 we mess up equivalences otherwise as we are only comparing
5679 the immediate controlling predicates. */
5680 vn_phi_insert (phi, result, backedges_varying_p);
5681 if (inserted)
5682 *inserted = true;
5683 }
5684
5685 return set_ssa_val_to (PHI_RESULT (phi), result);
5686 }
5687
5688 /* Try to simplify RHS using equivalences and constant folding. */
5689
5690 static tree
try_to_simplify(gassign * stmt)5691 try_to_simplify (gassign *stmt)
5692 {
5693 enum tree_code code = gimple_assign_rhs_code (stmt);
5694 tree tem;
5695
5696 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5697 in this case, there is no point in doing extra work. */
5698 if (code == SSA_NAME)
5699 return NULL_TREE;
5700
5701 /* First try constant folding based on our current lattice. */
5702 mprts_hook = vn_lookup_simplify_result;
5703 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5704 mprts_hook = NULL;
5705 if (tem
5706 && (TREE_CODE (tem) == SSA_NAME
5707 || is_gimple_min_invariant (tem)))
5708 return tem;
5709
5710 return NULL_TREE;
5711 }
5712
5713 /* Visit and value number STMT, return true if the value number
5714 changed. */
5715
5716 static bool
visit_stmt(gimple * stmt,bool backedges_varying_p=false)5717 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5718 {
5719 bool changed = false;
5720
5721 if (dump_file && (dump_flags & TDF_DETAILS))
5722 {
5723 fprintf (dump_file, "Value numbering stmt = ");
5724 print_gimple_stmt (dump_file, stmt, 0);
5725 }
5726
5727 if (gimple_code (stmt) == GIMPLE_PHI)
5728 changed = visit_phi (stmt, NULL, backedges_varying_p);
5729 else if (gimple_has_volatile_ops (stmt))
5730 changed = defs_to_varying (stmt);
5731 else if (gassign *ass = dyn_cast <gassign *> (stmt))
5732 {
5733 enum tree_code code = gimple_assign_rhs_code (ass);
5734 tree lhs = gimple_assign_lhs (ass);
5735 tree rhs1 = gimple_assign_rhs1 (ass);
5736 tree simplified;
5737
5738 /* Shortcut for copies. Simplifying copies is pointless,
5739 since we copy the expression and value they represent. */
5740 if (code == SSA_NAME
5741 && TREE_CODE (lhs) == SSA_NAME)
5742 {
5743 changed = visit_copy (lhs, rhs1);
5744 goto done;
5745 }
5746 simplified = try_to_simplify (ass);
5747 if (simplified)
5748 {
5749 if (dump_file && (dump_flags & TDF_DETAILS))
5750 {
5751 fprintf (dump_file, "RHS ");
5752 print_gimple_expr (dump_file, ass, 0);
5753 fprintf (dump_file, " simplified to ");
5754 print_generic_expr (dump_file, simplified);
5755 fprintf (dump_file, "\n");
5756 }
5757 }
5758 /* Setting value numbers to constants will occasionally
5759 screw up phi congruence because constants are not
5760 uniquely associated with a single ssa name that can be
5761 looked up. */
5762 if (simplified
5763 && is_gimple_min_invariant (simplified)
5764 && TREE_CODE (lhs) == SSA_NAME)
5765 {
5766 changed = set_ssa_val_to (lhs, simplified);
5767 goto done;
5768 }
5769 else if (simplified
5770 && TREE_CODE (simplified) == SSA_NAME
5771 && TREE_CODE (lhs) == SSA_NAME)
5772 {
5773 changed = visit_copy (lhs, simplified);
5774 goto done;
5775 }
5776
5777 if ((TREE_CODE (lhs) == SSA_NAME
5778 /* We can substitute SSA_NAMEs that are live over
5779 abnormal edges with their constant value. */
5780 && !(gimple_assign_copy_p (ass)
5781 && is_gimple_min_invariant (rhs1))
5782 && !(simplified
5783 && is_gimple_min_invariant (simplified))
5784 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5785 /* Stores or copies from SSA_NAMEs that are live over
5786 abnormal edges are a problem. */
5787 || (code == SSA_NAME
5788 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5789 changed = defs_to_varying (ass);
5790 else if (REFERENCE_CLASS_P (lhs)
5791 || DECL_P (lhs))
5792 changed = visit_reference_op_store (lhs, rhs1, ass);
5793 else if (TREE_CODE (lhs) == SSA_NAME)
5794 {
5795 if ((gimple_assign_copy_p (ass)
5796 && is_gimple_min_invariant (rhs1))
5797 || (simplified
5798 && is_gimple_min_invariant (simplified)))
5799 {
5800 if (simplified)
5801 changed = set_ssa_val_to (lhs, simplified);
5802 else
5803 changed = set_ssa_val_to (lhs, rhs1);
5804 }
5805 else
5806 {
5807 /* Visit the original statement. */
5808 switch (vn_get_stmt_kind (ass))
5809 {
5810 case VN_NARY:
5811 changed = visit_nary_op (lhs, ass);
5812 break;
5813 case VN_REFERENCE:
5814 changed = visit_reference_op_load (lhs, rhs1, ass);
5815 break;
5816 default:
5817 changed = defs_to_varying (ass);
5818 break;
5819 }
5820 }
5821 }
5822 else
5823 changed = defs_to_varying (ass);
5824 }
5825 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5826 {
5827 tree lhs = gimple_call_lhs (call_stmt);
5828 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5829 {
5830 /* Try constant folding based on our current lattice. */
5831 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5832 vn_valueize);
5833 if (simplified)
5834 {
5835 if (dump_file && (dump_flags & TDF_DETAILS))
5836 {
5837 fprintf (dump_file, "call ");
5838 print_gimple_expr (dump_file, call_stmt, 0);
5839 fprintf (dump_file, " simplified to ");
5840 print_generic_expr (dump_file, simplified);
5841 fprintf (dump_file, "\n");
5842 }
5843 }
5844 /* Setting value numbers to constants will occasionally
5845 screw up phi congruence because constants are not
5846 uniquely associated with a single ssa name that can be
5847 looked up. */
5848 if (simplified
5849 && is_gimple_min_invariant (simplified))
5850 {
5851 changed = set_ssa_val_to (lhs, simplified);
5852 if (gimple_vdef (call_stmt))
5853 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5854 SSA_VAL (gimple_vuse (call_stmt)));
5855 goto done;
5856 }
5857 else if (simplified
5858 && TREE_CODE (simplified) == SSA_NAME)
5859 {
5860 changed = visit_copy (lhs, simplified);
5861 if (gimple_vdef (call_stmt))
5862 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5863 SSA_VAL (gimple_vuse (call_stmt)));
5864 goto done;
5865 }
5866 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5867 {
5868 changed = defs_to_varying (call_stmt);
5869 goto done;
5870 }
5871 }
5872
5873 /* Pick up flags from a devirtualization target. */
5874 tree fn = gimple_call_fn (stmt);
5875 int extra_fnflags = 0;
5876 if (fn && TREE_CODE (fn) == SSA_NAME)
5877 {
5878 fn = SSA_VAL (fn);
5879 if (TREE_CODE (fn) == ADDR_EXPR
5880 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5881 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5882 }
5883 if ((/* Calls to the same function with the same vuse
5884 and the same operands do not necessarily return the same
5885 value, unless they're pure or const. */
5886 ((gimple_call_flags (call_stmt) | extra_fnflags)
5887 & (ECF_PURE | ECF_CONST))
5888 /* If calls have a vdef, subsequent calls won't have
5889 the same incoming vuse. So, if 2 calls with vdef have the
5890 same vuse, we know they're not subsequent.
5891 We can value number 2 calls to the same function with the
5892 same vuse and the same operands which are not subsequent
5893 the same, because there is no code in the program that can
5894 compare the 2 values... */
5895 || (gimple_vdef (call_stmt)
5896 /* ... unless the call returns a pointer which does
5897 not alias with anything else. In which case the
5898 information that the values are distinct are encoded
5899 in the IL. */
5900 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5901 /* Only perform the following when being called from PRE
5902 which embeds tail merging. */
5903 && default_vn_walk_kind == VN_WALK))
5904 /* Do not process .DEFERRED_INIT since that confuses uninit
5905 analysis. */
5906 && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
5907 changed = visit_reference_op_call (lhs, call_stmt);
5908 else
5909 changed = defs_to_varying (call_stmt);
5910 }
5911 else
5912 changed = defs_to_varying (stmt);
5913 done:
5914 return changed;
5915 }
5916
5917
5918 /* Allocate a value number table. */
5919
5920 static void
allocate_vn_table(vn_tables_t table,unsigned size)5921 allocate_vn_table (vn_tables_t table, unsigned size)
5922 {
5923 table->phis = new vn_phi_table_type (size);
5924 table->nary = new vn_nary_op_table_type (size);
5925 table->references = new vn_reference_table_type (size);
5926 }
5927
5928 /* Free a value number table. */
5929
5930 static void
free_vn_table(vn_tables_t table)5931 free_vn_table (vn_tables_t table)
5932 {
5933 /* Walk over elements and release vectors. */
5934 vn_reference_iterator_type hir;
5935 vn_reference_t vr;
5936 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5937 vr->operands.release ();
5938 delete table->phis;
5939 table->phis = NULL;
5940 delete table->nary;
5941 table->nary = NULL;
5942 delete table->references;
5943 table->references = NULL;
5944 }
5945
5946 /* Set *ID according to RESULT. */
5947
5948 static void
set_value_id_for_result(tree result,unsigned int * id)5949 set_value_id_for_result (tree result, unsigned int *id)
5950 {
5951 if (result && TREE_CODE (result) == SSA_NAME)
5952 *id = VN_INFO (result)->value_id;
5953 else if (result && is_gimple_min_invariant (result))
5954 *id = get_or_alloc_constant_value_id (result);
5955 else
5956 *id = get_next_value_id ();
5957 }
5958
5959 /* Set the value ids in the valid hash tables. */
5960
5961 static void
set_hashtable_value_ids(void)5962 set_hashtable_value_ids (void)
5963 {
5964 vn_nary_op_iterator_type hin;
5965 vn_phi_iterator_type hip;
5966 vn_reference_iterator_type hir;
5967 vn_nary_op_t vno;
5968 vn_reference_t vr;
5969 vn_phi_t vp;
5970
5971 /* Now set the value ids of the things we had put in the hash
5972 table. */
5973
5974 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5975 if (! vno->predicated_values)
5976 set_value_id_for_result (vno->u.result, &vno->value_id);
5977
5978 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5979 set_value_id_for_result (vp->result, &vp->value_id);
5980
5981 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5982 hir)
5983 set_value_id_for_result (vr->result, &vr->value_id);
5984 }
5985
5986 /* Return the maximum value id we have ever seen. */
5987
5988 unsigned int
get_max_value_id(void)5989 get_max_value_id (void)
5990 {
5991 return next_value_id;
5992 }
5993
5994 /* Return the maximum constant value id we have ever seen. */
5995
5996 unsigned int
get_max_constant_value_id(void)5997 get_max_constant_value_id (void)
5998 {
5999 return -next_constant_value_id;
6000 }
6001
6002 /* Return the next unique value id. */
6003
6004 unsigned int
get_next_value_id(void)6005 get_next_value_id (void)
6006 {
6007 gcc_checking_assert ((int)next_value_id > 0);
6008 return next_value_id++;
6009 }
6010
6011 /* Return the next unique value id for constants. */
6012
6013 unsigned int
get_next_constant_value_id(void)6014 get_next_constant_value_id (void)
6015 {
6016 gcc_checking_assert (next_constant_value_id < 0);
6017 return next_constant_value_id--;
6018 }
6019
6020
6021 /* Compare two expressions E1 and E2 and return true if they are equal.
6022 If match_vn_top_optimistically is true then VN_TOP is equal to anything,
6023 otherwise VN_TOP only matches VN_TOP. */
6024
6025 bool
expressions_equal_p(tree e1,tree e2,bool match_vn_top_optimistically)6026 expressions_equal_p (tree e1, tree e2, bool match_vn_top_optimistically)
6027 {
6028 /* The obvious case. */
6029 if (e1 == e2)
6030 return true;
6031
6032 /* If either one is VN_TOP consider them equal. */
6033 if (match_vn_top_optimistically
6034 && (e1 == VN_TOP || e2 == VN_TOP))
6035 return true;
6036
6037 /* If only one of them is null, they cannot be equal. While in general
6038 this should not happen for operations like TARGET_MEM_REF some
6039 operands are optional and an identity value we could substitute
6040 has differing semantics. */
6041 if (!e1 || !e2)
6042 return false;
6043
6044 /* SSA_NAME compare pointer equal. */
6045 if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
6046 return false;
6047
6048 /* Now perform the actual comparison. */
6049 if (TREE_CODE (e1) == TREE_CODE (e2)
6050 && operand_equal_p (e1, e2, OEP_PURE_SAME))
6051 return true;
6052
6053 return false;
6054 }
6055
6056
6057 /* Return true if the nary operation NARY may trap. This is a copy
6058 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
6059
6060 bool
vn_nary_may_trap(vn_nary_op_t nary)6061 vn_nary_may_trap (vn_nary_op_t nary)
6062 {
6063 tree type;
6064 tree rhs2 = NULL_TREE;
6065 bool honor_nans = false;
6066 bool honor_snans = false;
6067 bool fp_operation = false;
6068 bool honor_trapv = false;
6069 bool handled, ret;
6070 unsigned i;
6071
6072 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
6073 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
6074 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
6075 {
6076 type = nary->type;
6077 fp_operation = FLOAT_TYPE_P (type);
6078 if (fp_operation)
6079 {
6080 honor_nans = flag_trapping_math && !flag_finite_math_only;
6081 honor_snans = flag_signaling_nans != 0;
6082 }
6083 else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
6084 honor_trapv = true;
6085 }
6086 if (nary->length >= 2)
6087 rhs2 = nary->op[1];
6088 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
6089 honor_trapv, honor_nans, honor_snans,
6090 rhs2, &handled);
6091 if (handled && ret)
6092 return true;
6093
6094 for (i = 0; i < nary->length; ++i)
6095 if (tree_could_trap_p (nary->op[i]))
6096 return true;
6097
6098 return false;
6099 }
6100
6101 /* Return true if the reference operation REF may trap. */
6102
6103 bool
vn_reference_may_trap(vn_reference_t ref)6104 vn_reference_may_trap (vn_reference_t ref)
6105 {
6106 switch (ref->operands[0].opcode)
6107 {
6108 case MODIFY_EXPR:
6109 case CALL_EXPR:
6110 /* We do not handle calls. */
6111 return true;
6112 case ADDR_EXPR:
6113 /* And toplevel address computations never trap. */
6114 return false;
6115 default:;
6116 }
6117
6118 vn_reference_op_t op;
6119 unsigned i;
6120 FOR_EACH_VEC_ELT (ref->operands, i, op)
6121 {
6122 switch (op->opcode)
6123 {
6124 case WITH_SIZE_EXPR:
6125 case TARGET_MEM_REF:
6126 /* Always variable. */
6127 return true;
6128 case COMPONENT_REF:
6129 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
6130 return true;
6131 break;
6132 case ARRAY_RANGE_REF:
6133 if (TREE_CODE (op->op0) == SSA_NAME)
6134 return true;
6135 break;
6136 case ARRAY_REF:
6137 {
6138 if (TREE_CODE (op->op0) != INTEGER_CST)
6139 return true;
6140
6141 /* !in_array_bounds */
6142 tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
6143 if (!domain_type)
6144 return true;
6145
6146 tree min = op->op1;
6147 tree max = TYPE_MAX_VALUE (domain_type);
6148 if (!min
6149 || !max
6150 || TREE_CODE (min) != INTEGER_CST
6151 || TREE_CODE (max) != INTEGER_CST)
6152 return true;
6153
6154 if (tree_int_cst_lt (op->op0, min)
6155 || tree_int_cst_lt (max, op->op0))
6156 return true;
6157
6158 break;
6159 }
6160 case MEM_REF:
6161 /* Nothing interesting in itself, the base is separate. */
6162 break;
6163 /* The following are the address bases. */
6164 case SSA_NAME:
6165 return true;
6166 case ADDR_EXPR:
6167 if (op->op0)
6168 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
6169 return false;
6170 default:;
6171 }
6172 }
6173 return false;
6174 }
6175
eliminate_dom_walker(cdi_direction direction,bitmap inserted_exprs_)6176 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
6177 bitmap inserted_exprs_)
6178 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
6179 el_todo (0), eliminations (0), insertions (0),
6180 inserted_exprs (inserted_exprs_)
6181 {
6182 need_eh_cleanup = BITMAP_ALLOC (NULL);
6183 need_ab_cleanup = BITMAP_ALLOC (NULL);
6184 }
6185
~eliminate_dom_walker()6186 eliminate_dom_walker::~eliminate_dom_walker ()
6187 {
6188 BITMAP_FREE (need_eh_cleanup);
6189 BITMAP_FREE (need_ab_cleanup);
6190 }
6191
6192 /* Return a leader for OP that is available at the current point of the
6193 eliminate domwalk. */
6194
6195 tree
eliminate_avail(basic_block,tree op)6196 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
6197 {
6198 tree valnum = VN_INFO (op)->valnum;
6199 if (TREE_CODE (valnum) == SSA_NAME)
6200 {
6201 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6202 return valnum;
6203 if (avail.length () > SSA_NAME_VERSION (valnum))
6204 return avail[SSA_NAME_VERSION (valnum)];
6205 }
6206 else if (is_gimple_min_invariant (valnum))
6207 return valnum;
6208 return NULL_TREE;
6209 }
6210
6211 /* At the current point of the eliminate domwalk make OP available. */
6212
6213 void
eliminate_push_avail(basic_block,tree op)6214 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
6215 {
6216 tree valnum = VN_INFO (op)->valnum;
6217 if (TREE_CODE (valnum) == SSA_NAME)
6218 {
6219 if (avail.length () <= SSA_NAME_VERSION (valnum))
6220 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
6221 tree pushop = op;
6222 if (avail[SSA_NAME_VERSION (valnum)])
6223 pushop = avail[SSA_NAME_VERSION (valnum)];
6224 avail_stack.safe_push (pushop);
6225 avail[SSA_NAME_VERSION (valnum)] = op;
6226 }
6227 }
6228
6229 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6230 the leader for the expression if insertion was successful. */
6231
6232 tree
eliminate_insert(basic_block bb,gimple_stmt_iterator * gsi,tree val)6233 eliminate_dom_walker::eliminate_insert (basic_block bb,
6234 gimple_stmt_iterator *gsi, tree val)
6235 {
6236 /* We can insert a sequence with a single assignment only. */
6237 gimple_seq stmts = VN_INFO (val)->expr;
6238 if (!gimple_seq_singleton_p (stmts))
6239 return NULL_TREE;
6240 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
6241 if (!stmt
6242 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6243 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
6244 && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
6245 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
6246 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
6247 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
6248 return NULL_TREE;
6249
6250 tree op = gimple_assign_rhs1 (stmt);
6251 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
6252 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6253 op = TREE_OPERAND (op, 0);
6254 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
6255 if (!leader)
6256 return NULL_TREE;
6257
6258 tree res;
6259 stmts = NULL;
6260 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6261 res = gimple_build (&stmts, BIT_FIELD_REF,
6262 TREE_TYPE (val), leader,
6263 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
6264 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
6265 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
6266 res = gimple_build (&stmts, BIT_AND_EXPR,
6267 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
6268 else
6269 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
6270 TREE_TYPE (val), leader);
6271 if (TREE_CODE (res) != SSA_NAME
6272 || SSA_NAME_IS_DEFAULT_DEF (res)
6273 || gimple_bb (SSA_NAME_DEF_STMT (res)))
6274 {
6275 gimple_seq_discard (stmts);
6276
6277 /* During propagation we have to treat SSA info conservatively
6278 and thus we can end up simplifying the inserted expression
6279 at elimination time to sth not defined in stmts. */
6280 /* But then this is a redundancy we failed to detect. Which means
6281 res now has two values. That doesn't play well with how
6282 we track availability here, so give up. */
6283 if (dump_file && (dump_flags & TDF_DETAILS))
6284 {
6285 if (TREE_CODE (res) == SSA_NAME)
6286 res = eliminate_avail (bb, res);
6287 if (res)
6288 {
6289 fprintf (dump_file, "Failed to insert expression for value ");
6290 print_generic_expr (dump_file, val);
6291 fprintf (dump_file, " which is really fully redundant to ");
6292 print_generic_expr (dump_file, res);
6293 fprintf (dump_file, "\n");
6294 }
6295 }
6296
6297 return NULL_TREE;
6298 }
6299 else
6300 {
6301 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
6302 vn_ssa_aux_t vn_info = VN_INFO (res);
6303 vn_info->valnum = val;
6304 vn_info->visited = true;
6305 }
6306
6307 insertions++;
6308 if (dump_file && (dump_flags & TDF_DETAILS))
6309 {
6310 fprintf (dump_file, "Inserted ");
6311 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
6312 }
6313
6314 return res;
6315 }
6316
6317 void
eliminate_stmt(basic_block b,gimple_stmt_iterator * gsi)6318 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
6319 {
6320 tree sprime = NULL_TREE;
6321 gimple *stmt = gsi_stmt (*gsi);
6322 tree lhs = gimple_get_lhs (stmt);
6323 if (lhs && TREE_CODE (lhs) == SSA_NAME
6324 && !gimple_has_volatile_ops (stmt)
6325 /* See PR43491. Do not replace a global register variable when
6326 it is a the RHS of an assignment. Do replace local register
6327 variables since gcc does not guarantee a local variable will
6328 be allocated in register.
6329 ??? The fix isn't effective here. This should instead
6330 be ensured by not value-numbering them the same but treating
6331 them like volatiles? */
6332 && !(gimple_assign_single_p (stmt)
6333 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6334 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6335 && is_global_var (gimple_assign_rhs1 (stmt)))))
6336 {
6337 sprime = eliminate_avail (b, lhs);
6338 if (!sprime)
6339 {
6340 /* If there is no existing usable leader but SCCVN thinks
6341 it has an expression it wants to use as replacement,
6342 insert that. */
6343 tree val = VN_INFO (lhs)->valnum;
6344 vn_ssa_aux_t vn_info;
6345 if (val != VN_TOP
6346 && TREE_CODE (val) == SSA_NAME
6347 && (vn_info = VN_INFO (val), true)
6348 && vn_info->needs_insertion
6349 && vn_info->expr != NULL
6350 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6351 eliminate_push_avail (b, sprime);
6352 }
6353
6354 /* If this now constitutes a copy duplicate points-to
6355 and range info appropriately. This is especially
6356 important for inserted code. See tree-ssa-copy.cc
6357 for similar code. */
6358 if (sprime
6359 && TREE_CODE (sprime) == SSA_NAME)
6360 {
6361 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6362 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6363 && SSA_NAME_PTR_INFO (lhs)
6364 && ! SSA_NAME_PTR_INFO (sprime))
6365 {
6366 duplicate_ssa_name_ptr_info (sprime,
6367 SSA_NAME_PTR_INFO (lhs));
6368 if (b != sprime_b)
6369 reset_flow_sensitive_info (sprime);
6370 }
6371 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6372 && SSA_NAME_RANGE_INFO (lhs)
6373 && ! SSA_NAME_RANGE_INFO (sprime)
6374 && b == sprime_b)
6375 duplicate_ssa_name_range_info (sprime,
6376 SSA_NAME_RANGE_TYPE (lhs),
6377 SSA_NAME_RANGE_INFO (lhs));
6378 }
6379
6380 /* Inhibit the use of an inserted PHI on a loop header when
6381 the address of the memory reference is a simple induction
6382 variable. In other cases the vectorizer won't do anything
6383 anyway (either it's loop invariant or a complicated
6384 expression). */
6385 if (sprime
6386 && TREE_CODE (sprime) == SSA_NAME
6387 && do_pre
6388 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6389 && loop_outer (b->loop_father)
6390 && has_zero_uses (sprime)
6391 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6392 && gimple_assign_load_p (stmt))
6393 {
6394 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6395 basic_block def_bb = gimple_bb (def_stmt);
6396 if (gimple_code (def_stmt) == GIMPLE_PHI
6397 && def_bb->loop_father->header == def_bb)
6398 {
6399 loop_p loop = def_bb->loop_father;
6400 ssa_op_iter iter;
6401 tree op;
6402 bool found = false;
6403 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6404 {
6405 affine_iv iv;
6406 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
6407 if (def_bb
6408 && flow_bb_inside_loop_p (loop, def_bb)
6409 && simple_iv (loop, loop, op, &iv, true))
6410 {
6411 found = true;
6412 break;
6413 }
6414 }
6415 if (found)
6416 {
6417 if (dump_file && (dump_flags & TDF_DETAILS))
6418 {
6419 fprintf (dump_file, "Not replacing ");
6420 print_gimple_expr (dump_file, stmt, 0);
6421 fprintf (dump_file, " with ");
6422 print_generic_expr (dump_file, sprime);
6423 fprintf (dump_file, " which would add a loop"
6424 " carried dependence to loop %d\n",
6425 loop->num);
6426 }
6427 /* Don't keep sprime available. */
6428 sprime = NULL_TREE;
6429 }
6430 }
6431 }
6432
6433 if (sprime)
6434 {
6435 /* If we can propagate the value computed for LHS into
6436 all uses don't bother doing anything with this stmt. */
6437 if (may_propagate_copy (lhs, sprime))
6438 {
6439 /* Mark it for removal. */
6440 to_remove.safe_push (stmt);
6441
6442 /* ??? Don't count copy/constant propagations. */
6443 if (gimple_assign_single_p (stmt)
6444 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6445 || gimple_assign_rhs1 (stmt) == sprime))
6446 return;
6447
6448 if (dump_file && (dump_flags & TDF_DETAILS))
6449 {
6450 fprintf (dump_file, "Replaced ");
6451 print_gimple_expr (dump_file, stmt, 0);
6452 fprintf (dump_file, " with ");
6453 print_generic_expr (dump_file, sprime);
6454 fprintf (dump_file, " in all uses of ");
6455 print_gimple_stmt (dump_file, stmt, 0);
6456 }
6457
6458 eliminations++;
6459 return;
6460 }
6461
6462 /* If this is an assignment from our leader (which
6463 happens in the case the value-number is a constant)
6464 then there is nothing to do. Likewise if we run into
6465 inserted code that needed a conversion because of
6466 our type-agnostic value-numbering of loads. */
6467 if ((gimple_assign_single_p (stmt)
6468 || (is_gimple_assign (stmt)
6469 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6470 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6471 && sprime == gimple_assign_rhs1 (stmt))
6472 return;
6473
6474 /* Else replace its RHS. */
6475 if (dump_file && (dump_flags & TDF_DETAILS))
6476 {
6477 fprintf (dump_file, "Replaced ");
6478 print_gimple_expr (dump_file, stmt, 0);
6479 fprintf (dump_file, " with ");
6480 print_generic_expr (dump_file, sprime);
6481 fprintf (dump_file, " in ");
6482 print_gimple_stmt (dump_file, stmt, 0);
6483 }
6484 eliminations++;
6485
6486 bool can_make_abnormal_goto = (is_gimple_call (stmt)
6487 && stmt_can_make_abnormal_goto (stmt));
6488 gimple *orig_stmt = stmt;
6489 if (!useless_type_conversion_p (TREE_TYPE (lhs),
6490 TREE_TYPE (sprime)))
6491 {
6492 /* We preserve conversions to but not from function or method
6493 types. This asymmetry makes it necessary to re-instantiate
6494 conversions here. */
6495 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6496 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6497 sprime = fold_convert (TREE_TYPE (lhs), sprime);
6498 else
6499 gcc_unreachable ();
6500 }
6501 tree vdef = gimple_vdef (stmt);
6502 tree vuse = gimple_vuse (stmt);
6503 propagate_tree_value_into_stmt (gsi, sprime);
6504 stmt = gsi_stmt (*gsi);
6505 update_stmt (stmt);
6506 /* In case the VDEF on the original stmt was released, value-number
6507 it to the VUSE. This is to make vuse_ssa_val able to skip
6508 released virtual operands. */
6509 if (vdef != gimple_vdef (stmt))
6510 {
6511 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6512 VN_INFO (vdef)->valnum = vuse;
6513 }
6514
6515 /* If we removed EH side-effects from the statement, clean
6516 its EH information. */
6517 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6518 {
6519 bitmap_set_bit (need_eh_cleanup,
6520 gimple_bb (stmt)->index);
6521 if (dump_file && (dump_flags & TDF_DETAILS))
6522 fprintf (dump_file, " Removed EH side-effects.\n");
6523 }
6524
6525 /* Likewise for AB side-effects. */
6526 if (can_make_abnormal_goto
6527 && !stmt_can_make_abnormal_goto (stmt))
6528 {
6529 bitmap_set_bit (need_ab_cleanup,
6530 gimple_bb (stmt)->index);
6531 if (dump_file && (dump_flags & TDF_DETAILS))
6532 fprintf (dump_file, " Removed AB side-effects.\n");
6533 }
6534
6535 return;
6536 }
6537 }
6538
6539 /* If the statement is a scalar store, see if the expression
6540 has the same value number as its rhs. If so, the store is
6541 dead. */
6542 if (gimple_assign_single_p (stmt)
6543 && !gimple_has_volatile_ops (stmt)
6544 && !is_gimple_reg (gimple_assign_lhs (stmt))
6545 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6546 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6547 {
6548 tree rhs = gimple_assign_rhs1 (stmt);
6549 vn_reference_t vnresult;
6550 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6551 typed load of a byte known to be 0x11 as 1 so a store of
6552 a boolean 1 is detected as redundant. Because of this we
6553 have to make sure to lookup with a ref where its size
6554 matches the precision. */
6555 tree lookup_lhs = lhs;
6556 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6557 && (TREE_CODE (lhs) != COMPONENT_REF
6558 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6559 && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6560 {
6561 if (TREE_CODE (lhs) == COMPONENT_REF
6562 || TREE_CODE (lhs) == MEM_REF)
6563 {
6564 tree ltype = build_nonstandard_integer_type
6565 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6566 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6567 if (TREE_CODE (lhs) == COMPONENT_REF)
6568 {
6569 tree foff = component_ref_field_offset (lhs);
6570 tree f = TREE_OPERAND (lhs, 1);
6571 if (!poly_int_tree_p (foff))
6572 lookup_lhs = NULL_TREE;
6573 else
6574 lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6575 TREE_OPERAND (lhs, 0),
6576 TYPE_SIZE (TREE_TYPE (lhs)),
6577 bit_from_pos
6578 (foff, DECL_FIELD_BIT_OFFSET (f)));
6579 }
6580 else
6581 lookup_lhs = build2 (MEM_REF, ltype,
6582 TREE_OPERAND (lhs, 0),
6583 TREE_OPERAND (lhs, 1));
6584 }
6585 else
6586 lookup_lhs = NULL_TREE;
6587 }
6588 tree val = NULL_TREE;
6589 if (lookup_lhs)
6590 val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6591 VN_WALKREWRITE, &vnresult, false,
6592 NULL, NULL_TREE, true);
6593 if (TREE_CODE (rhs) == SSA_NAME)
6594 rhs = VN_INFO (rhs)->valnum;
6595 if (val
6596 && (operand_equal_p (val, rhs, 0)
6597 /* Due to the bitfield lookups above we can get bit
6598 interpretations of the same RHS as values here. Those
6599 are redundant as well. */
6600 || (TREE_CODE (val) == SSA_NAME
6601 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6602 && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6603 && TREE_CODE (val) == VIEW_CONVERT_EXPR
6604 && TREE_OPERAND (val, 0) == rhs)))
6605 {
6606 /* We can only remove the later store if the former aliases
6607 at least all accesses the later one does or if the store
6608 was to readonly memory storing the same value. */
6609 ao_ref lhs_ref;
6610 ao_ref_init (&lhs_ref, lhs);
6611 alias_set_type set = ao_ref_alias_set (&lhs_ref);
6612 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6613 if (! vnresult
6614 || ((vnresult->set == set
6615 || alias_set_subset_of (set, vnresult->set))
6616 && (vnresult->base_set == base_set
6617 || alias_set_subset_of (base_set, vnresult->base_set))))
6618 {
6619 if (dump_file && (dump_flags & TDF_DETAILS))
6620 {
6621 fprintf (dump_file, "Deleted redundant store ");
6622 print_gimple_stmt (dump_file, stmt, 0);
6623 }
6624
6625 /* Queue stmt for removal. */
6626 to_remove.safe_push (stmt);
6627 return;
6628 }
6629 }
6630 }
6631
6632 /* If this is a control statement value numbering left edges
6633 unexecuted on force the condition in a way consistent with
6634 that. */
6635 if (gcond *cond = dyn_cast <gcond *> (stmt))
6636 {
6637 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6638 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6639 {
6640 if (dump_file && (dump_flags & TDF_DETAILS))
6641 {
6642 fprintf (dump_file, "Removing unexecutable edge from ");
6643 print_gimple_stmt (dump_file, stmt, 0);
6644 }
6645 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6646 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6647 gimple_cond_make_true (cond);
6648 else
6649 gimple_cond_make_false (cond);
6650 update_stmt (cond);
6651 el_todo |= TODO_cleanup_cfg;
6652 return;
6653 }
6654 }
6655
6656 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6657 bool was_noreturn = (is_gimple_call (stmt)
6658 && gimple_call_noreturn_p (stmt));
6659 tree vdef = gimple_vdef (stmt);
6660 tree vuse = gimple_vuse (stmt);
6661
6662 /* If we didn't replace the whole stmt (or propagate the result
6663 into all uses), replace all uses on this stmt with their
6664 leaders. */
6665 bool modified = false;
6666 use_operand_p use_p;
6667 ssa_op_iter iter;
6668 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6669 {
6670 tree use = USE_FROM_PTR (use_p);
6671 /* ??? The call code above leaves stmt operands un-updated. */
6672 if (TREE_CODE (use) != SSA_NAME)
6673 continue;
6674 tree sprime;
6675 if (SSA_NAME_IS_DEFAULT_DEF (use))
6676 /* ??? For default defs BB shouldn't matter, but we have to
6677 solve the inconsistency between rpo eliminate and
6678 dom eliminate avail valueization first. */
6679 sprime = eliminate_avail (b, use);
6680 else
6681 /* Look for sth available at the definition block of the argument.
6682 This avoids inconsistencies between availability there which
6683 decides if the stmt can be removed and availability at the
6684 use site. The SSA property ensures that things available
6685 at the definition are also available at uses. */
6686 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6687 if (sprime && sprime != use
6688 && may_propagate_copy (use, sprime, true)
6689 /* We substitute into debug stmts to avoid excessive
6690 debug temporaries created by removed stmts, but we need
6691 to avoid doing so for inserted sprimes as we never want
6692 to create debug temporaries for them. */
6693 && (!inserted_exprs
6694 || TREE_CODE (sprime) != SSA_NAME
6695 || !is_gimple_debug (stmt)
6696 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6697 {
6698 propagate_value (use_p, sprime);
6699 modified = true;
6700 }
6701 }
6702
6703 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6704 into which is a requirement for the IPA devirt machinery. */
6705 gimple *old_stmt = stmt;
6706 if (modified)
6707 {
6708 /* If a formerly non-invariant ADDR_EXPR is turned into an
6709 invariant one it was on a separate stmt. */
6710 if (gimple_assign_single_p (stmt)
6711 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6712 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6713 gimple_stmt_iterator prev = *gsi;
6714 gsi_prev (&prev);
6715 if (fold_stmt (gsi, follow_all_ssa_edges))
6716 {
6717 /* fold_stmt may have created new stmts inbetween
6718 the previous stmt and the folded stmt. Mark
6719 all defs created there as varying to not confuse
6720 the SCCVN machinery as we're using that even during
6721 elimination. */
6722 if (gsi_end_p (prev))
6723 prev = gsi_start_bb (b);
6724 else
6725 gsi_next (&prev);
6726 if (gsi_stmt (prev) != gsi_stmt (*gsi))
6727 do
6728 {
6729 tree def;
6730 ssa_op_iter dit;
6731 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6732 dit, SSA_OP_ALL_DEFS)
6733 /* As existing DEFs may move between stmts
6734 only process new ones. */
6735 if (! has_VN_INFO (def))
6736 {
6737 vn_ssa_aux_t vn_info = VN_INFO (def);
6738 vn_info->valnum = def;
6739 vn_info->visited = true;
6740 }
6741 if (gsi_stmt (prev) == gsi_stmt (*gsi))
6742 break;
6743 gsi_next (&prev);
6744 }
6745 while (1);
6746 }
6747 stmt = gsi_stmt (*gsi);
6748 /* In case we folded the stmt away schedule the NOP for removal. */
6749 if (gimple_nop_p (stmt))
6750 to_remove.safe_push (stmt);
6751 }
6752
6753 /* Visit indirect calls and turn them into direct calls if
6754 possible using the devirtualization machinery. Do this before
6755 checking for required EH/abnormal/noreturn cleanup as devird
6756 may expose more of those. */
6757 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6758 {
6759 tree fn = gimple_call_fn (call_stmt);
6760 if (fn
6761 && flag_devirtualize
6762 && virtual_method_call_p (fn))
6763 {
6764 tree otr_type = obj_type_ref_class (fn);
6765 unsigned HOST_WIDE_INT otr_tok
6766 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6767 tree instance;
6768 ipa_polymorphic_call_context context (current_function_decl,
6769 fn, stmt, &instance);
6770 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6771 otr_type, stmt, NULL);
6772 bool final;
6773 vec <cgraph_node *> targets
6774 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6775 otr_tok, context, &final);
6776 if (dump_file)
6777 dump_possible_polymorphic_call_targets (dump_file,
6778 obj_type_ref_class (fn),
6779 otr_tok, context);
6780 if (final && targets.length () <= 1 && dbg_cnt (devirt))
6781 {
6782 tree fn;
6783 if (targets.length () == 1)
6784 fn = targets[0]->decl;
6785 else
6786 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6787 if (dump_enabled_p ())
6788 {
6789 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6790 "converting indirect call to "
6791 "function %s\n",
6792 lang_hooks.decl_printable_name (fn, 2));
6793 }
6794 gimple_call_set_fndecl (call_stmt, fn);
6795 /* If changing the call to __builtin_unreachable
6796 or similar noreturn function, adjust gimple_call_fntype
6797 too. */
6798 if (gimple_call_noreturn_p (call_stmt)
6799 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6800 && TYPE_ARG_TYPES (TREE_TYPE (fn))
6801 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6802 == void_type_node))
6803 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6804 maybe_remove_unused_call_args (cfun, call_stmt);
6805 modified = true;
6806 }
6807 }
6808 }
6809
6810 if (modified)
6811 {
6812 /* When changing a call into a noreturn call, cfg cleanup
6813 is needed to fix up the noreturn call. */
6814 if (!was_noreturn
6815 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6816 to_fixup.safe_push (stmt);
6817 /* When changing a condition or switch into one we know what
6818 edge will be executed, schedule a cfg cleanup. */
6819 if ((gimple_code (stmt) == GIMPLE_COND
6820 && (gimple_cond_true_p (as_a <gcond *> (stmt))
6821 || gimple_cond_false_p (as_a <gcond *> (stmt))))
6822 || (gimple_code (stmt) == GIMPLE_SWITCH
6823 && TREE_CODE (gimple_switch_index
6824 (as_a <gswitch *> (stmt))) == INTEGER_CST))
6825 el_todo |= TODO_cleanup_cfg;
6826 /* If we removed EH side-effects from the statement, clean
6827 its EH information. */
6828 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6829 {
6830 bitmap_set_bit (need_eh_cleanup,
6831 gimple_bb (stmt)->index);
6832 if (dump_file && (dump_flags & TDF_DETAILS))
6833 fprintf (dump_file, " Removed EH side-effects.\n");
6834 }
6835 /* Likewise for AB side-effects. */
6836 if (can_make_abnormal_goto
6837 && !stmt_can_make_abnormal_goto (stmt))
6838 {
6839 bitmap_set_bit (need_ab_cleanup,
6840 gimple_bb (stmt)->index);
6841 if (dump_file && (dump_flags & TDF_DETAILS))
6842 fprintf (dump_file, " Removed AB side-effects.\n");
6843 }
6844 update_stmt (stmt);
6845 /* In case the VDEF on the original stmt was released, value-number
6846 it to the VUSE. This is to make vuse_ssa_val able to skip
6847 released virtual operands. */
6848 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6849 VN_INFO (vdef)->valnum = vuse;
6850 }
6851
6852 /* Make new values available - for fully redundant LHS we
6853 continue with the next stmt above and skip this. */
6854 def_operand_p defp;
6855 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6856 eliminate_push_avail (b, DEF_FROM_PTR (defp));
6857 }
6858
6859 /* Perform elimination for the basic-block B during the domwalk. */
6860
6861 edge
before_dom_children(basic_block b)6862 eliminate_dom_walker::before_dom_children (basic_block b)
6863 {
6864 /* Mark new bb. */
6865 avail_stack.safe_push (NULL_TREE);
6866
6867 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6868 if (!(b->flags & BB_EXECUTABLE))
6869 return NULL;
6870
6871 vn_context_bb = b;
6872
6873 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6874 {
6875 gphi *phi = gsi.phi ();
6876 tree res = PHI_RESULT (phi);
6877
6878 if (virtual_operand_p (res))
6879 {
6880 gsi_next (&gsi);
6881 continue;
6882 }
6883
6884 tree sprime = eliminate_avail (b, res);
6885 if (sprime
6886 && sprime != res)
6887 {
6888 if (dump_file && (dump_flags & TDF_DETAILS))
6889 {
6890 fprintf (dump_file, "Replaced redundant PHI node defining ");
6891 print_generic_expr (dump_file, res);
6892 fprintf (dump_file, " with ");
6893 print_generic_expr (dump_file, sprime);
6894 fprintf (dump_file, "\n");
6895 }
6896
6897 /* If we inserted this PHI node ourself, it's not an elimination. */
6898 if (! inserted_exprs
6899 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6900 eliminations++;
6901
6902 /* If we will propagate into all uses don't bother to do
6903 anything. */
6904 if (may_propagate_copy (res, sprime))
6905 {
6906 /* Mark the PHI for removal. */
6907 to_remove.safe_push (phi);
6908 gsi_next (&gsi);
6909 continue;
6910 }
6911
6912 remove_phi_node (&gsi, false);
6913
6914 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6915 sprime = fold_convert (TREE_TYPE (res), sprime);
6916 gimple *stmt = gimple_build_assign (res, sprime);
6917 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6918 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6919 continue;
6920 }
6921
6922 eliminate_push_avail (b, res);
6923 gsi_next (&gsi);
6924 }
6925
6926 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6927 !gsi_end_p (gsi);
6928 gsi_next (&gsi))
6929 eliminate_stmt (b, &gsi);
6930
6931 /* Replace destination PHI arguments. */
6932 edge_iterator ei;
6933 edge e;
6934 FOR_EACH_EDGE (e, ei, b->succs)
6935 if (e->flags & EDGE_EXECUTABLE)
6936 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6937 !gsi_end_p (gsi);
6938 gsi_next (&gsi))
6939 {
6940 gphi *phi = gsi.phi ();
6941 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6942 tree arg = USE_FROM_PTR (use_p);
6943 if (TREE_CODE (arg) != SSA_NAME
6944 || virtual_operand_p (arg))
6945 continue;
6946 tree sprime = eliminate_avail (b, arg);
6947 if (sprime && may_propagate_copy (arg, sprime))
6948 propagate_value (use_p, sprime);
6949 }
6950
6951 vn_context_bb = NULL;
6952
6953 return NULL;
6954 }
6955
6956 /* Make no longer available leaders no longer available. */
6957
6958 void
after_dom_children(basic_block)6959 eliminate_dom_walker::after_dom_children (basic_block)
6960 {
6961 tree entry;
6962 while ((entry = avail_stack.pop ()) != NULL_TREE)
6963 {
6964 tree valnum = VN_INFO (entry)->valnum;
6965 tree old = avail[SSA_NAME_VERSION (valnum)];
6966 if (old == entry)
6967 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6968 else
6969 avail[SSA_NAME_VERSION (valnum)] = entry;
6970 }
6971 }
6972
6973 /* Remove queued stmts and perform delayed cleanups. */
6974
6975 unsigned
eliminate_cleanup(bool region_p)6976 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6977 {
6978 statistics_counter_event (cfun, "Eliminated", eliminations);
6979 statistics_counter_event (cfun, "Insertions", insertions);
6980
6981 /* We cannot remove stmts during BB walk, especially not release SSA
6982 names there as this confuses the VN machinery. The stmts ending
6983 up in to_remove are either stores or simple copies.
6984 Remove stmts in reverse order to make debug stmt creation possible. */
6985 while (!to_remove.is_empty ())
6986 {
6987 bool do_release_defs = true;
6988 gimple *stmt = to_remove.pop ();
6989
6990 /* When we are value-numbering a region we do not require exit PHIs to
6991 be present so we have to make sure to deal with uses outside of the
6992 region of stmts that we thought are eliminated.
6993 ??? Note we may be confused by uses in dead regions we didn't run
6994 elimination on. Rather than checking individual uses we accept
6995 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6996 contains such example). */
6997 if (region_p)
6998 {
6999 if (gphi *phi = dyn_cast <gphi *> (stmt))
7000 {
7001 tree lhs = gimple_phi_result (phi);
7002 if (!has_zero_uses (lhs))
7003 {
7004 if (dump_file && (dump_flags & TDF_DETAILS))
7005 fprintf (dump_file, "Keeping eliminated stmt live "
7006 "as copy because of out-of-region uses\n");
7007 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7008 gimple *copy = gimple_build_assign (lhs, sprime);
7009 gimple_stmt_iterator gsi
7010 = gsi_after_labels (gimple_bb (stmt));
7011 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7012 do_release_defs = false;
7013 }
7014 }
7015 else if (tree lhs = gimple_get_lhs (stmt))
7016 if (TREE_CODE (lhs) == SSA_NAME
7017 && !has_zero_uses (lhs))
7018 {
7019 if (dump_file && (dump_flags & TDF_DETAILS))
7020 fprintf (dump_file, "Keeping eliminated stmt live "
7021 "as copy because of out-of-region uses\n");
7022 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
7023 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7024 if (is_gimple_assign (stmt))
7025 {
7026 gimple_assign_set_rhs_from_tree (&gsi, sprime);
7027 stmt = gsi_stmt (gsi);
7028 update_stmt (stmt);
7029 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
7030 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
7031 continue;
7032 }
7033 else
7034 {
7035 gimple *copy = gimple_build_assign (lhs, sprime);
7036 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
7037 do_release_defs = false;
7038 }
7039 }
7040 }
7041
7042 if (dump_file && (dump_flags & TDF_DETAILS))
7043 {
7044 fprintf (dump_file, "Removing dead stmt ");
7045 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
7046 }
7047
7048 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
7049 if (gimple_code (stmt) == GIMPLE_PHI)
7050 remove_phi_node (&gsi, do_release_defs);
7051 else
7052 {
7053 basic_block bb = gimple_bb (stmt);
7054 unlink_stmt_vdef (stmt);
7055 if (gsi_remove (&gsi, true))
7056 bitmap_set_bit (need_eh_cleanup, bb->index);
7057 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
7058 bitmap_set_bit (need_ab_cleanup, bb->index);
7059 if (do_release_defs)
7060 release_defs (stmt);
7061 }
7062
7063 /* Removing a stmt may expose a forwarder block. */
7064 el_todo |= TODO_cleanup_cfg;
7065 }
7066
7067 /* Fixup stmts that became noreturn calls. This may require splitting
7068 blocks and thus isn't possible during the dominator walk. Do this
7069 in reverse order so we don't inadvertedly remove a stmt we want to
7070 fixup by visiting a dominating now noreturn call first. */
7071 while (!to_fixup.is_empty ())
7072 {
7073 gimple *stmt = to_fixup.pop ();
7074
7075 if (dump_file && (dump_flags & TDF_DETAILS))
7076 {
7077 fprintf (dump_file, "Fixing up noreturn call ");
7078 print_gimple_stmt (dump_file, stmt, 0);
7079 }
7080
7081 if (fixup_noreturn_call (stmt))
7082 el_todo |= TODO_cleanup_cfg;
7083 }
7084
7085 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
7086 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
7087
7088 if (do_eh_cleanup)
7089 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
7090
7091 if (do_ab_cleanup)
7092 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
7093
7094 if (do_eh_cleanup || do_ab_cleanup)
7095 el_todo |= TODO_cleanup_cfg;
7096
7097 return el_todo;
7098 }
7099
7100 /* Eliminate fully redundant computations. */
7101
7102 unsigned
eliminate_with_rpo_vn(bitmap inserted_exprs)7103 eliminate_with_rpo_vn (bitmap inserted_exprs)
7104 {
7105 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
7106
7107 eliminate_dom_walker *saved_rpo_avail = rpo_avail;
7108 rpo_avail = &walker;
7109 walker.walk (cfun->cfg->x_entry_block_ptr);
7110 rpo_avail = saved_rpo_avail;
7111
7112 return walker.eliminate_cleanup ();
7113 }
7114
7115 unsigned
7116 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7117 bool iterate, bool eliminate, vn_lookup_kind kind);
7118
7119 void
run_rpo_vn(vn_lookup_kind kind)7120 run_rpo_vn (vn_lookup_kind kind)
7121 {
7122 do_rpo_vn (cfun, NULL, NULL, true, false, kind);
7123
7124 /* ??? Prune requirement of these. */
7125 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
7126
7127 /* Initialize the value ids and prune out remaining VN_TOPs
7128 from dead code. */
7129 tree name;
7130 unsigned i;
7131 FOR_EACH_SSA_NAME (i, name, cfun)
7132 {
7133 vn_ssa_aux_t info = VN_INFO (name);
7134 if (!info->visited
7135 || info->valnum == VN_TOP)
7136 info->valnum = name;
7137 if (info->valnum == name)
7138 info->value_id = get_next_value_id ();
7139 else if (is_gimple_min_invariant (info->valnum))
7140 info->value_id = get_or_alloc_constant_value_id (info->valnum);
7141 }
7142
7143 /* Propagate. */
7144 FOR_EACH_SSA_NAME (i, name, cfun)
7145 {
7146 vn_ssa_aux_t info = VN_INFO (name);
7147 if (TREE_CODE (info->valnum) == SSA_NAME
7148 && info->valnum != name
7149 && info->value_id != VN_INFO (info->valnum)->value_id)
7150 info->value_id = VN_INFO (info->valnum)->value_id;
7151 }
7152
7153 set_hashtable_value_ids ();
7154
7155 if (dump_file && (dump_flags & TDF_DETAILS))
7156 {
7157 fprintf (dump_file, "Value numbers:\n");
7158 FOR_EACH_SSA_NAME (i, name, cfun)
7159 {
7160 if (VN_INFO (name)->visited
7161 && SSA_VAL (name) != name)
7162 {
7163 print_generic_expr (dump_file, name);
7164 fprintf (dump_file, " = ");
7165 print_generic_expr (dump_file, SSA_VAL (name));
7166 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
7167 }
7168 }
7169 }
7170 }
7171
7172 /* Free VN associated data structures. */
7173
7174 void
free_rpo_vn(void)7175 free_rpo_vn (void)
7176 {
7177 free_vn_table (valid_info);
7178 XDELETE (valid_info);
7179 obstack_free (&vn_tables_obstack, NULL);
7180 obstack_free (&vn_tables_insert_obstack, NULL);
7181
7182 vn_ssa_aux_iterator_type it;
7183 vn_ssa_aux_t info;
7184 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
7185 if (info->needs_insertion)
7186 release_ssa_name (info->name);
7187 obstack_free (&vn_ssa_aux_obstack, NULL);
7188 delete vn_ssa_aux_hash;
7189
7190 delete constant_to_value_id;
7191 constant_to_value_id = NULL;
7192 }
7193
7194 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
7195
7196 static tree
vn_lookup_simplify_result(gimple_match_op * res_op)7197 vn_lookup_simplify_result (gimple_match_op *res_op)
7198 {
7199 if (!res_op->code.is_tree_code ())
7200 return NULL_TREE;
7201 tree *ops = res_op->ops;
7202 unsigned int length = res_op->num_ops;
7203 if (res_op->code == CONSTRUCTOR
7204 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
7205 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
7206 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
7207 {
7208 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
7209 ops = XALLOCAVEC (tree, length);
7210 for (unsigned i = 0; i < length; ++i)
7211 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
7212 }
7213 vn_nary_op_t vnresult = NULL;
7214 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
7215 res_op->type, ops, &vnresult);
7216 /* If this is used from expression simplification make sure to
7217 return an available expression. */
7218 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
7219 res = rpo_avail->eliminate_avail (vn_context_bb, res);
7220 return res;
7221 }
7222
7223 /* Return a leader for OPs value that is valid at BB. */
7224
7225 tree
eliminate_avail(basic_block bb,tree op)7226 rpo_elim::eliminate_avail (basic_block bb, tree op)
7227 {
7228 bool visited;
7229 tree valnum = SSA_VAL (op, &visited);
7230 /* If we didn't visit OP then it must be defined outside of the
7231 region we process and also dominate it. So it is available. */
7232 if (!visited)
7233 return op;
7234 if (TREE_CODE (valnum) == SSA_NAME)
7235 {
7236 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
7237 return valnum;
7238 vn_avail *av = VN_INFO (valnum)->avail;
7239 if (!av)
7240 return NULL_TREE;
7241 if (av->location == bb->index)
7242 /* On tramp3d 90% of the cases are here. */
7243 return ssa_name (av->leader);
7244 do
7245 {
7246 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
7247 /* ??? During elimination we have to use availability at the
7248 definition site of a use we try to replace. This
7249 is required to not run into inconsistencies because
7250 of dominated_by_p_w_unex behavior and removing a definition
7251 while not replacing all uses.
7252 ??? We could try to consistently walk dominators
7253 ignoring non-executable regions. The nearest common
7254 dominator of bb and abb is where we can stop walking. We
7255 may also be able to "pre-compute" (bits of) the next immediate
7256 (non-)dominator during the RPO walk when marking edges as
7257 executable. */
7258 if (dominated_by_p_w_unex (bb, abb, true))
7259 {
7260 tree leader = ssa_name (av->leader);
7261 /* Prevent eliminations that break loop-closed SSA. */
7262 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
7263 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
7264 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7265 (leader))->loop_father,
7266 bb))
7267 return NULL_TREE;
7268 if (dump_file && (dump_flags & TDF_DETAILS))
7269 {
7270 print_generic_expr (dump_file, leader);
7271 fprintf (dump_file, " is available for ");
7272 print_generic_expr (dump_file, valnum);
7273 fprintf (dump_file, "\n");
7274 }
7275 /* On tramp3d 99% of the _remaining_ cases succeed at
7276 the first enty. */
7277 return leader;
7278 }
7279 /* ??? Can we somehow skip to the immediate dominator
7280 RPO index (bb_to_rpo)? Again, maybe not worth, on
7281 tramp3d the worst number of elements in the vector is 9. */
7282 av = av->next;
7283 }
7284 while (av);
7285 }
7286 else if (valnum != VN_TOP)
7287 /* valnum is is_gimple_min_invariant. */
7288 return valnum;
7289 return NULL_TREE;
7290 }
7291
7292 /* Make LEADER a leader for its value at BB. */
7293
7294 void
eliminate_push_avail(basic_block bb,tree leader)7295 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
7296 {
7297 tree valnum = VN_INFO (leader)->valnum;
7298 if (valnum == VN_TOP
7299 || is_gimple_min_invariant (valnum))
7300 return;
7301 if (dump_file && (dump_flags & TDF_DETAILS))
7302 {
7303 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
7304 print_generic_expr (dump_file, leader);
7305 fprintf (dump_file, " for value ");
7306 print_generic_expr (dump_file, valnum);
7307 fprintf (dump_file, "\n");
7308 }
7309 vn_ssa_aux_t value = VN_INFO (valnum);
7310 vn_avail *av;
7311 if (m_avail_freelist)
7312 {
7313 av = m_avail_freelist;
7314 m_avail_freelist = m_avail_freelist->next;
7315 }
7316 else
7317 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
7318 av->location = bb->index;
7319 av->leader = SSA_NAME_VERSION (leader);
7320 av->next = value->avail;
7321 av->next_undo = last_pushed_avail;
7322 last_pushed_avail = value;
7323 value->avail = av;
7324 }
7325
7326 /* Valueization hook for RPO VN plus required state. */
7327
7328 tree
rpo_vn_valueize(tree name)7329 rpo_vn_valueize (tree name)
7330 {
7331 if (TREE_CODE (name) == SSA_NAME)
7332 {
7333 vn_ssa_aux_t val = VN_INFO (name);
7334 if (val)
7335 {
7336 tree tem = val->valnum;
7337 if (tem != VN_TOP && tem != name)
7338 {
7339 if (TREE_CODE (tem) != SSA_NAME)
7340 return tem;
7341 /* For all values we only valueize to an available leader
7342 which means we can use SSA name info without restriction. */
7343 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7344 if (tem)
7345 return tem;
7346 }
7347 }
7348 }
7349 return name;
7350 }
7351
7352 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7353 inverted condition. */
7354
7355 static void
insert_related_predicates_on_edge(enum tree_code code,tree * ops,edge pred_e)7356 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7357 {
7358 switch (code)
7359 {
7360 case LT_EXPR:
7361 /* a < b -> a {!,<}= b */
7362 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7363 ops, boolean_true_node, 0, pred_e);
7364 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7365 ops, boolean_true_node, 0, pred_e);
7366 /* a < b -> ! a {>,=} b */
7367 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7368 ops, boolean_false_node, 0, pred_e);
7369 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7370 ops, boolean_false_node, 0, pred_e);
7371 break;
7372 case GT_EXPR:
7373 /* a > b -> a {!,>}= b */
7374 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7375 ops, boolean_true_node, 0, pred_e);
7376 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7377 ops, boolean_true_node, 0, pred_e);
7378 /* a > b -> ! a {<,=} b */
7379 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7380 ops, boolean_false_node, 0, pred_e);
7381 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7382 ops, boolean_false_node, 0, pred_e);
7383 break;
7384 case EQ_EXPR:
7385 /* a == b -> ! a {<,>} b */
7386 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7387 ops, boolean_false_node, 0, pred_e);
7388 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7389 ops, boolean_false_node, 0, pred_e);
7390 break;
7391 case LE_EXPR:
7392 case GE_EXPR:
7393 case NE_EXPR:
7394 /* Nothing besides inverted condition. */
7395 break;
7396 default:;
7397 }
7398 }
7399
7400 /* Main stmt worker for RPO VN, process BB. */
7401
7402 static unsigned
process_bb(rpo_elim & avail,basic_block bb,bool bb_visited,bool iterate_phis,bool iterate,bool eliminate,bool do_region,bitmap exit_bbs,bool skip_phis)7403 process_bb (rpo_elim &avail, basic_block bb,
7404 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
7405 bool do_region, bitmap exit_bbs, bool skip_phis)
7406 {
7407 unsigned todo = 0;
7408 edge_iterator ei;
7409 edge e;
7410
7411 vn_context_bb = bb;
7412
7413 /* If we are in loop-closed SSA preserve this state. This is
7414 relevant when called on regions from outside of FRE/PRE. */
7415 bool lc_phi_nodes = false;
7416 if (!skip_phis
7417 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
7418 FOR_EACH_EDGE (e, ei, bb->preds)
7419 if (e->src->loop_father != e->dest->loop_father
7420 && flow_loop_nested_p (e->dest->loop_father,
7421 e->src->loop_father))
7422 {
7423 lc_phi_nodes = true;
7424 break;
7425 }
7426
7427 /* When we visit a loop header substitute into loop info. */
7428 if (!iterate && eliminate && bb->loop_father->header == bb)
7429 {
7430 /* Keep fields in sync with substitute_in_loop_info. */
7431 if (bb->loop_father->nb_iterations)
7432 bb->loop_father->nb_iterations
7433 = simplify_replace_tree (bb->loop_father->nb_iterations,
7434 NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
7435 }
7436
7437 /* Value-number all defs in the basic-block. */
7438 if (!skip_phis)
7439 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7440 gsi_next (&gsi))
7441 {
7442 gphi *phi = gsi.phi ();
7443 tree res = PHI_RESULT (phi);
7444 vn_ssa_aux_t res_info = VN_INFO (res);
7445 if (!bb_visited)
7446 {
7447 gcc_assert (!res_info->visited);
7448 res_info->valnum = VN_TOP;
7449 res_info->visited = true;
7450 }
7451
7452 /* When not iterating force backedge values to varying. */
7453 visit_stmt (phi, !iterate_phis);
7454 if (virtual_operand_p (res))
7455 continue;
7456
7457 /* Eliminate */
7458 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7459 how we handle backedges and availability.
7460 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7461 tree val = res_info->valnum;
7462 if (res != val && !iterate && eliminate)
7463 {
7464 if (tree leader = avail.eliminate_avail (bb, res))
7465 {
7466 if (leader != res
7467 /* Preserve loop-closed SSA form. */
7468 && (! lc_phi_nodes
7469 || is_gimple_min_invariant (leader)))
7470 {
7471 if (dump_file && (dump_flags & TDF_DETAILS))
7472 {
7473 fprintf (dump_file, "Replaced redundant PHI node "
7474 "defining ");
7475 print_generic_expr (dump_file, res);
7476 fprintf (dump_file, " with ");
7477 print_generic_expr (dump_file, leader);
7478 fprintf (dump_file, "\n");
7479 }
7480 avail.eliminations++;
7481
7482 if (may_propagate_copy (res, leader))
7483 {
7484 /* Schedule for removal. */
7485 avail.to_remove.safe_push (phi);
7486 continue;
7487 }
7488 /* ??? Else generate a copy stmt. */
7489 }
7490 }
7491 }
7492 /* Only make defs available that not already are. But make
7493 sure loop-closed SSA PHI node defs are picked up for
7494 downstream uses. */
7495 if (lc_phi_nodes
7496 || res == val
7497 || ! avail.eliminate_avail (bb, res))
7498 avail.eliminate_push_avail (bb, res);
7499 }
7500
7501 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7502 we do this when processing the last stmt as we have to do this
7503 before elimination which otherwise forces GIMPLE_CONDs to
7504 if (1 != 0) style when seeing non-executable edges. */
7505 if (gsi_end_p (gsi_start_bb (bb)))
7506 {
7507 FOR_EACH_EDGE (e, ei, bb->succs)
7508 {
7509 if (!(e->flags & EDGE_EXECUTABLE))
7510 {
7511 if (dump_file && (dump_flags & TDF_DETAILS))
7512 fprintf (dump_file,
7513 "marking outgoing edge %d -> %d executable\n",
7514 e->src->index, e->dest->index);
7515 e->flags |= EDGE_EXECUTABLE;
7516 e->dest->flags |= BB_EXECUTABLE;
7517 }
7518 else if (!(e->dest->flags & BB_EXECUTABLE))
7519 {
7520 if (dump_file && (dump_flags & TDF_DETAILS))
7521 fprintf (dump_file,
7522 "marking destination block %d reachable\n",
7523 e->dest->index);
7524 e->dest->flags |= BB_EXECUTABLE;
7525 }
7526 }
7527 }
7528 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7529 !gsi_end_p (gsi); gsi_next (&gsi))
7530 {
7531 ssa_op_iter i;
7532 tree op;
7533 if (!bb_visited)
7534 {
7535 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7536 {
7537 vn_ssa_aux_t op_info = VN_INFO (op);
7538 gcc_assert (!op_info->visited);
7539 op_info->valnum = VN_TOP;
7540 op_info->visited = true;
7541 }
7542
7543 /* We somehow have to deal with uses that are not defined
7544 in the processed region. Forcing unvisited uses to
7545 varying here doesn't play well with def-use following during
7546 expression simplification, so we deal with this by checking
7547 the visited flag in SSA_VAL. */
7548 }
7549
7550 visit_stmt (gsi_stmt (gsi));
7551
7552 gimple *last = gsi_stmt (gsi);
7553 e = NULL;
7554 switch (gimple_code (last))
7555 {
7556 case GIMPLE_SWITCH:
7557 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7558 (as_a <gswitch *> (last))));
7559 break;
7560 case GIMPLE_COND:
7561 {
7562 tree lhs = vn_valueize (gimple_cond_lhs (last));
7563 tree rhs = vn_valueize (gimple_cond_rhs (last));
7564 tree val = gimple_simplify (gimple_cond_code (last),
7565 boolean_type_node, lhs, rhs,
7566 NULL, vn_valueize);
7567 /* If the condition didn't simplfy see if we have recorded
7568 an expression from sofar taken edges. */
7569 if (! val || TREE_CODE (val) != INTEGER_CST)
7570 {
7571 vn_nary_op_t vnresult;
7572 tree ops[2];
7573 ops[0] = lhs;
7574 ops[1] = rhs;
7575 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7576 boolean_type_node, ops,
7577 &vnresult);
7578 /* Did we get a predicated value? */
7579 if (! val && vnresult && vnresult->predicated_values)
7580 {
7581 val = vn_nary_op_get_predicated_value (vnresult, bb);
7582 if (val && dump_file && (dump_flags & TDF_DETAILS))
7583 {
7584 fprintf (dump_file, "Got predicated value ");
7585 print_generic_expr (dump_file, val, TDF_NONE);
7586 fprintf (dump_file, " for ");
7587 print_gimple_stmt (dump_file, last, TDF_SLIM);
7588 }
7589 }
7590 }
7591 if (val)
7592 e = find_taken_edge (bb, val);
7593 if (! e)
7594 {
7595 /* If we didn't manage to compute the taken edge then
7596 push predicated expressions for the condition itself
7597 and related conditions to the hashtables. This allows
7598 simplification of redundant conditions which is
7599 important as early cleanup. */
7600 edge true_e, false_e;
7601 extract_true_false_edges_from_block (bb, &true_e, &false_e);
7602 enum tree_code code = gimple_cond_code (last);
7603 enum tree_code icode
7604 = invert_tree_comparison (code, HONOR_NANS (lhs));
7605 tree ops[2];
7606 ops[0] = lhs;
7607 ops[1] = rhs;
7608 if (do_region
7609 && bitmap_bit_p (exit_bbs, true_e->dest->index))
7610 true_e = NULL;
7611 if (do_region
7612 && bitmap_bit_p (exit_bbs, false_e->dest->index))
7613 false_e = NULL;
7614 if (true_e)
7615 vn_nary_op_insert_pieces_predicated
7616 (2, code, boolean_type_node, ops,
7617 boolean_true_node, 0, true_e);
7618 if (false_e)
7619 vn_nary_op_insert_pieces_predicated
7620 (2, code, boolean_type_node, ops,
7621 boolean_false_node, 0, false_e);
7622 if (icode != ERROR_MARK)
7623 {
7624 if (true_e)
7625 vn_nary_op_insert_pieces_predicated
7626 (2, icode, boolean_type_node, ops,
7627 boolean_false_node, 0, true_e);
7628 if (false_e)
7629 vn_nary_op_insert_pieces_predicated
7630 (2, icode, boolean_type_node, ops,
7631 boolean_true_node, 0, false_e);
7632 }
7633 /* Relax for non-integers, inverted condition handled
7634 above. */
7635 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7636 {
7637 if (true_e)
7638 insert_related_predicates_on_edge (code, ops, true_e);
7639 if (false_e)
7640 insert_related_predicates_on_edge (icode, ops, false_e);
7641 }
7642 }
7643 break;
7644 }
7645 case GIMPLE_GOTO:
7646 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7647 break;
7648 default:
7649 e = NULL;
7650 }
7651 if (e)
7652 {
7653 todo = TODO_cleanup_cfg;
7654 if (!(e->flags & EDGE_EXECUTABLE))
7655 {
7656 if (dump_file && (dump_flags & TDF_DETAILS))
7657 fprintf (dump_file,
7658 "marking known outgoing %sedge %d -> %d executable\n",
7659 e->flags & EDGE_DFS_BACK ? "back-" : "",
7660 e->src->index, e->dest->index);
7661 e->flags |= EDGE_EXECUTABLE;
7662 e->dest->flags |= BB_EXECUTABLE;
7663 }
7664 else if (!(e->dest->flags & BB_EXECUTABLE))
7665 {
7666 if (dump_file && (dump_flags & TDF_DETAILS))
7667 fprintf (dump_file,
7668 "marking destination block %d reachable\n",
7669 e->dest->index);
7670 e->dest->flags |= BB_EXECUTABLE;
7671 }
7672 }
7673 else if (gsi_one_before_end_p (gsi))
7674 {
7675 FOR_EACH_EDGE (e, ei, bb->succs)
7676 {
7677 if (!(e->flags & EDGE_EXECUTABLE))
7678 {
7679 if (dump_file && (dump_flags & TDF_DETAILS))
7680 fprintf (dump_file,
7681 "marking outgoing edge %d -> %d executable\n",
7682 e->src->index, e->dest->index);
7683 e->flags |= EDGE_EXECUTABLE;
7684 e->dest->flags |= BB_EXECUTABLE;
7685 }
7686 else if (!(e->dest->flags & BB_EXECUTABLE))
7687 {
7688 if (dump_file && (dump_flags & TDF_DETAILS))
7689 fprintf (dump_file,
7690 "marking destination block %d reachable\n",
7691 e->dest->index);
7692 e->dest->flags |= BB_EXECUTABLE;
7693 }
7694 }
7695 }
7696
7697 /* Eliminate. That also pushes to avail. */
7698 if (eliminate && ! iterate)
7699 avail.eliminate_stmt (bb, &gsi);
7700 else
7701 /* If not eliminating, make all not already available defs
7702 available. */
7703 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7704 if (! avail.eliminate_avail (bb, op))
7705 avail.eliminate_push_avail (bb, op);
7706 }
7707
7708 /* Eliminate in destination PHI arguments. Always substitute in dest
7709 PHIs, even for non-executable edges. This handles region
7710 exits PHIs. */
7711 if (!iterate && eliminate)
7712 FOR_EACH_EDGE (e, ei, bb->succs)
7713 for (gphi_iterator gsi = gsi_start_phis (e->dest);
7714 !gsi_end_p (gsi); gsi_next (&gsi))
7715 {
7716 gphi *phi = gsi.phi ();
7717 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7718 tree arg = USE_FROM_PTR (use_p);
7719 if (TREE_CODE (arg) != SSA_NAME
7720 || virtual_operand_p (arg))
7721 continue;
7722 tree sprime;
7723 if (SSA_NAME_IS_DEFAULT_DEF (arg))
7724 {
7725 sprime = SSA_VAL (arg);
7726 gcc_assert (TREE_CODE (sprime) != SSA_NAME
7727 || SSA_NAME_IS_DEFAULT_DEF (sprime));
7728 }
7729 else
7730 /* Look for sth available at the definition block of the argument.
7731 This avoids inconsistencies between availability there which
7732 decides if the stmt can be removed and availability at the
7733 use site. The SSA property ensures that things available
7734 at the definition are also available at uses. */
7735 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7736 arg);
7737 if (sprime
7738 && sprime != arg
7739 && may_propagate_copy (arg, sprime))
7740 propagate_value (use_p, sprime);
7741 }
7742
7743 vn_context_bb = NULL;
7744 return todo;
7745 }
7746
7747 /* Unwind state per basic-block. */
7748
7749 struct unwind_state
7750 {
7751 /* Times this block has been visited. */
7752 unsigned visited;
7753 /* Whether to handle this as iteration point or whether to treat
7754 incoming backedge PHI values as varying. */
7755 bool iterate;
7756 /* Maximum RPO index this block is reachable from. */
7757 int max_rpo;
7758 /* Unwind state. */
7759 void *ob_top;
7760 vn_reference_t ref_top;
7761 vn_phi_t phi_top;
7762 vn_nary_op_t nary_top;
7763 vn_avail *avail_top;
7764 };
7765
7766 /* Unwind the RPO VN state for iteration. */
7767
7768 static void
do_unwind(unwind_state * to,rpo_elim & avail)7769 do_unwind (unwind_state *to, rpo_elim &avail)
7770 {
7771 gcc_assert (to->iterate);
7772 for (; last_inserted_nary != to->nary_top;
7773 last_inserted_nary = last_inserted_nary->next)
7774 {
7775 vn_nary_op_t *slot;
7776 slot = valid_info->nary->find_slot_with_hash
7777 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7778 /* Predication causes the need to restore previous state. */
7779 if ((*slot)->unwind_to)
7780 *slot = (*slot)->unwind_to;
7781 else
7782 valid_info->nary->clear_slot (slot);
7783 }
7784 for (; last_inserted_phi != to->phi_top;
7785 last_inserted_phi = last_inserted_phi->next)
7786 {
7787 vn_phi_t *slot;
7788 slot = valid_info->phis->find_slot_with_hash
7789 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7790 valid_info->phis->clear_slot (slot);
7791 }
7792 for (; last_inserted_ref != to->ref_top;
7793 last_inserted_ref = last_inserted_ref->next)
7794 {
7795 vn_reference_t *slot;
7796 slot = valid_info->references->find_slot_with_hash
7797 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7798 (*slot)->operands.release ();
7799 valid_info->references->clear_slot (slot);
7800 }
7801 obstack_free (&vn_tables_obstack, to->ob_top);
7802
7803 /* Prune [rpo_idx, ] from avail. */
7804 for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
7805 {
7806 vn_ssa_aux_t val = last_pushed_avail;
7807 vn_avail *av = val->avail;
7808 val->avail = av->next;
7809 last_pushed_avail = av->next_undo;
7810 av->next = avail.m_avail_freelist;
7811 avail.m_avail_freelist = av;
7812 }
7813 }
7814
7815 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7816 If ITERATE is true then treat backedges optimistically as not
7817 executed and iterate. If ELIMINATE is true then perform
7818 elimination, otherwise leave that to the caller. */
7819
7820 unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs,bool iterate,bool eliminate,vn_lookup_kind kind)7821 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7822 bool iterate, bool eliminate, vn_lookup_kind kind)
7823 {
7824 unsigned todo = 0;
7825 default_vn_walk_kind = kind;
7826
7827 /* We currently do not support region-based iteration when
7828 elimination is requested. */
7829 gcc_assert (!entry || !iterate || !eliminate);
7830 /* When iterating we need loop info up-to-date. */
7831 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7832
7833 bool do_region = entry != NULL;
7834 if (!do_region)
7835 {
7836 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7837 exit_bbs = BITMAP_ALLOC (NULL);
7838 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7839 }
7840
7841 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7842 re-mark those that are contained in the region. */
7843 edge_iterator ei;
7844 edge e;
7845 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7846 e->flags &= ~EDGE_DFS_BACK;
7847
7848 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7849 auto_vec<std::pair<int, int> > toplevel_scc_extents;
7850 int n = rev_post_order_and_mark_dfs_back_seme
7851 (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7852
7853 if (!do_region)
7854 BITMAP_FREE (exit_bbs);
7855
7856 /* If there are any non-DFS_BACK edges into entry->dest skip
7857 processing PHI nodes for that block. This supports
7858 value-numbering loop bodies w/o the actual loop. */
7859 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7860 if (e != entry
7861 && !(e->flags & EDGE_DFS_BACK))
7862 break;
7863 bool skip_entry_phis = e != NULL;
7864 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7865 fprintf (dump_file, "Region does not contain all edges into "
7866 "the entry block, skipping its PHIs.\n");
7867
7868 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7869 for (int i = 0; i < n; ++i)
7870 bb_to_rpo[rpo[i]] = i;
7871
7872 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7873
7874 rpo_elim avail (entry->dest);
7875 rpo_avail = &avail;
7876
7877 /* Verify we have no extra entries into the region. */
7878 if (flag_checking && do_region)
7879 {
7880 auto_bb_flag bb_in_region (fn);
7881 for (int i = 0; i < n; ++i)
7882 {
7883 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7884 bb->flags |= bb_in_region;
7885 }
7886 /* We can't merge the first two loops because we cannot rely
7887 on EDGE_DFS_BACK for edges not within the region. But if
7888 we decide to always have the bb_in_region flag we can
7889 do the checking during the RPO walk itself (but then it's
7890 also easy to handle MEME conservatively). */
7891 for (int i = 0; i < n; ++i)
7892 {
7893 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7894 edge e;
7895 edge_iterator ei;
7896 FOR_EACH_EDGE (e, ei, bb->preds)
7897 gcc_assert (e == entry
7898 || (skip_entry_phis && bb == entry->dest)
7899 || (e->src->flags & bb_in_region));
7900 }
7901 for (int i = 0; i < n; ++i)
7902 {
7903 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7904 bb->flags &= ~bb_in_region;
7905 }
7906 }
7907
7908 /* Create the VN state. For the initial size of the various hashtables
7909 use a heuristic based on region size and number of SSA names. */
7910 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7911 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7912 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7913 next_value_id = 1;
7914 next_constant_value_id = -1;
7915
7916 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7917 gcc_obstack_init (&vn_ssa_aux_obstack);
7918
7919 gcc_obstack_init (&vn_tables_obstack);
7920 gcc_obstack_init (&vn_tables_insert_obstack);
7921 valid_info = XCNEW (struct vn_tables_s);
7922 allocate_vn_table (valid_info, region_size);
7923 last_inserted_ref = NULL;
7924 last_inserted_phi = NULL;
7925 last_inserted_nary = NULL;
7926 last_pushed_avail = NULL;
7927
7928 vn_valueize = rpo_vn_valueize;
7929
7930 /* Initialize the unwind state and edge/BB executable state. */
7931 unsigned curr_scc = 0;
7932 for (int i = 0; i < n; ++i)
7933 {
7934 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7935 rpo_state[i].visited = 0;
7936 rpo_state[i].max_rpo = i;
7937 if (!iterate && curr_scc < toplevel_scc_extents.length ())
7938 {
7939 if (i >= toplevel_scc_extents[curr_scc].first
7940 && i <= toplevel_scc_extents[curr_scc].second)
7941 rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7942 if (i == toplevel_scc_extents[curr_scc].second)
7943 curr_scc++;
7944 }
7945 bb->flags &= ~BB_EXECUTABLE;
7946 bool has_backedges = false;
7947 edge e;
7948 edge_iterator ei;
7949 FOR_EACH_EDGE (e, ei, bb->preds)
7950 {
7951 if (e->flags & EDGE_DFS_BACK)
7952 has_backedges = true;
7953 e->flags &= ~EDGE_EXECUTABLE;
7954 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7955 continue;
7956 }
7957 rpo_state[i].iterate = iterate && has_backedges;
7958 }
7959 entry->flags |= EDGE_EXECUTABLE;
7960 entry->dest->flags |= BB_EXECUTABLE;
7961
7962 /* As heuristic to improve compile-time we handle only the N innermost
7963 loops and the outermost one optimistically. */
7964 if (iterate)
7965 {
7966 unsigned max_depth = param_rpo_vn_max_loop_depth;
7967 for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
7968 if (loop_depth (loop) > max_depth)
7969 for (unsigned i = 2;
7970 i < loop_depth (loop) - max_depth; ++i)
7971 {
7972 basic_block header = superloop_at_depth (loop, i)->header;
7973 bool non_latch_backedge = false;
7974 edge e;
7975 edge_iterator ei;
7976 FOR_EACH_EDGE (e, ei, header->preds)
7977 if (e->flags & EDGE_DFS_BACK)
7978 {
7979 /* There can be a non-latch backedge into the header
7980 which is part of an outer irreducible region. We
7981 cannot avoid iterating this block then. */
7982 if (!dominated_by_p (CDI_DOMINATORS,
7983 e->src, e->dest))
7984 {
7985 if (dump_file && (dump_flags & TDF_DETAILS))
7986 fprintf (dump_file, "non-latch backedge %d -> %d "
7987 "forces iteration of loop %d\n",
7988 e->src->index, e->dest->index, loop->num);
7989 non_latch_backedge = true;
7990 }
7991 else
7992 e->flags |= EDGE_EXECUTABLE;
7993 }
7994 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7995 }
7996 }
7997
7998 uint64_t nblk = 0;
7999 int idx = 0;
8000 if (iterate)
8001 /* Go and process all blocks, iterating as necessary. */
8002 do
8003 {
8004 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
8005
8006 /* If the block has incoming backedges remember unwind state. This
8007 is required even for non-executable blocks since in irreducible
8008 regions we might reach them via the backedge and re-start iterating
8009 from there.
8010 Note we can individually mark blocks with incoming backedges to
8011 not iterate where we then handle PHIs conservatively. We do that
8012 heuristically to reduce compile-time for degenerate cases. */
8013 if (rpo_state[idx].iterate)
8014 {
8015 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
8016 rpo_state[idx].ref_top = last_inserted_ref;
8017 rpo_state[idx].phi_top = last_inserted_phi;
8018 rpo_state[idx].nary_top = last_inserted_nary;
8019 rpo_state[idx].avail_top
8020 = last_pushed_avail ? last_pushed_avail->avail : NULL;
8021 }
8022
8023 if (!(bb->flags & BB_EXECUTABLE))
8024 {
8025 if (dump_file && (dump_flags & TDF_DETAILS))
8026 fprintf (dump_file, "Block %d: BB%d found not executable\n",
8027 idx, bb->index);
8028 idx++;
8029 continue;
8030 }
8031
8032 if (dump_file && (dump_flags & TDF_DETAILS))
8033 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
8034 nblk++;
8035 todo |= process_bb (avail, bb,
8036 rpo_state[idx].visited != 0,
8037 rpo_state[idx].iterate,
8038 iterate, eliminate, do_region, exit_bbs, false);
8039 rpo_state[idx].visited++;
8040
8041 /* Verify if changed values flow over executable outgoing backedges
8042 and those change destination PHI values (that's the thing we
8043 can easily verify). Reduce over all such edges to the farthest
8044 away PHI. */
8045 int iterate_to = -1;
8046 edge_iterator ei;
8047 edge e;
8048 FOR_EACH_EDGE (e, ei, bb->succs)
8049 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
8050 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
8051 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
8052 {
8053 int destidx = bb_to_rpo[e->dest->index];
8054 if (!rpo_state[destidx].visited)
8055 {
8056 if (dump_file && (dump_flags & TDF_DETAILS))
8057 fprintf (dump_file, "Unvisited destination %d\n",
8058 e->dest->index);
8059 if (iterate_to == -1 || destidx < iterate_to)
8060 iterate_to = destidx;
8061 continue;
8062 }
8063 if (dump_file && (dump_flags & TDF_DETAILS))
8064 fprintf (dump_file, "Looking for changed values of backedge"
8065 " %d->%d destination PHIs\n",
8066 e->src->index, e->dest->index);
8067 vn_context_bb = e->dest;
8068 gphi_iterator gsi;
8069 for (gsi = gsi_start_phis (e->dest);
8070 !gsi_end_p (gsi); gsi_next (&gsi))
8071 {
8072 bool inserted = false;
8073 /* While we'd ideally just iterate on value changes
8074 we CSE PHIs and do that even across basic-block
8075 boundaries. So even hashtable state changes can
8076 be important (which is roughly equivalent to
8077 PHI argument value changes). To not excessively
8078 iterate because of that we track whether a PHI
8079 was CSEd to with GF_PLF_1. */
8080 bool phival_changed;
8081 if ((phival_changed = visit_phi (gsi.phi (),
8082 &inserted, false))
8083 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
8084 {
8085 if (!phival_changed
8086 && dump_file && (dump_flags & TDF_DETAILS))
8087 fprintf (dump_file, "PHI was CSEd and hashtable "
8088 "state (changed)\n");
8089 if (iterate_to == -1 || destidx < iterate_to)
8090 iterate_to = destidx;
8091 break;
8092 }
8093 }
8094 vn_context_bb = NULL;
8095 }
8096 if (iterate_to != -1)
8097 {
8098 do_unwind (&rpo_state[iterate_to], avail);
8099 idx = iterate_to;
8100 if (dump_file && (dump_flags & TDF_DETAILS))
8101 fprintf (dump_file, "Iterating to %d BB%d\n",
8102 iterate_to, rpo[iterate_to]);
8103 continue;
8104 }
8105
8106 idx++;
8107 }
8108 while (idx < n);
8109
8110 else /* !iterate */
8111 {
8112 /* Process all blocks greedily with a worklist that enforces RPO
8113 processing of reachable blocks. */
8114 auto_bitmap worklist;
8115 bitmap_set_bit (worklist, 0);
8116 while (!bitmap_empty_p (worklist))
8117 {
8118 int idx = bitmap_first_set_bit (worklist);
8119 bitmap_clear_bit (worklist, idx);
8120 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
8121 gcc_assert ((bb->flags & BB_EXECUTABLE)
8122 && !rpo_state[idx].visited);
8123
8124 if (dump_file && (dump_flags & TDF_DETAILS))
8125 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
8126
8127 /* When we run into predecessor edges where we cannot trust its
8128 executable state mark them executable so PHI processing will
8129 be conservative.
8130 ??? Do we need to force arguments flowing over that edge
8131 to be varying or will they even always be? */
8132 edge_iterator ei;
8133 edge e;
8134 FOR_EACH_EDGE (e, ei, bb->preds)
8135 if (!(e->flags & EDGE_EXECUTABLE)
8136 && (bb == entry->dest
8137 || (!rpo_state[bb_to_rpo[e->src->index]].visited
8138 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
8139 >= (int)idx))))
8140 {
8141 if (dump_file && (dump_flags & TDF_DETAILS))
8142 fprintf (dump_file, "Cannot trust state of predecessor "
8143 "edge %d -> %d, marking executable\n",
8144 e->src->index, e->dest->index);
8145 e->flags |= EDGE_EXECUTABLE;
8146 }
8147
8148 nblk++;
8149 todo |= process_bb (avail, bb, false, false, false, eliminate,
8150 do_region, exit_bbs,
8151 skip_entry_phis && bb == entry->dest);
8152 rpo_state[idx].visited++;
8153
8154 FOR_EACH_EDGE (e, ei, bb->succs)
8155 if ((e->flags & EDGE_EXECUTABLE)
8156 && e->dest->index != EXIT_BLOCK
8157 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
8158 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
8159 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
8160 }
8161 }
8162
8163 /* If statistics or dump file active. */
8164 int nex = 0;
8165 unsigned max_visited = 1;
8166 for (int i = 0; i < n; ++i)
8167 {
8168 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
8169 if (bb->flags & BB_EXECUTABLE)
8170 nex++;
8171 statistics_histogram_event (cfun, "RPO block visited times",
8172 rpo_state[i].visited);
8173 if (rpo_state[i].visited > max_visited)
8174 max_visited = rpo_state[i].visited;
8175 }
8176 unsigned nvalues = 0, navail = 0;
8177 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
8178 i != vn_ssa_aux_hash->end (); ++i)
8179 {
8180 nvalues++;
8181 vn_avail *av = (*i)->avail;
8182 while (av)
8183 {
8184 navail++;
8185 av = av->next;
8186 }
8187 }
8188 statistics_counter_event (cfun, "RPO blocks", n);
8189 statistics_counter_event (cfun, "RPO blocks visited", nblk);
8190 statistics_counter_event (cfun, "RPO blocks executable", nex);
8191 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
8192 statistics_histogram_event (cfun, "RPO num values", nvalues);
8193 statistics_histogram_event (cfun, "RPO num avail", navail);
8194 statistics_histogram_event (cfun, "RPO num lattice",
8195 vn_ssa_aux_hash->elements ());
8196 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
8197 {
8198 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
8199 " blocks in total discovering %d executable blocks iterating "
8200 "%d.%d times, a block was visited max. %u times\n",
8201 n, nblk, nex,
8202 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
8203 max_visited);
8204 fprintf (dump_file, "RPO tracked %d values available at %d locations "
8205 "and %" PRIu64 " lattice elements\n",
8206 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
8207 }
8208
8209 if (eliminate)
8210 {
8211 /* When !iterate we already performed elimination during the RPO
8212 walk. */
8213 if (iterate)
8214 {
8215 /* Elimination for region-based VN needs to be done within the
8216 RPO walk. */
8217 gcc_assert (! do_region);
8218 /* Note we can't use avail.walk here because that gets confused
8219 by the existing availability and it will be less efficient
8220 as well. */
8221 todo |= eliminate_with_rpo_vn (NULL);
8222 }
8223 else
8224 todo |= avail.eliminate_cleanup (do_region);
8225 }
8226
8227 vn_valueize = NULL;
8228 rpo_avail = NULL;
8229
8230 XDELETEVEC (bb_to_rpo);
8231 XDELETEVEC (rpo);
8232 XDELETEVEC (rpo_state);
8233
8234 return todo;
8235 }
8236
8237 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8238 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8239 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8240 are not considered. */
8241
8242 unsigned
do_rpo_vn(function * fn,edge entry,bitmap exit_bbs)8243 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
8244 {
8245 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true, VN_WALKREWRITE);
8246 free_rpo_vn ();
8247 return todo;
8248 }
8249
8250
8251 namespace {
8252
8253 const pass_data pass_data_fre =
8254 {
8255 GIMPLE_PASS, /* type */
8256 "fre", /* name */
8257 OPTGROUP_NONE, /* optinfo_flags */
8258 TV_TREE_FRE, /* tv_id */
8259 ( PROP_cfg | PROP_ssa ), /* properties_required */
8260 0, /* properties_provided */
8261 0, /* properties_destroyed */
8262 0, /* todo_flags_start */
8263 0, /* todo_flags_finish */
8264 };
8265
8266 class pass_fre : public gimple_opt_pass
8267 {
8268 public:
pass_fre(gcc::context * ctxt)8269 pass_fre (gcc::context *ctxt)
8270 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
8271 {}
8272
8273 /* opt_pass methods: */
clone()8274 opt_pass * clone () { return new pass_fre (m_ctxt); }
set_pass_param(unsigned int n,bool param)8275 void set_pass_param (unsigned int n, bool param)
8276 {
8277 gcc_assert (n == 0);
8278 may_iterate = param;
8279 }
gate(function *)8280 virtual bool gate (function *)
8281 {
8282 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
8283 }
8284 virtual unsigned int execute (function *);
8285
8286 private:
8287 bool may_iterate;
8288 }; // class pass_fre
8289
8290 unsigned int
execute(function * fun)8291 pass_fre::execute (function *fun)
8292 {
8293 unsigned todo = 0;
8294
8295 /* At -O[1g] use the cheap non-iterating mode. */
8296 bool iterate_p = may_iterate && (optimize > 1);
8297 calculate_dominance_info (CDI_DOMINATORS);
8298 if (iterate_p)
8299 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
8300
8301 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true, VN_WALKREWRITE);
8302 free_rpo_vn ();
8303
8304 if (iterate_p)
8305 loop_optimizer_finalize ();
8306
8307 if (scev_initialized_p ())
8308 scev_reset_htab ();
8309
8310 /* For late FRE after IVOPTs and unrolling, see if we can
8311 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8312 if (!may_iterate)
8313 todo |= TODO_update_address_taken;
8314
8315 return todo;
8316 }
8317
8318 } // anon namespace
8319
8320 gimple_opt_pass *
make_pass_fre(gcc::context * ctxt)8321 make_pass_fre (gcc::context *ctxt)
8322 {
8323 return new pass_fre (ctxt);
8324 }
8325
8326 #undef BB_EXECUTABLE
8327