1 /* Interprocedural analyses.
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55 #include "options.h"
56 #include "symtab-clones.h"
57 #include "attr-fnspec.h"
58 #include "gimple-range.h"
59
60 /* Function summary where the parameter infos are actually stored. */
61 ipa_node_params_t *ipa_node_params_sum = NULL;
62
63 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
64
65 /* Edge summary for IPA-CP edge information. */
66 ipa_edge_args_sum_t *ipa_edge_args_sum;
67
68 /* Traits for a hash table for reusing already existing ipa_bits. */
69
70 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
71 {
72 typedef ipa_bits *value_type;
73 typedef ipa_bits *compare_type;
74 static hashval_t
hashipa_bit_ggc_hash_traits75 hash (const ipa_bits *p)
76 {
77 hashval_t t = (hashval_t) p->value.to_shwi ();
78 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
79 }
80 static bool
equalipa_bit_ggc_hash_traits81 equal (const ipa_bits *a, const ipa_bits *b)
82 {
83 return a->value == b->value && a->mask == b->mask;
84 }
85 static const bool empty_zero_p = true;
86 static void
mark_emptyipa_bit_ggc_hash_traits87 mark_empty (ipa_bits *&p)
88 {
89 p = NULL;
90 }
91 static bool
is_emptyipa_bit_ggc_hash_traits92 is_empty (const ipa_bits *p)
93 {
94 return p == NULL;
95 }
96 static bool
is_deletedipa_bit_ggc_hash_traits97 is_deleted (const ipa_bits *p)
98 {
99 return p == reinterpret_cast<const ipa_bits *> (1);
100 }
101 static void
mark_deletedipa_bit_ggc_hash_traits102 mark_deleted (ipa_bits *&p)
103 {
104 p = reinterpret_cast<ipa_bits *> (1);
105 }
106 };
107
108 /* Hash table for avoid repeated allocations of equal ipa_bits. */
109 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
110
111 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
112 the equiv bitmap is not hashed and is expected to be NULL. */
113
114 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
115 {
116 typedef value_range *value_type;
117 typedef value_range *compare_type;
118 static hashval_t
hashipa_vr_ggc_hash_traits119 hash (const value_range *p)
120 {
121 inchash::hash hstate (p->kind ());
122 inchash::add_expr (p->min (), hstate);
123 inchash::add_expr (p->max (), hstate);
124 return hstate.end ();
125 }
126 static bool
equalipa_vr_ggc_hash_traits127 equal (const value_range *a, const value_range *b)
128 {
129 return (a->equal_p (*b)
130 && types_compatible_p (a->type (), b->type ()));
131 }
132 static const bool empty_zero_p = true;
133 static void
mark_emptyipa_vr_ggc_hash_traits134 mark_empty (value_range *&p)
135 {
136 p = NULL;
137 }
138 static bool
is_emptyipa_vr_ggc_hash_traits139 is_empty (const value_range *p)
140 {
141 return p == NULL;
142 }
143 static bool
is_deletedipa_vr_ggc_hash_traits144 is_deleted (const value_range *p)
145 {
146 return p == reinterpret_cast<const value_range *> (1);
147 }
148 static void
mark_deletedipa_vr_ggc_hash_traits149 mark_deleted (value_range *&p)
150 {
151 p = reinterpret_cast<value_range *> (1);
152 }
153 };
154
155 /* Hash table for avoid repeated allocations of equal value_ranges. */
156 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
157
158 /* Holders of ipa cgraph hooks: */
159 static struct cgraph_node_hook_list *function_insertion_hook_holder;
160
161 /* Description of a reference to an IPA constant. */
162 struct ipa_cst_ref_desc
163 {
164 /* Edge that corresponds to the statement which took the reference. */
165 struct cgraph_edge *cs;
166 /* Linked list of duplicates created when call graph edges are cloned. */
167 struct ipa_cst_ref_desc *next_duplicate;
168 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
169 if out of control. */
170 int refcount;
171 };
172
173 /* Allocation pool for reference descriptions. */
174
175 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
176 ("IPA-PROP ref descriptions");
177
178 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
179 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
180
181 static bool
ipa_func_spec_opts_forbid_analysis_p(struct cgraph_node * node)182 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
183 {
184 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
185
186 if (!fs_opts)
187 return false;
188 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
189 }
190
191 /* Return index of the formal whose tree is PTREE in function which corresponds
192 to INFO. */
193
194 static int
ipa_get_param_decl_index_1(vec<ipa_param_descriptor,va_gc> * descriptors,tree ptree)195 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
196 tree ptree)
197 {
198 int i, count;
199
200 count = vec_safe_length (descriptors);
201 for (i = 0; i < count; i++)
202 if ((*descriptors)[i].decl_or_type == ptree)
203 return i;
204
205 return -1;
206 }
207
208 /* Return index of the formal whose tree is PTREE in function which corresponds
209 to INFO. */
210
211 int
ipa_get_param_decl_index(class ipa_node_params * info,tree ptree)212 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
213 {
214 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
215 }
216
217 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
218 NODE. */
219
220 static void
ipa_populate_param_decls(struct cgraph_node * node,vec<ipa_param_descriptor,va_gc> & descriptors)221 ipa_populate_param_decls (struct cgraph_node *node,
222 vec<ipa_param_descriptor, va_gc> &descriptors)
223 {
224 tree fndecl;
225 tree fnargs;
226 tree parm;
227 int param_num;
228
229 fndecl = node->decl;
230 gcc_assert (gimple_has_body_p (fndecl));
231 fnargs = DECL_ARGUMENTS (fndecl);
232 param_num = 0;
233 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
234 {
235 descriptors[param_num].decl_or_type = parm;
236 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
237 descriptors[param_num].move_cost = cost;
238 /* Watch overflow, move_cost is a bitfield. */
239 gcc_checking_assert (cost == descriptors[param_num].move_cost);
240 param_num++;
241 }
242 }
243
244 /* Return how many formal parameters FNDECL has. */
245
246 int
count_formal_params(tree fndecl)247 count_formal_params (tree fndecl)
248 {
249 tree parm;
250 int count = 0;
251 gcc_assert (gimple_has_body_p (fndecl));
252
253 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
254 count++;
255
256 return count;
257 }
258
259 /* Return the declaration of Ith formal parameter of the function corresponding
260 to INFO. Note there is no setter function as this array is built just once
261 using ipa_initialize_node_params. */
262
263 void
ipa_dump_param(FILE * file,class ipa_node_params * info,int i)264 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
265 {
266 fprintf (file, "param #%i", i);
267 if ((*info->descriptors)[i].decl_or_type)
268 {
269 fprintf (file, " ");
270 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
271 }
272 }
273
274 /* If necessary, allocate vector of parameter descriptors in info of NODE.
275 Return true if they were allocated, false if not. */
276
277 static bool
ipa_alloc_node_params(struct cgraph_node * node,int param_count)278 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
279 {
280 ipa_node_params *info = ipa_node_params_sum->get_create (node);
281
282 if (!info->descriptors && param_count)
283 {
284 vec_safe_grow_cleared (info->descriptors, param_count, true);
285 return true;
286 }
287 else
288 return false;
289 }
290
291 /* Initialize the ipa_node_params structure associated with NODE by counting
292 the function parameters, creating the descriptors and populating their
293 param_decls. */
294
295 void
ipa_initialize_node_params(struct cgraph_node * node)296 ipa_initialize_node_params (struct cgraph_node *node)
297 {
298 ipa_node_params *info = ipa_node_params_sum->get_create (node);
299
300 if (!info->descriptors
301 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
302 ipa_populate_param_decls (node, *info->descriptors);
303 }
304
305 /* Print the jump functions associated with call graph edge CS to file F. */
306
307 static void
ipa_print_node_jump_functions_for_edge(FILE * f,struct cgraph_edge * cs)308 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
309 {
310 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
311 int count = ipa_get_cs_argument_count (args);
312
313 for (int i = 0; i < count; i++)
314 {
315 struct ipa_jump_func *jump_func;
316 enum jump_func_type type;
317
318 jump_func = ipa_get_ith_jump_func (args, i);
319 type = jump_func->type;
320
321 fprintf (f, " param %d: ", i);
322 if (type == IPA_JF_UNKNOWN)
323 fprintf (f, "UNKNOWN\n");
324 else if (type == IPA_JF_CONST)
325 {
326 tree val = jump_func->value.constant.value;
327 fprintf (f, "CONST: ");
328 print_generic_expr (f, val);
329 if (TREE_CODE (val) == ADDR_EXPR
330 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
331 {
332 fprintf (f, " -> ");
333 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
334 }
335 fprintf (f, "\n");
336 }
337 else if (type == IPA_JF_PASS_THROUGH)
338 {
339 fprintf (f, "PASS THROUGH: ");
340 fprintf (f, "%d, op %s",
341 jump_func->value.pass_through.formal_id,
342 get_tree_code_name(jump_func->value.pass_through.operation));
343 if (jump_func->value.pass_through.operation != NOP_EXPR)
344 {
345 fprintf (f, " ");
346 print_generic_expr (f, jump_func->value.pass_through.operand);
347 }
348 if (jump_func->value.pass_through.agg_preserved)
349 fprintf (f, ", agg_preserved");
350 if (jump_func->value.pass_through.refdesc_decremented)
351 fprintf (f, ", refdesc_decremented");
352 fprintf (f, "\n");
353 }
354 else if (type == IPA_JF_ANCESTOR)
355 {
356 fprintf (f, "ANCESTOR: ");
357 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
358 jump_func->value.ancestor.formal_id,
359 jump_func->value.ancestor.offset);
360 if (jump_func->value.ancestor.agg_preserved)
361 fprintf (f, ", agg_preserved");
362 if (jump_func->value.ancestor.keep_null)
363 fprintf (f, ", keep_null");
364 fprintf (f, "\n");
365 }
366
367 if (jump_func->agg.items)
368 {
369 struct ipa_agg_jf_item *item;
370 int j;
371
372 fprintf (f, " Aggregate passed by %s:\n",
373 jump_func->agg.by_ref ? "reference" : "value");
374 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
375 {
376 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
377 item->offset);
378 fprintf (f, "type: ");
379 print_generic_expr (f, item->type);
380 fprintf (f, ", ");
381 if (item->jftype == IPA_JF_PASS_THROUGH)
382 fprintf (f, "PASS THROUGH: %d,",
383 item->value.pass_through.formal_id);
384 else if (item->jftype == IPA_JF_LOAD_AGG)
385 {
386 fprintf (f, "LOAD AGG: %d",
387 item->value.pass_through.formal_id);
388 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
389 item->value.load_agg.offset,
390 item->value.load_agg.by_ref ? "reference"
391 : "value");
392 }
393
394 if (item->jftype == IPA_JF_PASS_THROUGH
395 || item->jftype == IPA_JF_LOAD_AGG)
396 {
397 fprintf (f, " op %s",
398 get_tree_code_name (item->value.pass_through.operation));
399 if (item->value.pass_through.operation != NOP_EXPR)
400 {
401 fprintf (f, " ");
402 print_generic_expr (f, item->value.pass_through.operand);
403 }
404 }
405 else if (item->jftype == IPA_JF_CONST)
406 {
407 fprintf (f, "CONST: ");
408 print_generic_expr (f, item->value.constant);
409 }
410 else if (item->jftype == IPA_JF_UNKNOWN)
411 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
412 tree_to_uhwi (TYPE_SIZE (item->type)));
413 fprintf (f, "\n");
414 }
415 }
416
417 class ipa_polymorphic_call_context *ctx
418 = ipa_get_ith_polymorhic_call_context (args, i);
419 if (ctx && !ctx->useless_p ())
420 {
421 fprintf (f, " Context: ");
422 ctx->dump (dump_file);
423 }
424
425 if (jump_func->bits)
426 {
427 fprintf (f, " value: ");
428 print_hex (jump_func->bits->value, f);
429 fprintf (f, ", mask: ");
430 print_hex (jump_func->bits->mask, f);
431 fprintf (f, "\n");
432 }
433 else
434 fprintf (f, " Unknown bits\n");
435
436 if (jump_func->m_vr)
437 {
438 fprintf (f, " VR ");
439 fprintf (f, "%s[",
440 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
441 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
442 fprintf (f, ", ");
443 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
444 fprintf (f, "]\n");
445 }
446 else
447 fprintf (f, " Unknown VR\n");
448 }
449 }
450
451
452 /* Print the jump functions of all arguments on all call graph edges going from
453 NODE to file F. */
454
455 void
ipa_print_node_jump_functions(FILE * f,struct cgraph_node * node)456 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
457 {
458 struct cgraph_edge *cs;
459
460 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
461 for (cs = node->callees; cs; cs = cs->next_callee)
462 {
463
464 fprintf (f, " callsite %s -> %s : \n",
465 node->dump_name (),
466 cs->callee->dump_name ());
467 if (!ipa_edge_args_info_available_for_edge_p (cs))
468 fprintf (f, " no arg info\n");
469 else
470 ipa_print_node_jump_functions_for_edge (f, cs);
471 }
472
473 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
474 {
475 class cgraph_indirect_call_info *ii;
476
477 ii = cs->indirect_info;
478 if (ii->agg_contents)
479 fprintf (f, " indirect %s callsite, calling param %i, "
480 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
481 ii->member_ptr ? "member ptr" : "aggregate",
482 ii->param_index, ii->offset,
483 ii->by_ref ? "by reference" : "by_value");
484 else
485 fprintf (f, " indirect %s callsite, calling param %i, "
486 "offset " HOST_WIDE_INT_PRINT_DEC,
487 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
488 ii->offset);
489
490 if (cs->call_stmt)
491 {
492 fprintf (f, ", for stmt ");
493 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
494 }
495 else
496 fprintf (f, "\n");
497 if (ii->polymorphic)
498 ii->context.dump (f);
499 if (!ipa_edge_args_info_available_for_edge_p (cs))
500 fprintf (f, " no arg info\n");
501 else
502 ipa_print_node_jump_functions_for_edge (f, cs);
503 }
504 }
505
506 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
507
508 void
ipa_print_all_jump_functions(FILE * f)509 ipa_print_all_jump_functions (FILE *f)
510 {
511 struct cgraph_node *node;
512
513 fprintf (f, "\nJump functions:\n");
514 FOR_EACH_FUNCTION (node)
515 {
516 ipa_print_node_jump_functions (f, node);
517 }
518 }
519
520 /* Set jfunc to be a know-really nothing jump function. */
521
522 static void
ipa_set_jf_unknown(struct ipa_jump_func * jfunc)523 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
524 {
525 jfunc->type = IPA_JF_UNKNOWN;
526 }
527
528 /* Set JFUNC to be a copy of another jmp (to be used by jump function
529 combination code). The two functions will share their rdesc. */
530
531 static void
ipa_set_jf_cst_copy(struct ipa_jump_func * dst,struct ipa_jump_func * src)532 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
533 struct ipa_jump_func *src)
534
535 {
536 gcc_checking_assert (src->type == IPA_JF_CONST);
537 dst->type = IPA_JF_CONST;
538 dst->value.constant = src->value.constant;
539 }
540
541 /* Set JFUNC to be a constant jmp function. */
542
543 static void
ipa_set_jf_constant(struct ipa_jump_func * jfunc,tree constant,struct cgraph_edge * cs)544 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
545 struct cgraph_edge *cs)
546 {
547 jfunc->type = IPA_JF_CONST;
548 jfunc->value.constant.value = unshare_expr_without_location (constant);
549
550 if (TREE_CODE (constant) == ADDR_EXPR
551 && (TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL
552 || (TREE_CODE (TREE_OPERAND (constant, 0)) == VAR_DECL
553 && TREE_STATIC (TREE_OPERAND (constant, 0)))))
554 {
555 struct ipa_cst_ref_desc *rdesc;
556
557 rdesc = ipa_refdesc_pool.allocate ();
558 rdesc->cs = cs;
559 rdesc->next_duplicate = NULL;
560 rdesc->refcount = 1;
561 jfunc->value.constant.rdesc = rdesc;
562 }
563 else
564 jfunc->value.constant.rdesc = NULL;
565 }
566
567 /* Set JFUNC to be a simple pass-through jump function. */
568 static void
ipa_set_jf_simple_pass_through(struct ipa_jump_func * jfunc,int formal_id,bool agg_preserved)569 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
570 bool agg_preserved)
571 {
572 jfunc->type = IPA_JF_PASS_THROUGH;
573 jfunc->value.pass_through.operand = NULL_TREE;
574 jfunc->value.pass_through.formal_id = formal_id;
575 jfunc->value.pass_through.operation = NOP_EXPR;
576 jfunc->value.pass_through.agg_preserved = agg_preserved;
577 jfunc->value.pass_through.refdesc_decremented = false;
578 }
579
580 /* Set JFUNC to be an unary pass through jump function. */
581
582 static void
ipa_set_jf_unary_pass_through(struct ipa_jump_func * jfunc,int formal_id,enum tree_code operation)583 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
584 enum tree_code operation)
585 {
586 jfunc->type = IPA_JF_PASS_THROUGH;
587 jfunc->value.pass_through.operand = NULL_TREE;
588 jfunc->value.pass_through.formal_id = formal_id;
589 jfunc->value.pass_through.operation = operation;
590 jfunc->value.pass_through.agg_preserved = false;
591 jfunc->value.pass_through.refdesc_decremented = false;
592 }
593 /* Set JFUNC to be an arithmetic pass through jump function. */
594
595 static void
ipa_set_jf_arith_pass_through(struct ipa_jump_func * jfunc,int formal_id,tree operand,enum tree_code operation)596 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
597 tree operand, enum tree_code operation)
598 {
599 jfunc->type = IPA_JF_PASS_THROUGH;
600 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
601 jfunc->value.pass_through.formal_id = formal_id;
602 jfunc->value.pass_through.operation = operation;
603 jfunc->value.pass_through.agg_preserved = false;
604 jfunc->value.pass_through.refdesc_decremented = false;
605 }
606
607 /* Set JFUNC to be an ancestor jump function. */
608
609 static void
ipa_set_ancestor_jf(struct ipa_jump_func * jfunc,HOST_WIDE_INT offset,int formal_id,bool agg_preserved,bool keep_null)610 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
611 int formal_id, bool agg_preserved, bool keep_null)
612 {
613 jfunc->type = IPA_JF_ANCESTOR;
614 jfunc->value.ancestor.formal_id = formal_id;
615 jfunc->value.ancestor.offset = offset;
616 jfunc->value.ancestor.agg_preserved = agg_preserved;
617 jfunc->value.ancestor.keep_null = keep_null;
618 }
619
620 /* Get IPA BB information about the given BB. FBI is the context of analyzis
621 of this function body. */
622
623 static struct ipa_bb_info *
ipa_get_bb_info(struct ipa_func_body_info * fbi,basic_block bb)624 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
625 {
626 gcc_checking_assert (fbi);
627 return &fbi->bb_infos[bb->index];
628 }
629
630 /* Structure to be passed in between detect_type_change and
631 check_stmt_for_type_change. */
632
633 struct prop_type_change_info
634 {
635 /* Offset into the object where there is the virtual method pointer we are
636 looking for. */
637 HOST_WIDE_INT offset;
638 /* The declaration or SSA_NAME pointer of the base that we are checking for
639 type change. */
640 tree object;
641 /* Set to true if dynamic type change has been detected. */
642 bool type_maybe_changed;
643 };
644
645 /* Return true if STMT can modify a virtual method table pointer.
646
647 This function makes special assumptions about both constructors and
648 destructors which are all the functions that are allowed to alter the VMT
649 pointers. It assumes that destructors begin with assignment into all VMT
650 pointers and that constructors essentially look in the following way:
651
652 1) The very first thing they do is that they call constructors of ancestor
653 sub-objects that have them.
654
655 2) Then VMT pointers of this and all its ancestors is set to new values
656 corresponding to the type corresponding to the constructor.
657
658 3) Only afterwards, other stuff such as constructor of member sub-objects
659 and the code written by the user is run. Only this may include calling
660 virtual functions, directly or indirectly.
661
662 There is no way to call a constructor of an ancestor sub-object in any
663 other way.
664
665 This means that we do not have to care whether constructors get the correct
666 type information because they will always change it (in fact, if we define
667 the type to be given by the VMT pointer, it is undefined).
668
669 The most important fact to derive from the above is that if, for some
670 statement in the section 3, we try to detect whether the dynamic type has
671 changed, we can safely ignore all calls as we examine the function body
672 backwards until we reach statements in section 2 because these calls cannot
673 be ancestor constructors or destructors (if the input is not bogus) and so
674 do not change the dynamic type (this holds true only for automatically
675 allocated objects but at the moment we devirtualize only these). We then
676 must detect that statements in section 2 change the dynamic type and can try
677 to derive the new type. That is enough and we can stop, we will never see
678 the calls into constructors of sub-objects in this code. Therefore we can
679 safely ignore all call statements that we traverse.
680 */
681
682 static bool
stmt_may_be_vtbl_ptr_store(gimple * stmt)683 stmt_may_be_vtbl_ptr_store (gimple *stmt)
684 {
685 if (is_gimple_call (stmt))
686 return false;
687 if (gimple_clobber_p (stmt))
688 return false;
689 else if (is_gimple_assign (stmt))
690 {
691 tree lhs = gimple_assign_lhs (stmt);
692
693 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
694 {
695 if (flag_strict_aliasing
696 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
697 return false;
698
699 if (TREE_CODE (lhs) == COMPONENT_REF
700 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
701 return false;
702 /* In the future we might want to use get_ref_base_and_extent to find
703 if there is a field corresponding to the offset and if so, proceed
704 almost like if it was a component ref. */
705 }
706 }
707 return true;
708 }
709
710 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
711 to check whether a particular statement may modify the virtual table
712 pointerIt stores its result into DATA, which points to a
713 prop_type_change_info structure. */
714
715 static bool
check_stmt_for_type_change(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef,void * data)716 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
717 {
718 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
719 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
720
721 if (stmt_may_be_vtbl_ptr_store (stmt))
722 {
723 tci->type_maybe_changed = true;
724 return true;
725 }
726 else
727 return false;
728 }
729
730 /* See if ARG is PARAM_DECl describing instance passed by pointer
731 or reference in FUNCTION. Return false if the dynamic type may change
732 in between beggining of the function until CALL is invoked.
733
734 Generally functions are not allowed to change type of such instances,
735 but they call destructors. We assume that methods cannot destroy the THIS
736 pointer. Also as a special cases, constructor and destructors may change
737 type of the THIS pointer. */
738
739 static bool
param_type_may_change_p(tree function,tree arg,gimple * call)740 param_type_may_change_p (tree function, tree arg, gimple *call)
741 {
742 /* Pure functions cannot do any changes on the dynamic type;
743 that require writting to memory. */
744 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
745 return false;
746 /* We need to check if we are within inlined consturctor
747 or destructor (ideally we would have way to check that the
748 inline cdtor is actually working on ARG, but we don't have
749 easy tie on this, so punt on all non-pure cdtors.
750 We may also record the types of cdtors and once we know type
751 of the instance match them.
752
753 Also code unification optimizations may merge calls from
754 different blocks making return values unreliable. So
755 do nothing during late optimization. */
756 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
757 return true;
758 if (TREE_CODE (arg) == SSA_NAME
759 && SSA_NAME_IS_DEFAULT_DEF (arg)
760 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
761 {
762 /* Normal (non-THIS) argument. */
763 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
764 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
765 /* THIS pointer of an method - here we want to watch constructors
766 and destructors as those definitely may change the dynamic
767 type. */
768 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
769 && !DECL_CXX_CONSTRUCTOR_P (function)
770 && !DECL_CXX_DESTRUCTOR_P (function)
771 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
772 {
773 /* Walk the inline stack and watch out for ctors/dtors. */
774 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
775 block = BLOCK_SUPERCONTEXT (block))
776 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
777 return true;
778 return false;
779 }
780 }
781 return true;
782 }
783
784 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
785 callsite CALL) by looking for assignments to its virtual table pointer. If
786 it is, return true. ARG is the object itself (not a pointer
787 to it, unless dereferenced). BASE is the base of the memory access as
788 returned by get_ref_base_and_extent, as is the offset.
789
790 This is helper function for detect_type_change and detect_type_change_ssa
791 that does the heavy work which is usually unnecesary. */
792
793 static bool
detect_type_change_from_memory_writes(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)794 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
795 tree base, tree comp_type, gcall *call,
796 HOST_WIDE_INT offset)
797 {
798 struct prop_type_change_info tci;
799 ao_ref ao;
800
801 gcc_checking_assert (DECL_P (arg)
802 || TREE_CODE (arg) == MEM_REF
803 || handled_component_p (arg));
804
805 comp_type = TYPE_MAIN_VARIANT (comp_type);
806
807 /* Const calls cannot call virtual methods through VMT and so type changes do
808 not matter. */
809 if (!flag_devirtualize || !gimple_vuse (call)
810 /* Be sure expected_type is polymorphic. */
811 || !comp_type
812 || TREE_CODE (comp_type) != RECORD_TYPE
813 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
814 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
815 return true;
816
817 if (fbi->aa_walk_budget == 0)
818 return false;
819
820 ao_ref_init (&ao, arg);
821 ao.base = base;
822 ao.offset = offset;
823 ao.size = POINTER_SIZE;
824 ao.max_size = ao.size;
825
826 tci.offset = offset;
827 tci.object = get_base_address (arg);
828 tci.type_maybe_changed = false;
829
830 int walked
831 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
832 &tci, NULL, NULL, fbi->aa_walk_budget);
833 if (walked >= 0)
834 fbi->aa_walk_budget -= walked;
835 else
836 fbi->aa_walk_budget = 0;
837
838 if (walked >= 0 && !tci.type_maybe_changed)
839 return false;
840
841 return true;
842 }
843
844 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
845 If it is, return true. ARG is the object itself (not a pointer
846 to it, unless dereferenced). BASE is the base of the memory access as
847 returned by get_ref_base_and_extent, as is the offset. */
848
849 static bool
detect_type_change(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)850 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
851 tree comp_type, gcall *call,
852 HOST_WIDE_INT offset)
853 {
854 if (!flag_devirtualize)
855 return false;
856
857 if (TREE_CODE (base) == MEM_REF
858 && !param_type_may_change_p (current_function_decl,
859 TREE_OPERAND (base, 0),
860 call))
861 return false;
862 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
863 call, offset);
864 }
865
866 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
867 SSA name (its dereference will become the base and the offset is assumed to
868 be zero). */
869
870 static bool
detect_type_change_ssa(ipa_func_body_info * fbi,tree arg,tree comp_type,gcall * call)871 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
872 gcall *call)
873 {
874 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
875 if (!flag_devirtualize
876 || !POINTER_TYPE_P (TREE_TYPE (arg)))
877 return false;
878
879 if (!param_type_may_change_p (current_function_decl, arg, call))
880 return false;
881
882 arg = build2 (MEM_REF, ptr_type_node, arg,
883 build_int_cst (ptr_type_node, 0));
884
885 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
886 call, 0);
887 }
888
889 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
890 boolean variable pointed to by DATA. */
891
892 static bool
mark_modified(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef ATTRIBUTE_UNUSED,void * data)893 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
894 void *data)
895 {
896 bool *b = (bool *) data;
897 *b = true;
898 return true;
899 }
900
901 /* Find the nearest valid aa status for parameter specified by INDEX that
902 dominates BB. */
903
904 static struct ipa_param_aa_status *
find_dominating_aa_status(struct ipa_func_body_info * fbi,basic_block bb,int index)905 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
906 int index)
907 {
908 while (true)
909 {
910 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
911 if (!bb)
912 return NULL;
913 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
914 if (!bi->param_aa_statuses.is_empty ()
915 && bi->param_aa_statuses[index].valid)
916 return &bi->param_aa_statuses[index];
917 }
918 }
919
920 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
921 structures and/or intialize the result with a dominating description as
922 necessary. */
923
924 static struct ipa_param_aa_status *
parm_bb_aa_status_for_bb(struct ipa_func_body_info * fbi,basic_block bb,int index)925 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
926 int index)
927 {
928 gcc_checking_assert (fbi);
929 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
930 if (bi->param_aa_statuses.is_empty ())
931 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count, true);
932 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
933 if (!paa->valid)
934 {
935 gcc_checking_assert (!paa->parm_modified
936 && !paa->ref_modified
937 && !paa->pt_modified);
938 struct ipa_param_aa_status *dom_paa;
939 dom_paa = find_dominating_aa_status (fbi, bb, index);
940 if (dom_paa)
941 *paa = *dom_paa;
942 else
943 paa->valid = true;
944 }
945
946 return paa;
947 }
948
949 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
950 a value known not to be modified in this function before reaching the
951 statement STMT. FBI holds information about the function we have so far
952 gathered but do not survive the summary building stage. */
953
954 static bool
parm_preserved_before_stmt_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree parm_load)955 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
956 gimple *stmt, tree parm_load)
957 {
958 struct ipa_param_aa_status *paa;
959 bool modified = false;
960 ao_ref refd;
961
962 tree base = get_base_address (parm_load);
963 gcc_assert (TREE_CODE (base) == PARM_DECL);
964 if (TREE_READONLY (base))
965 return true;
966
967 gcc_checking_assert (fbi);
968 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
969 if (paa->parm_modified || fbi->aa_walk_budget == 0)
970 return false;
971
972 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
973 ao_ref_init (&refd, parm_load);
974 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
975 &modified, NULL, NULL,
976 fbi->aa_walk_budget);
977 if (walked < 0)
978 {
979 modified = true;
980 fbi->aa_walk_budget = 0;
981 }
982 else
983 fbi->aa_walk_budget -= walked;
984 if (paa && modified)
985 paa->parm_modified = true;
986 return !modified;
987 }
988
989 /* If STMT is an assignment that loads a value from an parameter declaration,
990 return the index of the parameter in ipa_node_params which has not been
991 modified. Otherwise return -1. */
992
993 static int
load_from_unmodified_param(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt)994 load_from_unmodified_param (struct ipa_func_body_info *fbi,
995 vec<ipa_param_descriptor, va_gc> *descriptors,
996 gimple *stmt)
997 {
998 int index;
999 tree op1;
1000
1001 if (!gimple_assign_single_p (stmt))
1002 return -1;
1003
1004 op1 = gimple_assign_rhs1 (stmt);
1005 if (TREE_CODE (op1) != PARM_DECL)
1006 return -1;
1007
1008 index = ipa_get_param_decl_index_1 (descriptors, op1);
1009 if (index < 0
1010 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
1011 return -1;
1012
1013 return index;
1014 }
1015
1016 /* Return true if memory reference REF (which must be a load through parameter
1017 with INDEX) loads data that are known to be unmodified in this function
1018 before reaching statement STMT. */
1019
1020 static bool
parm_ref_data_preserved_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree ref)1021 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1022 int index, gimple *stmt, tree ref)
1023 {
1024 struct ipa_param_aa_status *paa;
1025 bool modified = false;
1026 ao_ref refd;
1027
1028 gcc_checking_assert (fbi);
1029 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1030 if (paa->ref_modified || fbi->aa_walk_budget == 0)
1031 return false;
1032
1033 gcc_checking_assert (gimple_vuse (stmt));
1034 ao_ref_init (&refd, ref);
1035 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1036 &modified, NULL, NULL,
1037 fbi->aa_walk_budget);
1038 if (walked < 0)
1039 {
1040 modified = true;
1041 fbi->aa_walk_budget = 0;
1042 }
1043 else
1044 fbi->aa_walk_budget -= walked;
1045 if (modified)
1046 paa->ref_modified = true;
1047 return !modified;
1048 }
1049
1050 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1051 is known to be unmodified in this function before reaching call statement
1052 CALL into which it is passed. FBI describes the function body. */
1053
1054 static bool
parm_ref_data_pass_through_p(struct ipa_func_body_info * fbi,int index,gimple * call,tree parm)1055 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1056 gimple *call, tree parm)
1057 {
1058 bool modified = false;
1059 ao_ref refd;
1060
1061 /* It's unnecessary to calculate anything about memory contnets for a const
1062 function because it is not goin to use it. But do not cache the result
1063 either. Also, no such calculations for non-pointers. */
1064 if (!gimple_vuse (call)
1065 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1066 return false;
1067
1068 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1069 gimple_bb (call),
1070 index);
1071 if (paa->pt_modified || fbi->aa_walk_budget == 0)
1072 return false;
1073
1074 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1075 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1076 &modified, NULL, NULL,
1077 fbi->aa_walk_budget);
1078 if (walked < 0)
1079 {
1080 fbi->aa_walk_budget = 0;
1081 modified = true;
1082 }
1083 else
1084 fbi->aa_walk_budget -= walked;
1085 if (modified)
1086 paa->pt_modified = true;
1087 return !modified;
1088 }
1089
1090 /* Return true if we can prove that OP is a memory reference loading
1091 data from an aggregate passed as a parameter.
1092
1093 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1094 false if it cannot prove that the value has not been modified before the
1095 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1096 if it cannot prove the value has not been modified, in that case it will
1097 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1098
1099 INFO and PARMS_AINFO describe parameters of the current function (but the
1100 latter can be NULL), STMT is the load statement. If function returns true,
1101 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1102 within the aggregate and whether it is a load from a value passed by
1103 reference respectively. */
1104
1105 bool
ipa_load_from_parm_agg(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt,tree op,int * index_p,HOST_WIDE_INT * offset_p,poly_int64 * size_p,bool * by_ref_p,bool * guaranteed_unmodified)1106 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1107 vec<ipa_param_descriptor, va_gc> *descriptors,
1108 gimple *stmt, tree op, int *index_p,
1109 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1110 bool *by_ref_p, bool *guaranteed_unmodified)
1111 {
1112 int index;
1113 HOST_WIDE_INT size;
1114 bool reverse;
1115 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1116
1117 if (!base)
1118 return false;
1119
1120 /* We can not propagate across volatile loads. */
1121 if (TREE_THIS_VOLATILE (op))
1122 return false;
1123
1124 if (DECL_P (base))
1125 {
1126 int index = ipa_get_param_decl_index_1 (descriptors, base);
1127 if (index >= 0
1128 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1129 {
1130 *index_p = index;
1131 *by_ref_p = false;
1132 if (size_p)
1133 *size_p = size;
1134 if (guaranteed_unmodified)
1135 *guaranteed_unmodified = true;
1136 return true;
1137 }
1138 return false;
1139 }
1140
1141 if (TREE_CODE (base) != MEM_REF
1142 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1143 || !integer_zerop (TREE_OPERAND (base, 1)))
1144 return false;
1145
1146 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1147 {
1148 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1149 index = ipa_get_param_decl_index_1 (descriptors, parm);
1150 }
1151 else
1152 {
1153 /* This branch catches situations where a pointer parameter is not a
1154 gimple register, for example:
1155
1156 void hip7(S*) (struct S * p)
1157 {
1158 void (*<T2e4>) (struct S *) D.1867;
1159 struct S * p.1;
1160
1161 <bb 2>:
1162 p.1_1 = p;
1163 D.1867_2 = p.1_1->f;
1164 D.1867_2 ();
1165 gdp = &p;
1166 */
1167
1168 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1169 index = load_from_unmodified_param (fbi, descriptors, def);
1170 }
1171
1172 if (index >= 0)
1173 {
1174 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1175 if (!data_preserved && !guaranteed_unmodified)
1176 return false;
1177
1178 *index_p = index;
1179 *by_ref_p = true;
1180 if (size_p)
1181 *size_p = size;
1182 if (guaranteed_unmodified)
1183 *guaranteed_unmodified = data_preserved;
1184 return true;
1185 }
1186 return false;
1187 }
1188
1189 /* If STMT is an assignment that loads a value from a parameter declaration,
1190 or from an aggregate passed as the parameter either by value or reference,
1191 return the index of the parameter in ipa_node_params. Otherwise return -1.
1192
1193 FBI holds gathered information about the function. INFO describes
1194 parameters of the function, STMT is the assignment statement. If it is a
1195 memory load from an aggregate, *OFFSET_P is filled with offset within the
1196 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1197 reference. */
1198
1199 static int
load_from_unmodified_param_or_agg(struct ipa_func_body_info * fbi,class ipa_node_params * info,gimple * stmt,HOST_WIDE_INT * offset_p,bool * by_ref_p)1200 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1201 class ipa_node_params *info,
1202 gimple *stmt,
1203 HOST_WIDE_INT *offset_p,
1204 bool *by_ref_p)
1205 {
1206 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1207 poly_int64 size;
1208
1209 /* Load value from a parameter declaration. */
1210 if (index >= 0)
1211 {
1212 *offset_p = -1;
1213 return index;
1214 }
1215
1216 if (!gimple_assign_load_p (stmt))
1217 return -1;
1218
1219 tree rhs = gimple_assign_rhs1 (stmt);
1220
1221 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1222 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1223 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1224 return -1;
1225
1226 /* Skip memory reference containing bit-field. */
1227 if (TREE_CODE (rhs) == BIT_FIELD_REF
1228 || contains_bitfld_component_ref_p (rhs))
1229 return -1;
1230
1231 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1232 offset_p, &size, by_ref_p))
1233 return -1;
1234
1235 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1236 size));
1237 if (!*by_ref_p)
1238 {
1239 tree param_type = ipa_get_type (info, index);
1240
1241 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1242 return -1;
1243 }
1244 else if (TREE_THIS_VOLATILE (rhs))
1245 return -1;
1246
1247 return index;
1248 }
1249
1250 /* Walk pointer adjustemnts from OP (such as POINTER_PLUS and ADDR_EXPR)
1251 to find original pointer. Initialize RET to the pointer which results from
1252 the walk.
1253 If offset is known return true and initialize OFFSET_RET. */
1254
1255 bool
unadjusted_ptr_and_unit_offset(tree op,tree * ret,poly_int64 * offset_ret)1256 unadjusted_ptr_and_unit_offset (tree op, tree *ret, poly_int64 *offset_ret)
1257 {
1258 poly_int64 offset = 0;
1259 bool offset_known = true;
1260 int i;
1261
1262 for (i = 0; i < param_ipa_jump_function_lookups; i++)
1263 {
1264 if (TREE_CODE (op) == ADDR_EXPR)
1265 {
1266 poly_int64 extra_offset = 0;
1267 tree base = get_addr_base_and_unit_offset (TREE_OPERAND (op, 0),
1268 &offset);
1269 if (!base)
1270 {
1271 base = get_base_address (TREE_OPERAND (op, 0));
1272 if (TREE_CODE (base) != MEM_REF)
1273 break;
1274 offset_known = false;
1275 }
1276 else
1277 {
1278 if (TREE_CODE (base) != MEM_REF)
1279 break;
1280 offset += extra_offset;
1281 }
1282 op = TREE_OPERAND (base, 0);
1283 if (mem_ref_offset (base).to_shwi (&extra_offset))
1284 offset += extra_offset;
1285 else
1286 offset_known = false;
1287 }
1288 else if (TREE_CODE (op) == SSA_NAME
1289 && !SSA_NAME_IS_DEFAULT_DEF (op))
1290 {
1291 gimple *pstmt = SSA_NAME_DEF_STMT (op);
1292
1293 if (gimple_assign_single_p (pstmt))
1294 op = gimple_assign_rhs1 (pstmt);
1295 else if (is_gimple_assign (pstmt)
1296 && gimple_assign_rhs_code (pstmt) == POINTER_PLUS_EXPR)
1297 {
1298 poly_int64 extra_offset = 0;
1299 if (ptrdiff_tree_p (gimple_assign_rhs2 (pstmt),
1300 &extra_offset))
1301 offset += extra_offset;
1302 else
1303 offset_known = false;
1304 op = gimple_assign_rhs1 (pstmt);
1305 }
1306 else
1307 break;
1308 }
1309 else
1310 break;
1311 }
1312 *ret = op;
1313 *offset_ret = offset;
1314 return offset_known;
1315 }
1316
1317 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1318 of an assignment statement STMT, try to determine whether we are actually
1319 handling any of the following cases and construct an appropriate jump
1320 function into JFUNC if so:
1321
1322 1) The passed value is loaded from a formal parameter which is not a gimple
1323 register (most probably because it is addressable, the value has to be
1324 scalar) and we can guarantee the value has not changed. This case can
1325 therefore be described by a simple pass-through jump function. For example:
1326
1327 foo (int a)
1328 {
1329 int a.0;
1330
1331 a.0_2 = a;
1332 bar (a.0_2);
1333
1334 2) The passed value can be described by a simple arithmetic pass-through
1335 jump function. E.g.
1336
1337 foo (int a)
1338 {
1339 int D.2064;
1340
1341 D.2064_4 = a.1(D) + 4;
1342 bar (D.2064_4);
1343
1344 This case can also occur in combination of the previous one, e.g.:
1345
1346 foo (int a, int z)
1347 {
1348 int a.0;
1349 int D.2064;
1350
1351 a.0_3 = a;
1352 D.2064_4 = a.0_3 + 4;
1353 foo (D.2064_4);
1354
1355 3) The passed value is an address of an object within another one (which
1356 also passed by reference). Such situations are described by an ancestor
1357 jump function and describe situations such as:
1358
1359 B::foo() (struct B * const this)
1360 {
1361 struct A * D.1845;
1362
1363 D.1845_2 = &this_1(D)->D.1748;
1364 A::bar (D.1845_2);
1365
1366 INFO is the structure describing individual parameters access different
1367 stages of IPA optimizations. PARMS_AINFO contains the information that is
1368 only needed for intraprocedural analysis. */
1369
1370 static void
compute_complex_assign_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gimple * stmt,tree name,tree param_type)1371 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1372 class ipa_node_params *info,
1373 struct ipa_jump_func *jfunc,
1374 gcall *call, gimple *stmt, tree name,
1375 tree param_type)
1376 {
1377 HOST_WIDE_INT offset, size;
1378 tree op1, tc_ssa, base, ssa;
1379 bool reverse;
1380 int index;
1381
1382 op1 = gimple_assign_rhs1 (stmt);
1383
1384 if (TREE_CODE (op1) == SSA_NAME)
1385 {
1386 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1387 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1388 else
1389 index = load_from_unmodified_param (fbi, info->descriptors,
1390 SSA_NAME_DEF_STMT (op1));
1391 tc_ssa = op1;
1392 }
1393 else
1394 {
1395 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1396 tc_ssa = gimple_assign_lhs (stmt);
1397 }
1398
1399 if (index >= 0)
1400 {
1401 switch (gimple_assign_rhs_class (stmt))
1402 {
1403 case GIMPLE_BINARY_RHS:
1404 {
1405 tree op2 = gimple_assign_rhs2 (stmt);
1406 if (!is_gimple_ip_invariant (op2)
1407 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1408 != tcc_comparison)
1409 && !useless_type_conversion_p (TREE_TYPE (name),
1410 TREE_TYPE (op1))))
1411 return;
1412
1413 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1414 gimple_assign_rhs_code (stmt));
1415 break;
1416 }
1417 case GIMPLE_SINGLE_RHS:
1418 {
1419 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1420 tc_ssa);
1421 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1422 break;
1423 }
1424 case GIMPLE_UNARY_RHS:
1425 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1426 ipa_set_jf_unary_pass_through (jfunc, index,
1427 gimple_assign_rhs_code (stmt));
1428 default:;
1429 }
1430 return;
1431 }
1432
1433 if (TREE_CODE (op1) != ADDR_EXPR)
1434 return;
1435 op1 = TREE_OPERAND (op1, 0);
1436 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1437 offset_int mem_offset;
1438 if (!base
1439 || TREE_CODE (base) != MEM_REF
1440 || !mem_ref_offset (base).is_constant (&mem_offset))
1441 return;
1442 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1443 ssa = TREE_OPERAND (base, 0);
1444 if (TREE_CODE (ssa) != SSA_NAME
1445 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1446 || offset < 0)
1447 return;
1448
1449 /* Dynamic types are changed in constructors and destructors. */
1450 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1451 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1452 ipa_set_ancestor_jf (jfunc, offset, index,
1453 parm_ref_data_pass_through_p (fbi, index, call, ssa),
1454 false);
1455 }
1456
1457 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1458 it looks like:
1459
1460 iftmp.1_3 = &obj_2(D)->D.1762;
1461
1462 The base of the MEM_REF must be a default definition SSA NAME of a
1463 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1464 whole MEM_REF expression is returned and the offset calculated from any
1465 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1466 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1467
1468 static tree
get_ancestor_addr_info(gimple * assign,tree * obj_p,HOST_WIDE_INT * offset)1469 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1470 {
1471 HOST_WIDE_INT size;
1472 tree expr, parm, obj;
1473 bool reverse;
1474
1475 if (!gimple_assign_single_p (assign))
1476 return NULL_TREE;
1477 expr = gimple_assign_rhs1 (assign);
1478
1479 if (TREE_CODE (expr) != ADDR_EXPR)
1480 return NULL_TREE;
1481 expr = TREE_OPERAND (expr, 0);
1482 obj = expr;
1483 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1484
1485 offset_int mem_offset;
1486 if (!expr
1487 || TREE_CODE (expr) != MEM_REF
1488 || !mem_ref_offset (expr).is_constant (&mem_offset))
1489 return NULL_TREE;
1490 parm = TREE_OPERAND (expr, 0);
1491 if (TREE_CODE (parm) != SSA_NAME
1492 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1493 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1494 return NULL_TREE;
1495
1496 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1497 *obj_p = obj;
1498 return expr;
1499 }
1500
1501
1502 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1503 statement PHI, try to find out whether NAME is in fact a
1504 multiple-inheritance typecast from a descendant into an ancestor of a formal
1505 parameter and thus can be described by an ancestor jump function and if so,
1506 write the appropriate function into JFUNC.
1507
1508 Essentially we want to match the following pattern:
1509
1510 if (obj_2(D) != 0B)
1511 goto <bb 3>;
1512 else
1513 goto <bb 4>;
1514
1515 <bb 3>:
1516 iftmp.1_3 = &obj_2(D)->D.1762;
1517
1518 <bb 4>:
1519 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1520 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1521 return D.1879_6; */
1522
1523 static void
compute_complex_ancestor_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gphi * phi)1524 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1525 class ipa_node_params *info,
1526 struct ipa_jump_func *jfunc,
1527 gcall *call, gphi *phi)
1528 {
1529 HOST_WIDE_INT offset;
1530 gimple *assign, *cond;
1531 basic_block phi_bb, assign_bb, cond_bb;
1532 tree tmp, parm, expr, obj;
1533 int index, i;
1534
1535 if (gimple_phi_num_args (phi) != 2)
1536 return;
1537
1538 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1539 tmp = PHI_ARG_DEF (phi, 0);
1540 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1541 tmp = PHI_ARG_DEF (phi, 1);
1542 else
1543 return;
1544 if (TREE_CODE (tmp) != SSA_NAME
1545 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1546 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1547 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1548 return;
1549
1550 assign = SSA_NAME_DEF_STMT (tmp);
1551 assign_bb = gimple_bb (assign);
1552 if (!single_pred_p (assign_bb))
1553 return;
1554 expr = get_ancestor_addr_info (assign, &obj, &offset);
1555 if (!expr)
1556 return;
1557 parm = TREE_OPERAND (expr, 0);
1558 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1559 if (index < 0)
1560 return;
1561
1562 cond_bb = single_pred (assign_bb);
1563 cond = last_stmt (cond_bb);
1564 if (!cond
1565 || gimple_code (cond) != GIMPLE_COND
1566 || gimple_cond_code (cond) != NE_EXPR
1567 || gimple_cond_lhs (cond) != parm
1568 || !integer_zerop (gimple_cond_rhs (cond)))
1569 return;
1570
1571 phi_bb = gimple_bb (phi);
1572 for (i = 0; i < 2; i++)
1573 {
1574 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1575 if (pred != assign_bb && pred != cond_bb)
1576 return;
1577 }
1578
1579 ipa_set_ancestor_jf (jfunc, offset, index,
1580 parm_ref_data_pass_through_p (fbi, index, call, parm),
1581 true);
1582 }
1583
1584 /* Inspect the given TYPE and return true iff it has the same structure (the
1585 same number of fields of the same types) as a C++ member pointer. If
1586 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1587 corresponding fields there. */
1588
1589 static bool
type_like_member_ptr_p(tree type,tree * method_ptr,tree * delta)1590 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1591 {
1592 tree fld;
1593
1594 if (TREE_CODE (type) != RECORD_TYPE)
1595 return false;
1596
1597 fld = TYPE_FIELDS (type);
1598 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1599 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1600 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1601 return false;
1602
1603 if (method_ptr)
1604 *method_ptr = fld;
1605
1606 fld = DECL_CHAIN (fld);
1607 if (!fld || INTEGRAL_TYPE_P (fld)
1608 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1609 return false;
1610 if (delta)
1611 *delta = fld;
1612
1613 if (DECL_CHAIN (fld))
1614 return false;
1615
1616 return true;
1617 }
1618
1619 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1620 return the rhs of its defining statement, and this statement is stored in
1621 *RHS_STMT. Otherwise return RHS as it is. */
1622
1623 static inline tree
get_ssa_def_if_simple_copy(tree rhs,gimple ** rhs_stmt)1624 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1625 {
1626 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1627 {
1628 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1629
1630 if (gimple_assign_single_p (def_stmt))
1631 rhs = gimple_assign_rhs1 (def_stmt);
1632 else
1633 break;
1634 *rhs_stmt = def_stmt;
1635 }
1636 return rhs;
1637 }
1638
1639 /* Simple linked list, describing contents of an aggregate before call. */
1640
1641 struct ipa_known_agg_contents_list
1642 {
1643 /* Offset and size of the described part of the aggregate. */
1644 HOST_WIDE_INT offset, size;
1645
1646 /* Type of the described part of the aggregate. */
1647 tree type;
1648
1649 /* Known constant value or jump function data describing contents. */
1650 struct ipa_load_agg_data value;
1651
1652 /* Pointer to the next structure in the list. */
1653 struct ipa_known_agg_contents_list *next;
1654 };
1655
1656 /* Add an aggregate content item into a linked list of
1657 ipa_known_agg_contents_list structure, in which all elements
1658 are sorted ascendingly by offset. */
1659
1660 static inline void
add_to_agg_contents_list(struct ipa_known_agg_contents_list ** plist,struct ipa_known_agg_contents_list * item)1661 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1662 struct ipa_known_agg_contents_list *item)
1663 {
1664 struct ipa_known_agg_contents_list *list = *plist;
1665
1666 for (; list; list = list->next)
1667 {
1668 if (list->offset >= item->offset)
1669 break;
1670
1671 plist = &list->next;
1672 }
1673
1674 item->next = list;
1675 *plist = item;
1676 }
1677
1678 /* Check whether a given aggregate content is clobbered by certain element in
1679 a linked list of ipa_known_agg_contents_list. */
1680
1681 static inline bool
clobber_by_agg_contents_list_p(struct ipa_known_agg_contents_list * list,struct ipa_known_agg_contents_list * item)1682 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1683 struct ipa_known_agg_contents_list *item)
1684 {
1685 for (; list; list = list->next)
1686 {
1687 if (list->offset >= item->offset)
1688 return list->offset < item->offset + item->size;
1689
1690 if (list->offset + list->size > item->offset)
1691 return true;
1692 }
1693
1694 return false;
1695 }
1696
1697 /* Build aggregate jump function from LIST, assuming there are exactly
1698 VALUE_COUNT entries there and that offset of the passed argument
1699 is ARG_OFFSET and store it into JFUNC. */
1700
1701 static void
build_agg_jump_func_from_list(struct ipa_known_agg_contents_list * list,int value_count,HOST_WIDE_INT arg_offset,struct ipa_jump_func * jfunc)1702 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1703 int value_count, HOST_WIDE_INT arg_offset,
1704 struct ipa_jump_func *jfunc)
1705 {
1706 vec_safe_reserve (jfunc->agg.items, value_count, true);
1707 for (; list; list = list->next)
1708 {
1709 struct ipa_agg_jf_item item;
1710 tree operand = list->value.pass_through.operand;
1711
1712 if (list->value.pass_through.formal_id >= 0)
1713 {
1714 /* Content value is derived from some formal paramerter. */
1715 if (list->value.offset >= 0)
1716 item.jftype = IPA_JF_LOAD_AGG;
1717 else
1718 item.jftype = IPA_JF_PASS_THROUGH;
1719
1720 item.value.load_agg = list->value;
1721 if (operand)
1722 item.value.pass_through.operand
1723 = unshare_expr_without_location (operand);
1724 }
1725 else if (operand)
1726 {
1727 /* Content value is known constant. */
1728 item.jftype = IPA_JF_CONST;
1729 item.value.constant = unshare_expr_without_location (operand);
1730 }
1731 else
1732 continue;
1733
1734 item.type = list->type;
1735 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1736
1737 item.offset = list->offset - arg_offset;
1738 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1739
1740 jfunc->agg.items->quick_push (item);
1741 }
1742 }
1743
1744 /* Given an assignment statement STMT, try to collect information into
1745 AGG_VALUE that will be used to construct jump function for RHS of the
1746 assignment, from which content value of an aggregate part comes.
1747
1748 Besides constant and simple pass-through jump functions, also try to
1749 identify whether it matches the following pattern that can be described by
1750 a load-value-from-aggregate jump function, which is a derivative of simple
1751 pass-through jump function.
1752
1753 foo (int *p)
1754 {
1755 ...
1756
1757 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1758 bar (q_5);
1759 }
1760
1761 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1762 constant, simple pass-through and load-vale-from-aggregate. If value
1763 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1764 set to -1. For simple pass-through and load-value-from-aggregate, field
1765 FORMAL_ID specifies the related formal parameter index, and field
1766 OFFSET can be used to distinguish them, -1 means simple pass-through,
1767 otherwise means load-value-from-aggregate. */
1768
1769 static void
analyze_agg_content_value(struct ipa_func_body_info * fbi,struct ipa_load_agg_data * agg_value,gimple * stmt)1770 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1771 struct ipa_load_agg_data *agg_value,
1772 gimple *stmt)
1773 {
1774 tree lhs = gimple_assign_lhs (stmt);
1775 tree rhs1 = gimple_assign_rhs1 (stmt);
1776 enum tree_code code;
1777 int index = -1;
1778
1779 /* Initialize jump function data for the aggregate part. */
1780 memset (agg_value, 0, sizeof (*agg_value));
1781 agg_value->pass_through.operation = NOP_EXPR;
1782 agg_value->pass_through.formal_id = -1;
1783 agg_value->offset = -1;
1784
1785 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1786 || TREE_THIS_VOLATILE (lhs)
1787 || TREE_CODE (lhs) == BIT_FIELD_REF
1788 || contains_bitfld_component_ref_p (lhs))
1789 return;
1790
1791 /* Skip SSA copies. */
1792 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1793 {
1794 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1795 break;
1796
1797 stmt = SSA_NAME_DEF_STMT (rhs1);
1798 if (!is_gimple_assign (stmt))
1799 break;
1800
1801 rhs1 = gimple_assign_rhs1 (stmt);
1802 }
1803
1804 if (gphi *phi = dyn_cast<gphi *> (stmt))
1805 {
1806 /* Also special case like the following (a is a formal parameter):
1807
1808 _12 = *a_11(D).dim[0].stride;
1809 ...
1810 # iftmp.22_9 = PHI <_12(2), 1(3)>
1811 ...
1812 parm.6.dim[0].stride = iftmp.22_9;
1813 ...
1814 __x_MOD_foo (&parm.6, b_31(D));
1815
1816 The aggregate function describing parm.6.dim[0].stride is encoded as a
1817 PASS-THROUGH jump function with ASSERT_EXPR operation whith operand 1
1818 (the constant from the PHI node). */
1819
1820 if (gimple_phi_num_args (phi) != 2)
1821 return;
1822 tree arg0 = gimple_phi_arg_def (phi, 0);
1823 tree arg1 = gimple_phi_arg_def (phi, 1);
1824 tree operand;
1825
1826 if (is_gimple_ip_invariant (arg1))
1827 {
1828 operand = arg1;
1829 rhs1 = arg0;
1830 }
1831 else if (is_gimple_ip_invariant (arg0))
1832 {
1833 operand = arg0;
1834 rhs1 = arg1;
1835 }
1836 else
1837 return;
1838
1839 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1840 if (!is_gimple_assign (stmt))
1841 return;
1842
1843 code = ASSERT_EXPR;
1844 agg_value->pass_through.operand = operand;
1845 }
1846 else if (is_gimple_assign (stmt))
1847 {
1848 code = gimple_assign_rhs_code (stmt);
1849 switch (gimple_assign_rhs_class (stmt))
1850 {
1851 case GIMPLE_SINGLE_RHS:
1852 if (is_gimple_ip_invariant (rhs1))
1853 {
1854 agg_value->pass_through.operand = rhs1;
1855 return;
1856 }
1857 code = NOP_EXPR;
1858 break;
1859
1860 case GIMPLE_UNARY_RHS:
1861 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1862 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1863 tcc_binary, this subtleness is somewhat misleading.
1864
1865 Since tcc_unary is widely used in IPA-CP code to check an operation
1866 with one operand, here we only allow tc_unary operation to avoid
1867 possible problem. Then we can use (opclass == tc_unary) or not to
1868 distinguish unary and binary. */
1869 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1870 return;
1871
1872 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1873 break;
1874
1875 case GIMPLE_BINARY_RHS:
1876 {
1877 gimple *rhs1_stmt = stmt;
1878 gimple *rhs2_stmt = stmt;
1879 tree rhs2 = gimple_assign_rhs2 (stmt);
1880
1881 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1882 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1883
1884 if (is_gimple_ip_invariant (rhs2))
1885 {
1886 agg_value->pass_through.operand = rhs2;
1887 stmt = rhs1_stmt;
1888 }
1889 else if (is_gimple_ip_invariant (rhs1))
1890 {
1891 if (TREE_CODE_CLASS (code) == tcc_comparison)
1892 code = swap_tree_comparison (code);
1893 else if (!commutative_tree_code (code))
1894 return;
1895
1896 agg_value->pass_through.operand = rhs1;
1897 stmt = rhs2_stmt;
1898 rhs1 = rhs2;
1899 }
1900 else
1901 return;
1902
1903 if (TREE_CODE_CLASS (code) != tcc_comparison
1904 && !useless_type_conversion_p (TREE_TYPE (lhs),
1905 TREE_TYPE (rhs1)))
1906 return;
1907 }
1908 break;
1909
1910 default:
1911 return;
1912 }
1913 }
1914 else
1915 return;
1916
1917 if (TREE_CODE (rhs1) != SSA_NAME)
1918 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1919 &agg_value->offset,
1920 &agg_value->by_ref);
1921 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1922 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1923
1924 if (index >= 0)
1925 {
1926 if (agg_value->offset >= 0)
1927 agg_value->type = TREE_TYPE (rhs1);
1928 agg_value->pass_through.formal_id = index;
1929 agg_value->pass_through.operation = code;
1930 }
1931 else
1932 agg_value->pass_through.operand = NULL_TREE;
1933 }
1934
1935 /* If STMT is a memory store to the object whose address is BASE, extract
1936 information (offset, size, and value) into CONTENT, and return true,
1937 otherwise we conservatively assume the whole object is modified with
1938 unknown content, and return false. CHECK_REF means that access to object
1939 is expected to be in form of MEM_REF expression. */
1940
1941 static bool
extract_mem_content(struct ipa_func_body_info * fbi,gimple * stmt,tree base,bool check_ref,struct ipa_known_agg_contents_list * content)1942 extract_mem_content (struct ipa_func_body_info *fbi,
1943 gimple *stmt, tree base, bool check_ref,
1944 struct ipa_known_agg_contents_list *content)
1945 {
1946 HOST_WIDE_INT lhs_offset, lhs_size;
1947 bool reverse;
1948
1949 if (!is_gimple_assign (stmt))
1950 return false;
1951
1952 tree lhs = gimple_assign_lhs (stmt);
1953 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1954 &reverse);
1955 if (!lhs_base)
1956 return false;
1957
1958 if (check_ref)
1959 {
1960 if (TREE_CODE (lhs_base) != MEM_REF
1961 || TREE_OPERAND (lhs_base, 0) != base
1962 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1963 return false;
1964 }
1965 else if (lhs_base != base)
1966 return false;
1967
1968 content->offset = lhs_offset;
1969 content->size = lhs_size;
1970 content->type = TREE_TYPE (lhs);
1971 content->next = NULL;
1972
1973 analyze_agg_content_value (fbi, &content->value, stmt);
1974 return true;
1975 }
1976
1977 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1978 in ARG is filled in constants or values that are derived from caller's
1979 formal parameter in the way described by some kinds of jump functions. FBI
1980 is the context of the caller function for interprocedural analysis. ARG can
1981 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1982 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1983
1984 static void
determine_known_aggregate_parts(struct ipa_func_body_info * fbi,gcall * call,tree arg,tree arg_type,struct ipa_jump_func * jfunc)1985 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1986 gcall *call, tree arg,
1987 tree arg_type,
1988 struct ipa_jump_func *jfunc)
1989 {
1990 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1991 bitmap visited = NULL;
1992 int item_count = 0, value_count = 0;
1993 HOST_WIDE_INT arg_offset, arg_size;
1994 tree arg_base;
1995 bool check_ref, by_ref;
1996 ao_ref r;
1997 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1998
1999 if (max_agg_items == 0)
2000 return;
2001
2002 /* The function operates in three stages. First, we prepare check_ref, r,
2003 arg_base and arg_offset based on what is actually passed as an actual
2004 argument. */
2005
2006 if (POINTER_TYPE_P (arg_type))
2007 {
2008 by_ref = true;
2009 if (TREE_CODE (arg) == SSA_NAME)
2010 {
2011 tree type_size;
2012 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
2013 || !POINTER_TYPE_P (TREE_TYPE (arg)))
2014 return;
2015 check_ref = true;
2016 arg_base = arg;
2017 arg_offset = 0;
2018 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
2019 arg_size = tree_to_uhwi (type_size);
2020 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
2021 }
2022 else if (TREE_CODE (arg) == ADDR_EXPR)
2023 {
2024 bool reverse;
2025
2026 arg = TREE_OPERAND (arg, 0);
2027 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2028 &arg_size, &reverse);
2029 if (!arg_base)
2030 return;
2031 if (DECL_P (arg_base))
2032 {
2033 check_ref = false;
2034 ao_ref_init (&r, arg_base);
2035 }
2036 else
2037 return;
2038 }
2039 else
2040 return;
2041 }
2042 else
2043 {
2044 bool reverse;
2045
2046 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
2047
2048 by_ref = false;
2049 check_ref = false;
2050 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
2051 &arg_size, &reverse);
2052 if (!arg_base)
2053 return;
2054
2055 ao_ref_init (&r, arg);
2056 }
2057
2058 /* Second stage traverses virtual SSA web backwards starting from the call
2059 statement, only looks at individual dominating virtual operand (its
2060 definition dominates the call), as long as it is confident that content
2061 of the aggregate is affected by definition of the virtual operand, it
2062 builds a sorted linked list of ipa_agg_jf_list describing that. */
2063
2064 for (tree dom_vuse = gimple_vuse (call);
2065 dom_vuse && fbi->aa_walk_budget > 0;)
2066 {
2067 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
2068
2069 if (gimple_code (stmt) == GIMPLE_PHI)
2070 {
2071 dom_vuse = get_continuation_for_phi (stmt, &r, true,
2072 fbi->aa_walk_budget,
2073 &visited, false, NULL, NULL);
2074 continue;
2075 }
2076
2077 fbi->aa_walk_budget--;
2078 if (stmt_may_clobber_ref_p_1 (stmt, &r))
2079 {
2080 struct ipa_known_agg_contents_list *content
2081 = XALLOCA (struct ipa_known_agg_contents_list);
2082
2083 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
2084 break;
2085
2086 /* Now we get a dominating virtual operand, and need to check
2087 whether its value is clobbered any other dominating one. */
2088 if ((content->value.pass_through.formal_id >= 0
2089 || content->value.pass_through.operand)
2090 && !clobber_by_agg_contents_list_p (all_list, content))
2091 {
2092 struct ipa_known_agg_contents_list *copy
2093 = XALLOCA (struct ipa_known_agg_contents_list);
2094
2095 /* Add to the list consisting of only dominating virtual
2096 operands, whose definitions can finally reach the call. */
2097 add_to_agg_contents_list (&list, (*copy = *content, copy));
2098
2099 if (++value_count == max_agg_items)
2100 break;
2101 }
2102
2103 /* Add to the list consisting of all dominating virtual operands. */
2104 add_to_agg_contents_list (&all_list, content);
2105
2106 if (++item_count == 2 * max_agg_items)
2107 break;
2108 }
2109 dom_vuse = gimple_vuse (stmt);
2110 }
2111
2112 if (visited)
2113 BITMAP_FREE (visited);
2114
2115 /* Third stage just goes over the list and creates an appropriate vector of
2116 ipa_agg_jf_item structures out of it, of course only if there are
2117 any meaningful items to begin with. */
2118
2119 if (value_count)
2120 {
2121 jfunc->agg.by_ref = by_ref;
2122 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
2123 }
2124 }
2125
2126
2127 /* Return the Ith param type of callee associated with call graph
2128 edge E. */
2129
2130 tree
ipa_get_callee_param_type(struct cgraph_edge * e,int i)2131 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2132 {
2133 int n;
2134 tree type = (e->callee
2135 ? TREE_TYPE (e->callee->decl)
2136 : gimple_call_fntype (e->call_stmt));
2137 tree t = TYPE_ARG_TYPES (type);
2138
2139 for (n = 0; n < i; n++)
2140 {
2141 if (!t)
2142 break;
2143 t = TREE_CHAIN (t);
2144 }
2145 if (t)
2146 return TREE_VALUE (t);
2147 if (!e->callee)
2148 return NULL;
2149 t = DECL_ARGUMENTS (e->callee->decl);
2150 for (n = 0; n < i; n++)
2151 {
2152 if (!t)
2153 return NULL;
2154 t = TREE_CHAIN (t);
2155 }
2156 if (t)
2157 return TREE_TYPE (t);
2158 return NULL;
2159 }
2160
2161 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2162 allocated structure or a previously existing one shared with other jump
2163 functions and/or transformation summaries. */
2164
2165 ipa_bits *
ipa_get_ipa_bits_for_value(const widest_int & value,const widest_int & mask)2166 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2167 {
2168 ipa_bits tmp;
2169 tmp.value = value;
2170 tmp.mask = mask;
2171
2172 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2173 if (*slot)
2174 return *slot;
2175
2176 ipa_bits *res = ggc_alloc<ipa_bits> ();
2177 res->value = value;
2178 res->mask = mask;
2179 *slot = res;
2180
2181 return res;
2182 }
2183
2184 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2185 table in order to avoid creating multiple same ipa_bits structures. */
2186
2187 static void
ipa_set_jfunc_bits(ipa_jump_func * jf,const widest_int & value,const widest_int & mask)2188 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2189 const widest_int &mask)
2190 {
2191 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2192 }
2193
2194 /* Return a pointer to a value_range just like *TMP, but either find it in
2195 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2196
2197 static value_range *
ipa_get_value_range(value_range * tmp)2198 ipa_get_value_range (value_range *tmp)
2199 {
2200 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2201 if (*slot)
2202 return *slot;
2203
2204 value_range *vr = new (ggc_alloc<value_range> ()) value_range;
2205 *vr = *tmp;
2206 *slot = vr;
2207
2208 return vr;
2209 }
2210
2211 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2212 equiv set. Use hash table in order to avoid creating multiple same copies of
2213 value_ranges. */
2214
2215 static value_range *
ipa_get_value_range(enum value_range_kind kind,tree min,tree max)2216 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2217 {
2218 value_range tmp (min, max, kind);
2219 return ipa_get_value_range (&tmp);
2220 }
2221
2222 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2223 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2224 same value_range structures. */
2225
2226 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,enum value_range_kind type,tree min,tree max)2227 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2228 tree min, tree max)
2229 {
2230 jf->m_vr = ipa_get_value_range (type, min, max);
2231 }
2232
2233 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2234 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2235
2236 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,value_range * tmp)2237 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2238 {
2239 jf->m_vr = ipa_get_value_range (tmp);
2240 }
2241
2242 /* Compute jump function for all arguments of callsite CS and insert the
2243 information in the jump_functions array in the ipa_edge_args corresponding
2244 to this callsite. */
2245
2246 static void
ipa_compute_jump_functions_for_edge(struct ipa_func_body_info * fbi,struct cgraph_edge * cs)2247 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2248 struct cgraph_edge *cs)
2249 {
2250 ipa_node_params *info = ipa_node_params_sum->get (cs->caller);
2251 ipa_edge_args *args = ipa_edge_args_sum->get_create (cs);
2252 gcall *call = cs->call_stmt;
2253 int n, arg_num = gimple_call_num_args (call);
2254 bool useful_context = false;
2255 value_range vr;
2256
2257 if (arg_num == 0 || args->jump_functions)
2258 return;
2259 vec_safe_grow_cleared (args->jump_functions, arg_num, true);
2260 if (flag_devirtualize)
2261 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num, true);
2262
2263 if (gimple_call_internal_p (call))
2264 return;
2265 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2266 return;
2267
2268 for (n = 0; n < arg_num; n++)
2269 {
2270 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2271 tree arg = gimple_call_arg (call, n);
2272 tree param_type = ipa_get_callee_param_type (cs, n);
2273 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2274 {
2275 tree instance;
2276 class ipa_polymorphic_call_context context (cs->caller->decl,
2277 arg, cs->call_stmt,
2278 &instance);
2279 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2280 &fbi->aa_walk_budget);
2281 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2282 if (!context.useless_p ())
2283 useful_context = true;
2284 }
2285
2286 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2287 {
2288 bool addr_nonzero = false;
2289 bool strict_overflow = false;
2290
2291 if (TREE_CODE (arg) == SSA_NAME
2292 && param_type
2293 && get_range_query (cfun)->range_of_expr (vr, arg)
2294 && vr.nonzero_p ())
2295 addr_nonzero = true;
2296 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2297 addr_nonzero = true;
2298
2299 if (addr_nonzero)
2300 {
2301 tree z = build_int_cst (TREE_TYPE (arg), 0);
2302 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2303 }
2304 else
2305 gcc_assert (!jfunc->m_vr);
2306 }
2307 else
2308 {
2309 if (TREE_CODE (arg) == SSA_NAME
2310 && param_type
2311 && get_range_query (cfun)->range_of_expr (vr, arg)
2312 && !vr.undefined_p ())
2313 {
2314 value_range resvr;
2315 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2316 &vr, TREE_TYPE (arg));
2317 if (!resvr.undefined_p () && !resvr.varying_p ())
2318 ipa_set_jfunc_vr (jfunc, &resvr);
2319 else
2320 gcc_assert (!jfunc->m_vr);
2321 }
2322 else
2323 gcc_assert (!jfunc->m_vr);
2324 }
2325
2326 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2327 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2328 {
2329 if (TREE_CODE (arg) == SSA_NAME)
2330 ipa_set_jfunc_bits (jfunc, 0,
2331 widest_int::from (get_nonzero_bits (arg),
2332 TYPE_SIGN (TREE_TYPE (arg))));
2333 else
2334 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2335 }
2336 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2337 {
2338 unsigned HOST_WIDE_INT bitpos;
2339 unsigned align;
2340
2341 get_pointer_alignment_1 (arg, &align, &bitpos);
2342 widest_int mask = wi::bit_and_not
2343 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2344 align / BITS_PER_UNIT - 1);
2345 widest_int value = bitpos / BITS_PER_UNIT;
2346 ipa_set_jfunc_bits (jfunc, value, mask);
2347 }
2348 else
2349 gcc_assert (!jfunc->bits);
2350
2351 if (is_gimple_ip_invariant (arg)
2352 || (VAR_P (arg)
2353 && is_global_var (arg)
2354 && TREE_READONLY (arg)))
2355 ipa_set_jf_constant (jfunc, arg, cs);
2356 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2357 && TREE_CODE (arg) == PARM_DECL)
2358 {
2359 int index = ipa_get_param_decl_index (info, arg);
2360
2361 gcc_assert (index >=0);
2362 /* Aggregate passed by value, check for pass-through, otherwise we
2363 will attempt to fill in aggregate contents later in this
2364 for cycle. */
2365 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2366 {
2367 ipa_set_jf_simple_pass_through (jfunc, index, false);
2368 continue;
2369 }
2370 }
2371 else if (TREE_CODE (arg) == SSA_NAME)
2372 {
2373 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2374 {
2375 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2376 if (index >= 0)
2377 {
2378 bool agg_p;
2379 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2380 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2381 }
2382 }
2383 else
2384 {
2385 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2386 if (is_gimple_assign (stmt))
2387 compute_complex_assign_jump_func (fbi, info, jfunc,
2388 call, stmt, arg, param_type);
2389 else if (gimple_code (stmt) == GIMPLE_PHI)
2390 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2391 call,
2392 as_a <gphi *> (stmt));
2393 }
2394 }
2395
2396 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2397 passed (because type conversions are ignored in gimple). Usually we can
2398 safely get type from function declaration, but in case of K&R prototypes or
2399 variadic functions we can try our luck with type of the pointer passed.
2400 TODO: Since we look for actual initialization of the memory object, we may better
2401 work out the type based on the memory stores we find. */
2402 if (!param_type)
2403 param_type = TREE_TYPE (arg);
2404
2405 if ((jfunc->type != IPA_JF_PASS_THROUGH
2406 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2407 && (jfunc->type != IPA_JF_ANCESTOR
2408 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2409 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2410 || POINTER_TYPE_P (param_type)))
2411 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2412 }
2413 if (!useful_context)
2414 vec_free (args->polymorphic_call_contexts);
2415 }
2416
2417 /* Compute jump functions for all edges - both direct and indirect - outgoing
2418 from BB. */
2419
2420 static void
ipa_compute_jump_functions_for_bb(struct ipa_func_body_info * fbi,basic_block bb)2421 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2422 {
2423 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2424 int i;
2425 struct cgraph_edge *cs;
2426
2427 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2428 {
2429 struct cgraph_node *callee = cs->callee;
2430
2431 if (callee)
2432 {
2433 callee = callee->ultimate_alias_target ();
2434 /* We do not need to bother analyzing calls to unknown functions
2435 unless they may become known during lto/whopr. */
2436 if (!callee->definition && !flag_lto
2437 && !gimple_call_fnspec (cs->call_stmt).known_p ())
2438 continue;
2439 }
2440 ipa_compute_jump_functions_for_edge (fbi, cs);
2441 }
2442 }
2443
2444 /* If STMT looks like a statement loading a value from a member pointer formal
2445 parameter, return that parameter and store the offset of the field to
2446 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2447 might be clobbered). If USE_DELTA, then we look for a use of the delta
2448 field rather than the pfn. */
2449
2450 static tree
ipa_get_stmt_member_ptr_load_param(gimple * stmt,bool use_delta,HOST_WIDE_INT * offset_p)2451 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2452 HOST_WIDE_INT *offset_p)
2453 {
2454 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2455
2456 if (!gimple_assign_single_p (stmt))
2457 return NULL_TREE;
2458
2459 rhs = gimple_assign_rhs1 (stmt);
2460 if (TREE_CODE (rhs) == COMPONENT_REF)
2461 {
2462 ref_field = TREE_OPERAND (rhs, 1);
2463 rhs = TREE_OPERAND (rhs, 0);
2464 }
2465 else
2466 ref_field = NULL_TREE;
2467 if (TREE_CODE (rhs) != MEM_REF)
2468 return NULL_TREE;
2469 rec = TREE_OPERAND (rhs, 0);
2470 if (TREE_CODE (rec) != ADDR_EXPR)
2471 return NULL_TREE;
2472 rec = TREE_OPERAND (rec, 0);
2473 if (TREE_CODE (rec) != PARM_DECL
2474 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2475 return NULL_TREE;
2476 ref_offset = TREE_OPERAND (rhs, 1);
2477
2478 if (use_delta)
2479 fld = delta_field;
2480 else
2481 fld = ptr_field;
2482 if (offset_p)
2483 *offset_p = int_bit_position (fld);
2484
2485 if (ref_field)
2486 {
2487 if (integer_nonzerop (ref_offset))
2488 return NULL_TREE;
2489 return ref_field == fld ? rec : NULL_TREE;
2490 }
2491 else
2492 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2493 : NULL_TREE;
2494 }
2495
2496 /* Returns true iff T is an SSA_NAME defined by a statement. */
2497
2498 static bool
ipa_is_ssa_with_stmt_def(tree t)2499 ipa_is_ssa_with_stmt_def (tree t)
2500 {
2501 if (TREE_CODE (t) == SSA_NAME
2502 && !SSA_NAME_IS_DEFAULT_DEF (t))
2503 return true;
2504 else
2505 return false;
2506 }
2507
2508 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2509 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2510 indirect call graph edge.
2511 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2512
2513 static struct cgraph_edge *
ipa_note_param_call(struct cgraph_node * node,int param_index,gcall * stmt,bool polymorphic)2514 ipa_note_param_call (struct cgraph_node *node, int param_index,
2515 gcall *stmt, bool polymorphic)
2516 {
2517 struct cgraph_edge *cs;
2518
2519 cs = node->get_edge (stmt);
2520 cs->indirect_info->param_index = param_index;
2521 cs->indirect_info->agg_contents = 0;
2522 cs->indirect_info->member_ptr = 0;
2523 cs->indirect_info->guaranteed_unmodified = 0;
2524 ipa_node_params *info = ipa_node_params_sum->get (node);
2525 ipa_set_param_used_by_indirect_call (info, param_index, true);
2526 if (cs->indirect_info->polymorphic || polymorphic)
2527 ipa_set_param_used_by_polymorphic_call (info, param_index, true);
2528 return cs;
2529 }
2530
2531 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2532 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2533 intermediate information about each formal parameter. Currently it checks
2534 whether the call calls a pointer that is a formal parameter and if so, the
2535 parameter is marked with the called flag and an indirect call graph edge
2536 describing the call is created. This is very simple for ordinary pointers
2537 represented in SSA but not-so-nice when it comes to member pointers. The
2538 ugly part of this function does nothing more than trying to match the
2539 pattern of such a call. An example of such a pattern is the gimple dump
2540 below, the call is on the last line:
2541
2542 <bb 2>:
2543 f$__delta_5 = f.__delta;
2544 f$__pfn_24 = f.__pfn;
2545
2546 or
2547 <bb 2>:
2548 f$__delta_5 = MEM[(struct *)&f];
2549 f$__pfn_24 = MEM[(struct *)&f + 4B];
2550
2551 and a few lines below:
2552
2553 <bb 5>
2554 D.2496_3 = (int) f$__pfn_24;
2555 D.2497_4 = D.2496_3 & 1;
2556 if (D.2497_4 != 0)
2557 goto <bb 3>;
2558 else
2559 goto <bb 4>;
2560
2561 <bb 6>:
2562 D.2500_7 = (unsigned int) f$__delta_5;
2563 D.2501_8 = &S + D.2500_7;
2564 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2565 D.2503_10 = *D.2502_9;
2566 D.2504_12 = f$__pfn_24 + -1;
2567 D.2505_13 = (unsigned int) D.2504_12;
2568 D.2506_14 = D.2503_10 + D.2505_13;
2569 D.2507_15 = *D.2506_14;
2570 iftmp.11_16 = (String:: *) D.2507_15;
2571
2572 <bb 7>:
2573 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2574 D.2500_19 = (unsigned int) f$__delta_5;
2575 D.2508_20 = &S + D.2500_19;
2576 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2577
2578 Such patterns are results of simple calls to a member pointer:
2579
2580 int doprinting (int (MyString::* f)(int) const)
2581 {
2582 MyString S ("somestring");
2583
2584 return (S.*f)(4);
2585 }
2586
2587 Moreover, the function also looks for called pointers loaded from aggregates
2588 passed by value or reference. */
2589
2590 static void
ipa_analyze_indirect_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2591 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2592 tree target)
2593 {
2594 class ipa_node_params *info = fbi->info;
2595 HOST_WIDE_INT offset;
2596 bool by_ref;
2597
2598 if (SSA_NAME_IS_DEFAULT_DEF (target))
2599 {
2600 tree var = SSA_NAME_VAR (target);
2601 int index = ipa_get_param_decl_index (info, var);
2602 if (index >= 0)
2603 ipa_note_param_call (fbi->node, index, call, false);
2604 return;
2605 }
2606
2607 int index;
2608 gimple *def = SSA_NAME_DEF_STMT (target);
2609 bool guaranteed_unmodified;
2610 if (gimple_assign_single_p (def)
2611 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2612 gimple_assign_rhs1 (def), &index, &offset,
2613 NULL, &by_ref, &guaranteed_unmodified))
2614 {
2615 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2616 call, false);
2617 cs->indirect_info->offset = offset;
2618 cs->indirect_info->agg_contents = 1;
2619 cs->indirect_info->by_ref = by_ref;
2620 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2621 return;
2622 }
2623
2624 /* Now we need to try to match the complex pattern of calling a member
2625 pointer. */
2626 if (gimple_code (def) != GIMPLE_PHI
2627 || gimple_phi_num_args (def) != 2
2628 || !POINTER_TYPE_P (TREE_TYPE (target))
2629 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2630 return;
2631
2632 /* First, we need to check whether one of these is a load from a member
2633 pointer that is a parameter to this function. */
2634 tree n1 = PHI_ARG_DEF (def, 0);
2635 tree n2 = PHI_ARG_DEF (def, 1);
2636 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2637 return;
2638 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2639 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2640
2641 tree rec;
2642 basic_block bb, virt_bb;
2643 basic_block join = gimple_bb (def);
2644 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2645 {
2646 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2647 return;
2648
2649 bb = EDGE_PRED (join, 0)->src;
2650 virt_bb = gimple_bb (d2);
2651 }
2652 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2653 {
2654 bb = EDGE_PRED (join, 1)->src;
2655 virt_bb = gimple_bb (d1);
2656 }
2657 else
2658 return;
2659
2660 /* Second, we need to check that the basic blocks are laid out in the way
2661 corresponding to the pattern. */
2662
2663 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2664 || single_pred (virt_bb) != bb
2665 || single_succ (virt_bb) != join)
2666 return;
2667
2668 /* Third, let's see that the branching is done depending on the least
2669 significant bit of the pfn. */
2670
2671 gimple *branch = last_stmt (bb);
2672 if (!branch || gimple_code (branch) != GIMPLE_COND)
2673 return;
2674
2675 if ((gimple_cond_code (branch) != NE_EXPR
2676 && gimple_cond_code (branch) != EQ_EXPR)
2677 || !integer_zerop (gimple_cond_rhs (branch)))
2678 return;
2679
2680 tree cond = gimple_cond_lhs (branch);
2681 if (!ipa_is_ssa_with_stmt_def (cond))
2682 return;
2683
2684 def = SSA_NAME_DEF_STMT (cond);
2685 if (!is_gimple_assign (def)
2686 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2687 || !integer_onep (gimple_assign_rhs2 (def)))
2688 return;
2689
2690 cond = gimple_assign_rhs1 (def);
2691 if (!ipa_is_ssa_with_stmt_def (cond))
2692 return;
2693
2694 def = SSA_NAME_DEF_STMT (cond);
2695
2696 if (is_gimple_assign (def)
2697 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2698 {
2699 cond = gimple_assign_rhs1 (def);
2700 if (!ipa_is_ssa_with_stmt_def (cond))
2701 return;
2702 def = SSA_NAME_DEF_STMT (cond);
2703 }
2704
2705 tree rec2;
2706 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2707 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2708 == ptrmemfunc_vbit_in_delta),
2709 NULL);
2710 if (rec != rec2)
2711 return;
2712
2713 index = ipa_get_param_decl_index (info, rec);
2714 if (index >= 0
2715 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2716 {
2717 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2718 call, false);
2719 cs->indirect_info->offset = offset;
2720 cs->indirect_info->agg_contents = 1;
2721 cs->indirect_info->member_ptr = 1;
2722 cs->indirect_info->guaranteed_unmodified = 1;
2723 }
2724
2725 return;
2726 }
2727
2728 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2729 object referenced in the expression is a formal parameter of the caller
2730 FBI->node (described by FBI->info), create a call note for the
2731 statement. */
2732
2733 static void
ipa_analyze_virtual_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2734 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2735 gcall *call, tree target)
2736 {
2737 tree obj = OBJ_TYPE_REF_OBJECT (target);
2738 int index;
2739 HOST_WIDE_INT anc_offset;
2740
2741 if (!flag_devirtualize)
2742 return;
2743
2744 if (TREE_CODE (obj) != SSA_NAME)
2745 return;
2746
2747 class ipa_node_params *info = fbi->info;
2748 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2749 {
2750 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2751 return;
2752
2753 anc_offset = 0;
2754 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2755 gcc_assert (index >= 0);
2756 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2757 call))
2758 return;
2759 }
2760 else
2761 {
2762 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2763 tree expr;
2764
2765 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2766 if (!expr)
2767 return;
2768 index = ipa_get_param_decl_index (info,
2769 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2770 gcc_assert (index >= 0);
2771 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2772 call, anc_offset))
2773 return;
2774 }
2775
2776 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2777 call, true);
2778 class cgraph_indirect_call_info *ii = cs->indirect_info;
2779 ii->offset = anc_offset;
2780 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2781 ii->otr_type = obj_type_ref_class (target);
2782 ii->polymorphic = 1;
2783 }
2784
2785 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2786 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2787 containing intermediate information about each formal parameter. */
2788
2789 static void
ipa_analyze_call_uses(struct ipa_func_body_info * fbi,gcall * call)2790 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2791 {
2792 tree target = gimple_call_fn (call);
2793
2794 if (!target
2795 || (TREE_CODE (target) != SSA_NAME
2796 && !virtual_method_call_p (target)))
2797 return;
2798
2799 struct cgraph_edge *cs = fbi->node->get_edge (call);
2800 /* If we previously turned the call into a direct call, there is
2801 no need to analyze. */
2802 if (cs && !cs->indirect_unknown_callee)
2803 return;
2804
2805 if (cs->indirect_info->polymorphic && flag_devirtualize)
2806 {
2807 tree instance;
2808 tree target = gimple_call_fn (call);
2809 ipa_polymorphic_call_context context (current_function_decl,
2810 target, call, &instance);
2811
2812 gcc_checking_assert (cs->indirect_info->otr_type
2813 == obj_type_ref_class (target));
2814 gcc_checking_assert (cs->indirect_info->otr_token
2815 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2816
2817 cs->indirect_info->vptr_changed
2818 = !context.get_dynamic_type (instance,
2819 OBJ_TYPE_REF_OBJECT (target),
2820 obj_type_ref_class (target), call,
2821 &fbi->aa_walk_budget);
2822 cs->indirect_info->context = context;
2823 }
2824
2825 if (TREE_CODE (target) == SSA_NAME)
2826 ipa_analyze_indirect_call_uses (fbi, call, target);
2827 else if (virtual_method_call_p (target))
2828 ipa_analyze_virtual_call_uses (fbi, call, target);
2829 }
2830
2831
2832 /* Analyze the call statement STMT with respect to formal parameters (described
2833 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2834 formal parameters are called. */
2835
2836 static void
ipa_analyze_stmt_uses(struct ipa_func_body_info * fbi,gimple * stmt)2837 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2838 {
2839 if (is_gimple_call (stmt))
2840 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2841 }
2842
2843 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2844 If OP is a parameter declaration, mark it as used in the info structure
2845 passed in DATA. */
2846
2847 static bool
visit_ref_for_mod_analysis(gimple *,tree op,tree,void * data)2848 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2849 {
2850 class ipa_node_params *info = (class ipa_node_params *) data;
2851
2852 op = get_base_address (op);
2853 if (op
2854 && TREE_CODE (op) == PARM_DECL)
2855 {
2856 int index = ipa_get_param_decl_index (info, op);
2857 gcc_assert (index >= 0);
2858 ipa_set_param_used (info, index, true);
2859 }
2860
2861 return false;
2862 }
2863
2864 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2865 the findings in various structures of the associated ipa_node_params
2866 structure, such as parameter flags, notes etc. FBI holds various data about
2867 the function being analyzed. */
2868
2869 static void
ipa_analyze_params_uses_in_bb(struct ipa_func_body_info * fbi,basic_block bb)2870 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2871 {
2872 gimple_stmt_iterator gsi;
2873 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2874 {
2875 gimple *stmt = gsi_stmt (gsi);
2876
2877 if (is_gimple_debug (stmt))
2878 continue;
2879
2880 ipa_analyze_stmt_uses (fbi, stmt);
2881 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2882 visit_ref_for_mod_analysis,
2883 visit_ref_for_mod_analysis,
2884 visit_ref_for_mod_analysis);
2885 }
2886 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2887 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2888 visit_ref_for_mod_analysis,
2889 visit_ref_for_mod_analysis,
2890 visit_ref_for_mod_analysis);
2891 }
2892
2893 /* Return true EXPR is a load from a dereference of SSA_NAME NAME. */
2894
2895 static bool
load_from_dereferenced_name(tree expr,tree name)2896 load_from_dereferenced_name (tree expr, tree name)
2897 {
2898 tree base = get_base_address (expr);
2899 return (TREE_CODE (base) == MEM_REF
2900 && TREE_OPERAND (base, 0) == name);
2901 }
2902
2903 /* Calculate controlled uses of parameters of NODE. */
2904
2905 static void
ipa_analyze_controlled_uses(struct cgraph_node * node)2906 ipa_analyze_controlled_uses (struct cgraph_node *node)
2907 {
2908 ipa_node_params *info = ipa_node_params_sum->get (node);
2909
2910 for (int i = 0; i < ipa_get_param_count (info); i++)
2911 {
2912 tree parm = ipa_get_param (info, i);
2913 int call_uses = 0;
2914 bool load_dereferenced = false;
2915
2916 /* For SSA regs see if parameter is used. For non-SSA we compute
2917 the flag during modification analysis. */
2918 if (is_gimple_reg (parm))
2919 {
2920 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2921 parm);
2922 if (ddef && !has_zero_uses (ddef))
2923 {
2924 imm_use_iterator imm_iter;
2925 gimple *stmt;
2926
2927 ipa_set_param_used (info, i, true);
2928 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, ddef)
2929 {
2930 if (is_gimple_debug (stmt))
2931 continue;
2932
2933 int all_stmt_uses = 0;
2934 use_operand_p use_p;
2935 FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
2936 all_stmt_uses++;
2937
2938 if (is_gimple_call (stmt))
2939 {
2940 if (gimple_call_internal_p (stmt))
2941 {
2942 call_uses = IPA_UNDESCRIBED_USE;
2943 break;
2944 }
2945 int recognized_stmt_uses;
2946 if (gimple_call_fn (stmt) == ddef)
2947 recognized_stmt_uses = 1;
2948 else
2949 recognized_stmt_uses = 0;
2950 unsigned arg_count = gimple_call_num_args (stmt);
2951 for (unsigned i = 0; i < arg_count; i++)
2952 {
2953 tree arg = gimple_call_arg (stmt, i);
2954 if (arg == ddef)
2955 recognized_stmt_uses++;
2956 else if (load_from_dereferenced_name (arg, ddef))
2957 {
2958 load_dereferenced = true;
2959 recognized_stmt_uses++;
2960 }
2961 }
2962
2963 if (recognized_stmt_uses != all_stmt_uses)
2964 {
2965 call_uses = IPA_UNDESCRIBED_USE;
2966 break;
2967 }
2968 if (call_uses >= 0)
2969 call_uses += all_stmt_uses;
2970 }
2971 else if (gimple_assign_single_p (stmt))
2972 {
2973 tree rhs = gimple_assign_rhs1 (stmt);
2974 if (all_stmt_uses != 1
2975 || !load_from_dereferenced_name (rhs, ddef))
2976 {
2977 call_uses = IPA_UNDESCRIBED_USE;
2978 break;
2979 }
2980 load_dereferenced = true;
2981 }
2982 else
2983 {
2984 call_uses = IPA_UNDESCRIBED_USE;
2985 break;
2986 }
2987 }
2988 }
2989 else
2990 call_uses = 0;
2991 }
2992 else
2993 call_uses = IPA_UNDESCRIBED_USE;
2994 ipa_set_controlled_uses (info, i, call_uses);
2995 ipa_set_param_load_dereferenced (info, i, load_dereferenced);
2996 }
2997 }
2998
2999 /* Free stuff in BI. */
3000
3001 static void
free_ipa_bb_info(struct ipa_bb_info * bi)3002 free_ipa_bb_info (struct ipa_bb_info *bi)
3003 {
3004 bi->cg_edges.release ();
3005 bi->param_aa_statuses.release ();
3006 }
3007
3008 /* Dominator walker driving the analysis. */
3009
3010 class analysis_dom_walker : public dom_walker
3011 {
3012 public:
analysis_dom_walker(struct ipa_func_body_info * fbi)3013 analysis_dom_walker (struct ipa_func_body_info *fbi)
3014 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
3015
3016 virtual edge before_dom_children (basic_block);
3017
3018 private:
3019 struct ipa_func_body_info *m_fbi;
3020 };
3021
3022 edge
before_dom_children(basic_block bb)3023 analysis_dom_walker::before_dom_children (basic_block bb)
3024 {
3025 ipa_analyze_params_uses_in_bb (m_fbi, bb);
3026 ipa_compute_jump_functions_for_bb (m_fbi, bb);
3027 return NULL;
3028 }
3029
3030 /* Release body info FBI. */
3031
3032 void
ipa_release_body_info(struct ipa_func_body_info * fbi)3033 ipa_release_body_info (struct ipa_func_body_info *fbi)
3034 {
3035 int i;
3036 struct ipa_bb_info *bi;
3037
3038 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
3039 free_ipa_bb_info (bi);
3040 fbi->bb_infos.release ();
3041 }
3042
3043 /* Initialize the array describing properties of formal parameters
3044 of NODE, analyze their uses and compute jump functions associated
3045 with actual arguments of calls from within NODE. */
3046
3047 void
ipa_analyze_node(struct cgraph_node * node)3048 ipa_analyze_node (struct cgraph_node *node)
3049 {
3050 struct ipa_func_body_info fbi;
3051 class ipa_node_params *info;
3052
3053 ipa_check_create_node_params ();
3054 ipa_check_create_edge_args ();
3055 info = ipa_node_params_sum->get_create (node);
3056
3057 if (info->analysis_done)
3058 return;
3059 info->analysis_done = 1;
3060
3061 if (ipa_func_spec_opts_forbid_analysis_p (node))
3062 {
3063 for (int i = 0; i < ipa_get_param_count (info); i++)
3064 {
3065 ipa_set_param_used (info, i, true);
3066 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
3067 }
3068 return;
3069 }
3070
3071 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
3072 push_cfun (func);
3073 calculate_dominance_info (CDI_DOMINATORS);
3074 ipa_initialize_node_params (node);
3075 ipa_analyze_controlled_uses (node);
3076
3077 fbi.node = node;
3078 fbi.info = info;
3079 fbi.bb_infos = vNULL;
3080 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
3081 fbi.param_count = ipa_get_param_count (info);
3082 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
3083
3084 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
3085 {
3086 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3087 bi->cg_edges.safe_push (cs);
3088 }
3089
3090 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
3091 {
3092 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
3093 bi->cg_edges.safe_push (cs);
3094 }
3095
3096 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
3097
3098 ipa_release_body_info (&fbi);
3099 free_dominance_info (CDI_DOMINATORS);
3100 pop_cfun ();
3101 }
3102
3103 /* Update the jump functions associated with call graph edge E when the call
3104 graph edge CS is being inlined, assuming that E->caller is already (possibly
3105 indirectly) inlined into CS->callee and that E has not been inlined. */
3106
3107 static void
update_jump_functions_after_inlining(struct cgraph_edge * cs,struct cgraph_edge * e)3108 update_jump_functions_after_inlining (struct cgraph_edge *cs,
3109 struct cgraph_edge *e)
3110 {
3111 ipa_edge_args *top = ipa_edge_args_sum->get (cs);
3112 ipa_edge_args *args = ipa_edge_args_sum->get (e);
3113 if (!args)
3114 return;
3115 int count = ipa_get_cs_argument_count (args);
3116 int i;
3117
3118 for (i = 0; i < count; i++)
3119 {
3120 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3121 class ipa_polymorphic_call_context *dst_ctx
3122 = ipa_get_ith_polymorhic_call_context (args, i);
3123
3124 if (dst->agg.items)
3125 {
3126 struct ipa_agg_jf_item *item;
3127 int j;
3128
3129 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
3130 {
3131 int dst_fid;
3132 struct ipa_jump_func *src;
3133
3134 if (item->jftype != IPA_JF_PASS_THROUGH
3135 && item->jftype != IPA_JF_LOAD_AGG)
3136 continue;
3137
3138 dst_fid = item->value.pass_through.formal_id;
3139 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
3140 {
3141 item->jftype = IPA_JF_UNKNOWN;
3142 continue;
3143 }
3144
3145 item->value.pass_through.formal_id = -1;
3146 src = ipa_get_ith_jump_func (top, dst_fid);
3147 if (src->type == IPA_JF_CONST)
3148 {
3149 if (item->jftype == IPA_JF_PASS_THROUGH
3150 && item->value.pass_through.operation == NOP_EXPR)
3151 {
3152 item->jftype = IPA_JF_CONST;
3153 item->value.constant = src->value.constant.value;
3154 continue;
3155 }
3156 }
3157 else if (src->type == IPA_JF_PASS_THROUGH
3158 && src->value.pass_through.operation == NOP_EXPR)
3159 {
3160 if (item->jftype == IPA_JF_PASS_THROUGH
3161 || !item->value.load_agg.by_ref
3162 || src->value.pass_through.agg_preserved)
3163 item->value.pass_through.formal_id
3164 = src->value.pass_through.formal_id;
3165 }
3166 else if (src->type == IPA_JF_ANCESTOR)
3167 {
3168 if (item->jftype == IPA_JF_PASS_THROUGH)
3169 {
3170 if (!src->value.ancestor.offset)
3171 item->value.pass_through.formal_id
3172 = src->value.ancestor.formal_id;
3173 }
3174 else if (src->value.ancestor.agg_preserved)
3175 {
3176 gcc_checking_assert (item->value.load_agg.by_ref);
3177
3178 item->value.pass_through.formal_id
3179 = src->value.ancestor.formal_id;
3180 item->value.load_agg.offset
3181 += src->value.ancestor.offset;
3182 }
3183 }
3184
3185 if (item->value.pass_through.formal_id < 0)
3186 item->jftype = IPA_JF_UNKNOWN;
3187 }
3188 }
3189
3190 if (!top)
3191 {
3192 ipa_set_jf_unknown (dst);
3193 continue;
3194 }
3195
3196 if (dst->type == IPA_JF_ANCESTOR)
3197 {
3198 struct ipa_jump_func *src;
3199 int dst_fid = dst->value.ancestor.formal_id;
3200 class ipa_polymorphic_call_context *src_ctx
3201 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3202
3203 /* Variable number of arguments can cause havoc if we try to access
3204 one that does not exist in the inlined edge. So make sure we
3205 don't. */
3206 if (dst_fid >= ipa_get_cs_argument_count (top))
3207 {
3208 ipa_set_jf_unknown (dst);
3209 continue;
3210 }
3211
3212 src = ipa_get_ith_jump_func (top, dst_fid);
3213
3214 if (src_ctx && !src_ctx->useless_p ())
3215 {
3216 class ipa_polymorphic_call_context ctx = *src_ctx;
3217
3218 /* TODO: Make type preserved safe WRT contexts. */
3219 if (!ipa_get_jf_ancestor_type_preserved (dst))
3220 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3221 ctx.offset_by (dst->value.ancestor.offset);
3222 if (!ctx.useless_p ())
3223 {
3224 if (!dst_ctx)
3225 {
3226 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3227 count, true);
3228 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3229 }
3230
3231 dst_ctx->combine_with (ctx);
3232 }
3233 }
3234
3235 /* Parameter and argument in ancestor jump function must be pointer
3236 type, which means access to aggregate must be by-reference. */
3237 gcc_assert (!src->agg.items || src->agg.by_ref);
3238
3239 if (src->agg.items && dst->value.ancestor.agg_preserved)
3240 {
3241 struct ipa_agg_jf_item *item;
3242 int j;
3243
3244 /* Currently we do not produce clobber aggregate jump functions,
3245 replace with merging when we do. */
3246 gcc_assert (!dst->agg.items);
3247
3248 dst->agg.items = vec_safe_copy (src->agg.items);
3249 dst->agg.by_ref = src->agg.by_ref;
3250 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3251 item->offset -= dst->value.ancestor.offset;
3252 }
3253
3254 if (src->type == IPA_JF_PASS_THROUGH
3255 && src->value.pass_through.operation == NOP_EXPR)
3256 {
3257 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3258 dst->value.ancestor.agg_preserved &=
3259 src->value.pass_through.agg_preserved;
3260 }
3261 else if (src->type == IPA_JF_ANCESTOR)
3262 {
3263 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3264 dst->value.ancestor.offset += src->value.ancestor.offset;
3265 dst->value.ancestor.agg_preserved &=
3266 src->value.ancestor.agg_preserved;
3267 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3268 }
3269 else
3270 ipa_set_jf_unknown (dst);
3271 }
3272 else if (dst->type == IPA_JF_PASS_THROUGH)
3273 {
3274 struct ipa_jump_func *src;
3275 /* We must check range due to calls with variable number of arguments
3276 and we cannot combine jump functions with operations. */
3277 if (dst->value.pass_through.operation == NOP_EXPR
3278 && (top && dst->value.pass_through.formal_id
3279 < ipa_get_cs_argument_count (top)))
3280 {
3281 int dst_fid = dst->value.pass_through.formal_id;
3282 src = ipa_get_ith_jump_func (top, dst_fid);
3283 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3284 class ipa_polymorphic_call_context *src_ctx
3285 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3286
3287 if (src_ctx && !src_ctx->useless_p ())
3288 {
3289 class ipa_polymorphic_call_context ctx = *src_ctx;
3290
3291 /* TODO: Make type preserved safe WRT contexts. */
3292 if (!ipa_get_jf_pass_through_type_preserved (dst))
3293 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3294 if (!ctx.useless_p ())
3295 {
3296 if (!dst_ctx)
3297 {
3298 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3299 count, true);
3300 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3301 }
3302 dst_ctx->combine_with (ctx);
3303 }
3304 }
3305 switch (src->type)
3306 {
3307 case IPA_JF_UNKNOWN:
3308 ipa_set_jf_unknown (dst);
3309 break;
3310 case IPA_JF_CONST:
3311 {
3312 bool rd = ipa_get_jf_pass_through_refdesc_decremented (dst);
3313 ipa_set_jf_cst_copy (dst, src);
3314 if (rd)
3315 ipa_zap_jf_refdesc (dst);
3316 }
3317
3318 break;
3319
3320 case IPA_JF_PASS_THROUGH:
3321 {
3322 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3323 enum tree_code operation;
3324 operation = ipa_get_jf_pass_through_operation (src);
3325
3326 if (operation == NOP_EXPR)
3327 {
3328 bool agg_p;
3329 agg_p = dst_agg_p
3330 && ipa_get_jf_pass_through_agg_preserved (src);
3331 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3332 }
3333 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3334 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3335 else
3336 {
3337 tree operand = ipa_get_jf_pass_through_operand (src);
3338 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3339 operation);
3340 }
3341 break;
3342 }
3343 case IPA_JF_ANCESTOR:
3344 {
3345 bool agg_p;
3346 agg_p = dst_agg_p
3347 && ipa_get_jf_ancestor_agg_preserved (src);
3348 ipa_set_ancestor_jf (dst,
3349 ipa_get_jf_ancestor_offset (src),
3350 ipa_get_jf_ancestor_formal_id (src),
3351 agg_p,
3352 ipa_get_jf_ancestor_keep_null (src));
3353 break;
3354 }
3355 default:
3356 gcc_unreachable ();
3357 }
3358
3359 if (src->agg.items
3360 && (dst_agg_p || !src->agg.by_ref))
3361 {
3362 /* Currently we do not produce clobber aggregate jump
3363 functions, replace with merging when we do. */
3364 gcc_assert (!dst->agg.items);
3365
3366 dst->agg.by_ref = src->agg.by_ref;
3367 dst->agg.items = vec_safe_copy (src->agg.items);
3368 }
3369 }
3370 else
3371 ipa_set_jf_unknown (dst);
3372 }
3373 }
3374 }
3375
3376 /* If TARGET is an addr_expr of a function declaration, make it the
3377 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3378 Otherwise, return NULL. */
3379
3380 struct cgraph_edge *
ipa_make_edge_direct_to_target(struct cgraph_edge * ie,tree target,bool speculative)3381 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3382 bool speculative)
3383 {
3384 struct cgraph_node *callee;
3385 bool unreachable = false;
3386
3387 if (TREE_CODE (target) == ADDR_EXPR)
3388 target = TREE_OPERAND (target, 0);
3389 if (TREE_CODE (target) != FUNCTION_DECL)
3390 {
3391 target = canonicalize_constructor_val (target, NULL);
3392 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3393 {
3394 /* Member pointer call that goes through a VMT lookup. */
3395 if (ie->indirect_info->member_ptr
3396 /* Or if target is not an invariant expression and we do not
3397 know if it will evaulate to function at runtime.
3398 This can happen when folding through &VAR, where &VAR
3399 is IP invariant, but VAR itself is not.
3400
3401 TODO: Revisit this when GCC 5 is branched. It seems that
3402 member_ptr check is not needed and that we may try to fold
3403 the expression and see if VAR is readonly. */
3404 || !is_gimple_ip_invariant (target))
3405 {
3406 if (dump_enabled_p ())
3407 {
3408 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3409 "discovered direct call non-invariant %s\n",
3410 ie->caller->dump_name ());
3411 }
3412 return NULL;
3413 }
3414
3415
3416 if (dump_enabled_p ())
3417 {
3418 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3419 "discovered direct call to non-function in %s, "
3420 "making it __builtin_unreachable\n",
3421 ie->caller->dump_name ());
3422 }
3423
3424 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3425 callee = cgraph_node::get_create (target);
3426 unreachable = true;
3427 }
3428 else
3429 callee = cgraph_node::get (target);
3430 }
3431 else
3432 callee = cgraph_node::get (target);
3433
3434 /* Because may-edges are not explicitely represented and vtable may be external,
3435 we may create the first reference to the object in the unit. */
3436 if (!callee || callee->inlined_to)
3437 {
3438
3439 /* We are better to ensure we can refer to it.
3440 In the case of static functions we are out of luck, since we already
3441 removed its body. In the case of public functions we may or may
3442 not introduce the reference. */
3443 if (!canonicalize_constructor_val (target, NULL)
3444 || !TREE_PUBLIC (target))
3445 {
3446 if (dump_file)
3447 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3448 "(%s -> %s) but cannot refer to it. Giving up.\n",
3449 ie->caller->dump_name (),
3450 ie->callee->dump_name ());
3451 return NULL;
3452 }
3453 callee = cgraph_node::get_create (target);
3454 }
3455
3456 /* If the edge is already speculated. */
3457 if (speculative && ie->speculative)
3458 {
3459 if (dump_file)
3460 {
3461 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3462 if (!e2)
3463 {
3464 if (dump_file)
3465 fprintf (dump_file, "ipa-prop: Discovered call to a "
3466 "speculative target (%s -> %s) but the call is "
3467 "already speculated to different target. "
3468 "Giving up.\n",
3469 ie->caller->dump_name (), callee->dump_name ());
3470 }
3471 else
3472 {
3473 if (dump_file)
3474 fprintf (dump_file,
3475 "ipa-prop: Discovered call to a speculative target "
3476 "(%s -> %s) this agree with previous speculation.\n",
3477 ie->caller->dump_name (), callee->dump_name ());
3478 }
3479 }
3480 return NULL;
3481 }
3482
3483 if (!dbg_cnt (devirt))
3484 return NULL;
3485
3486 ipa_check_create_node_params ();
3487
3488 /* We cannot make edges to inline clones. It is bug that someone removed
3489 the cgraph node too early. */
3490 gcc_assert (!callee->inlined_to);
3491
3492 if (dump_file && !unreachable)
3493 {
3494 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3495 "(%s -> %s), for stmt ",
3496 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3497 speculative ? "speculative" : "known",
3498 ie->caller->dump_name (),
3499 callee->dump_name ());
3500 if (ie->call_stmt)
3501 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3502 else
3503 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3504 }
3505 if (dump_enabled_p ())
3506 {
3507 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3508 "converting indirect call in %s to direct call to %s\n",
3509 ie->caller->dump_name (), callee->dump_name ());
3510 }
3511 if (!speculative)
3512 {
3513 struct cgraph_edge *orig = ie;
3514 ie = cgraph_edge::make_direct (ie, callee);
3515 /* If we resolved speculative edge the cost is already up to date
3516 for direct call (adjusted by inline_edge_duplication_hook). */
3517 if (ie == orig)
3518 {
3519 ipa_call_summary *es = ipa_call_summaries->get (ie);
3520 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3521 - eni_size_weights.call_cost);
3522 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3523 - eni_time_weights.call_cost);
3524 }
3525 }
3526 else
3527 {
3528 if (!callee->can_be_discarded_p ())
3529 {
3530 cgraph_node *alias;
3531 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3532 if (alias)
3533 callee = alias;
3534 }
3535 /* make_speculative will update ie's cost to direct call cost. */
3536 ie = ie->make_speculative
3537 (callee, ie->count.apply_scale (8, 10));
3538 }
3539
3540 return ie;
3541 }
3542
3543 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3544 CONSTRUCTOR and return it. Return NULL if the search fails for some
3545 reason. */
3546
3547 static tree
find_constructor_constant_at_offset(tree constructor,HOST_WIDE_INT req_offset)3548 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3549 {
3550 tree type = TREE_TYPE (constructor);
3551 if (TREE_CODE (type) != ARRAY_TYPE
3552 && TREE_CODE (type) != RECORD_TYPE)
3553 return NULL;
3554
3555 unsigned ix;
3556 tree index, val;
3557 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3558 {
3559 HOST_WIDE_INT elt_offset;
3560 if (TREE_CODE (type) == ARRAY_TYPE)
3561 {
3562 offset_int off;
3563 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3564 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3565
3566 if (index)
3567 {
3568 if (TREE_CODE (index) == RANGE_EXPR)
3569 off = wi::to_offset (TREE_OPERAND (index, 0));
3570 else
3571 off = wi::to_offset (index);
3572 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3573 {
3574 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3575 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3576 off = wi::sext (off - wi::to_offset (low_bound),
3577 TYPE_PRECISION (TREE_TYPE (index)));
3578 }
3579 off *= wi::to_offset (unit_size);
3580 /* ??? Handle more than just the first index of a
3581 RANGE_EXPR. */
3582 }
3583 else
3584 off = wi::to_offset (unit_size) * ix;
3585
3586 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3587 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3588 continue;
3589 elt_offset = off.to_shwi ();
3590 }
3591 else if (TREE_CODE (type) == RECORD_TYPE)
3592 {
3593 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3594 if (DECL_BIT_FIELD (index))
3595 continue;
3596 elt_offset = int_bit_position (index);
3597 }
3598 else
3599 gcc_unreachable ();
3600
3601 if (elt_offset > req_offset)
3602 return NULL;
3603
3604 if (TREE_CODE (val) == CONSTRUCTOR)
3605 return find_constructor_constant_at_offset (val,
3606 req_offset - elt_offset);
3607
3608 if (elt_offset == req_offset
3609 && is_gimple_reg_type (TREE_TYPE (val))
3610 && is_gimple_ip_invariant (val))
3611 return val;
3612 }
3613 return NULL;
3614 }
3615
3616 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3617 invariant from a static constructor and if so, return it. Otherwise return
3618 NULL. */
3619
3620 static tree
ipa_find_agg_cst_from_init(tree scalar,HOST_WIDE_INT offset,bool by_ref)3621 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3622 {
3623 if (by_ref)
3624 {
3625 if (TREE_CODE (scalar) != ADDR_EXPR)
3626 return NULL;
3627 scalar = TREE_OPERAND (scalar, 0);
3628 }
3629
3630 if (!VAR_P (scalar)
3631 || !is_global_var (scalar)
3632 || !TREE_READONLY (scalar)
3633 || !DECL_INITIAL (scalar)
3634 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3635 return NULL;
3636
3637 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3638 }
3639
3640 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3641 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3642 return NULL if there is none. BY_REF specifies whether the value has to be
3643 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3644 the boolean it points to is set to true if the value comes from an
3645 initializer of a constant. */
3646
3647 tree
ipa_find_agg_cst_for_param(const ipa_agg_value_set * agg,tree scalar,HOST_WIDE_INT offset,bool by_ref,bool * from_global_constant)3648 ipa_find_agg_cst_for_param (const ipa_agg_value_set *agg, tree scalar,
3649 HOST_WIDE_INT offset, bool by_ref,
3650 bool *from_global_constant)
3651 {
3652 struct ipa_agg_value *item;
3653 int i;
3654
3655 if (scalar)
3656 {
3657 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3658 if (res)
3659 {
3660 if (from_global_constant)
3661 *from_global_constant = true;
3662 return res;
3663 }
3664 }
3665
3666 if (!agg
3667 || by_ref != agg->by_ref)
3668 return NULL;
3669
3670 FOR_EACH_VEC_ELT (agg->items, i, item)
3671 if (item->offset == offset)
3672 {
3673 /* Currently we do not have clobber values, return NULL for them once
3674 we do. */
3675 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3676 if (from_global_constant)
3677 *from_global_constant = false;
3678 return item->value;
3679 }
3680 return NULL;
3681 }
3682
3683 /* Remove a reference to SYMBOL from the list of references of a node given by
3684 reference description RDESC. Return true if the reference has been
3685 successfully found and removed. */
3686
3687 static bool
remove_described_reference(symtab_node * symbol,struct ipa_cst_ref_desc * rdesc)3688 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3689 {
3690 struct ipa_ref *to_del;
3691 struct cgraph_edge *origin;
3692
3693 origin = rdesc->cs;
3694 if (!origin)
3695 return false;
3696 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3697 origin->lto_stmt_uid, IPA_REF_ADDR);
3698 if (!to_del)
3699 return false;
3700
3701 to_del->remove_reference ();
3702 if (dump_file)
3703 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3704 origin->caller->dump_name (), symbol->dump_name ());
3705 return true;
3706 }
3707
3708 /* If JFUNC has a reference description with refcount different from
3709 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3710 NULL. JFUNC must be a constant jump function. */
3711
3712 static struct ipa_cst_ref_desc *
jfunc_rdesc_usable(struct ipa_jump_func * jfunc)3713 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3714 {
3715 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3716 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3717 return rdesc;
3718 else
3719 return NULL;
3720 }
3721
3722 /* If the value of constant jump function JFUNC is an address of a function
3723 declaration, return the associated call graph node. Otherwise return
3724 NULL. */
3725
3726 static symtab_node *
symtab_node_for_jfunc(struct ipa_jump_func * jfunc)3727 symtab_node_for_jfunc (struct ipa_jump_func *jfunc)
3728 {
3729 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3730 tree cst = ipa_get_jf_constant (jfunc);
3731 if (TREE_CODE (cst) != ADDR_EXPR
3732 || (TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL
3733 && TREE_CODE (TREE_OPERAND (cst, 0)) != VAR_DECL))
3734 return NULL;
3735
3736 return symtab_node::get (TREE_OPERAND (cst, 0));
3737 }
3738
3739
3740 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3741 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3742 the edge specified in the rdesc. Return false if either the symbol or the
3743 reference could not be found, otherwise return true. */
3744
3745 static bool
try_decrement_rdesc_refcount(struct ipa_jump_func * jfunc)3746 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3747 {
3748 struct ipa_cst_ref_desc *rdesc;
3749 if (jfunc->type == IPA_JF_CONST
3750 && (rdesc = jfunc_rdesc_usable (jfunc))
3751 && --rdesc->refcount == 0)
3752 {
3753 symtab_node *symbol = symtab_node_for_jfunc (jfunc);
3754 if (!symbol)
3755 return false;
3756
3757 return remove_described_reference (symbol, rdesc);
3758 }
3759 return true;
3760 }
3761
3762 /* Try to find a destination for indirect edge IE that corresponds to a simple
3763 call or a call of a member function pointer and where the destination is a
3764 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3765 the type of the parameter to which the result of JFUNC is passed. If it can
3766 be determined, return the newly direct edge, otherwise return NULL.
3767 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3768 relative to. */
3769
3770 static struct cgraph_edge *
try_make_edge_direct_simple_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,tree target_type,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3771 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3772 struct ipa_jump_func *jfunc, tree target_type,
3773 struct cgraph_node *new_root,
3774 class ipa_node_params *new_root_info)
3775 {
3776 struct cgraph_edge *cs;
3777 tree target;
3778 bool agg_contents = ie->indirect_info->agg_contents;
3779 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3780 if (agg_contents)
3781 {
3782 bool from_global_constant;
3783 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3784 new_root,
3785 &jfunc->agg);
3786 target = ipa_find_agg_cst_for_param (&agg, scalar,
3787 ie->indirect_info->offset,
3788 ie->indirect_info->by_ref,
3789 &from_global_constant);
3790 agg.release ();
3791 if (target
3792 && !from_global_constant
3793 && !ie->indirect_info->guaranteed_unmodified)
3794 return NULL;
3795 }
3796 else
3797 target = scalar;
3798 if (!target)
3799 return NULL;
3800 cs = ipa_make_edge_direct_to_target (ie, target);
3801
3802 if (cs && !agg_contents)
3803 {
3804 bool ok;
3805 gcc_checking_assert (cs->callee
3806 && (cs != ie
3807 || jfunc->type != IPA_JF_CONST
3808 || !symtab_node_for_jfunc (jfunc)
3809 || cs->callee == symtab_node_for_jfunc (jfunc)));
3810 ok = try_decrement_rdesc_refcount (jfunc);
3811 gcc_checking_assert (ok);
3812 }
3813
3814 return cs;
3815 }
3816
3817 /* Return the target to be used in cases of impossible devirtualization. IE
3818 and target (the latter can be NULL) are dumped when dumping is enabled. */
3819
3820 tree
ipa_impossible_devirt_target(struct cgraph_edge * ie,tree target)3821 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3822 {
3823 if (dump_file)
3824 {
3825 if (target)
3826 fprintf (dump_file,
3827 "Type inconsistent devirtualization: %s->%s\n",
3828 ie->caller->dump_name (),
3829 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3830 else
3831 fprintf (dump_file,
3832 "No devirtualization target in %s\n",
3833 ie->caller->dump_name ());
3834 }
3835 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3836 cgraph_node::get_create (new_target);
3837 return new_target;
3838 }
3839
3840 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3841 call based on a formal parameter which is described by jump function JFUNC
3842 and if it can be determined, make it direct and return the direct edge.
3843 Otherwise, return NULL. CTX describes the polymorphic context that the
3844 parameter the call is based on brings along with it. NEW_ROOT and
3845 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3846 to. */
3847
3848 static struct cgraph_edge *
try_make_edge_direct_virtual_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,class ipa_polymorphic_call_context ctx,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3849 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3850 struct ipa_jump_func *jfunc,
3851 class ipa_polymorphic_call_context ctx,
3852 struct cgraph_node *new_root,
3853 class ipa_node_params *new_root_info)
3854 {
3855 tree target = NULL;
3856 bool speculative = false;
3857
3858 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3859 return NULL;
3860
3861 gcc_assert (!ie->indirect_info->by_ref);
3862
3863 /* Try to do lookup via known virtual table pointer value. */
3864 if (!ie->indirect_info->vptr_changed
3865 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3866 {
3867 tree vtable;
3868 unsigned HOST_WIDE_INT offset;
3869 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3870 : NULL;
3871 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3872 new_root,
3873 &jfunc->agg);
3874 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3875 ie->indirect_info->offset,
3876 true);
3877 agg.release ();
3878 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3879 {
3880 bool can_refer;
3881 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3882 vtable, offset, &can_refer);
3883 if (can_refer)
3884 {
3885 if (!t
3886 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3887 || !possible_polymorphic_call_target_p
3888 (ie, cgraph_node::get (t)))
3889 {
3890 /* Do not speculate builtin_unreachable, it is stupid! */
3891 if (!ie->indirect_info->vptr_changed)
3892 target = ipa_impossible_devirt_target (ie, target);
3893 else
3894 target = NULL;
3895 }
3896 else
3897 {
3898 target = t;
3899 speculative = ie->indirect_info->vptr_changed;
3900 }
3901 }
3902 }
3903 }
3904
3905 ipa_polymorphic_call_context ie_context (ie);
3906 vec <cgraph_node *>targets;
3907 bool final;
3908
3909 ctx.offset_by (ie->indirect_info->offset);
3910 if (ie->indirect_info->vptr_changed)
3911 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3912 ie->indirect_info->otr_type);
3913 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3914 targets = possible_polymorphic_call_targets
3915 (ie->indirect_info->otr_type,
3916 ie->indirect_info->otr_token,
3917 ctx, &final);
3918 if (final && targets.length () <= 1)
3919 {
3920 speculative = false;
3921 if (targets.length () == 1)
3922 target = targets[0]->decl;
3923 else
3924 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3925 }
3926 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3927 && !ie->speculative && ie->maybe_hot_p ())
3928 {
3929 cgraph_node *n;
3930 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3931 ie->indirect_info->otr_token,
3932 ie->indirect_info->context);
3933 if (n)
3934 {
3935 target = n->decl;
3936 speculative = true;
3937 }
3938 }
3939
3940 if (target)
3941 {
3942 if (!possible_polymorphic_call_target_p
3943 (ie, cgraph_node::get_create (target)))
3944 {
3945 if (speculative)
3946 return NULL;
3947 target = ipa_impossible_devirt_target (ie, target);
3948 }
3949 return ipa_make_edge_direct_to_target (ie, target, speculative);
3950 }
3951 else
3952 return NULL;
3953 }
3954
3955 /* Update the param called notes associated with NODE when CS is being inlined,
3956 assuming NODE is (potentially indirectly) inlined into CS->callee.
3957 Moreover, if the callee is discovered to be constant, create a new cgraph
3958 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3959 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3960
3961 static bool
update_indirect_edges_after_inlining(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3962 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3963 struct cgraph_node *node,
3964 vec<cgraph_edge *> *new_edges)
3965 {
3966 class ipa_edge_args *top;
3967 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3968 struct cgraph_node *new_root;
3969 class ipa_node_params *new_root_info, *inlined_node_info;
3970 bool res = false;
3971
3972 ipa_check_create_edge_args ();
3973 top = ipa_edge_args_sum->get (cs);
3974 new_root = cs->caller->inlined_to
3975 ? cs->caller->inlined_to : cs->caller;
3976 new_root_info = ipa_node_params_sum->get (new_root);
3977 inlined_node_info = ipa_node_params_sum->get (cs->callee->function_symbol ());
3978
3979 for (ie = node->indirect_calls; ie; ie = next_ie)
3980 {
3981 class cgraph_indirect_call_info *ici = ie->indirect_info;
3982 struct ipa_jump_func *jfunc;
3983 int param_index;
3984
3985 next_ie = ie->next_callee;
3986
3987 if (ici->param_index == -1)
3988 continue;
3989
3990 /* We must check range due to calls with variable number of arguments: */
3991 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3992 {
3993 ici->param_index = -1;
3994 continue;
3995 }
3996
3997 param_index = ici->param_index;
3998 jfunc = ipa_get_ith_jump_func (top, param_index);
3999
4000 auto_vec<cgraph_node *, 4> spec_targets;
4001 if (ie->speculative)
4002 for (cgraph_edge *direct = ie->first_speculative_call_target ();
4003 direct;
4004 direct = direct->next_speculative_call_target ())
4005 spec_targets.safe_push (direct->callee);
4006
4007 if (!opt_for_fn (node->decl, flag_indirect_inlining))
4008 new_direct_edge = NULL;
4009 else if (ici->polymorphic)
4010 {
4011 ipa_polymorphic_call_context ctx;
4012 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
4013 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
4014 new_root,
4015 new_root_info);
4016 }
4017 else
4018 {
4019 tree target_type = ipa_get_type (inlined_node_info, param_index);
4020 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
4021 target_type,
4022 new_root,
4023 new_root_info);
4024 }
4025
4026 /* If speculation was removed, then we need to do nothing. */
4027 if (new_direct_edge && new_direct_edge != ie
4028 && spec_targets.contains (new_direct_edge->callee))
4029 {
4030 new_direct_edge->indirect_inlining_edge = 1;
4031 res = true;
4032 if (!new_direct_edge->speculative)
4033 continue;
4034 }
4035 else if (new_direct_edge)
4036 {
4037 new_direct_edge->indirect_inlining_edge = 1;
4038 if (new_edges)
4039 {
4040 new_edges->safe_push (new_direct_edge);
4041 res = true;
4042 }
4043 /* If speculative edge was introduced we still need to update
4044 call info of the indirect edge. */
4045 if (!new_direct_edge->speculative)
4046 continue;
4047 }
4048 if (jfunc->type == IPA_JF_PASS_THROUGH
4049 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
4050 {
4051 if (ici->agg_contents
4052 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
4053 && !ici->polymorphic)
4054 ici->param_index = -1;
4055 else
4056 {
4057 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
4058 if (ici->polymorphic
4059 && !ipa_get_jf_pass_through_type_preserved (jfunc))
4060 ici->vptr_changed = true;
4061 ipa_set_param_used_by_indirect_call (new_root_info,
4062 ici->param_index, true);
4063 if (ici->polymorphic)
4064 ipa_set_param_used_by_polymorphic_call (new_root_info,
4065 ici->param_index, true);
4066 }
4067 }
4068 else if (jfunc->type == IPA_JF_ANCESTOR)
4069 {
4070 if (ici->agg_contents
4071 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
4072 && !ici->polymorphic)
4073 ici->param_index = -1;
4074 else
4075 {
4076 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
4077 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
4078 if (ici->polymorphic
4079 && !ipa_get_jf_ancestor_type_preserved (jfunc))
4080 ici->vptr_changed = true;
4081 ipa_set_param_used_by_indirect_call (new_root_info,
4082 ici->param_index, true);
4083 if (ici->polymorphic)
4084 ipa_set_param_used_by_polymorphic_call (new_root_info,
4085 ici->param_index, true);
4086 }
4087 }
4088 else
4089 /* Either we can find a destination for this edge now or never. */
4090 ici->param_index = -1;
4091 }
4092
4093 return res;
4094 }
4095
4096 /* Recursively traverse subtree of NODE (including node) made of inlined
4097 cgraph_edges when CS has been inlined and invoke
4098 update_indirect_edges_after_inlining on all nodes and
4099 update_jump_functions_after_inlining on all non-inlined edges that lead out
4100 of this subtree. Newly discovered indirect edges will be added to
4101 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
4102 created. */
4103
4104 static bool
propagate_info_to_inlined_callees(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)4105 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
4106 struct cgraph_node *node,
4107 vec<cgraph_edge *> *new_edges)
4108 {
4109 struct cgraph_edge *e;
4110 bool res;
4111
4112 res = update_indirect_edges_after_inlining (cs, node, new_edges);
4113
4114 for (e = node->callees; e; e = e->next_callee)
4115 if (!e->inline_failed)
4116 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
4117 else
4118 update_jump_functions_after_inlining (cs, e);
4119 for (e = node->indirect_calls; e; e = e->next_callee)
4120 update_jump_functions_after_inlining (cs, e);
4121
4122 return res;
4123 }
4124
4125 /* Combine two controlled uses counts as done during inlining. */
4126
4127 static int
combine_controlled_uses_counters(int c,int d)4128 combine_controlled_uses_counters (int c, int d)
4129 {
4130 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
4131 return IPA_UNDESCRIBED_USE;
4132 else
4133 return c + d - 1;
4134 }
4135
4136 /* Propagate number of controlled users from CS->caleee to the new root of the
4137 tree of inlined nodes. */
4138
4139 static void
propagate_controlled_uses(struct cgraph_edge * cs)4140 propagate_controlled_uses (struct cgraph_edge *cs)
4141 {
4142 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4143 if (!args)
4144 return;
4145 struct cgraph_node *new_root = cs->caller->inlined_to
4146 ? cs->caller->inlined_to : cs->caller;
4147 ipa_node_params *new_root_info = ipa_node_params_sum->get (new_root);
4148 ipa_node_params *old_root_info = ipa_node_params_sum->get (cs->callee);
4149 int count, i;
4150
4151 if (!old_root_info)
4152 return;
4153
4154 count = MIN (ipa_get_cs_argument_count (args),
4155 ipa_get_param_count (old_root_info));
4156 for (i = 0; i < count; i++)
4157 {
4158 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4159 struct ipa_cst_ref_desc *rdesc;
4160
4161 if (jf->type == IPA_JF_PASS_THROUGH
4162 && !ipa_get_jf_pass_through_refdesc_decremented (jf))
4163 {
4164 int src_idx, c, d;
4165 src_idx = ipa_get_jf_pass_through_formal_id (jf);
4166 c = ipa_get_controlled_uses (new_root_info, src_idx);
4167 d = ipa_get_controlled_uses (old_root_info, i);
4168
4169 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
4170 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
4171 c = combine_controlled_uses_counters (c, d);
4172 ipa_set_controlled_uses (new_root_info, src_idx, c);
4173 bool lderef = true;
4174 if (c != IPA_UNDESCRIBED_USE)
4175 {
4176 lderef = (ipa_get_param_load_dereferenced (new_root_info, src_idx)
4177 || ipa_get_param_load_dereferenced (old_root_info, i));
4178 ipa_set_param_load_dereferenced (new_root_info, src_idx, lderef);
4179 }
4180
4181 if (c == 0 && !lderef && new_root_info->ipcp_orig_node)
4182 {
4183 struct cgraph_node *n;
4184 struct ipa_ref *ref;
4185 tree t = new_root_info->known_csts[src_idx];
4186
4187 if (t && TREE_CODE (t) == ADDR_EXPR
4188 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
4189 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
4190 && (ref = new_root->find_reference (n, NULL, 0,
4191 IPA_REF_ADDR)))
4192 {
4193 if (dump_file)
4194 fprintf (dump_file, "ipa-prop: Removing cloning-created "
4195 "reference from %s to %s.\n",
4196 new_root->dump_name (),
4197 n->dump_name ());
4198 ref->remove_reference ();
4199 }
4200 }
4201 }
4202 else if (jf->type == IPA_JF_CONST
4203 && (rdesc = jfunc_rdesc_usable (jf)))
4204 {
4205 int d = ipa_get_controlled_uses (old_root_info, i);
4206 int c = rdesc->refcount;
4207 tree cst = ipa_get_jf_constant (jf);
4208 rdesc->refcount = combine_controlled_uses_counters (c, d);
4209 if (rdesc->refcount != IPA_UNDESCRIBED_USE
4210 && ipa_get_param_load_dereferenced (old_root_info, i)
4211 && TREE_CODE (cst) == ADDR_EXPR
4212 && TREE_CODE (TREE_OPERAND (cst, 0)) == VAR_DECL)
4213 {
4214 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4215 new_root->create_reference (n, IPA_REF_LOAD, NULL);
4216 if (dump_file)
4217 fprintf (dump_file, "ipa-prop: Address IPA constant will reach "
4218 "a load so adding LOAD reference from %s to %s.\n",
4219 new_root->dump_name (), n->dump_name ());
4220 }
4221 if (rdesc->refcount == 0)
4222 {
4223 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4224 && ((TREE_CODE (TREE_OPERAND (cst, 0))
4225 == FUNCTION_DECL)
4226 || (TREE_CODE (TREE_OPERAND (cst, 0))
4227 == VAR_DECL)));
4228
4229 symtab_node *n = symtab_node::get (TREE_OPERAND (cst, 0));
4230 if (n)
4231 {
4232 remove_described_reference (n, rdesc);
4233 cgraph_node *clone = cs->caller;
4234 while (clone->inlined_to
4235 && clone->ipcp_clone
4236 && clone != rdesc->cs->caller)
4237 {
4238 struct ipa_ref *ref;
4239 ref = clone->find_reference (n, NULL, 0, IPA_REF_ADDR);
4240 if (ref)
4241 {
4242 if (dump_file)
4243 fprintf (dump_file, "ipa-prop: Removing "
4244 "cloning-created reference "
4245 "from %s to %s.\n",
4246 clone->dump_name (),
4247 n->dump_name ());
4248 ref->remove_reference ();
4249 }
4250 clone = clone->callers->caller;
4251 }
4252 }
4253 }
4254 }
4255 }
4256
4257 for (i = ipa_get_param_count (old_root_info);
4258 i < ipa_get_cs_argument_count (args);
4259 i++)
4260 {
4261 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4262
4263 if (jf->type == IPA_JF_CONST)
4264 {
4265 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4266 if (rdesc)
4267 rdesc->refcount = IPA_UNDESCRIBED_USE;
4268 }
4269 else if (jf->type == IPA_JF_PASS_THROUGH)
4270 ipa_set_controlled_uses (new_root_info,
4271 jf->value.pass_through.formal_id,
4272 IPA_UNDESCRIBED_USE);
4273 }
4274 }
4275
4276 /* Update jump functions and call note functions on inlining the call site CS.
4277 CS is expected to lead to a node already cloned by
4278 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4279 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4280 created. */
4281
4282 bool
ipa_propagate_indirect_call_infos(struct cgraph_edge * cs,vec<cgraph_edge * > * new_edges)4283 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4284 vec<cgraph_edge *> *new_edges)
4285 {
4286 bool changed;
4287 /* Do nothing if the preparation phase has not been carried out yet
4288 (i.e. during early inlining). */
4289 if (!ipa_node_params_sum)
4290 return false;
4291 gcc_assert (ipa_edge_args_sum);
4292
4293 propagate_controlled_uses (cs);
4294 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4295 ipa_node_params_sum->remove (cs->callee);
4296
4297 ipa_edge_args *args = ipa_edge_args_sum->get (cs);
4298 if (args)
4299 {
4300 bool ok = true;
4301 if (args->jump_functions)
4302 {
4303 struct ipa_jump_func *jf;
4304 int i;
4305 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4306 if (jf->type == IPA_JF_CONST
4307 && ipa_get_jf_constant_rdesc (jf))
4308 {
4309 ok = false;
4310 break;
4311 }
4312 }
4313 if (ok)
4314 ipa_edge_args_sum->remove (cs);
4315 }
4316 if (ipcp_transformation_sum)
4317 ipcp_transformation_sum->remove (cs->callee);
4318
4319 return changed;
4320 }
4321
4322 /* Ensure that array of edge arguments infos is big enough to accommodate a
4323 structure for all edges and reallocates it if not. Also, allocate
4324 associated hash tables is they do not already exist. */
4325
4326 void
ipa_check_create_edge_args(void)4327 ipa_check_create_edge_args (void)
4328 {
4329 if (!ipa_edge_args_sum)
4330 ipa_edge_args_sum
4331 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4332 ipa_edge_args_sum_t (symtab, true));
4333 if (!ipa_bits_hash_table)
4334 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4335 if (!ipa_vr_hash_table)
4336 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4337 }
4338
4339 /* Free all ipa_edge structures. */
4340
4341 void
ipa_free_all_edge_args(void)4342 ipa_free_all_edge_args (void)
4343 {
4344 if (!ipa_edge_args_sum)
4345 return;
4346
4347 ggc_delete (ipa_edge_args_sum);
4348 ipa_edge_args_sum = NULL;
4349 }
4350
4351 /* Free all ipa_node_params structures. */
4352
4353 void
ipa_free_all_node_params(void)4354 ipa_free_all_node_params (void)
4355 {
4356 if (ipa_node_params_sum)
4357 ggc_delete (ipa_node_params_sum);
4358 ipa_node_params_sum = NULL;
4359 }
4360
4361 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4362 tables if they do not already exist. */
4363
4364 void
ipcp_transformation_initialize(void)4365 ipcp_transformation_initialize (void)
4366 {
4367 if (!ipa_bits_hash_table)
4368 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4369 if (!ipa_vr_hash_table)
4370 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4371 if (ipcp_transformation_sum == NULL)
4372 {
4373 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4374 ipcp_transformation_sum->disable_insertion_hook ();
4375 }
4376 }
4377
4378 /* Release the IPA CP transformation summary. */
4379
4380 void
ipcp_free_transformation_sum(void)4381 ipcp_free_transformation_sum (void)
4382 {
4383 if (!ipcp_transformation_sum)
4384 return;
4385
4386 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4387 ggc_free (ipcp_transformation_sum);
4388 ipcp_transformation_sum = NULL;
4389 }
4390
4391 /* Set the aggregate replacements of NODE to be AGGVALS. */
4392
4393 void
ipa_set_node_agg_value_chain(struct cgraph_node * node,struct ipa_agg_replacement_value * aggvals)4394 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4395 struct ipa_agg_replacement_value *aggvals)
4396 {
4397 ipcp_transformation_initialize ();
4398 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4399 s->agg_values = aggvals;
4400 }
4401
4402 /* Hook that is called by cgraph.cc when an edge is removed. Adjust reference
4403 count data structures accordingly. */
4404
4405 void
remove(cgraph_edge * cs,ipa_edge_args * args)4406 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4407 {
4408 if (args->jump_functions)
4409 {
4410 struct ipa_jump_func *jf;
4411 int i;
4412 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4413 {
4414 struct ipa_cst_ref_desc *rdesc;
4415 try_decrement_rdesc_refcount (jf);
4416 if (jf->type == IPA_JF_CONST
4417 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4418 && rdesc->cs == cs)
4419 rdesc->cs = NULL;
4420 }
4421 }
4422 }
4423
4424 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4425 reference count data strucutres accordingly. */
4426
4427 void
duplicate(cgraph_edge * src,cgraph_edge * dst,ipa_edge_args * old_args,ipa_edge_args * new_args)4428 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4429 ipa_edge_args *old_args, ipa_edge_args *new_args)
4430 {
4431 unsigned int i;
4432
4433 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4434 if (old_args->polymorphic_call_contexts)
4435 new_args->polymorphic_call_contexts
4436 = vec_safe_copy (old_args->polymorphic_call_contexts);
4437
4438 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4439 {
4440 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4441 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4442
4443 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4444
4445 if (src_jf->type == IPA_JF_CONST)
4446 {
4447 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4448
4449 if (!src_rdesc)
4450 dst_jf->value.constant.rdesc = NULL;
4451 else if (src->caller == dst->caller)
4452 {
4453 /* Creation of a speculative edge. If the source edge is the one
4454 grabbing a reference, we must create a new (duplicate)
4455 reference description. Otherwise they refer to the same
4456 description corresponding to a reference taken in a function
4457 src->caller is inlined to. In that case we just must
4458 increment the refcount. */
4459 if (src_rdesc->cs == src)
4460 {
4461 symtab_node *n = symtab_node_for_jfunc (src_jf);
4462 gcc_checking_assert (n);
4463 ipa_ref *ref
4464 = src->caller->find_reference (n, src->call_stmt,
4465 src->lto_stmt_uid,
4466 IPA_REF_ADDR);
4467 gcc_checking_assert (ref);
4468 dst->caller->clone_reference (ref, ref->stmt);
4469
4470 ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4471 dst_rdesc->cs = dst;
4472 dst_rdesc->refcount = src_rdesc->refcount;
4473 dst_rdesc->next_duplicate = NULL;
4474 dst_jf->value.constant.rdesc = dst_rdesc;
4475 }
4476 else
4477 {
4478 src_rdesc->refcount++;
4479 dst_jf->value.constant.rdesc = src_rdesc;
4480 }
4481 }
4482 else if (src_rdesc->cs == src)
4483 {
4484 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4485 dst_rdesc->cs = dst;
4486 dst_rdesc->refcount = src_rdesc->refcount;
4487 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4488 src_rdesc->next_duplicate = dst_rdesc;
4489 dst_jf->value.constant.rdesc = dst_rdesc;
4490 }
4491 else
4492 {
4493 struct ipa_cst_ref_desc *dst_rdesc;
4494 /* This can happen during inlining, when a JFUNC can refer to a
4495 reference taken in a function up in the tree of inline clones.
4496 We need to find the duplicate that refers to our tree of
4497 inline clones. */
4498
4499 gcc_assert (dst->caller->inlined_to);
4500 for (dst_rdesc = src_rdesc->next_duplicate;
4501 dst_rdesc;
4502 dst_rdesc = dst_rdesc->next_duplicate)
4503 {
4504 struct cgraph_node *top;
4505 top = dst_rdesc->cs->caller->inlined_to
4506 ? dst_rdesc->cs->caller->inlined_to
4507 : dst_rdesc->cs->caller;
4508 if (dst->caller->inlined_to == top)
4509 break;
4510 }
4511 gcc_assert (dst_rdesc);
4512 dst_jf->value.constant.rdesc = dst_rdesc;
4513 }
4514 }
4515 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4516 && src->caller == dst->caller)
4517 {
4518 struct cgraph_node *inline_root = dst->caller->inlined_to
4519 ? dst->caller->inlined_to : dst->caller;
4520 ipa_node_params *root_info = ipa_node_params_sum->get (inline_root);
4521 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4522
4523 int c = ipa_get_controlled_uses (root_info, idx);
4524 if (c != IPA_UNDESCRIBED_USE)
4525 {
4526 c++;
4527 ipa_set_controlled_uses (root_info, idx, c);
4528 }
4529 }
4530 }
4531 }
4532
4533 /* Analyze newly added function into callgraph. */
4534
4535 static void
ipa_add_new_function(cgraph_node * node,void * data ATTRIBUTE_UNUSED)4536 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4537 {
4538 if (node->has_gimple_body_p ())
4539 ipa_analyze_node (node);
4540 }
4541
4542 /* Hook that is called by summary when a node is duplicated. */
4543
4544 void
duplicate(cgraph_node * src,cgraph_node * dst,ipa_node_params * old_info,ipa_node_params * new_info)4545 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4546 ipa_node_params *old_info,
4547 ipa_node_params *new_info)
4548 {
4549 ipa_agg_replacement_value *old_av, *new_av;
4550
4551 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4552 new_info->lattices = NULL;
4553 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4554 new_info->known_csts = old_info->known_csts.copy ();
4555 new_info->known_contexts = old_info->known_contexts.copy ();
4556
4557 new_info->analysis_done = old_info->analysis_done;
4558 new_info->node_enqueued = old_info->node_enqueued;
4559 new_info->versionable = old_info->versionable;
4560
4561 old_av = ipa_get_agg_replacements_for_node (src);
4562 if (old_av)
4563 {
4564 new_av = NULL;
4565 while (old_av)
4566 {
4567 struct ipa_agg_replacement_value *v;
4568
4569 v = ggc_alloc<ipa_agg_replacement_value> ();
4570 memcpy (v, old_av, sizeof (*v));
4571 v->next = new_av;
4572 new_av = v;
4573 old_av = old_av->next;
4574 }
4575 ipa_set_node_agg_value_chain (dst, new_av);
4576 }
4577 }
4578
4579 /* Duplication of ipcp transformation summaries. */
4580
4581 void
duplicate(cgraph_node *,cgraph_node * dst,ipcp_transformation * src_trans,ipcp_transformation * dst_trans)4582 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4583 ipcp_transformation *src_trans,
4584 ipcp_transformation *dst_trans)
4585 {
4586 /* Avoid redundant work of duplicating vectors we will never use. */
4587 if (dst->inlined_to)
4588 return;
4589 dst_trans->bits = vec_safe_copy (src_trans->bits);
4590 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4591 ipa_agg_replacement_value *agg = src_trans->agg_values,
4592 **aggptr = &dst_trans->agg_values;
4593 while (agg)
4594 {
4595 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4596 **aggptr = *agg;
4597 agg = agg->next;
4598 aggptr = &(*aggptr)->next;
4599 }
4600 }
4601
4602 /* Register our cgraph hooks if they are not already there. */
4603
4604 void
ipa_register_cgraph_hooks(void)4605 ipa_register_cgraph_hooks (void)
4606 {
4607 ipa_check_create_node_params ();
4608 ipa_check_create_edge_args ();
4609
4610 function_insertion_hook_holder =
4611 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4612 }
4613
4614 /* Unregister our cgraph hooks if they are not already there. */
4615
4616 static void
ipa_unregister_cgraph_hooks(void)4617 ipa_unregister_cgraph_hooks (void)
4618 {
4619 if (function_insertion_hook_holder)
4620 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4621 function_insertion_hook_holder = NULL;
4622 }
4623
4624 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4625 longer needed after ipa-cp. */
4626
4627 void
ipa_free_all_structures_after_ipa_cp(void)4628 ipa_free_all_structures_after_ipa_cp (void)
4629 {
4630 if (!optimize && !in_lto_p)
4631 {
4632 ipa_free_all_edge_args ();
4633 ipa_free_all_node_params ();
4634 ipcp_sources_pool.release ();
4635 ipcp_cst_values_pool.release ();
4636 ipcp_poly_ctx_values_pool.release ();
4637 ipcp_agg_lattice_pool.release ();
4638 ipa_unregister_cgraph_hooks ();
4639 ipa_refdesc_pool.release ();
4640 }
4641 }
4642
4643 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4644 longer needed after indirect inlining. */
4645
4646 void
ipa_free_all_structures_after_iinln(void)4647 ipa_free_all_structures_after_iinln (void)
4648 {
4649 ipa_free_all_edge_args ();
4650 ipa_free_all_node_params ();
4651 ipa_unregister_cgraph_hooks ();
4652 ipcp_sources_pool.release ();
4653 ipcp_cst_values_pool.release ();
4654 ipcp_poly_ctx_values_pool.release ();
4655 ipcp_agg_lattice_pool.release ();
4656 ipa_refdesc_pool.release ();
4657 }
4658
4659 /* Print ipa_tree_map data structures of all functions in the
4660 callgraph to F. */
4661
4662 void
ipa_print_node_params(FILE * f,struct cgraph_node * node)4663 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4664 {
4665 int i, count;
4666 class ipa_node_params *info;
4667
4668 if (!node->definition)
4669 return;
4670 info = ipa_node_params_sum->get (node);
4671 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4672 if (!info)
4673 {
4674 fprintf (f, " no params return\n");
4675 return;
4676 }
4677 count = ipa_get_param_count (info);
4678 for (i = 0; i < count; i++)
4679 {
4680 int c;
4681
4682 fprintf (f, " ");
4683 ipa_dump_param (f, info, i);
4684 if (ipa_is_param_used (info, i))
4685 fprintf (f, " used");
4686 if (ipa_is_param_used_by_ipa_predicates (info, i))
4687 fprintf (f, " used_by_ipa_predicates");
4688 if (ipa_is_param_used_by_indirect_call (info, i))
4689 fprintf (f, " used_by_indirect_call");
4690 if (ipa_is_param_used_by_polymorphic_call (info, i))
4691 fprintf (f, " used_by_polymorphic_call");
4692 c = ipa_get_controlled_uses (info, i);
4693 if (c == IPA_UNDESCRIBED_USE)
4694 fprintf (f, " undescribed_use");
4695 else
4696 fprintf (f, " controlled_uses=%i %s", c,
4697 ipa_get_param_load_dereferenced (info, i)
4698 ? "(load_dereferenced)" : "");
4699 fprintf (f, "\n");
4700 }
4701 }
4702
4703 /* Print ipa_tree_map data structures of all functions in the
4704 callgraph to F. */
4705
4706 void
ipa_print_all_params(FILE * f)4707 ipa_print_all_params (FILE * f)
4708 {
4709 struct cgraph_node *node;
4710
4711 fprintf (f, "\nFunction parameters:\n");
4712 FOR_EACH_FUNCTION (node)
4713 ipa_print_node_params (f, node);
4714 }
4715
4716 /* Dump the AV linked list. */
4717
4718 void
ipa_dump_agg_replacement_values(FILE * f,struct ipa_agg_replacement_value * av)4719 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4720 {
4721 bool comma = false;
4722 fprintf (f, " Aggregate replacements:");
4723 for (; av; av = av->next)
4724 {
4725 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4726 av->index, av->offset);
4727 print_generic_expr (f, av->value);
4728 comma = true;
4729 }
4730 fprintf (f, "\n");
4731 }
4732
4733 /* Stream out jump function JUMP_FUNC to OB. */
4734
4735 static void
ipa_write_jump_function(struct output_block * ob,struct ipa_jump_func * jump_func)4736 ipa_write_jump_function (struct output_block *ob,
4737 struct ipa_jump_func *jump_func)
4738 {
4739 struct ipa_agg_jf_item *item;
4740 struct bitpack_d bp;
4741 int i, count;
4742 int flag = 0;
4743
4744 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4745 as well as WPA memory by handling them specially. */
4746 if (jump_func->type == IPA_JF_CONST
4747 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4748 flag = 1;
4749
4750 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4751 switch (jump_func->type)
4752 {
4753 case IPA_JF_UNKNOWN:
4754 break;
4755 case IPA_JF_CONST:
4756 gcc_assert (
4757 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4758 stream_write_tree (ob,
4759 flag
4760 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4761 : jump_func->value.constant.value, true);
4762 break;
4763 case IPA_JF_PASS_THROUGH:
4764 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4765 if (jump_func->value.pass_through.operation == NOP_EXPR)
4766 {
4767 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4768 bp = bitpack_create (ob->main_stream);
4769 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4770 gcc_assert (!jump_func->value.pass_through.refdesc_decremented);
4771 streamer_write_bitpack (&bp);
4772 }
4773 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4774 == tcc_unary)
4775 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4776 else
4777 {
4778 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4779 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4780 }
4781 break;
4782 case IPA_JF_ANCESTOR:
4783 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4784 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4785 bp = bitpack_create (ob->main_stream);
4786 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4787 bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
4788 streamer_write_bitpack (&bp);
4789 break;
4790 default:
4791 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4792 }
4793
4794 count = vec_safe_length (jump_func->agg.items);
4795 streamer_write_uhwi (ob, count);
4796 if (count)
4797 {
4798 bp = bitpack_create (ob->main_stream);
4799 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4800 streamer_write_bitpack (&bp);
4801 }
4802
4803 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4804 {
4805 stream_write_tree (ob, item->type, true);
4806 streamer_write_uhwi (ob, item->offset);
4807 streamer_write_uhwi (ob, item->jftype);
4808 switch (item->jftype)
4809 {
4810 case IPA_JF_UNKNOWN:
4811 break;
4812 case IPA_JF_CONST:
4813 stream_write_tree (ob, item->value.constant, true);
4814 break;
4815 case IPA_JF_PASS_THROUGH:
4816 case IPA_JF_LOAD_AGG:
4817 streamer_write_uhwi (ob, item->value.pass_through.operation);
4818 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4819 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4820 != tcc_unary)
4821 stream_write_tree (ob, item->value.pass_through.operand, true);
4822 if (item->jftype == IPA_JF_LOAD_AGG)
4823 {
4824 stream_write_tree (ob, item->value.load_agg.type, true);
4825 streamer_write_uhwi (ob, item->value.load_agg.offset);
4826 bp = bitpack_create (ob->main_stream);
4827 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4828 streamer_write_bitpack (&bp);
4829 }
4830 break;
4831 default:
4832 fatal_error (UNKNOWN_LOCATION,
4833 "invalid jump function in LTO stream");
4834 }
4835 }
4836
4837 bp = bitpack_create (ob->main_stream);
4838 bp_pack_value (&bp, !!jump_func->bits, 1);
4839 streamer_write_bitpack (&bp);
4840 if (jump_func->bits)
4841 {
4842 streamer_write_widest_int (ob, jump_func->bits->value);
4843 streamer_write_widest_int (ob, jump_func->bits->mask);
4844 }
4845 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4846 streamer_write_bitpack (&bp);
4847 if (jump_func->m_vr)
4848 {
4849 streamer_write_enum (ob->main_stream, value_rang_type,
4850 VR_LAST, jump_func->m_vr->kind ());
4851 stream_write_tree (ob, jump_func->m_vr->min (), true);
4852 stream_write_tree (ob, jump_func->m_vr->max (), true);
4853 }
4854 }
4855
4856 /* Read in jump function JUMP_FUNC from IB. */
4857
4858 static void
ipa_read_jump_function(class lto_input_block * ib,struct ipa_jump_func * jump_func,struct cgraph_edge * cs,class data_in * data_in,bool prevails)4859 ipa_read_jump_function (class lto_input_block *ib,
4860 struct ipa_jump_func *jump_func,
4861 struct cgraph_edge *cs,
4862 class data_in *data_in,
4863 bool prevails)
4864 {
4865 enum jump_func_type jftype;
4866 enum tree_code operation;
4867 int i, count;
4868 int val = streamer_read_uhwi (ib);
4869 bool flag = val & 1;
4870
4871 jftype = (enum jump_func_type) (val / 2);
4872 switch (jftype)
4873 {
4874 case IPA_JF_UNKNOWN:
4875 ipa_set_jf_unknown (jump_func);
4876 break;
4877 case IPA_JF_CONST:
4878 {
4879 tree t = stream_read_tree (ib, data_in);
4880 if (flag && prevails)
4881 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4882 ipa_set_jf_constant (jump_func, t, cs);
4883 }
4884 break;
4885 case IPA_JF_PASS_THROUGH:
4886 operation = (enum tree_code) streamer_read_uhwi (ib);
4887 if (operation == NOP_EXPR)
4888 {
4889 int formal_id = streamer_read_uhwi (ib);
4890 struct bitpack_d bp = streamer_read_bitpack (ib);
4891 bool agg_preserved = bp_unpack_value (&bp, 1);
4892 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4893 }
4894 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4895 {
4896 int formal_id = streamer_read_uhwi (ib);
4897 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4898 }
4899 else
4900 {
4901 tree operand = stream_read_tree (ib, data_in);
4902 int formal_id = streamer_read_uhwi (ib);
4903 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4904 operation);
4905 }
4906 break;
4907 case IPA_JF_ANCESTOR:
4908 {
4909 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4910 int formal_id = streamer_read_uhwi (ib);
4911 struct bitpack_d bp = streamer_read_bitpack (ib);
4912 bool agg_preserved = bp_unpack_value (&bp, 1);
4913 bool keep_null = bp_unpack_value (&bp, 1);
4914 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
4915 keep_null);
4916 break;
4917 }
4918 default:
4919 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4920 }
4921
4922 count = streamer_read_uhwi (ib);
4923 if (prevails)
4924 {
4925 jump_func->agg.items = NULL;
4926 vec_safe_reserve (jump_func->agg.items, count, true);
4927 }
4928 if (count)
4929 {
4930 struct bitpack_d bp = streamer_read_bitpack (ib);
4931 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4932 }
4933 for (i = 0; i < count; i++)
4934 {
4935 struct ipa_agg_jf_item item;
4936 item.type = stream_read_tree (ib, data_in);
4937 item.offset = streamer_read_uhwi (ib);
4938 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4939
4940 switch (item.jftype)
4941 {
4942 case IPA_JF_UNKNOWN:
4943 break;
4944 case IPA_JF_CONST:
4945 item.value.constant = stream_read_tree (ib, data_in);
4946 break;
4947 case IPA_JF_PASS_THROUGH:
4948 case IPA_JF_LOAD_AGG:
4949 operation = (enum tree_code) streamer_read_uhwi (ib);
4950 item.value.pass_through.operation = operation;
4951 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4952 if (TREE_CODE_CLASS (operation) == tcc_unary)
4953 item.value.pass_through.operand = NULL_TREE;
4954 else
4955 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4956 if (item.jftype == IPA_JF_LOAD_AGG)
4957 {
4958 struct bitpack_d bp;
4959 item.value.load_agg.type = stream_read_tree (ib, data_in);
4960 item.value.load_agg.offset = streamer_read_uhwi (ib);
4961 bp = streamer_read_bitpack (ib);
4962 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4963 }
4964 break;
4965 default:
4966 fatal_error (UNKNOWN_LOCATION,
4967 "invalid jump function in LTO stream");
4968 }
4969 if (prevails)
4970 jump_func->agg.items->quick_push (item);
4971 }
4972
4973 struct bitpack_d bp = streamer_read_bitpack (ib);
4974 bool bits_known = bp_unpack_value (&bp, 1);
4975 if (bits_known)
4976 {
4977 widest_int value = streamer_read_widest_int (ib);
4978 widest_int mask = streamer_read_widest_int (ib);
4979 if (prevails)
4980 ipa_set_jfunc_bits (jump_func, value, mask);
4981 }
4982 else
4983 jump_func->bits = NULL;
4984
4985 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4986 bool vr_known = bp_unpack_value (&vr_bp, 1);
4987 if (vr_known)
4988 {
4989 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4990 VR_LAST);
4991 tree min = stream_read_tree (ib, data_in);
4992 tree max = stream_read_tree (ib, data_in);
4993 if (prevails)
4994 ipa_set_jfunc_vr (jump_func, type, min, max);
4995 }
4996 else
4997 jump_func->m_vr = NULL;
4998 }
4999
5000 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
5001 relevant to indirect inlining to OB. */
5002
5003 static void
ipa_write_indirect_edge_info(struct output_block * ob,struct cgraph_edge * cs)5004 ipa_write_indirect_edge_info (struct output_block *ob,
5005 struct cgraph_edge *cs)
5006 {
5007 class cgraph_indirect_call_info *ii = cs->indirect_info;
5008 struct bitpack_d bp;
5009
5010 streamer_write_hwi (ob, ii->param_index);
5011 bp = bitpack_create (ob->main_stream);
5012 bp_pack_value (&bp, ii->polymorphic, 1);
5013 bp_pack_value (&bp, ii->agg_contents, 1);
5014 bp_pack_value (&bp, ii->member_ptr, 1);
5015 bp_pack_value (&bp, ii->by_ref, 1);
5016 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
5017 bp_pack_value (&bp, ii->vptr_changed, 1);
5018 streamer_write_bitpack (&bp);
5019 if (ii->agg_contents || ii->polymorphic)
5020 streamer_write_hwi (ob, ii->offset);
5021 else
5022 gcc_assert (ii->offset == 0);
5023
5024 if (ii->polymorphic)
5025 {
5026 streamer_write_hwi (ob, ii->otr_token);
5027 stream_write_tree (ob, ii->otr_type, true);
5028 ii->context.stream_out (ob);
5029 }
5030 }
5031
5032 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
5033 relevant to indirect inlining from IB. */
5034
5035 static void
ipa_read_indirect_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * cs,class ipa_node_params * info)5036 ipa_read_indirect_edge_info (class lto_input_block *ib,
5037 class data_in *data_in,
5038 struct cgraph_edge *cs,
5039 class ipa_node_params *info)
5040 {
5041 class cgraph_indirect_call_info *ii = cs->indirect_info;
5042 struct bitpack_d bp;
5043
5044 ii->param_index = (int) streamer_read_hwi (ib);
5045 bp = streamer_read_bitpack (ib);
5046 ii->polymorphic = bp_unpack_value (&bp, 1);
5047 ii->agg_contents = bp_unpack_value (&bp, 1);
5048 ii->member_ptr = bp_unpack_value (&bp, 1);
5049 ii->by_ref = bp_unpack_value (&bp, 1);
5050 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
5051 ii->vptr_changed = bp_unpack_value (&bp, 1);
5052 if (ii->agg_contents || ii->polymorphic)
5053 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
5054 else
5055 ii->offset = 0;
5056 if (ii->polymorphic)
5057 {
5058 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
5059 ii->otr_type = stream_read_tree (ib, data_in);
5060 ii->context.stream_in (ib, data_in);
5061 }
5062 if (info && ii->param_index >= 0)
5063 {
5064 if (ii->polymorphic)
5065 ipa_set_param_used_by_polymorphic_call (info,
5066 ii->param_index , true);
5067 ipa_set_param_used_by_indirect_call (info,
5068 ii->param_index, true);
5069 }
5070 }
5071
5072 /* Stream out NODE info to OB. */
5073
5074 static void
ipa_write_node_info(struct output_block * ob,struct cgraph_node * node)5075 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
5076 {
5077 int node_ref;
5078 lto_symtab_encoder_t encoder;
5079 ipa_node_params *info = ipa_node_params_sum->get (node);
5080 int j;
5081 struct cgraph_edge *e;
5082 struct bitpack_d bp;
5083
5084 encoder = ob->decl_state->symtab_node_encoder;
5085 node_ref = lto_symtab_encoder_encode (encoder, node);
5086 streamer_write_uhwi (ob, node_ref);
5087
5088 streamer_write_uhwi (ob, ipa_get_param_count (info));
5089 for (j = 0; j < ipa_get_param_count (info); j++)
5090 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
5091 bp = bitpack_create (ob->main_stream);
5092 gcc_assert (info->analysis_done
5093 || ipa_get_param_count (info) == 0);
5094 gcc_assert (!info->node_enqueued);
5095 gcc_assert (!info->ipcp_orig_node);
5096 for (j = 0; j < ipa_get_param_count (info); j++)
5097 {
5098 /* TODO: We could just not stream the bit in the undescribed case. */
5099 bool d = (ipa_get_controlled_uses (info, j) != IPA_UNDESCRIBED_USE)
5100 ? ipa_get_param_load_dereferenced (info, j) : true;
5101 bp_pack_value (&bp, d, 1);
5102 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
5103 }
5104 streamer_write_bitpack (&bp);
5105 for (j = 0; j < ipa_get_param_count (info); j++)
5106 {
5107 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
5108 stream_write_tree (ob, ipa_get_type (info, j), true);
5109 }
5110 for (e = node->callees; e; e = e->next_callee)
5111 {
5112 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5113
5114 if (!args)
5115 {
5116 streamer_write_uhwi (ob, 0);
5117 continue;
5118 }
5119
5120 streamer_write_uhwi (ob,
5121 ipa_get_cs_argument_count (args) * 2
5122 + (args->polymorphic_call_contexts != NULL));
5123 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5124 {
5125 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5126 if (args->polymorphic_call_contexts != NULL)
5127 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5128 }
5129 }
5130 for (e = node->indirect_calls; e; e = e->next_callee)
5131 {
5132 ipa_edge_args *args = ipa_edge_args_sum->get (e);
5133 if (!args)
5134 streamer_write_uhwi (ob, 0);
5135 else
5136 {
5137 streamer_write_uhwi (ob,
5138 ipa_get_cs_argument_count (args) * 2
5139 + (args->polymorphic_call_contexts != NULL));
5140 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5141 {
5142 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
5143 if (args->polymorphic_call_contexts != NULL)
5144 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
5145 }
5146 }
5147 ipa_write_indirect_edge_info (ob, e);
5148 }
5149 }
5150
5151 /* Stream in edge E from IB. */
5152
5153 static void
ipa_read_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * e,bool prevails)5154 ipa_read_edge_info (class lto_input_block *ib,
5155 class data_in *data_in,
5156 struct cgraph_edge *e, bool prevails)
5157 {
5158 int count = streamer_read_uhwi (ib);
5159 bool contexts_computed = count & 1;
5160
5161 count /= 2;
5162 if (!count)
5163 return;
5164 if (prevails
5165 && (e->possibly_call_in_translation_unit_p ()
5166 /* Also stream in jump functions to builtins in hope that they
5167 will get fnspecs. */
5168 || fndecl_built_in_p (e->callee->decl, BUILT_IN_NORMAL)))
5169 {
5170 ipa_edge_args *args = ipa_edge_args_sum->get_create (e);
5171 vec_safe_grow_cleared (args->jump_functions, count, true);
5172 if (contexts_computed)
5173 vec_safe_grow_cleared (args->polymorphic_call_contexts, count, true);
5174 for (int k = 0; k < count; k++)
5175 {
5176 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5177 data_in, prevails);
5178 if (contexts_computed)
5179 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
5180 (ib, data_in);
5181 }
5182 }
5183 else
5184 {
5185 for (int k = 0; k < count; k++)
5186 {
5187 struct ipa_jump_func dummy;
5188 ipa_read_jump_function (ib, &dummy, e,
5189 data_in, prevails);
5190 if (contexts_computed)
5191 {
5192 class ipa_polymorphic_call_context ctx;
5193 ctx.stream_in (ib, data_in);
5194 }
5195 }
5196 }
5197 }
5198
5199 /* Stream in NODE info from IB. */
5200
5201 static void
ipa_read_node_info(class lto_input_block * ib,struct cgraph_node * node,class data_in * data_in)5202 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
5203 class data_in *data_in)
5204 {
5205 int k;
5206 struct cgraph_edge *e;
5207 struct bitpack_d bp;
5208 bool prevails = node->prevailing_p ();
5209 ipa_node_params *info
5210 = prevails ? ipa_node_params_sum->get_create (node) : NULL;
5211
5212 int param_count = streamer_read_uhwi (ib);
5213 if (prevails)
5214 {
5215 ipa_alloc_node_params (node, param_count);
5216 for (k = 0; k < param_count; k++)
5217 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
5218 if (ipa_get_param_count (info) != 0)
5219 info->analysis_done = true;
5220 info->node_enqueued = false;
5221 }
5222 else
5223 for (k = 0; k < param_count; k++)
5224 streamer_read_uhwi (ib);
5225
5226 bp = streamer_read_bitpack (ib);
5227 for (k = 0; k < param_count; k++)
5228 {
5229 bool load_dereferenced = bp_unpack_value (&bp, 1);
5230 bool used = bp_unpack_value (&bp, 1);
5231
5232 if (prevails)
5233 {
5234 ipa_set_param_load_dereferenced (info, k, load_dereferenced);
5235 ipa_set_param_used (info, k, used);
5236 }
5237 }
5238 for (k = 0; k < param_count; k++)
5239 {
5240 int nuses = streamer_read_hwi (ib);
5241 tree type = stream_read_tree (ib, data_in);
5242
5243 if (prevails)
5244 {
5245 ipa_set_controlled_uses (info, k, nuses);
5246 (*info->descriptors)[k].decl_or_type = type;
5247 }
5248 }
5249 for (e = node->callees; e; e = e->next_callee)
5250 ipa_read_edge_info (ib, data_in, e, prevails);
5251 for (e = node->indirect_calls; e; e = e->next_callee)
5252 {
5253 ipa_read_edge_info (ib, data_in, e, prevails);
5254 ipa_read_indirect_edge_info (ib, data_in, e, info);
5255 }
5256 }
5257
5258 /* Write jump functions for nodes in SET. */
5259
5260 void
ipa_prop_write_jump_functions(void)5261 ipa_prop_write_jump_functions (void)
5262 {
5263 struct output_block *ob;
5264 unsigned int count = 0;
5265 lto_symtab_encoder_iterator lsei;
5266 lto_symtab_encoder_t encoder;
5267
5268 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5269 return;
5270
5271 ob = create_output_block (LTO_section_jump_functions);
5272 encoder = ob->decl_state->symtab_node_encoder;
5273 ob->symbol = NULL;
5274 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5275 lsei_next_function_in_partition (&lsei))
5276 {
5277 cgraph_node *node = lsei_cgraph_node (lsei);
5278 if (node->has_gimple_body_p ()
5279 && ipa_node_params_sum->get (node) != NULL)
5280 count++;
5281 }
5282
5283 streamer_write_uhwi (ob, count);
5284
5285 /* Process all of the functions. */
5286 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5287 lsei_next_function_in_partition (&lsei))
5288 {
5289 cgraph_node *node = lsei_cgraph_node (lsei);
5290 if (node->has_gimple_body_p ()
5291 && ipa_node_params_sum->get (node) != NULL)
5292 ipa_write_node_info (ob, node);
5293 }
5294 streamer_write_char_stream (ob->main_stream, 0);
5295 produce_asm (ob, NULL);
5296 destroy_output_block (ob);
5297 }
5298
5299 /* Read section in file FILE_DATA of length LEN with data DATA. */
5300
5301 static void
ipa_prop_read_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5302 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5303 size_t len)
5304 {
5305 const struct lto_function_header *header =
5306 (const struct lto_function_header *) data;
5307 const int cfg_offset = sizeof (struct lto_function_header);
5308 const int main_offset = cfg_offset + header->cfg_size;
5309 const int string_offset = main_offset + header->main_size;
5310 class data_in *data_in;
5311 unsigned int i;
5312 unsigned int count;
5313
5314 lto_input_block ib_main ((const char *) data + main_offset,
5315 header->main_size, file_data->mode_table);
5316
5317 data_in =
5318 lto_data_in_create (file_data, (const char *) data + string_offset,
5319 header->string_size, vNULL);
5320 count = streamer_read_uhwi (&ib_main);
5321
5322 for (i = 0; i < count; i++)
5323 {
5324 unsigned int index;
5325 struct cgraph_node *node;
5326 lto_symtab_encoder_t encoder;
5327
5328 index = streamer_read_uhwi (&ib_main);
5329 encoder = file_data->symtab_node_encoder;
5330 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5331 index));
5332 gcc_assert (node->definition);
5333 ipa_read_node_info (&ib_main, node, data_in);
5334 }
5335 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5336 len);
5337 lto_data_in_delete (data_in);
5338 }
5339
5340 /* Read ipcp jump functions. */
5341
5342 void
ipa_prop_read_jump_functions(void)5343 ipa_prop_read_jump_functions (void)
5344 {
5345 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5346 struct lto_file_decl_data *file_data;
5347 unsigned int j = 0;
5348
5349 ipa_check_create_node_params ();
5350 ipa_check_create_edge_args ();
5351 ipa_register_cgraph_hooks ();
5352
5353 while ((file_data = file_data_vec[j++]))
5354 {
5355 size_t len;
5356 const char *data
5357 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5358 &len);
5359 if (data)
5360 ipa_prop_read_section (file_data, data, len);
5361 }
5362 }
5363
5364 void
write_ipcp_transformation_info(output_block * ob,cgraph_node * node)5365 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5366 {
5367 int node_ref;
5368 unsigned int count = 0;
5369 lto_symtab_encoder_t encoder;
5370 struct ipa_agg_replacement_value *aggvals, *av;
5371
5372 aggvals = ipa_get_agg_replacements_for_node (node);
5373 encoder = ob->decl_state->symtab_node_encoder;
5374 node_ref = lto_symtab_encoder_encode (encoder, node);
5375 streamer_write_uhwi (ob, node_ref);
5376
5377 for (av = aggvals; av; av = av->next)
5378 count++;
5379 streamer_write_uhwi (ob, count);
5380
5381 for (av = aggvals; av; av = av->next)
5382 {
5383 struct bitpack_d bp;
5384
5385 streamer_write_uhwi (ob, av->offset);
5386 streamer_write_uhwi (ob, av->index);
5387 stream_write_tree (ob, av->value, true);
5388
5389 bp = bitpack_create (ob->main_stream);
5390 bp_pack_value (&bp, av->by_ref, 1);
5391 streamer_write_bitpack (&bp);
5392 }
5393
5394 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5395 if (ts && vec_safe_length (ts->m_vr) > 0)
5396 {
5397 count = ts->m_vr->length ();
5398 streamer_write_uhwi (ob, count);
5399 for (unsigned i = 0; i < count; ++i)
5400 {
5401 struct bitpack_d bp;
5402 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5403 bp = bitpack_create (ob->main_stream);
5404 bp_pack_value (&bp, parm_vr->known, 1);
5405 streamer_write_bitpack (&bp);
5406 if (parm_vr->known)
5407 {
5408 streamer_write_enum (ob->main_stream, value_rang_type,
5409 VR_LAST, parm_vr->type);
5410 streamer_write_wide_int (ob, parm_vr->min);
5411 streamer_write_wide_int (ob, parm_vr->max);
5412 }
5413 }
5414 }
5415 else
5416 streamer_write_uhwi (ob, 0);
5417
5418 if (ts && vec_safe_length (ts->bits) > 0)
5419 {
5420 count = ts->bits->length ();
5421 streamer_write_uhwi (ob, count);
5422
5423 for (unsigned i = 0; i < count; ++i)
5424 {
5425 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5426 struct bitpack_d bp = bitpack_create (ob->main_stream);
5427 bp_pack_value (&bp, !!bits_jfunc, 1);
5428 streamer_write_bitpack (&bp);
5429 if (bits_jfunc)
5430 {
5431 streamer_write_widest_int (ob, bits_jfunc->value);
5432 streamer_write_widest_int (ob, bits_jfunc->mask);
5433 }
5434 }
5435 }
5436 else
5437 streamer_write_uhwi (ob, 0);
5438 }
5439
5440 /* Stream in the aggregate value replacement chain for NODE from IB. */
5441
5442 static void
read_ipcp_transformation_info(lto_input_block * ib,cgraph_node * node,data_in * data_in)5443 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5444 data_in *data_in)
5445 {
5446 struct ipa_agg_replacement_value *aggvals = NULL;
5447 unsigned int count, i;
5448
5449 count = streamer_read_uhwi (ib);
5450 for (i = 0; i <count; i++)
5451 {
5452 struct ipa_agg_replacement_value *av;
5453 struct bitpack_d bp;
5454
5455 av = ggc_alloc<ipa_agg_replacement_value> ();
5456 av->offset = streamer_read_uhwi (ib);
5457 av->index = streamer_read_uhwi (ib);
5458 av->value = stream_read_tree (ib, data_in);
5459 bp = streamer_read_bitpack (ib);
5460 av->by_ref = bp_unpack_value (&bp, 1);
5461 av->next = aggvals;
5462 aggvals = av;
5463 }
5464 ipa_set_node_agg_value_chain (node, aggvals);
5465
5466 count = streamer_read_uhwi (ib);
5467 if (count > 0)
5468 {
5469 ipcp_transformation_initialize ();
5470 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5471 vec_safe_grow_cleared (ts->m_vr, count, true);
5472 for (i = 0; i < count; i++)
5473 {
5474 ipa_vr *parm_vr;
5475 parm_vr = &(*ts->m_vr)[i];
5476 struct bitpack_d bp;
5477 bp = streamer_read_bitpack (ib);
5478 parm_vr->known = bp_unpack_value (&bp, 1);
5479 if (parm_vr->known)
5480 {
5481 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5482 VR_LAST);
5483 parm_vr->min = streamer_read_wide_int (ib);
5484 parm_vr->max = streamer_read_wide_int (ib);
5485 }
5486 }
5487 }
5488 count = streamer_read_uhwi (ib);
5489 if (count > 0)
5490 {
5491 ipcp_transformation_initialize ();
5492 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5493 vec_safe_grow_cleared (ts->bits, count, true);
5494
5495 for (i = 0; i < count; i++)
5496 {
5497 struct bitpack_d bp = streamer_read_bitpack (ib);
5498 bool known = bp_unpack_value (&bp, 1);
5499 if (known)
5500 {
5501 const widest_int value = streamer_read_widest_int (ib);
5502 const widest_int mask = streamer_read_widest_int (ib);
5503 ipa_bits *bits
5504 = ipa_get_ipa_bits_for_value (value, mask);
5505 (*ts->bits)[i] = bits;
5506 }
5507 }
5508 }
5509 }
5510
5511 /* Write all aggregate replacement for nodes in set. */
5512
5513 void
ipcp_write_transformation_summaries(void)5514 ipcp_write_transformation_summaries (void)
5515 {
5516 struct cgraph_node *node;
5517 struct output_block *ob;
5518 unsigned int count = 0;
5519 lto_symtab_encoder_iterator lsei;
5520 lto_symtab_encoder_t encoder;
5521
5522 ob = create_output_block (LTO_section_ipcp_transform);
5523 encoder = ob->decl_state->symtab_node_encoder;
5524 ob->symbol = NULL;
5525 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5526 lsei_next_function_in_partition (&lsei))
5527 {
5528 node = lsei_cgraph_node (lsei);
5529 if (node->has_gimple_body_p ())
5530 count++;
5531 }
5532
5533 streamer_write_uhwi (ob, count);
5534
5535 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5536 lsei_next_function_in_partition (&lsei))
5537 {
5538 node = lsei_cgraph_node (lsei);
5539 if (node->has_gimple_body_p ())
5540 write_ipcp_transformation_info (ob, node);
5541 }
5542 streamer_write_char_stream (ob->main_stream, 0);
5543 produce_asm (ob, NULL);
5544 destroy_output_block (ob);
5545 }
5546
5547 /* Read replacements section in file FILE_DATA of length LEN with data
5548 DATA. */
5549
5550 static void
read_replacements_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5551 read_replacements_section (struct lto_file_decl_data *file_data,
5552 const char *data,
5553 size_t len)
5554 {
5555 const struct lto_function_header *header =
5556 (const struct lto_function_header *) data;
5557 const int cfg_offset = sizeof (struct lto_function_header);
5558 const int main_offset = cfg_offset + header->cfg_size;
5559 const int string_offset = main_offset + header->main_size;
5560 class data_in *data_in;
5561 unsigned int i;
5562 unsigned int count;
5563
5564 lto_input_block ib_main ((const char *) data + main_offset,
5565 header->main_size, file_data->mode_table);
5566
5567 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5568 header->string_size, vNULL);
5569 count = streamer_read_uhwi (&ib_main);
5570
5571 for (i = 0; i < count; i++)
5572 {
5573 unsigned int index;
5574 struct cgraph_node *node;
5575 lto_symtab_encoder_t encoder;
5576
5577 index = streamer_read_uhwi (&ib_main);
5578 encoder = file_data->symtab_node_encoder;
5579 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5580 index));
5581 gcc_assert (node->definition);
5582 read_ipcp_transformation_info (&ib_main, node, data_in);
5583 }
5584 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5585 len);
5586 lto_data_in_delete (data_in);
5587 }
5588
5589 /* Read IPA-CP aggregate replacements. */
5590
5591 void
ipcp_read_transformation_summaries(void)5592 ipcp_read_transformation_summaries (void)
5593 {
5594 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5595 struct lto_file_decl_data *file_data;
5596 unsigned int j = 0;
5597
5598 while ((file_data = file_data_vec[j++]))
5599 {
5600 size_t len;
5601 const char *data
5602 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5603 &len);
5604 if (data)
5605 read_replacements_section (file_data, data, len);
5606 }
5607 }
5608
5609 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5610 NODE but also if any parameter was IPA-SRAed into a scalar go ahead with
5611 substitution of the default_definitions of that new param with the
5612 appropriate constant.
5613
5614 Return two bools. the first it true if at least one item in AGGVAL still
5615 exists and function body walk should go ahead. The second is true if any
5616 values were already substituted for scalarized parameters and update_cfg
5617 shuld be run after replace_uses_by. */
5618
5619 static std::pair<bool, bool>
adjust_agg_replacement_values(cgraph_node * node,ipa_agg_replacement_value * aggval,const vec<ipa_param_descriptor,va_gc> & descriptors)5620 adjust_agg_replacement_values (cgraph_node *node,
5621 ipa_agg_replacement_value *aggval,
5622 const vec<ipa_param_descriptor, va_gc>
5623 &descriptors)
5624 {
5625 struct ipa_agg_replacement_value *v;
5626 clone_info *cinfo = clone_info::get (node);
5627 if (!cinfo || !cinfo->param_adjustments)
5628 return std::pair<bool, bool> (true, false);
5629
5630 bool anything_left = false;
5631 bool done_replacement = false;
5632 for (v = aggval; v; v = v->next)
5633 {
5634 gcc_checking_assert (v->index >= 0);
5635
5636 unsigned unit_offset = v->offset / BITS_PER_UNIT;
5637 tree cst_type = TREE_TYPE (v->value);
5638 int split_idx;
5639 int new_idx
5640 = cinfo->param_adjustments->get_updated_index_or_split (v->index,
5641 unit_offset,
5642 cst_type,
5643 &split_idx);
5644 v->index = new_idx;
5645 if (new_idx >= 0)
5646 anything_left = true;
5647 else if (split_idx >= 0)
5648 {
5649 tree parm = ipa_get_param (descriptors, split_idx);
5650 tree ddef = ssa_default_def (cfun, parm);
5651 if (ddef)
5652 {
5653 replace_uses_by (ddef, v->value);
5654 done_replacement = true;
5655 }
5656 }
5657 }
5658 return std::pair<bool, bool> (anything_left, done_replacement);
5659 }
5660
5661 /* Dominator walker driving the ipcp modification phase. */
5662
5663 class ipcp_modif_dom_walker : public dom_walker
5664 {
5665 public:
ipcp_modif_dom_walker(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descs,struct ipa_agg_replacement_value * av,bool * sc)5666 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5667 vec<ipa_param_descriptor, va_gc> *descs,
5668 struct ipa_agg_replacement_value *av,
5669 bool *sc)
5670 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5671 m_aggval (av), m_something_changed (sc) {}
5672
5673 virtual edge before_dom_children (basic_block);
cleanup_eh()5674 bool cleanup_eh ()
5675 { return gimple_purge_all_dead_eh_edges (m_need_eh_cleanup); }
5676
5677 private:
5678 struct ipa_func_body_info *m_fbi;
5679 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5680 struct ipa_agg_replacement_value *m_aggval;
5681 bool *m_something_changed;
5682 auto_bitmap m_need_eh_cleanup;
5683 };
5684
5685 edge
before_dom_children(basic_block bb)5686 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5687 {
5688 gimple_stmt_iterator gsi;
5689 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5690 {
5691 struct ipa_agg_replacement_value *v;
5692 gimple *stmt = gsi_stmt (gsi);
5693 tree rhs, val, t;
5694 HOST_WIDE_INT offset;
5695 poly_int64 size;
5696 int index;
5697 bool by_ref, vce;
5698
5699 if (!gimple_assign_load_p (stmt))
5700 continue;
5701 rhs = gimple_assign_rhs1 (stmt);
5702 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5703 continue;
5704
5705 vce = false;
5706 t = rhs;
5707 while (handled_component_p (t))
5708 {
5709 /* V_C_E can do things like convert an array of integers to one
5710 bigger integer and similar things we do not handle below. */
5711 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5712 {
5713 vce = true;
5714 break;
5715 }
5716 t = TREE_OPERAND (t, 0);
5717 }
5718 if (vce)
5719 continue;
5720
5721 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5722 &offset, &size, &by_ref))
5723 continue;
5724 for (v = m_aggval; v; v = v->next)
5725 if (v->index == index
5726 && v->offset == offset)
5727 break;
5728 if (!v
5729 || v->by_ref != by_ref
5730 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5731 size))
5732 continue;
5733
5734 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5735 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5736 {
5737 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5738 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5739 else if (TYPE_SIZE (TREE_TYPE (rhs))
5740 == TYPE_SIZE (TREE_TYPE (v->value)))
5741 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5742 else
5743 {
5744 if (dump_file)
5745 {
5746 fprintf (dump_file, " const ");
5747 print_generic_expr (dump_file, v->value);
5748 fprintf (dump_file, " can't be converted to type of ");
5749 print_generic_expr (dump_file, rhs);
5750 fprintf (dump_file, "\n");
5751 }
5752 continue;
5753 }
5754 }
5755 else
5756 val = v->value;
5757
5758 if (dump_file && (dump_flags & TDF_DETAILS))
5759 {
5760 fprintf (dump_file, "Modifying stmt:\n ");
5761 print_gimple_stmt (dump_file, stmt, 0);
5762 }
5763 gimple_assign_set_rhs_from_tree (&gsi, val);
5764 update_stmt (stmt);
5765
5766 if (dump_file && (dump_flags & TDF_DETAILS))
5767 {
5768 fprintf (dump_file, "into:\n ");
5769 print_gimple_stmt (dump_file, stmt, 0);
5770 fprintf (dump_file, "\n");
5771 }
5772
5773 *m_something_changed = true;
5774 if (maybe_clean_eh_stmt (stmt))
5775 bitmap_set_bit (m_need_eh_cleanup, bb->index);
5776 }
5777 return NULL;
5778 }
5779
5780 /* Return true if we have recorded VALUE and MASK about PARM.
5781 Set VALUE and MASk accordingly. */
5782
5783 bool
ipcp_get_parm_bits(tree parm,tree * value,widest_int * mask)5784 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5785 {
5786 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5787 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5788 if (!ts || vec_safe_length (ts->bits) == 0)
5789 return false;
5790
5791 int i = 0;
5792 for (tree p = DECL_ARGUMENTS (current_function_decl);
5793 p != parm; p = DECL_CHAIN (p))
5794 {
5795 i++;
5796 /* Ignore static chain. */
5797 if (!p)
5798 return false;
5799 }
5800
5801 clone_info *cinfo = clone_info::get (cnode);
5802 if (cinfo && cinfo->param_adjustments)
5803 {
5804 i = cinfo->param_adjustments->get_original_index (i);
5805 if (i < 0)
5806 return false;
5807 }
5808
5809 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5810 if (!bits[i])
5811 return false;
5812 *mask = bits[i]->mask;
5813 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5814 return true;
5815 }
5816
5817
5818 /* Update bits info of formal parameters as described in
5819 ipcp_transformation. */
5820
5821 static void
ipcp_update_bits(struct cgraph_node * node)5822 ipcp_update_bits (struct cgraph_node *node)
5823 {
5824 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5825
5826 if (!ts || vec_safe_length (ts->bits) == 0)
5827 return;
5828 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5829 unsigned count = bits.length ();
5830 if (!count)
5831 return;
5832
5833 auto_vec<int, 16> new_indices;
5834 bool need_remapping = false;
5835 clone_info *cinfo = clone_info::get (node);
5836 if (cinfo && cinfo->param_adjustments)
5837 {
5838 cinfo->param_adjustments->get_updated_indices (&new_indices);
5839 need_remapping = true;
5840 }
5841 auto_vec <tree, 16> parm_decls;
5842 push_function_arg_decls (&parm_decls, node->decl);
5843
5844 for (unsigned i = 0; i < count; ++i)
5845 {
5846 tree parm;
5847 if (need_remapping)
5848 {
5849 if (i >= new_indices.length ())
5850 continue;
5851 int idx = new_indices[i];
5852 if (idx < 0)
5853 continue;
5854 parm = parm_decls[idx];
5855 }
5856 else
5857 parm = parm_decls[i];
5858 gcc_checking_assert (parm);
5859
5860
5861 if (!bits[i]
5862 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5863 || POINTER_TYPE_P (TREE_TYPE (parm)))
5864 || !is_gimple_reg (parm))
5865 continue;
5866
5867 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5868 if (!ddef)
5869 continue;
5870
5871 if (dump_file)
5872 {
5873 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5874 print_hex (bits[i]->mask, dump_file);
5875 fprintf (dump_file, "\n");
5876 }
5877
5878 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5879 {
5880 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5881 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5882
5883 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5884 | wide_int::from (bits[i]->value, prec, sgn);
5885 set_nonzero_bits (ddef, nonzero_bits);
5886 }
5887 else
5888 {
5889 unsigned tem = bits[i]->mask.to_uhwi ();
5890 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5891 unsigned align = tem & -tem;
5892 unsigned misalign = bitpos & (align - 1);
5893
5894 if (align > 1)
5895 {
5896 if (dump_file)
5897 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5898
5899 unsigned old_align, old_misalign;
5900 struct ptr_info_def *pi = get_ptr_info (ddef);
5901 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5902
5903 if (old_known
5904 && old_align > align)
5905 {
5906 if (dump_file)
5907 {
5908 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5909 if ((old_misalign & (align - 1)) != misalign)
5910 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5911 old_misalign, misalign);
5912 }
5913 continue;
5914 }
5915
5916 if (old_known
5917 && ((misalign & (old_align - 1)) != old_misalign)
5918 && dump_file)
5919 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5920 old_misalign, misalign);
5921
5922 set_ptr_info_alignment (pi, align, misalign);
5923 }
5924 }
5925 }
5926 }
5927
5928 bool
nonzero_p(tree expr_type) const5929 ipa_vr::nonzero_p (tree expr_type) const
5930 {
5931 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5932 return true;
5933
5934 unsigned prec = TYPE_PRECISION (expr_type);
5935 return (type == VR_RANGE
5936 && TYPE_UNSIGNED (expr_type)
5937 && wi::eq_p (min, wi::one (prec))
5938 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5939 }
5940
5941 /* Update value range of formal parameters as described in
5942 ipcp_transformation. */
5943
5944 static void
ipcp_update_vr(struct cgraph_node * node)5945 ipcp_update_vr (struct cgraph_node *node)
5946 {
5947 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5948 if (!ts || vec_safe_length (ts->m_vr) == 0)
5949 return;
5950 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5951 unsigned count = vr.length ();
5952 if (!count)
5953 return;
5954
5955 auto_vec<int, 16> new_indices;
5956 bool need_remapping = false;
5957 clone_info *cinfo = clone_info::get (node);
5958 if (cinfo && cinfo->param_adjustments)
5959 {
5960 cinfo->param_adjustments->get_updated_indices (&new_indices);
5961 need_remapping = true;
5962 }
5963 auto_vec <tree, 16> parm_decls;
5964 push_function_arg_decls (&parm_decls, node->decl);
5965
5966 for (unsigned i = 0; i < count; ++i)
5967 {
5968 tree parm;
5969 int remapped_idx;
5970 if (need_remapping)
5971 {
5972 if (i >= new_indices.length ())
5973 continue;
5974 remapped_idx = new_indices[i];
5975 if (remapped_idx < 0)
5976 continue;
5977 }
5978 else
5979 remapped_idx = i;
5980
5981 parm = parm_decls[remapped_idx];
5982
5983 gcc_checking_assert (parm);
5984 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5985
5986 if (!ddef || !is_gimple_reg (parm))
5987 continue;
5988
5989 if (vr[i].known
5990 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5991 {
5992 tree type = TREE_TYPE (ddef);
5993 unsigned prec = TYPE_PRECISION (type);
5994 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5995 {
5996 if (dump_file)
5997 {
5998 fprintf (dump_file, "Setting value range of param %u "
5999 "(now %i) ", i, remapped_idx);
6000 fprintf (dump_file, "%s[",
6001 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
6002 print_decs (vr[i].min, dump_file);
6003 fprintf (dump_file, ", ");
6004 print_decs (vr[i].max, dump_file);
6005 fprintf (dump_file, "]\n");
6006 }
6007 set_range_info (ddef, vr[i].type,
6008 wide_int_storage::from (vr[i].min, prec,
6009 TYPE_SIGN (type)),
6010 wide_int_storage::from (vr[i].max, prec,
6011 TYPE_SIGN (type)));
6012 }
6013 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
6014 && vr[i].nonzero_p (TREE_TYPE (ddef)))
6015 {
6016 if (dump_file)
6017 fprintf (dump_file, "Setting nonnull for %u\n", i);
6018 set_ptr_nonnull (ddef);
6019 }
6020 }
6021 }
6022 }
6023
6024 /* IPCP transformation phase doing propagation of aggregate values. */
6025
6026 unsigned int
ipcp_transform_function(struct cgraph_node * node)6027 ipcp_transform_function (struct cgraph_node *node)
6028 {
6029 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
6030 struct ipa_func_body_info fbi;
6031 struct ipa_agg_replacement_value *aggval;
6032 int param_count;
6033
6034 gcc_checking_assert (cfun);
6035 gcc_checking_assert (current_function_decl);
6036
6037 if (dump_file)
6038 fprintf (dump_file, "Modification phase of node %s\n",
6039 node->dump_name ());
6040
6041 ipcp_update_bits (node);
6042 ipcp_update_vr (node);
6043 aggval = ipa_get_agg_replacements_for_node (node);
6044 if (!aggval)
6045 return 0;
6046 param_count = count_formal_params (node->decl);
6047 if (param_count == 0)
6048 return 0;
6049 vec_safe_grow_cleared (descriptors, param_count, true);
6050 ipa_populate_param_decls (node, *descriptors);
6051 std::pair<bool, bool> rr
6052 = adjust_agg_replacement_values (node, aggval, *descriptors);
6053 bool cfg_changed = rr.second;
6054 if (!rr.first)
6055 {
6056 vec_free (descriptors);
6057 if (dump_file)
6058 fprintf (dump_file, " All affected aggregate parameters were either "
6059 "removed or converted into scalars, phase done.\n");
6060 if (cfg_changed)
6061 delete_unreachable_blocks_update_callgraph (node, false);
6062 return 0;
6063 }
6064 if (dump_file)
6065 ipa_dump_agg_replacement_values (dump_file, aggval);
6066
6067 fbi.node = node;
6068 fbi.info = NULL;
6069 fbi.bb_infos = vNULL;
6070 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun), true);
6071 fbi.param_count = param_count;
6072 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
6073
6074 bool modified_mem_access = false;
6075 calculate_dominance_info (CDI_DOMINATORS);
6076 ipcp_modif_dom_walker walker (&fbi, descriptors, aggval, &modified_mem_access);
6077 walker.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
6078 free_dominance_info (CDI_DOMINATORS);
6079 cfg_changed |= walker.cleanup_eh ();
6080
6081 int i;
6082 struct ipa_bb_info *bi;
6083 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
6084 free_ipa_bb_info (bi);
6085 fbi.bb_infos.release ();
6086
6087 ipcp_transformation *s = ipcp_transformation_sum->get (node);
6088 s->agg_values = NULL;
6089 s->bits = NULL;
6090 s->m_vr = NULL;
6091
6092 vec_free (descriptors);
6093 if (cfg_changed)
6094 delete_unreachable_blocks_update_callgraph (node, false);
6095
6096 return modified_mem_access ? TODO_update_ssa_only_virtuals : 0;
6097 }
6098
6099 /* Return true if the two pass_through components of two jump functions are
6100 known to be equivalent. AGG_JF denotes whether they are part of aggregate
6101 functions or not. The function can be used before the IPA phase of IPA-CP
6102 or inlining because it cannot cope with refdesc changes these passes can
6103 carry out. */
6104
6105 static bool
ipa_agg_pass_through_jf_equivalent_p(ipa_pass_through_data * ipt1,ipa_pass_through_data * ipt2,bool agg_jf)6106 ipa_agg_pass_through_jf_equivalent_p (ipa_pass_through_data *ipt1,
6107 ipa_pass_through_data *ipt2,
6108 bool agg_jf)
6109
6110 {
6111 gcc_assert (agg_jf ||
6112 (!ipt1->refdesc_decremented && !ipt2->refdesc_decremented));
6113 if (ipt1->operation != ipt2->operation
6114 || ipt1->formal_id != ipt2->formal_id
6115 || (!agg_jf && (ipt1->agg_preserved != ipt2->agg_preserved)))
6116 return false;
6117 if (((ipt1->operand != NULL_TREE) != (ipt2->operand != NULL_TREE))
6118 || (ipt1->operand
6119 && !values_equal_for_ipcp_p (ipt1->operand, ipt2->operand)))
6120 return false;
6121 return true;
6122 }
6123
6124 /* Return true if the two aggregate jump functions are known to be equivalent.
6125 The function can be used before the IPA phase of IPA-CP or inlining because
6126 it cannot cope with refdesc changes these passes can carry out. */
6127
6128 static bool
ipa_agg_jump_functions_equivalent_p(ipa_agg_jf_item * ajf1,ipa_agg_jf_item * ajf2)6129 ipa_agg_jump_functions_equivalent_p (ipa_agg_jf_item *ajf1,
6130 ipa_agg_jf_item *ajf2)
6131 {
6132 if (ajf1->offset != ajf2->offset
6133 || ajf1->jftype != ajf2->jftype
6134 || !types_compatible_p (ajf1->type, ajf2->type))
6135 return false;
6136
6137 switch (ajf1->jftype)
6138 {
6139 case IPA_JF_CONST:
6140 if (!values_equal_for_ipcp_p (ajf1->value.constant,
6141 ajf2->value.constant))
6142 return false;
6143 break;
6144 case IPA_JF_PASS_THROUGH:
6145 {
6146 ipa_pass_through_data *ipt1 = &ajf1->value.pass_through;
6147 ipa_pass_through_data *ipt2 = &ajf2->value.pass_through;
6148 if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, true))
6149 return false;
6150 }
6151 break;
6152 case IPA_JF_LOAD_AGG:
6153 {
6154 ipa_load_agg_data *ila1 = &ajf1->value.load_agg;
6155 ipa_load_agg_data *ila2 = &ajf2->value.load_agg;
6156 if (!ipa_agg_pass_through_jf_equivalent_p (&ila1->pass_through,
6157 &ila2->pass_through, true))
6158 return false;
6159 if (ila1->offset != ila2->offset
6160 || ila1->by_ref != ila2->by_ref
6161 || !types_compatible_p (ila1->type, ila2->type))
6162 return false;
6163 }
6164 break;
6165 default:
6166 gcc_unreachable ();
6167 }
6168 return true;
6169 }
6170
6171 /* Return true if the two jump functions are known to be equivalent. The
6172 function can be used before the IPA phase of IPA-CP or inlining because it
6173 cannot cope with refdesc changes these passes can carry out. */
6174
6175 bool
ipa_jump_functions_equivalent_p(ipa_jump_func * jf1,ipa_jump_func * jf2)6176 ipa_jump_functions_equivalent_p (ipa_jump_func *jf1, ipa_jump_func *jf2)
6177 {
6178 if (jf1->type != jf2->type)
6179 return false;
6180
6181 switch (jf1->type)
6182 {
6183 case IPA_JF_UNKNOWN:
6184 break;
6185 case IPA_JF_CONST:
6186 {
6187 tree cst1 = ipa_get_jf_constant (jf1);
6188 tree cst2 = ipa_get_jf_constant (jf2);
6189 if (!values_equal_for_ipcp_p (cst1, cst2))
6190 return false;
6191
6192 ipa_cst_ref_desc *rd1 = jfunc_rdesc_usable (jf1);
6193 ipa_cst_ref_desc *rd2 = jfunc_rdesc_usable (jf2);
6194 if (rd1 && rd2)
6195 {
6196 gcc_assert (rd1->refcount == 1
6197 && rd2->refcount == 1);
6198 gcc_assert (!rd1->next_duplicate && !rd2->next_duplicate);
6199 }
6200 else if (rd1)
6201 return false;
6202 else if (rd2)
6203 return false;
6204 }
6205 break;
6206 case IPA_JF_PASS_THROUGH:
6207 {
6208 ipa_pass_through_data *ipt1 = &jf1->value.pass_through;
6209 ipa_pass_through_data *ipt2 = &jf2->value.pass_through;
6210 if (!ipa_agg_pass_through_jf_equivalent_p (ipt1, ipt2, false))
6211 return false;
6212 }
6213 break;
6214 case IPA_JF_ANCESTOR:
6215 {
6216 ipa_ancestor_jf_data *ia1 = &jf1->value.ancestor;
6217 ipa_ancestor_jf_data *ia2 = &jf2->value.ancestor;
6218
6219 if (ia1->formal_id != ia2->formal_id
6220 || ia1->agg_preserved != ia2->agg_preserved
6221 || ia1->keep_null != ia2->keep_null
6222 || ia1->offset != ia2->offset)
6223 return false;
6224 }
6225 break;
6226 default:
6227 gcc_unreachable ();
6228 }
6229
6230 if (((jf1->bits != nullptr) != (jf2->bits != nullptr))
6231 || (jf1->bits && ((jf1->bits->value != jf2->bits->value)
6232 || (jf1->bits->mask != jf2->bits->mask))))
6233 return false;
6234
6235 if (((jf1->m_vr != nullptr) != (jf2->m_vr != nullptr))
6236 || (jf1->m_vr && *jf1->m_vr != *jf2->m_vr))
6237 return false;
6238
6239 unsigned alen = vec_safe_length (jf1->agg.items);
6240 if (vec_safe_length (jf2->agg.items) != alen)
6241 return false;
6242
6243 if (!alen)
6244 return true;
6245
6246 if (jf1->agg.by_ref != jf2->agg.by_ref)
6247 return false;
6248
6249 for (unsigned i = 0 ; i < alen; i++)
6250 if (!ipa_agg_jump_functions_equivalent_p (&(*jf1->agg.items)[i],
6251 &(*jf2->agg.items)[i]))
6252 return false;
6253
6254 return true;
6255 }
6256
6257 /* Return true if OTHER describes same agg value. */
6258 bool
equal_to(const ipa_agg_value & other)6259 ipa_agg_value::equal_to (const ipa_agg_value &other)
6260 {
6261 return offset == other.offset
6262 && operand_equal_p (value, other.value, 0);
6263 }
6264
6265 /* Destructor also removing individual aggregate values. */
6266
~ipa_auto_call_arg_values()6267 ipa_auto_call_arg_values::~ipa_auto_call_arg_values ()
6268 {
6269 ipa_release_agg_values (m_known_aggs, false);
6270 }
6271
6272
6273
6274 #include "gt-ipa-prop.h"
6275