1 /* Interprocedural analyses.
2 Copyright (C) 2005-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "alloc-pool.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "tree-streamer.h"
31 #include "cgraph.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "gimple-fold.h"
35 #include "tree-eh.h"
36 #include "calls.h"
37 #include "stor-layout.h"
38 #include "print-tree.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "gimple-walk.h"
43 #include "symbol-summary.h"
44 #include "ipa-prop.h"
45 #include "tree-cfg.h"
46 #include "tree-dfa.h"
47 #include "tree-inline.h"
48 #include "ipa-fnsummary.h"
49 #include "gimple-pretty-print.h"
50 #include "ipa-utils.h"
51 #include "dbgcnt.h"
52 #include "domwalk.h"
53 #include "builtins.h"
54 #include "tree-cfgcleanup.h"
55
56 /* Function summary where the parameter infos are actually stored. */
57 ipa_node_params_t *ipa_node_params_sum = NULL;
58
59 function_summary <ipcp_transformation *> *ipcp_transformation_sum = NULL;
60
61 /* Edge summary for IPA-CP edge information. */
62 ipa_edge_args_sum_t *ipa_edge_args_sum;
63
64 /* Traits for a hash table for reusing already existing ipa_bits. */
65
66 struct ipa_bit_ggc_hash_traits : public ggc_cache_remove <ipa_bits *>
67 {
68 typedef ipa_bits *value_type;
69 typedef ipa_bits *compare_type;
70 static hashval_t
hashipa_bit_ggc_hash_traits71 hash (const ipa_bits *p)
72 {
73 hashval_t t = (hashval_t) p->value.to_shwi ();
74 return iterative_hash_host_wide_int (p->mask.to_shwi (), t);
75 }
76 static bool
equalipa_bit_ggc_hash_traits77 equal (const ipa_bits *a, const ipa_bits *b)
78 {
79 return a->value == b->value && a->mask == b->mask;
80 }
81 static const bool empty_zero_p = true;
82 static void
mark_emptyipa_bit_ggc_hash_traits83 mark_empty (ipa_bits *&p)
84 {
85 p = NULL;
86 }
87 static bool
is_emptyipa_bit_ggc_hash_traits88 is_empty (const ipa_bits *p)
89 {
90 return p == NULL;
91 }
92 static bool
is_deletedipa_bit_ggc_hash_traits93 is_deleted (const ipa_bits *p)
94 {
95 return p == reinterpret_cast<const ipa_bits *> (1);
96 }
97 static void
mark_deletedipa_bit_ggc_hash_traits98 mark_deleted (ipa_bits *&p)
99 {
100 p = reinterpret_cast<ipa_bits *> (1);
101 }
102 };
103
104 /* Hash table for avoid repeated allocations of equal ipa_bits. */
105 static GTY ((cache)) hash_table<ipa_bit_ggc_hash_traits> *ipa_bits_hash_table;
106
107 /* Traits for a hash table for reusing value_ranges used for IPA. Note that
108 the equiv bitmap is not hashed and is expected to be NULL. */
109
110 struct ipa_vr_ggc_hash_traits : public ggc_cache_remove <value_range *>
111 {
112 typedef value_range *value_type;
113 typedef value_range *compare_type;
114 static hashval_t
hashipa_vr_ggc_hash_traits115 hash (const value_range *p)
116 {
117 inchash::hash hstate (p->kind ());
118 inchash::add_expr (p->min (), hstate);
119 inchash::add_expr (p->max (), hstate);
120 return hstate.end ();
121 }
122 static bool
equalipa_vr_ggc_hash_traits123 equal (const value_range *a, const value_range *b)
124 {
125 return (a->equal_p (*b)
126 && types_compatible_p (a->type (), b->type ()));
127 }
128 static const bool empty_zero_p = true;
129 static void
mark_emptyipa_vr_ggc_hash_traits130 mark_empty (value_range *&p)
131 {
132 p = NULL;
133 }
134 static bool
is_emptyipa_vr_ggc_hash_traits135 is_empty (const value_range *p)
136 {
137 return p == NULL;
138 }
139 static bool
is_deletedipa_vr_ggc_hash_traits140 is_deleted (const value_range *p)
141 {
142 return p == reinterpret_cast<const value_range *> (1);
143 }
144 static void
mark_deletedipa_vr_ggc_hash_traits145 mark_deleted (value_range *&p)
146 {
147 p = reinterpret_cast<value_range *> (1);
148 }
149 };
150
151 /* Hash table for avoid repeated allocations of equal value_ranges. */
152 static GTY ((cache)) hash_table<ipa_vr_ggc_hash_traits> *ipa_vr_hash_table;
153
154 /* Holders of ipa cgraph hooks: */
155 static struct cgraph_node_hook_list *function_insertion_hook_holder;
156
157 /* Description of a reference to an IPA constant. */
158 struct ipa_cst_ref_desc
159 {
160 /* Edge that corresponds to the statement which took the reference. */
161 struct cgraph_edge *cs;
162 /* Linked list of duplicates created when call graph edges are cloned. */
163 struct ipa_cst_ref_desc *next_duplicate;
164 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
165 if out of control. */
166 int refcount;
167 };
168
169 /* Allocation pool for reference descriptions. */
170
171 static object_allocator<ipa_cst_ref_desc> ipa_refdesc_pool
172 ("IPA-PROP ref descriptions");
173
174 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
175 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
176
177 static bool
ipa_func_spec_opts_forbid_analysis_p(struct cgraph_node * node)178 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
179 {
180 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
181
182 if (!fs_opts)
183 return false;
184 return !opt_for_fn (node->decl, optimize) || !opt_for_fn (node->decl, flag_ipa_cp);
185 }
186
187 /* Return index of the formal whose tree is PTREE in function which corresponds
188 to INFO. */
189
190 static int
ipa_get_param_decl_index_1(vec<ipa_param_descriptor,va_gc> * descriptors,tree ptree)191 ipa_get_param_decl_index_1 (vec<ipa_param_descriptor, va_gc> *descriptors,
192 tree ptree)
193 {
194 int i, count;
195
196 count = vec_safe_length (descriptors);
197 for (i = 0; i < count; i++)
198 if ((*descriptors)[i].decl_or_type == ptree)
199 return i;
200
201 return -1;
202 }
203
204 /* Return index of the formal whose tree is PTREE in function which corresponds
205 to INFO. */
206
207 int
ipa_get_param_decl_index(class ipa_node_params * info,tree ptree)208 ipa_get_param_decl_index (class ipa_node_params *info, tree ptree)
209 {
210 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
211 }
212
213 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
214 NODE. */
215
216 static void
ipa_populate_param_decls(struct cgraph_node * node,vec<ipa_param_descriptor,va_gc> & descriptors)217 ipa_populate_param_decls (struct cgraph_node *node,
218 vec<ipa_param_descriptor, va_gc> &descriptors)
219 {
220 tree fndecl;
221 tree fnargs;
222 tree parm;
223 int param_num;
224
225 fndecl = node->decl;
226 gcc_assert (gimple_has_body_p (fndecl));
227 fnargs = DECL_ARGUMENTS (fndecl);
228 param_num = 0;
229 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
230 {
231 descriptors[param_num].decl_or_type = parm;
232 unsigned int cost = estimate_move_cost (TREE_TYPE (parm), true);
233 descriptors[param_num].move_cost = cost;
234 /* Watch overflow, move_cost is a bitfield. */
235 gcc_checking_assert (cost == descriptors[param_num].move_cost);
236 param_num++;
237 }
238 }
239
240 /* Return how many formal parameters FNDECL has. */
241
242 int
count_formal_params(tree fndecl)243 count_formal_params (tree fndecl)
244 {
245 tree parm;
246 int count = 0;
247 gcc_assert (gimple_has_body_p (fndecl));
248
249 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
250 count++;
251
252 return count;
253 }
254
255 /* Return the declaration of Ith formal parameter of the function corresponding
256 to INFO. Note there is no setter function as this array is built just once
257 using ipa_initialize_node_params. */
258
259 void
ipa_dump_param(FILE * file,class ipa_node_params * info,int i)260 ipa_dump_param (FILE *file, class ipa_node_params *info, int i)
261 {
262 fprintf (file, "param #%i", i);
263 if ((*info->descriptors)[i].decl_or_type)
264 {
265 fprintf (file, " ");
266 print_generic_expr (file, (*info->descriptors)[i].decl_or_type);
267 }
268 }
269
270 /* If necessary, allocate vector of parameter descriptors in info of NODE.
271 Return true if they were allocated, false if not. */
272
273 static bool
ipa_alloc_node_params(struct cgraph_node * node,int param_count)274 ipa_alloc_node_params (struct cgraph_node *node, int param_count)
275 {
276 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
277
278 if (!info->descriptors && param_count)
279 {
280 vec_safe_grow_cleared (info->descriptors, param_count);
281 return true;
282 }
283 else
284 return false;
285 }
286
287 /* Initialize the ipa_node_params structure associated with NODE by counting
288 the function parameters, creating the descriptors and populating their
289 param_decls. */
290
291 void
ipa_initialize_node_params(struct cgraph_node * node)292 ipa_initialize_node_params (struct cgraph_node *node)
293 {
294 class ipa_node_params *info = IPA_NODE_REF_GET_CREATE (node);
295
296 if (!info->descriptors
297 && ipa_alloc_node_params (node, count_formal_params (node->decl)))
298 ipa_populate_param_decls (node, *info->descriptors);
299 }
300
301 /* Print the jump functions associated with call graph edge CS to file F. */
302
303 static void
ipa_print_node_jump_functions_for_edge(FILE * f,struct cgraph_edge * cs)304 ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
305 {
306 int i, count;
307
308 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
309 for (i = 0; i < count; i++)
310 {
311 struct ipa_jump_func *jump_func;
312 enum jump_func_type type;
313
314 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
315 type = jump_func->type;
316
317 fprintf (f, " param %d: ", i);
318 if (type == IPA_JF_UNKNOWN)
319 fprintf (f, "UNKNOWN\n");
320 else if (type == IPA_JF_CONST)
321 {
322 tree val = jump_func->value.constant.value;
323 fprintf (f, "CONST: ");
324 print_generic_expr (f, val);
325 if (TREE_CODE (val) == ADDR_EXPR
326 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
327 {
328 fprintf (f, " -> ");
329 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)));
330 }
331 fprintf (f, "\n");
332 }
333 else if (type == IPA_JF_PASS_THROUGH)
334 {
335 fprintf (f, "PASS THROUGH: ");
336 fprintf (f, "%d, op %s",
337 jump_func->value.pass_through.formal_id,
338 get_tree_code_name(jump_func->value.pass_through.operation));
339 if (jump_func->value.pass_through.operation != NOP_EXPR)
340 {
341 fprintf (f, " ");
342 print_generic_expr (f, jump_func->value.pass_through.operand);
343 }
344 if (jump_func->value.pass_through.agg_preserved)
345 fprintf (f, ", agg_preserved");
346 fprintf (f, "\n");
347 }
348 else if (type == IPA_JF_ANCESTOR)
349 {
350 fprintf (f, "ANCESTOR: ");
351 fprintf (f, "%d, offset " HOST_WIDE_INT_PRINT_DEC,
352 jump_func->value.ancestor.formal_id,
353 jump_func->value.ancestor.offset);
354 if (jump_func->value.ancestor.agg_preserved)
355 fprintf (f, ", agg_preserved");
356 if (jump_func->value.ancestor.keep_null)
357 fprintf (f, ", keep_null");
358 fprintf (f, "\n");
359 }
360
361 if (jump_func->agg.items)
362 {
363 struct ipa_agg_jf_item *item;
364 int j;
365
366 fprintf (f, " Aggregate passed by %s:\n",
367 jump_func->agg.by_ref ? "reference" : "value");
368 FOR_EACH_VEC_ELT (*jump_func->agg.items, j, item)
369 {
370 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
371 item->offset);
372 fprintf (f, "type: ");
373 print_generic_expr (f, item->type);
374 fprintf (f, ", ");
375 if (item->jftype == IPA_JF_PASS_THROUGH)
376 fprintf (f, "PASS THROUGH: %d,",
377 item->value.pass_through.formal_id);
378 else if (item->jftype == IPA_JF_LOAD_AGG)
379 {
380 fprintf (f, "LOAD AGG: %d",
381 item->value.pass_through.formal_id);
382 fprintf (f, " [offset: " HOST_WIDE_INT_PRINT_DEC ", by %s],",
383 item->value.load_agg.offset,
384 item->value.load_agg.by_ref ? "reference"
385 : "value");
386 }
387
388 if (item->jftype == IPA_JF_PASS_THROUGH
389 || item->jftype == IPA_JF_LOAD_AGG)
390 {
391 fprintf (f, " op %s",
392 get_tree_code_name (item->value.pass_through.operation));
393 if (item->value.pass_through.operation != NOP_EXPR)
394 {
395 fprintf (f, " ");
396 print_generic_expr (f, item->value.pass_through.operand);
397 }
398 }
399 else if (item->jftype == IPA_JF_CONST)
400 {
401 fprintf (f, "CONST: ");
402 print_generic_expr (f, item->value.constant);
403 }
404 else if (item->jftype == IPA_JF_UNKNOWN)
405 fprintf (f, "UNKNOWN: " HOST_WIDE_INT_PRINT_DEC " bits",
406 tree_to_uhwi (TYPE_SIZE (item->type)));
407 fprintf (f, "\n");
408 }
409 }
410
411 class ipa_polymorphic_call_context *ctx
412 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i);
413 if (ctx && !ctx->useless_p ())
414 {
415 fprintf (f, " Context: ");
416 ctx->dump (dump_file);
417 }
418
419 if (jump_func->bits)
420 {
421 fprintf (f, " value: ");
422 print_hex (jump_func->bits->value, f);
423 fprintf (f, ", mask: ");
424 print_hex (jump_func->bits->mask, f);
425 fprintf (f, "\n");
426 }
427 else
428 fprintf (f, " Unknown bits\n");
429
430 if (jump_func->m_vr)
431 {
432 fprintf (f, " VR ");
433 fprintf (f, "%s[",
434 (jump_func->m_vr->kind () == VR_ANTI_RANGE) ? "~" : "");
435 print_decs (wi::to_wide (jump_func->m_vr->min ()), f);
436 fprintf (f, ", ");
437 print_decs (wi::to_wide (jump_func->m_vr->max ()), f);
438 fprintf (f, "]\n");
439 }
440 else
441 fprintf (f, " Unknown VR\n");
442 }
443 }
444
445
446 /* Print the jump functions of all arguments on all call graph edges going from
447 NODE to file F. */
448
449 void
ipa_print_node_jump_functions(FILE * f,struct cgraph_node * node)450 ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
451 {
452 struct cgraph_edge *cs;
453
454 fprintf (f, " Jump functions of caller %s:\n", node->dump_name ());
455 for (cs = node->callees; cs; cs = cs->next_callee)
456 {
457
458 fprintf (f, " callsite %s -> %s : \n",
459 node->dump_name (),
460 cs->callee->dump_name ());
461 if (!ipa_edge_args_info_available_for_edge_p (cs))
462 fprintf (f, " no arg info\n");
463 else
464 ipa_print_node_jump_functions_for_edge (f, cs);
465 }
466
467 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
468 {
469 class cgraph_indirect_call_info *ii;
470
471 ii = cs->indirect_info;
472 if (ii->agg_contents)
473 fprintf (f, " indirect %s callsite, calling param %i, "
474 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
475 ii->member_ptr ? "member ptr" : "aggregate",
476 ii->param_index, ii->offset,
477 ii->by_ref ? "by reference" : "by_value");
478 else
479 fprintf (f, " indirect %s callsite, calling param %i, "
480 "offset " HOST_WIDE_INT_PRINT_DEC,
481 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
482 ii->offset);
483
484 if (cs->call_stmt)
485 {
486 fprintf (f, ", for stmt ");
487 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
488 }
489 else
490 fprintf (f, "\n");
491 if (ii->polymorphic)
492 ii->context.dump (f);
493 if (!ipa_edge_args_info_available_for_edge_p (cs))
494 fprintf (f, " no arg info\n");
495 else
496 ipa_print_node_jump_functions_for_edge (f, cs);
497 }
498 }
499
500 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
501
502 void
ipa_print_all_jump_functions(FILE * f)503 ipa_print_all_jump_functions (FILE *f)
504 {
505 struct cgraph_node *node;
506
507 fprintf (f, "\nJump functions:\n");
508 FOR_EACH_FUNCTION (node)
509 {
510 ipa_print_node_jump_functions (f, node);
511 }
512 }
513
514 /* Set jfunc to be a know-really nothing jump function. */
515
516 static void
ipa_set_jf_unknown(struct ipa_jump_func * jfunc)517 ipa_set_jf_unknown (struct ipa_jump_func *jfunc)
518 {
519 jfunc->type = IPA_JF_UNKNOWN;
520 }
521
522 /* Set JFUNC to be a copy of another jmp (to be used by jump function
523 combination code). The two functions will share their rdesc. */
524
525 static void
ipa_set_jf_cst_copy(struct ipa_jump_func * dst,struct ipa_jump_func * src)526 ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
527 struct ipa_jump_func *src)
528
529 {
530 gcc_checking_assert (src->type == IPA_JF_CONST);
531 dst->type = IPA_JF_CONST;
532 dst->value.constant = src->value.constant;
533 }
534
535 /* Set JFUNC to be a constant jmp function. */
536
537 static void
ipa_set_jf_constant(struct ipa_jump_func * jfunc,tree constant,struct cgraph_edge * cs)538 ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
539 struct cgraph_edge *cs)
540 {
541 jfunc->type = IPA_JF_CONST;
542 jfunc->value.constant.value = unshare_expr_without_location (constant);
543
544 if (TREE_CODE (constant) == ADDR_EXPR
545 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
546 {
547 struct ipa_cst_ref_desc *rdesc;
548
549 rdesc = ipa_refdesc_pool.allocate ();
550 rdesc->cs = cs;
551 rdesc->next_duplicate = NULL;
552 rdesc->refcount = 1;
553 jfunc->value.constant.rdesc = rdesc;
554 }
555 else
556 jfunc->value.constant.rdesc = NULL;
557 }
558
559 /* Set JFUNC to be a simple pass-through jump function. */
560 static void
ipa_set_jf_simple_pass_through(struct ipa_jump_func * jfunc,int formal_id,bool agg_preserved)561 ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
562 bool agg_preserved)
563 {
564 jfunc->type = IPA_JF_PASS_THROUGH;
565 jfunc->value.pass_through.operand = NULL_TREE;
566 jfunc->value.pass_through.formal_id = formal_id;
567 jfunc->value.pass_through.operation = NOP_EXPR;
568 jfunc->value.pass_through.agg_preserved = agg_preserved;
569 }
570
571 /* Set JFUNC to be an unary pass through jump function. */
572
573 static void
ipa_set_jf_unary_pass_through(struct ipa_jump_func * jfunc,int formal_id,enum tree_code operation)574 ipa_set_jf_unary_pass_through (struct ipa_jump_func *jfunc, int formal_id,
575 enum tree_code operation)
576 {
577 jfunc->type = IPA_JF_PASS_THROUGH;
578 jfunc->value.pass_through.operand = NULL_TREE;
579 jfunc->value.pass_through.formal_id = formal_id;
580 jfunc->value.pass_through.operation = operation;
581 jfunc->value.pass_through.agg_preserved = false;
582 }
583 /* Set JFUNC to be an arithmetic pass through jump function. */
584
585 static void
ipa_set_jf_arith_pass_through(struct ipa_jump_func * jfunc,int formal_id,tree operand,enum tree_code operation)586 ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
587 tree operand, enum tree_code operation)
588 {
589 jfunc->type = IPA_JF_PASS_THROUGH;
590 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
591 jfunc->value.pass_through.formal_id = formal_id;
592 jfunc->value.pass_through.operation = operation;
593 jfunc->value.pass_through.agg_preserved = false;
594 }
595
596 /* Set JFUNC to be an ancestor jump function. */
597
598 static void
ipa_set_ancestor_jf(struct ipa_jump_func * jfunc,HOST_WIDE_INT offset,int formal_id,bool agg_preserved,bool keep_null)599 ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
600 int formal_id, bool agg_preserved, bool keep_null)
601 {
602 jfunc->type = IPA_JF_ANCESTOR;
603 jfunc->value.ancestor.formal_id = formal_id;
604 jfunc->value.ancestor.offset = offset;
605 jfunc->value.ancestor.agg_preserved = agg_preserved;
606 jfunc->value.ancestor.keep_null = keep_null;
607 }
608
609 /* Get IPA BB information about the given BB. FBI is the context of analyzis
610 of this function body. */
611
612 static struct ipa_bb_info *
ipa_get_bb_info(struct ipa_func_body_info * fbi,basic_block bb)613 ipa_get_bb_info (struct ipa_func_body_info *fbi, basic_block bb)
614 {
615 gcc_checking_assert (fbi);
616 return &fbi->bb_infos[bb->index];
617 }
618
619 /* Structure to be passed in between detect_type_change and
620 check_stmt_for_type_change. */
621
622 struct prop_type_change_info
623 {
624 /* Offset into the object where there is the virtual method pointer we are
625 looking for. */
626 HOST_WIDE_INT offset;
627 /* The declaration or SSA_NAME pointer of the base that we are checking for
628 type change. */
629 tree object;
630 /* Set to true if dynamic type change has been detected. */
631 bool type_maybe_changed;
632 };
633
634 /* Return true if STMT can modify a virtual method table pointer.
635
636 This function makes special assumptions about both constructors and
637 destructors which are all the functions that are allowed to alter the VMT
638 pointers. It assumes that destructors begin with assignment into all VMT
639 pointers and that constructors essentially look in the following way:
640
641 1) The very first thing they do is that they call constructors of ancestor
642 sub-objects that have them.
643
644 2) Then VMT pointers of this and all its ancestors is set to new values
645 corresponding to the type corresponding to the constructor.
646
647 3) Only afterwards, other stuff such as constructor of member sub-objects
648 and the code written by the user is run. Only this may include calling
649 virtual functions, directly or indirectly.
650
651 There is no way to call a constructor of an ancestor sub-object in any
652 other way.
653
654 This means that we do not have to care whether constructors get the correct
655 type information because they will always change it (in fact, if we define
656 the type to be given by the VMT pointer, it is undefined).
657
658 The most important fact to derive from the above is that if, for some
659 statement in the section 3, we try to detect whether the dynamic type has
660 changed, we can safely ignore all calls as we examine the function body
661 backwards until we reach statements in section 2 because these calls cannot
662 be ancestor constructors or destructors (if the input is not bogus) and so
663 do not change the dynamic type (this holds true only for automatically
664 allocated objects but at the moment we devirtualize only these). We then
665 must detect that statements in section 2 change the dynamic type and can try
666 to derive the new type. That is enough and we can stop, we will never see
667 the calls into constructors of sub-objects in this code. Therefore we can
668 safely ignore all call statements that we traverse.
669 */
670
671 static bool
stmt_may_be_vtbl_ptr_store(gimple * stmt)672 stmt_may_be_vtbl_ptr_store (gimple *stmt)
673 {
674 if (is_gimple_call (stmt))
675 return false;
676 if (gimple_clobber_p (stmt))
677 return false;
678 else if (is_gimple_assign (stmt))
679 {
680 tree lhs = gimple_assign_lhs (stmt);
681
682 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
683 {
684 if (flag_strict_aliasing
685 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
686 return false;
687
688 if (TREE_CODE (lhs) == COMPONENT_REF
689 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
690 return false;
691 /* In the future we might want to use get_ref_base_and_extent to find
692 if there is a field corresponding to the offset and if so, proceed
693 almost like if it was a component ref. */
694 }
695 }
696 return true;
697 }
698
699 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
700 to check whether a particular statement may modify the virtual table
701 pointerIt stores its result into DATA, which points to a
702 prop_type_change_info structure. */
703
704 static bool
check_stmt_for_type_change(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef,void * data)705 check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
706 {
707 gimple *stmt = SSA_NAME_DEF_STMT (vdef);
708 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
709
710 if (stmt_may_be_vtbl_ptr_store (stmt))
711 {
712 tci->type_maybe_changed = true;
713 return true;
714 }
715 else
716 return false;
717 }
718
719 /* See if ARG is PARAM_DECl describing instance passed by pointer
720 or reference in FUNCTION. Return false if the dynamic type may change
721 in between beggining of the function until CALL is invoked.
722
723 Generally functions are not allowed to change type of such instances,
724 but they call destructors. We assume that methods cannot destroy the THIS
725 pointer. Also as a special cases, constructor and destructors may change
726 type of the THIS pointer. */
727
728 static bool
param_type_may_change_p(tree function,tree arg,gimple * call)729 param_type_may_change_p (tree function, tree arg, gimple *call)
730 {
731 /* Pure functions cannot do any changes on the dynamic type;
732 that require writting to memory. */
733 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
734 return false;
735 /* We need to check if we are within inlined consturctor
736 or destructor (ideally we would have way to check that the
737 inline cdtor is actually working on ARG, but we don't have
738 easy tie on this, so punt on all non-pure cdtors.
739 We may also record the types of cdtors and once we know type
740 of the instance match them.
741
742 Also code unification optimizations may merge calls from
743 different blocks making return values unreliable. So
744 do nothing during late optimization. */
745 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
746 return true;
747 if (TREE_CODE (arg) == SSA_NAME
748 && SSA_NAME_IS_DEFAULT_DEF (arg)
749 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
750 {
751 /* Normal (non-THIS) argument. */
752 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
753 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
754 /* THIS pointer of an method - here we want to watch constructors
755 and destructors as those definitely may change the dynamic
756 type. */
757 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
758 && !DECL_CXX_CONSTRUCTOR_P (function)
759 && !DECL_CXX_DESTRUCTOR_P (function)
760 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
761 {
762 /* Walk the inline stack and watch out for ctors/dtors. */
763 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
764 block = BLOCK_SUPERCONTEXT (block))
765 if (inlined_polymorphic_ctor_dtor_block_p (block, false))
766 return true;
767 return false;
768 }
769 }
770 return true;
771 }
772
773 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
774 callsite CALL) by looking for assignments to its virtual table pointer. If
775 it is, return true. ARG is the object itself (not a pointer
776 to it, unless dereferenced). BASE is the base of the memory access as
777 returned by get_ref_base_and_extent, as is the offset.
778
779 This is helper function for detect_type_change and detect_type_change_ssa
780 that does the heavy work which is usually unnecesary. */
781
782 static bool
detect_type_change_from_memory_writes(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)783 detect_type_change_from_memory_writes (ipa_func_body_info *fbi, tree arg,
784 tree base, tree comp_type, gcall *call,
785 HOST_WIDE_INT offset)
786 {
787 struct prop_type_change_info tci;
788 ao_ref ao;
789
790 gcc_checking_assert (DECL_P (arg)
791 || TREE_CODE (arg) == MEM_REF
792 || handled_component_p (arg));
793
794 comp_type = TYPE_MAIN_VARIANT (comp_type);
795
796 /* Const calls cannot call virtual methods through VMT and so type changes do
797 not matter. */
798 if (!flag_devirtualize || !gimple_vuse (call)
799 /* Be sure expected_type is polymorphic. */
800 || !comp_type
801 || TREE_CODE (comp_type) != RECORD_TYPE
802 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
803 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
804 return true;
805
806 ao_ref_init (&ao, arg);
807 ao.base = base;
808 ao.offset = offset;
809 ao.size = POINTER_SIZE;
810 ao.max_size = ao.size;
811
812 tci.offset = offset;
813 tci.object = get_base_address (arg);
814 tci.type_maybe_changed = false;
815
816 int walked
817 = walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
818 &tci, NULL, NULL, fbi->aa_walk_budget + 1);
819
820 if (walked >= 0 && !tci.type_maybe_changed)
821 return false;
822
823 return true;
824 }
825
826 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
827 If it is, return true. ARG is the object itself (not a pointer
828 to it, unless dereferenced). BASE is the base of the memory access as
829 returned by get_ref_base_and_extent, as is the offset. */
830
831 static bool
detect_type_change(ipa_func_body_info * fbi,tree arg,tree base,tree comp_type,gcall * call,HOST_WIDE_INT offset)832 detect_type_change (ipa_func_body_info *fbi, tree arg, tree base,
833 tree comp_type, gcall *call,
834 HOST_WIDE_INT offset)
835 {
836 if (!flag_devirtualize)
837 return false;
838
839 if (TREE_CODE (base) == MEM_REF
840 && !param_type_may_change_p (current_function_decl,
841 TREE_OPERAND (base, 0),
842 call))
843 return false;
844 return detect_type_change_from_memory_writes (fbi, arg, base, comp_type,
845 call, offset);
846 }
847
848 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
849 SSA name (its dereference will become the base and the offset is assumed to
850 be zero). */
851
852 static bool
detect_type_change_ssa(ipa_func_body_info * fbi,tree arg,tree comp_type,gcall * call)853 detect_type_change_ssa (ipa_func_body_info *fbi, tree arg, tree comp_type,
854 gcall *call)
855 {
856 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
857 if (!flag_devirtualize
858 || !POINTER_TYPE_P (TREE_TYPE (arg)))
859 return false;
860
861 if (!param_type_may_change_p (current_function_decl, arg, call))
862 return false;
863
864 arg = build2 (MEM_REF, ptr_type_node, arg,
865 build_int_cst (ptr_type_node, 0));
866
867 return detect_type_change_from_memory_writes (fbi, arg, arg, comp_type,
868 call, 0);
869 }
870
871 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
872 boolean variable pointed to by DATA. */
873
874 static bool
mark_modified(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef ATTRIBUTE_UNUSED,void * data)875 mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
876 void *data)
877 {
878 bool *b = (bool *) data;
879 *b = true;
880 return true;
881 }
882
883 /* Find the nearest valid aa status for parameter specified by INDEX that
884 dominates BB. */
885
886 static struct ipa_param_aa_status *
find_dominating_aa_status(struct ipa_func_body_info * fbi,basic_block bb,int index)887 find_dominating_aa_status (struct ipa_func_body_info *fbi, basic_block bb,
888 int index)
889 {
890 while (true)
891 {
892 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
893 if (!bb)
894 return NULL;
895 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
896 if (!bi->param_aa_statuses.is_empty ()
897 && bi->param_aa_statuses[index].valid)
898 return &bi->param_aa_statuses[index];
899 }
900 }
901
902 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
903 structures and/or intialize the result with a dominating description as
904 necessary. */
905
906 static struct ipa_param_aa_status *
parm_bb_aa_status_for_bb(struct ipa_func_body_info * fbi,basic_block bb,int index)907 parm_bb_aa_status_for_bb (struct ipa_func_body_info *fbi, basic_block bb,
908 int index)
909 {
910 gcc_checking_assert (fbi);
911 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
912 if (bi->param_aa_statuses.is_empty ())
913 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
914 struct ipa_param_aa_status *paa = &bi->param_aa_statuses[index];
915 if (!paa->valid)
916 {
917 gcc_checking_assert (!paa->parm_modified
918 && !paa->ref_modified
919 && !paa->pt_modified);
920 struct ipa_param_aa_status *dom_paa;
921 dom_paa = find_dominating_aa_status (fbi, bb, index);
922 if (dom_paa)
923 *paa = *dom_paa;
924 else
925 paa->valid = true;
926 }
927
928 return paa;
929 }
930
931 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
932 a value known not to be modified in this function before reaching the
933 statement STMT. FBI holds information about the function we have so far
934 gathered but do not survive the summary building stage. */
935
936 static bool
parm_preserved_before_stmt_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree parm_load)937 parm_preserved_before_stmt_p (struct ipa_func_body_info *fbi, int index,
938 gimple *stmt, tree parm_load)
939 {
940 struct ipa_param_aa_status *paa;
941 bool modified = false;
942 ao_ref refd;
943
944 tree base = get_base_address (parm_load);
945 gcc_assert (TREE_CODE (base) == PARM_DECL);
946 if (TREE_READONLY (base))
947 return true;
948
949 gcc_checking_assert (fbi);
950 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
951 if (paa->parm_modified)
952 return false;
953
954 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
955 ao_ref_init (&refd, parm_load);
956 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
957 &modified, NULL, NULL,
958 fbi->aa_walk_budget + 1);
959 if (walked < 0)
960 {
961 modified = true;
962 if (fbi)
963 fbi->aa_walk_budget = 0;
964 }
965 else if (fbi)
966 fbi->aa_walk_budget -= walked;
967 if (paa && modified)
968 paa->parm_modified = true;
969 return !modified;
970 }
971
972 /* If STMT is an assignment that loads a value from an parameter declaration,
973 return the index of the parameter in ipa_node_params which has not been
974 modified. Otherwise return -1. */
975
976 static int
load_from_unmodified_param(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt)977 load_from_unmodified_param (struct ipa_func_body_info *fbi,
978 vec<ipa_param_descriptor, va_gc> *descriptors,
979 gimple *stmt)
980 {
981 int index;
982 tree op1;
983
984 if (!gimple_assign_single_p (stmt))
985 return -1;
986
987 op1 = gimple_assign_rhs1 (stmt);
988 if (TREE_CODE (op1) != PARM_DECL)
989 return -1;
990
991 index = ipa_get_param_decl_index_1 (descriptors, op1);
992 if (index < 0
993 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
994 return -1;
995
996 return index;
997 }
998
999 /* Return true if memory reference REF (which must be a load through parameter
1000 with INDEX) loads data that are known to be unmodified in this function
1001 before reaching statement STMT. */
1002
1003 static bool
parm_ref_data_preserved_p(struct ipa_func_body_info * fbi,int index,gimple * stmt,tree ref)1004 parm_ref_data_preserved_p (struct ipa_func_body_info *fbi,
1005 int index, gimple *stmt, tree ref)
1006 {
1007 struct ipa_param_aa_status *paa;
1008 bool modified = false;
1009 ao_ref refd;
1010
1011 gcc_checking_assert (fbi);
1012 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1013 if (paa->ref_modified)
1014 return false;
1015
1016 gcc_checking_assert (gimple_vuse (stmt));
1017 ao_ref_init (&refd, ref);
1018 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1019 &modified, NULL, NULL,
1020 fbi->aa_walk_budget + 1);
1021 if (walked < 0)
1022 {
1023 modified = true;
1024 fbi->aa_walk_budget = 0;
1025 }
1026 else
1027 fbi->aa_walk_budget -= walked;
1028 if (modified)
1029 paa->ref_modified = true;
1030 return !modified;
1031 }
1032
1033 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1034 is known to be unmodified in this function before reaching call statement
1035 CALL into which it is passed. FBI describes the function body. */
1036
1037 static bool
parm_ref_data_pass_through_p(struct ipa_func_body_info * fbi,int index,gimple * call,tree parm)1038 parm_ref_data_pass_through_p (struct ipa_func_body_info *fbi, int index,
1039 gimple *call, tree parm)
1040 {
1041 bool modified = false;
1042 ao_ref refd;
1043
1044 /* It's unnecessary to calculate anything about memory contnets for a const
1045 function because it is not goin to use it. But do not cache the result
1046 either. Also, no such calculations for non-pointers. */
1047 if (!gimple_vuse (call)
1048 || !POINTER_TYPE_P (TREE_TYPE (parm)))
1049 return false;
1050
1051 struct ipa_param_aa_status *paa = parm_bb_aa_status_for_bb (fbi,
1052 gimple_bb (call),
1053 index);
1054 if (paa->pt_modified)
1055 return false;
1056
1057 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
1058 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1059 &modified, NULL, NULL,
1060 fbi->aa_walk_budget + 1);
1061 if (walked < 0)
1062 {
1063 fbi->aa_walk_budget = 0;
1064 modified = true;
1065 }
1066 else
1067 fbi->aa_walk_budget -= walked;
1068 if (modified)
1069 paa->pt_modified = true;
1070 return !modified;
1071 }
1072
1073 /* Return true if we can prove that OP is a memory reference loading
1074 data from an aggregate passed as a parameter.
1075
1076 The function works in two modes. If GUARANTEED_UNMODIFIED is NULL, it return
1077 false if it cannot prove that the value has not been modified before the
1078 load in STMT. If GUARANTEED_UNMODIFIED is not NULL, it will return true even
1079 if it cannot prove the value has not been modified, in that case it will
1080 store false to *GUARANTEED_UNMODIFIED, otherwise it will store true there.
1081
1082 INFO and PARMS_AINFO describe parameters of the current function (but the
1083 latter can be NULL), STMT is the load statement. If function returns true,
1084 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1085 within the aggregate and whether it is a load from a value passed by
1086 reference respectively. */
1087
1088 bool
ipa_load_from_parm_agg(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descriptors,gimple * stmt,tree op,int * index_p,HOST_WIDE_INT * offset_p,poly_int64 * size_p,bool * by_ref_p,bool * guaranteed_unmodified)1089 ipa_load_from_parm_agg (struct ipa_func_body_info *fbi,
1090 vec<ipa_param_descriptor, va_gc> *descriptors,
1091 gimple *stmt, tree op, int *index_p,
1092 HOST_WIDE_INT *offset_p, poly_int64 *size_p,
1093 bool *by_ref_p, bool *guaranteed_unmodified)
1094 {
1095 int index;
1096 HOST_WIDE_INT size;
1097 bool reverse;
1098 tree base = get_ref_base_and_extent_hwi (op, offset_p, &size, &reverse);
1099
1100 if (!base)
1101 return false;
1102
1103 /* We can not propagate across volatile loads. */
1104 if (TREE_THIS_VOLATILE (op))
1105 return false;
1106
1107 if (DECL_P (base))
1108 {
1109 int index = ipa_get_param_decl_index_1 (descriptors, base);
1110 if (index >= 0
1111 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
1112 {
1113 *index_p = index;
1114 *by_ref_p = false;
1115 if (size_p)
1116 *size_p = size;
1117 if (guaranteed_unmodified)
1118 *guaranteed_unmodified = true;
1119 return true;
1120 }
1121 return false;
1122 }
1123
1124 if (TREE_CODE (base) != MEM_REF
1125 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1126 || !integer_zerop (TREE_OPERAND (base, 1)))
1127 return false;
1128
1129 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1130 {
1131 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
1132 index = ipa_get_param_decl_index_1 (descriptors, parm);
1133 }
1134 else
1135 {
1136 /* This branch catches situations where a pointer parameter is not a
1137 gimple register, for example:
1138
1139 void hip7(S*) (struct S * p)
1140 {
1141 void (*<T2e4>) (struct S *) D.1867;
1142 struct S * p.1;
1143
1144 <bb 2>:
1145 p.1_1 = p;
1146 D.1867_2 = p.1_1->f;
1147 D.1867_2 ();
1148 gdp = &p;
1149 */
1150
1151 gimple *def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
1152 index = load_from_unmodified_param (fbi, descriptors, def);
1153 }
1154
1155 if (index >= 0)
1156 {
1157 bool data_preserved = parm_ref_data_preserved_p (fbi, index, stmt, op);
1158 if (!data_preserved && !guaranteed_unmodified)
1159 return false;
1160
1161 *index_p = index;
1162 *by_ref_p = true;
1163 if (size_p)
1164 *size_p = size;
1165 if (guaranteed_unmodified)
1166 *guaranteed_unmodified = data_preserved;
1167 return true;
1168 }
1169 return false;
1170 }
1171
1172 /* If STMT is an assignment that loads a value from a parameter declaration,
1173 or from an aggregate passed as the parameter either by value or reference,
1174 return the index of the parameter in ipa_node_params. Otherwise return -1.
1175
1176 FBI holds gathered information about the function. INFO describes
1177 parameters of the function, STMT is the assignment statement. If it is a
1178 memory load from an aggregate, *OFFSET_P is filled with offset within the
1179 aggregate, and *BY_REF_P specifies whether the aggregate is passed by
1180 reference. */
1181
1182 static int
load_from_unmodified_param_or_agg(struct ipa_func_body_info * fbi,class ipa_node_params * info,gimple * stmt,HOST_WIDE_INT * offset_p,bool * by_ref_p)1183 load_from_unmodified_param_or_agg (struct ipa_func_body_info *fbi,
1184 class ipa_node_params *info,
1185 gimple *stmt,
1186 HOST_WIDE_INT *offset_p,
1187 bool *by_ref_p)
1188 {
1189 int index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1190 poly_int64 size;
1191
1192 /* Load value from a parameter declaration. */
1193 if (index >= 0)
1194 {
1195 *offset_p = -1;
1196 return index;
1197 }
1198
1199 if (!gimple_assign_load_p (stmt))
1200 return -1;
1201
1202 tree rhs = gimple_assign_rhs1 (stmt);
1203
1204 /* Skip memory reference containing VIEW_CONVERT_EXPR. */
1205 for (tree t = rhs; handled_component_p (t); t = TREE_OPERAND (t, 0))
1206 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1207 return -1;
1208
1209 /* Skip memory reference containing bit-field. */
1210 if (TREE_CODE (rhs) == BIT_FIELD_REF
1211 || contains_bitfld_component_ref_p (rhs))
1212 return -1;
1213
1214 if (!ipa_load_from_parm_agg (fbi, info->descriptors, stmt, rhs, &index,
1215 offset_p, &size, by_ref_p))
1216 return -1;
1217
1218 gcc_assert (!maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (rhs))),
1219 size));
1220 if (!*by_ref_p)
1221 {
1222 tree param_type = ipa_get_type (info, index);
1223
1224 if (!param_type || !AGGREGATE_TYPE_P (param_type))
1225 return -1;
1226 }
1227 else if (TREE_THIS_VOLATILE (rhs))
1228 return -1;
1229
1230 return index;
1231 }
1232
1233 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1234 of an assignment statement STMT, try to determine whether we are actually
1235 handling any of the following cases and construct an appropriate jump
1236 function into JFUNC if so:
1237
1238 1) The passed value is loaded from a formal parameter which is not a gimple
1239 register (most probably because it is addressable, the value has to be
1240 scalar) and we can guarantee the value has not changed. This case can
1241 therefore be described by a simple pass-through jump function. For example:
1242
1243 foo (int a)
1244 {
1245 int a.0;
1246
1247 a.0_2 = a;
1248 bar (a.0_2);
1249
1250 2) The passed value can be described by a simple arithmetic pass-through
1251 jump function. E.g.
1252
1253 foo (int a)
1254 {
1255 int D.2064;
1256
1257 D.2064_4 = a.1(D) + 4;
1258 bar (D.2064_4);
1259
1260 This case can also occur in combination of the previous one, e.g.:
1261
1262 foo (int a, int z)
1263 {
1264 int a.0;
1265 int D.2064;
1266
1267 a.0_3 = a;
1268 D.2064_4 = a.0_3 + 4;
1269 foo (D.2064_4);
1270
1271 3) The passed value is an address of an object within another one (which
1272 also passed by reference). Such situations are described by an ancestor
1273 jump function and describe situations such as:
1274
1275 B::foo() (struct B * const this)
1276 {
1277 struct A * D.1845;
1278
1279 D.1845_2 = &this_1(D)->D.1748;
1280 A::bar (D.1845_2);
1281
1282 INFO is the structure describing individual parameters access different
1283 stages of IPA optimizations. PARMS_AINFO contains the information that is
1284 only needed for intraprocedural analysis. */
1285
1286 static void
compute_complex_assign_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gimple * stmt,tree name,tree param_type)1287 compute_complex_assign_jump_func (struct ipa_func_body_info *fbi,
1288 class ipa_node_params *info,
1289 struct ipa_jump_func *jfunc,
1290 gcall *call, gimple *stmt, tree name,
1291 tree param_type)
1292 {
1293 HOST_WIDE_INT offset, size;
1294 tree op1, tc_ssa, base, ssa;
1295 bool reverse;
1296 int index;
1297
1298 op1 = gimple_assign_rhs1 (stmt);
1299
1300 if (TREE_CODE (op1) == SSA_NAME)
1301 {
1302 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1303 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1304 else
1305 index = load_from_unmodified_param (fbi, info->descriptors,
1306 SSA_NAME_DEF_STMT (op1));
1307 tc_ssa = op1;
1308 }
1309 else
1310 {
1311 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
1312 tc_ssa = gimple_assign_lhs (stmt);
1313 }
1314
1315 if (index >= 0)
1316 {
1317 switch (gimple_assign_rhs_class (stmt))
1318 {
1319 case GIMPLE_BINARY_RHS:
1320 {
1321 tree op2 = gimple_assign_rhs2 (stmt);
1322 if (!is_gimple_ip_invariant (op2)
1323 || ((TREE_CODE_CLASS (gimple_assign_rhs_code (stmt))
1324 != tcc_comparison)
1325 && !useless_type_conversion_p (TREE_TYPE (name),
1326 TREE_TYPE (op1))))
1327 return;
1328
1329 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1330 gimple_assign_rhs_code (stmt));
1331 break;
1332 }
1333 case GIMPLE_SINGLE_RHS:
1334 {
1335 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call,
1336 tc_ssa);
1337 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1338 break;
1339 }
1340 case GIMPLE_UNARY_RHS:
1341 if (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt)))
1342 ipa_set_jf_unary_pass_through (jfunc, index,
1343 gimple_assign_rhs_code (stmt));
1344 default:;
1345 }
1346 return;
1347 }
1348
1349 if (TREE_CODE (op1) != ADDR_EXPR)
1350 return;
1351 op1 = TREE_OPERAND (op1, 0);
1352 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
1353 return;
1354 base = get_ref_base_and_extent_hwi (op1, &offset, &size, &reverse);
1355 offset_int mem_offset;
1356 if (!base
1357 || TREE_CODE (base) != MEM_REF
1358 || !mem_ref_offset (base).is_constant (&mem_offset))
1359 return;
1360 offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1361 ssa = TREE_OPERAND (base, 0);
1362 if (TREE_CODE (ssa) != SSA_NAME
1363 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
1364 || offset < 0)
1365 return;
1366
1367 /* Dynamic types are changed in constructors and destructors. */
1368 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
1369 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
1370 ipa_set_ancestor_jf (jfunc, offset, index,
1371 parm_ref_data_pass_through_p (fbi, index, call, ssa),
1372 false);
1373 }
1374
1375 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1376 it looks like:
1377
1378 iftmp.1_3 = &obj_2(D)->D.1762;
1379
1380 The base of the MEM_REF must be a default definition SSA NAME of a
1381 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1382 whole MEM_REF expression is returned and the offset calculated from any
1383 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1384 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1385
1386 static tree
get_ancestor_addr_info(gimple * assign,tree * obj_p,HOST_WIDE_INT * offset)1387 get_ancestor_addr_info (gimple *assign, tree *obj_p, HOST_WIDE_INT *offset)
1388 {
1389 HOST_WIDE_INT size;
1390 tree expr, parm, obj;
1391 bool reverse;
1392
1393 if (!gimple_assign_single_p (assign))
1394 return NULL_TREE;
1395 expr = gimple_assign_rhs1 (assign);
1396
1397 if (TREE_CODE (expr) != ADDR_EXPR)
1398 return NULL_TREE;
1399 expr = TREE_OPERAND (expr, 0);
1400 obj = expr;
1401 expr = get_ref_base_and_extent_hwi (expr, offset, &size, &reverse);
1402
1403 offset_int mem_offset;
1404 if (!expr
1405 || TREE_CODE (expr) != MEM_REF
1406 || !mem_ref_offset (expr).is_constant (&mem_offset))
1407 return NULL_TREE;
1408 parm = TREE_OPERAND (expr, 0);
1409 if (TREE_CODE (parm) != SSA_NAME
1410 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1411 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1412 return NULL_TREE;
1413
1414 *offset += mem_offset.to_short_addr () * BITS_PER_UNIT;
1415 *obj_p = obj;
1416 return expr;
1417 }
1418
1419
1420 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1421 statement PHI, try to find out whether NAME is in fact a
1422 multiple-inheritance typecast from a descendant into an ancestor of a formal
1423 parameter and thus can be described by an ancestor jump function and if so,
1424 write the appropriate function into JFUNC.
1425
1426 Essentially we want to match the following pattern:
1427
1428 if (obj_2(D) != 0B)
1429 goto <bb 3>;
1430 else
1431 goto <bb 4>;
1432
1433 <bb 3>:
1434 iftmp.1_3 = &obj_2(D)->D.1762;
1435
1436 <bb 4>:
1437 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1438 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1439 return D.1879_6; */
1440
1441 static void
compute_complex_ancestor_jump_func(struct ipa_func_body_info * fbi,class ipa_node_params * info,struct ipa_jump_func * jfunc,gcall * call,gphi * phi)1442 compute_complex_ancestor_jump_func (struct ipa_func_body_info *fbi,
1443 class ipa_node_params *info,
1444 struct ipa_jump_func *jfunc,
1445 gcall *call, gphi *phi)
1446 {
1447 HOST_WIDE_INT offset;
1448 gimple *assign, *cond;
1449 basic_block phi_bb, assign_bb, cond_bb;
1450 tree tmp, parm, expr, obj;
1451 int index, i;
1452
1453 if (gimple_phi_num_args (phi) != 2)
1454 return;
1455
1456 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1457 tmp = PHI_ARG_DEF (phi, 0);
1458 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1459 tmp = PHI_ARG_DEF (phi, 1);
1460 else
1461 return;
1462 if (TREE_CODE (tmp) != SSA_NAME
1463 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1464 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1465 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1466 return;
1467
1468 assign = SSA_NAME_DEF_STMT (tmp);
1469 assign_bb = gimple_bb (assign);
1470 if (!single_pred_p (assign_bb))
1471 return;
1472 expr = get_ancestor_addr_info (assign, &obj, &offset);
1473 if (!expr)
1474 return;
1475 parm = TREE_OPERAND (expr, 0);
1476 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
1477 if (index < 0)
1478 return;
1479
1480 cond_bb = single_pred (assign_bb);
1481 cond = last_stmt (cond_bb);
1482 if (!cond
1483 || gimple_code (cond) != GIMPLE_COND
1484 || gimple_cond_code (cond) != NE_EXPR
1485 || gimple_cond_lhs (cond) != parm
1486 || !integer_zerop (gimple_cond_rhs (cond)))
1487 return;
1488
1489 phi_bb = gimple_bb (phi);
1490 for (i = 0; i < 2; i++)
1491 {
1492 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1493 if (pred != assign_bb && pred != cond_bb)
1494 return;
1495 }
1496
1497 ipa_set_ancestor_jf (jfunc, offset, index,
1498 parm_ref_data_pass_through_p (fbi, index, call, parm),
1499 true);
1500 }
1501
1502 /* Inspect the given TYPE and return true iff it has the same structure (the
1503 same number of fields of the same types) as a C++ member pointer. If
1504 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1505 corresponding fields there. */
1506
1507 static bool
type_like_member_ptr_p(tree type,tree * method_ptr,tree * delta)1508 type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1509 {
1510 tree fld;
1511
1512 if (TREE_CODE (type) != RECORD_TYPE)
1513 return false;
1514
1515 fld = TYPE_FIELDS (type);
1516 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
1517 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1518 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1519 return false;
1520
1521 if (method_ptr)
1522 *method_ptr = fld;
1523
1524 fld = DECL_CHAIN (fld);
1525 if (!fld || INTEGRAL_TYPE_P (fld)
1526 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
1527 return false;
1528 if (delta)
1529 *delta = fld;
1530
1531 if (DECL_CHAIN (fld))
1532 return false;
1533
1534 return true;
1535 }
1536
1537 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1538 return the rhs of its defining statement, and this statement is stored in
1539 *RHS_STMT. Otherwise return RHS as it is. */
1540
1541 static inline tree
get_ssa_def_if_simple_copy(tree rhs,gimple ** rhs_stmt)1542 get_ssa_def_if_simple_copy (tree rhs, gimple **rhs_stmt)
1543 {
1544 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1545 {
1546 gimple *def_stmt = SSA_NAME_DEF_STMT (rhs);
1547
1548 if (gimple_assign_single_p (def_stmt))
1549 rhs = gimple_assign_rhs1 (def_stmt);
1550 else
1551 break;
1552 *rhs_stmt = def_stmt;
1553 }
1554 return rhs;
1555 }
1556
1557 /* Simple linked list, describing contents of an aggregate before call. */
1558
1559 struct ipa_known_agg_contents_list
1560 {
1561 /* Offset and size of the described part of the aggregate. */
1562 HOST_WIDE_INT offset, size;
1563
1564 /* Type of the described part of the aggregate. */
1565 tree type;
1566
1567 /* Known constant value or jump function data describing contents. */
1568 struct ipa_load_agg_data value;
1569
1570 /* Pointer to the next structure in the list. */
1571 struct ipa_known_agg_contents_list *next;
1572 };
1573
1574 /* Add an aggregate content item into a linked list of
1575 ipa_known_agg_contents_list structure, in which all elements
1576 are sorted ascendingly by offset. */
1577
1578 static inline void
add_to_agg_contents_list(struct ipa_known_agg_contents_list ** plist,struct ipa_known_agg_contents_list * item)1579 add_to_agg_contents_list (struct ipa_known_agg_contents_list **plist,
1580 struct ipa_known_agg_contents_list *item)
1581 {
1582 struct ipa_known_agg_contents_list *list = *plist;
1583
1584 for (; list; list = list->next)
1585 {
1586 if (list->offset >= item->offset)
1587 break;
1588
1589 plist = &list->next;
1590 }
1591
1592 item->next = list;
1593 *plist = item;
1594 }
1595
1596 /* Check whether a given aggregate content is clobbered by certain element in
1597 a linked list of ipa_known_agg_contents_list. */
1598
1599 static inline bool
clobber_by_agg_contents_list_p(struct ipa_known_agg_contents_list * list,struct ipa_known_agg_contents_list * item)1600 clobber_by_agg_contents_list_p (struct ipa_known_agg_contents_list *list,
1601 struct ipa_known_agg_contents_list *item)
1602 {
1603 for (; list; list = list->next)
1604 {
1605 if (list->offset >= item->offset)
1606 return list->offset < item->offset + item->size;
1607
1608 if (list->offset + list->size > item->offset)
1609 return true;
1610 }
1611
1612 return false;
1613 }
1614
1615 /* Build aggregate jump function from LIST, assuming there are exactly
1616 VALUE_COUNT entries there and that offset of the passed argument
1617 is ARG_OFFSET and store it into JFUNC. */
1618
1619 static void
build_agg_jump_func_from_list(struct ipa_known_agg_contents_list * list,int value_count,HOST_WIDE_INT arg_offset,struct ipa_jump_func * jfunc)1620 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1621 int value_count, HOST_WIDE_INT arg_offset,
1622 struct ipa_jump_func *jfunc)
1623 {
1624 vec_alloc (jfunc->agg.items, value_count);
1625 for (; list; list = list->next)
1626 {
1627 struct ipa_agg_jf_item item;
1628 tree operand = list->value.pass_through.operand;
1629
1630 if (list->value.pass_through.formal_id >= 0)
1631 {
1632 /* Content value is derived from some formal paramerter. */
1633 if (list->value.offset >= 0)
1634 item.jftype = IPA_JF_LOAD_AGG;
1635 else
1636 item.jftype = IPA_JF_PASS_THROUGH;
1637
1638 item.value.load_agg = list->value;
1639 if (operand)
1640 item.value.pass_through.operand
1641 = unshare_expr_without_location (operand);
1642 }
1643 else if (operand)
1644 {
1645 /* Content value is known constant. */
1646 item.jftype = IPA_JF_CONST;
1647 item.value.constant = unshare_expr_without_location (operand);
1648 }
1649 else
1650 continue;
1651
1652 item.type = list->type;
1653 gcc_assert (tree_to_shwi (TYPE_SIZE (list->type)) == list->size);
1654
1655 item.offset = list->offset - arg_offset;
1656 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1657
1658 jfunc->agg.items->quick_push (item);
1659 }
1660 }
1661
1662 /* Given an assignment statement STMT, try to collect information into
1663 AGG_VALUE that will be used to construct jump function for RHS of the
1664 assignment, from which content value of an aggregate part comes.
1665
1666 Besides constant and simple pass-through jump functions, also try to
1667 identify whether it matches the following pattern that can be described by
1668 a load-value-from-aggregate jump function, which is a derivative of simple
1669 pass-through jump function.
1670
1671 foo (int *p)
1672 {
1673 ...
1674
1675 *(q_5 + 4) = *(p_3(D) + 28) op 1;
1676 bar (q_5);
1677 }
1678
1679 Here IPA_LOAD_AGG_DATA data structure is informative enough to describe
1680 constant, simple pass-through and load-vale-from-aggregate. If value
1681 is constant, it will be kept in field OPERAND, and field FORMAL_ID is
1682 set to -1. For simple pass-through and load-value-from-aggregate, field
1683 FORMAL_ID specifies the related formal parameter index, and field
1684 OFFSET can be used to distinguish them, -1 means simple pass-through,
1685 otherwise means load-value-from-aggregate. */
1686
1687 static void
analyze_agg_content_value(struct ipa_func_body_info * fbi,struct ipa_load_agg_data * agg_value,gimple * stmt)1688 analyze_agg_content_value (struct ipa_func_body_info *fbi,
1689 struct ipa_load_agg_data *agg_value,
1690 gimple *stmt)
1691 {
1692 tree lhs = gimple_assign_lhs (stmt);
1693 tree rhs1 = gimple_assign_rhs1 (stmt);
1694 enum tree_code code;
1695 int index = -1;
1696
1697 /* Initialize jump function data for the aggregate part. */
1698 memset (agg_value, 0, sizeof (*agg_value));
1699 agg_value->pass_through.operation = NOP_EXPR;
1700 agg_value->pass_through.formal_id = -1;
1701 agg_value->offset = -1;
1702
1703 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs)) /* TODO: Support aggregate type. */
1704 || TREE_THIS_VOLATILE (lhs)
1705 || TREE_CODE (lhs) == BIT_FIELD_REF
1706 || contains_bitfld_component_ref_p (lhs))
1707 return;
1708
1709 /* Skip SSA copies. */
1710 while (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
1711 {
1712 if (TREE_CODE (rhs1) != SSA_NAME || SSA_NAME_IS_DEFAULT_DEF (rhs1))
1713 break;
1714
1715 stmt = SSA_NAME_DEF_STMT (rhs1);
1716 if (!is_gimple_assign (stmt))
1717 return;
1718
1719 rhs1 = gimple_assign_rhs1 (stmt);
1720 }
1721
1722 code = gimple_assign_rhs_code (stmt);
1723 switch (gimple_assign_rhs_class (stmt))
1724 {
1725 case GIMPLE_SINGLE_RHS:
1726 if (is_gimple_ip_invariant (rhs1))
1727 {
1728 agg_value->pass_through.operand = rhs1;
1729 return;
1730 }
1731 code = NOP_EXPR;
1732 break;
1733
1734 case GIMPLE_UNARY_RHS:
1735 /* NOTE: A GIMPLE_UNARY_RHS operation might not be tcc_unary
1736 (truth_not_expr is example), GIMPLE_BINARY_RHS does not imply
1737 tcc_binary, this subtleness is somewhat misleading.
1738
1739 Since tcc_unary is widely used in IPA-CP code to check an operation
1740 with one operand, here we only allow tc_unary operation to avoid
1741 possible problem. Then we can use (opclass == tc_unary) or not to
1742 distinguish unary and binary. */
1743 if (TREE_CODE_CLASS (code) != tcc_unary || CONVERT_EXPR_CODE_P (code))
1744 return;
1745
1746 rhs1 = get_ssa_def_if_simple_copy (rhs1, &stmt);
1747 break;
1748
1749 case GIMPLE_BINARY_RHS:
1750 {
1751 gimple *rhs1_stmt = stmt;
1752 gimple *rhs2_stmt = stmt;
1753 tree rhs2 = gimple_assign_rhs2 (stmt);
1754
1755 rhs1 = get_ssa_def_if_simple_copy (rhs1, &rhs1_stmt);
1756 rhs2 = get_ssa_def_if_simple_copy (rhs2, &rhs2_stmt);
1757
1758 if (is_gimple_ip_invariant (rhs2))
1759 {
1760 agg_value->pass_through.operand = rhs2;
1761 stmt = rhs1_stmt;
1762 }
1763 else if (is_gimple_ip_invariant (rhs1))
1764 {
1765 if (TREE_CODE_CLASS (code) == tcc_comparison)
1766 code = swap_tree_comparison (code);
1767 else if (!commutative_tree_code (code))
1768 return;
1769
1770 agg_value->pass_through.operand = rhs1;
1771 stmt = rhs2_stmt;
1772 rhs1 = rhs2;
1773 }
1774 else
1775 return;
1776
1777 if (TREE_CODE_CLASS (code) != tcc_comparison
1778 && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs1)))
1779 return;
1780 }
1781 break;
1782
1783 default:
1784 return;
1785 }
1786
1787 if (TREE_CODE (rhs1) != SSA_NAME)
1788 index = load_from_unmodified_param_or_agg (fbi, fbi->info, stmt,
1789 &agg_value->offset,
1790 &agg_value->by_ref);
1791 else if (SSA_NAME_IS_DEFAULT_DEF (rhs1))
1792 index = ipa_get_param_decl_index (fbi->info, SSA_NAME_VAR (rhs1));
1793
1794 if (index >= 0)
1795 {
1796 if (agg_value->offset >= 0)
1797 agg_value->type = TREE_TYPE (rhs1);
1798 agg_value->pass_through.formal_id = index;
1799 agg_value->pass_through.operation = code;
1800 }
1801 else
1802 agg_value->pass_through.operand = NULL_TREE;
1803 }
1804
1805 /* If STMT is a memory store to the object whose address is BASE, extract
1806 information (offset, size, and value) into CONTENT, and return true,
1807 otherwise we conservatively assume the whole object is modified with
1808 unknown content, and return false. CHECK_REF means that access to object
1809 is expected to be in form of MEM_REF expression. */
1810
1811 static bool
extract_mem_content(struct ipa_func_body_info * fbi,gimple * stmt,tree base,bool check_ref,struct ipa_known_agg_contents_list * content)1812 extract_mem_content (struct ipa_func_body_info *fbi,
1813 gimple *stmt, tree base, bool check_ref,
1814 struct ipa_known_agg_contents_list *content)
1815 {
1816 HOST_WIDE_INT lhs_offset, lhs_size;
1817 bool reverse;
1818
1819 if (!is_gimple_assign (stmt))
1820 return false;
1821
1822 tree lhs = gimple_assign_lhs (stmt);
1823 tree lhs_base = get_ref_base_and_extent_hwi (lhs, &lhs_offset, &lhs_size,
1824 &reverse);
1825 if (!lhs_base)
1826 return false;
1827
1828 if (check_ref)
1829 {
1830 if (TREE_CODE (lhs_base) != MEM_REF
1831 || TREE_OPERAND (lhs_base, 0) != base
1832 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1833 return false;
1834 }
1835 else if (lhs_base != base)
1836 return false;
1837
1838 content->offset = lhs_offset;
1839 content->size = lhs_size;
1840 content->type = TREE_TYPE (lhs);
1841 content->next = NULL;
1842
1843 analyze_agg_content_value (fbi, &content->value, stmt);
1844 return true;
1845 }
1846
1847 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1848 in ARG is filled in constants or values that are derived from caller's
1849 formal parameter in the way described by some kinds of jump functions. FBI
1850 is the context of the caller function for interprocedural analysis. ARG can
1851 either be an aggregate expression or a pointer to an aggregate. ARG_TYPE is
1852 the type of the aggregate, JFUNC is the jump function for the aggregate. */
1853
1854 static void
determine_known_aggregate_parts(struct ipa_func_body_info * fbi,gcall * call,tree arg,tree arg_type,struct ipa_jump_func * jfunc)1855 determine_known_aggregate_parts (struct ipa_func_body_info *fbi,
1856 gcall *call, tree arg,
1857 tree arg_type,
1858 struct ipa_jump_func *jfunc)
1859 {
1860 struct ipa_known_agg_contents_list *list = NULL, *all_list = NULL;
1861 bitmap visited = NULL;
1862 int item_count = 0, value_count = 0;
1863 HOST_WIDE_INT arg_offset, arg_size;
1864 tree arg_base;
1865 bool check_ref, by_ref;
1866 ao_ref r;
1867 int max_agg_items = opt_for_fn (fbi->node->decl, param_ipa_max_agg_items);
1868
1869 if (max_agg_items == 0)
1870 return;
1871
1872 /* The function operates in three stages. First, we prepare check_ref, r,
1873 arg_base and arg_offset based on what is actually passed as an actual
1874 argument. */
1875
1876 if (POINTER_TYPE_P (arg_type))
1877 {
1878 by_ref = true;
1879 if (TREE_CODE (arg) == SSA_NAME)
1880 {
1881 tree type_size;
1882 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type)))
1883 || !POINTER_TYPE_P (TREE_TYPE (arg)))
1884 return;
1885 check_ref = true;
1886 arg_base = arg;
1887 arg_offset = 0;
1888 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
1889 arg_size = tree_to_uhwi (type_size);
1890 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1891 }
1892 else if (TREE_CODE (arg) == ADDR_EXPR)
1893 {
1894 bool reverse;
1895
1896 arg = TREE_OPERAND (arg, 0);
1897 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1898 &arg_size, &reverse);
1899 if (!arg_base)
1900 return;
1901 if (DECL_P (arg_base))
1902 {
1903 check_ref = false;
1904 ao_ref_init (&r, arg_base);
1905 }
1906 else
1907 return;
1908 }
1909 else
1910 return;
1911 }
1912 else
1913 {
1914 bool reverse;
1915
1916 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1917
1918 by_ref = false;
1919 check_ref = false;
1920 arg_base = get_ref_base_and_extent_hwi (arg, &arg_offset,
1921 &arg_size, &reverse);
1922 if (!arg_base)
1923 return;
1924
1925 ao_ref_init (&r, arg);
1926 }
1927
1928 /* Second stage traverses virtual SSA web backwards starting from the call
1929 statement, only looks at individual dominating virtual operand (its
1930 definition dominates the call), as long as it is confident that content
1931 of the aggregate is affected by definition of the virtual operand, it
1932 builds a sorted linked list of ipa_agg_jf_list describing that. */
1933
1934 for (tree dom_vuse = gimple_vuse (call); dom_vuse;)
1935 {
1936 gimple *stmt = SSA_NAME_DEF_STMT (dom_vuse);
1937
1938 if (gimple_code (stmt) == GIMPLE_PHI)
1939 {
1940 dom_vuse = get_continuation_for_phi (stmt, &r, true,
1941 fbi->aa_walk_budget,
1942 &visited, false, NULL, NULL);
1943 continue;
1944 }
1945
1946 if (stmt_may_clobber_ref_p_1 (stmt, &r))
1947 {
1948 struct ipa_known_agg_contents_list *content
1949 = XALLOCA (struct ipa_known_agg_contents_list);
1950
1951 if (!extract_mem_content (fbi, stmt, arg_base, check_ref, content))
1952 break;
1953
1954 /* Now we get a dominating virtual operand, and need to check
1955 whether its value is clobbered any other dominating one. */
1956 if ((content->value.pass_through.formal_id >= 0
1957 || content->value.pass_through.operand)
1958 && !clobber_by_agg_contents_list_p (all_list, content))
1959 {
1960 struct ipa_known_agg_contents_list *copy
1961 = XALLOCA (struct ipa_known_agg_contents_list);
1962
1963 /* Add to the list consisting of only dominating virtual
1964 operands, whose definitions can finally reach the call. */
1965 add_to_agg_contents_list (&list, (*copy = *content, copy));
1966
1967 if (++value_count == max_agg_items)
1968 break;
1969 }
1970
1971 /* Add to the list consisting of all dominating virtual operands. */
1972 add_to_agg_contents_list (&all_list, content);
1973
1974 if (++item_count == 2 * max_agg_items)
1975 break;
1976 }
1977 dom_vuse = gimple_vuse (stmt);
1978 }
1979
1980 if (visited)
1981 BITMAP_FREE (visited);
1982
1983 /* Third stage just goes over the list and creates an appropriate vector of
1984 ipa_agg_jf_item structures out of it, of course only if there are
1985 any meaningful items to begin with. */
1986
1987 if (value_count)
1988 {
1989 jfunc->agg.by_ref = by_ref;
1990 build_agg_jump_func_from_list (list, value_count, arg_offset, jfunc);
1991 }
1992 }
1993
1994
1995 /* Return the Ith param type of callee associated with call graph
1996 edge E. */
1997
1998 tree
ipa_get_callee_param_type(struct cgraph_edge * e,int i)1999 ipa_get_callee_param_type (struct cgraph_edge *e, int i)
2000 {
2001 int n;
2002 tree type = (e->callee
2003 ? TREE_TYPE (e->callee->decl)
2004 : gimple_call_fntype (e->call_stmt));
2005 tree t = TYPE_ARG_TYPES (type);
2006
2007 for (n = 0; n < i; n++)
2008 {
2009 if (!t)
2010 break;
2011 t = TREE_CHAIN (t);
2012 }
2013 if (t)
2014 return TREE_VALUE (t);
2015 if (!e->callee)
2016 return NULL;
2017 t = DECL_ARGUMENTS (e->callee->decl);
2018 for (n = 0; n < i; n++)
2019 {
2020 if (!t)
2021 return NULL;
2022 t = TREE_CHAIN (t);
2023 }
2024 if (t)
2025 return TREE_TYPE (t);
2026 return NULL;
2027 }
2028
2029 /* Return ipa_bits with VALUE and MASK values, which can be either a newly
2030 allocated structure or a previously existing one shared with other jump
2031 functions and/or transformation summaries. */
2032
2033 ipa_bits *
ipa_get_ipa_bits_for_value(const widest_int & value,const widest_int & mask)2034 ipa_get_ipa_bits_for_value (const widest_int &value, const widest_int &mask)
2035 {
2036 ipa_bits tmp;
2037 tmp.value = value;
2038 tmp.mask = mask;
2039
2040 ipa_bits **slot = ipa_bits_hash_table->find_slot (&tmp, INSERT);
2041 if (*slot)
2042 return *slot;
2043
2044 ipa_bits *res = ggc_alloc<ipa_bits> ();
2045 res->value = value;
2046 res->mask = mask;
2047 *slot = res;
2048
2049 return res;
2050 }
2051
2052 /* Assign to JF a pointer to ipa_bits structure with VALUE and MASK. Use hash
2053 table in order to avoid creating multiple same ipa_bits structures. */
2054
2055 static void
ipa_set_jfunc_bits(ipa_jump_func * jf,const widest_int & value,const widest_int & mask)2056 ipa_set_jfunc_bits (ipa_jump_func *jf, const widest_int &value,
2057 const widest_int &mask)
2058 {
2059 jf->bits = ipa_get_ipa_bits_for_value (value, mask);
2060 }
2061
2062 /* Return a pointer to a value_range just like *TMP, but either find it in
2063 ipa_vr_hash_table or allocate it in GC memory. TMP->equiv must be NULL. */
2064
2065 static value_range *
ipa_get_value_range(value_range * tmp)2066 ipa_get_value_range (value_range *tmp)
2067 {
2068 value_range **slot = ipa_vr_hash_table->find_slot (tmp, INSERT);
2069 if (*slot)
2070 return *slot;
2071
2072 value_range *vr = ggc_alloc<value_range> ();
2073 *vr = *tmp;
2074 *slot = vr;
2075
2076 return vr;
2077 }
2078
2079 /* Return a pointer to a value range consisting of TYPE, MIN, MAX and an empty
2080 equiv set. Use hash table in order to avoid creating multiple same copies of
2081 value_ranges. */
2082
2083 static value_range *
ipa_get_value_range(enum value_range_kind kind,tree min,tree max)2084 ipa_get_value_range (enum value_range_kind kind, tree min, tree max)
2085 {
2086 value_range tmp (min, max, kind);
2087 return ipa_get_value_range (&tmp);
2088 }
2089
2090 /* Assign to JF a pointer to a value_range structure with TYPE, MIN and MAX and
2091 a NULL equiv bitmap. Use hash table in order to avoid creating multiple
2092 same value_range structures. */
2093
2094 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,enum value_range_kind type,tree min,tree max)2095 ipa_set_jfunc_vr (ipa_jump_func *jf, enum value_range_kind type,
2096 tree min, tree max)
2097 {
2098 jf->m_vr = ipa_get_value_range (type, min, max);
2099 }
2100
2101 /* Assign to JF a pointer to a value_range just like TMP but either fetch a
2102 copy from ipa_vr_hash_table or allocate a new on in GC memory. */
2103
2104 static void
ipa_set_jfunc_vr(ipa_jump_func * jf,value_range * tmp)2105 ipa_set_jfunc_vr (ipa_jump_func *jf, value_range *tmp)
2106 {
2107 jf->m_vr = ipa_get_value_range (tmp);
2108 }
2109
2110 /* Compute jump function for all arguments of callsite CS and insert the
2111 information in the jump_functions array in the ipa_edge_args corresponding
2112 to this callsite. */
2113
2114 static void
ipa_compute_jump_functions_for_edge(struct ipa_func_body_info * fbi,struct cgraph_edge * cs)2115 ipa_compute_jump_functions_for_edge (struct ipa_func_body_info *fbi,
2116 struct cgraph_edge *cs)
2117 {
2118 class ipa_node_params *info = IPA_NODE_REF (cs->caller);
2119 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (cs);
2120 gcall *call = cs->call_stmt;
2121 int n, arg_num = gimple_call_num_args (call);
2122 bool useful_context = false;
2123
2124 if (arg_num == 0 || args->jump_functions)
2125 return;
2126 vec_safe_grow_cleared (args->jump_functions, arg_num);
2127 if (flag_devirtualize)
2128 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
2129
2130 if (gimple_call_internal_p (call))
2131 return;
2132 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
2133 return;
2134
2135 for (n = 0; n < arg_num; n++)
2136 {
2137 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
2138 tree arg = gimple_call_arg (call, n);
2139 tree param_type = ipa_get_callee_param_type (cs, n);
2140 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
2141 {
2142 tree instance;
2143 class ipa_polymorphic_call_context context (cs->caller->decl,
2144 arg, cs->call_stmt,
2145 &instance);
2146 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt,
2147 &fbi->aa_walk_budget);
2148 *ipa_get_ith_polymorhic_call_context (args, n) = context;
2149 if (!context.useless_p ())
2150 useful_context = true;
2151 }
2152
2153 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2154 {
2155 bool addr_nonzero = false;
2156 bool strict_overflow = false;
2157
2158 if (TREE_CODE (arg) == SSA_NAME
2159 && param_type
2160 && get_ptr_nonnull (arg))
2161 addr_nonzero = true;
2162 else if (tree_single_nonzero_warnv_p (arg, &strict_overflow))
2163 addr_nonzero = true;
2164
2165 if (addr_nonzero)
2166 {
2167 tree z = build_int_cst (TREE_TYPE (arg), 0);
2168 ipa_set_jfunc_vr (jfunc, VR_ANTI_RANGE, z, z);
2169 }
2170 else
2171 gcc_assert (!jfunc->m_vr);
2172 }
2173 else
2174 {
2175 wide_int min, max;
2176 value_range_kind kind;
2177 if (TREE_CODE (arg) == SSA_NAME
2178 && param_type
2179 && (kind = get_range_info (arg, &min, &max))
2180 && (kind == VR_RANGE || kind == VR_ANTI_RANGE))
2181 {
2182 value_range resvr;
2183 value_range tmpvr (wide_int_to_tree (TREE_TYPE (arg), min),
2184 wide_int_to_tree (TREE_TYPE (arg), max),
2185 kind);
2186 range_fold_unary_expr (&resvr, NOP_EXPR, param_type,
2187 &tmpvr, TREE_TYPE (arg));
2188 if (!resvr.undefined_p () && !resvr.varying_p ())
2189 ipa_set_jfunc_vr (jfunc, &resvr);
2190 else
2191 gcc_assert (!jfunc->m_vr);
2192 }
2193 else
2194 gcc_assert (!jfunc->m_vr);
2195 }
2196
2197 if (INTEGRAL_TYPE_P (TREE_TYPE (arg))
2198 && (TREE_CODE (arg) == SSA_NAME || TREE_CODE (arg) == INTEGER_CST))
2199 {
2200 if (TREE_CODE (arg) == SSA_NAME)
2201 ipa_set_jfunc_bits (jfunc, 0,
2202 widest_int::from (get_nonzero_bits (arg),
2203 TYPE_SIGN (TREE_TYPE (arg))));
2204 else
2205 ipa_set_jfunc_bits (jfunc, wi::to_widest (arg), 0);
2206 }
2207 else if (POINTER_TYPE_P (TREE_TYPE (arg)))
2208 {
2209 unsigned HOST_WIDE_INT bitpos;
2210 unsigned align;
2211
2212 get_pointer_alignment_1 (arg, &align, &bitpos);
2213 widest_int mask = wi::bit_and_not
2214 (wi::mask<widest_int> (TYPE_PRECISION (TREE_TYPE (arg)), false),
2215 align / BITS_PER_UNIT - 1);
2216 widest_int value = bitpos / BITS_PER_UNIT;
2217 ipa_set_jfunc_bits (jfunc, value, mask);
2218 }
2219 else
2220 gcc_assert (!jfunc->bits);
2221
2222 if (is_gimple_ip_invariant (arg)
2223 || (VAR_P (arg)
2224 && is_global_var (arg)
2225 && TREE_READONLY (arg)))
2226 ipa_set_jf_constant (jfunc, arg, cs);
2227 else if (!is_gimple_reg_type (TREE_TYPE (arg))
2228 && TREE_CODE (arg) == PARM_DECL)
2229 {
2230 int index = ipa_get_param_decl_index (info, arg);
2231
2232 gcc_assert (index >=0);
2233 /* Aggregate passed by value, check for pass-through, otherwise we
2234 will attempt to fill in aggregate contents later in this
2235 for cycle. */
2236 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
2237 {
2238 ipa_set_jf_simple_pass_through (jfunc, index, false);
2239 continue;
2240 }
2241 }
2242 else if (TREE_CODE (arg) == SSA_NAME)
2243 {
2244 if (SSA_NAME_IS_DEFAULT_DEF (arg))
2245 {
2246 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
2247 if (index >= 0)
2248 {
2249 bool agg_p;
2250 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
2251 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
2252 }
2253 }
2254 else
2255 {
2256 gimple *stmt = SSA_NAME_DEF_STMT (arg);
2257 if (is_gimple_assign (stmt))
2258 compute_complex_assign_jump_func (fbi, info, jfunc,
2259 call, stmt, arg, param_type);
2260 else if (gimple_code (stmt) == GIMPLE_PHI)
2261 compute_complex_ancestor_jump_func (fbi, info, jfunc,
2262 call,
2263 as_a <gphi *> (stmt));
2264 }
2265 }
2266
2267 /* If ARG is pointer, we cannot use its type to determine the type of aggregate
2268 passed (because type conversions are ignored in gimple). Usually we can
2269 safely get type from function declaration, but in case of K&R prototypes or
2270 variadic functions we can try our luck with type of the pointer passed.
2271 TODO: Since we look for actual initialization of the memory object, we may better
2272 work out the type based on the memory stores we find. */
2273 if (!param_type)
2274 param_type = TREE_TYPE (arg);
2275
2276 if ((jfunc->type != IPA_JF_PASS_THROUGH
2277 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
2278 && (jfunc->type != IPA_JF_ANCESTOR
2279 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
2280 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
2281 || POINTER_TYPE_P (param_type)))
2282 determine_known_aggregate_parts (fbi, call, arg, param_type, jfunc);
2283 }
2284 if (!useful_context)
2285 vec_free (args->polymorphic_call_contexts);
2286 }
2287
2288 /* Compute jump functions for all edges - both direct and indirect - outgoing
2289 from BB. */
2290
2291 static void
ipa_compute_jump_functions_for_bb(struct ipa_func_body_info * fbi,basic_block bb)2292 ipa_compute_jump_functions_for_bb (struct ipa_func_body_info *fbi, basic_block bb)
2293 {
2294 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
2295 int i;
2296 struct cgraph_edge *cs;
2297
2298 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
2299 {
2300 struct cgraph_node *callee = cs->callee;
2301
2302 if (callee)
2303 {
2304 callee = callee->ultimate_alias_target ();
2305 /* We do not need to bother analyzing calls to unknown functions
2306 unless they may become known during lto/whopr. */
2307 if (!callee->definition && !flag_lto)
2308 continue;
2309 }
2310 ipa_compute_jump_functions_for_edge (fbi, cs);
2311 }
2312 }
2313
2314 /* If STMT looks like a statement loading a value from a member pointer formal
2315 parameter, return that parameter and store the offset of the field to
2316 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2317 might be clobbered). If USE_DELTA, then we look for a use of the delta
2318 field rather than the pfn. */
2319
2320 static tree
ipa_get_stmt_member_ptr_load_param(gimple * stmt,bool use_delta,HOST_WIDE_INT * offset_p)2321 ipa_get_stmt_member_ptr_load_param (gimple *stmt, bool use_delta,
2322 HOST_WIDE_INT *offset_p)
2323 {
2324 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2325
2326 if (!gimple_assign_single_p (stmt))
2327 return NULL_TREE;
2328
2329 rhs = gimple_assign_rhs1 (stmt);
2330 if (TREE_CODE (rhs) == COMPONENT_REF)
2331 {
2332 ref_field = TREE_OPERAND (rhs, 1);
2333 rhs = TREE_OPERAND (rhs, 0);
2334 }
2335 else
2336 ref_field = NULL_TREE;
2337 if (TREE_CODE (rhs) != MEM_REF)
2338 return NULL_TREE;
2339 rec = TREE_OPERAND (rhs, 0);
2340 if (TREE_CODE (rec) != ADDR_EXPR)
2341 return NULL_TREE;
2342 rec = TREE_OPERAND (rec, 0);
2343 if (TREE_CODE (rec) != PARM_DECL
2344 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
2345 return NULL_TREE;
2346 ref_offset = TREE_OPERAND (rhs, 1);
2347
2348 if (use_delta)
2349 fld = delta_field;
2350 else
2351 fld = ptr_field;
2352 if (offset_p)
2353 *offset_p = int_bit_position (fld);
2354
2355 if (ref_field)
2356 {
2357 if (integer_nonzerop (ref_offset))
2358 return NULL_TREE;
2359 return ref_field == fld ? rec : NULL_TREE;
2360 }
2361 else
2362 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2363 : NULL_TREE;
2364 }
2365
2366 /* Returns true iff T is an SSA_NAME defined by a statement. */
2367
2368 static bool
ipa_is_ssa_with_stmt_def(tree t)2369 ipa_is_ssa_with_stmt_def (tree t)
2370 {
2371 if (TREE_CODE (t) == SSA_NAME
2372 && !SSA_NAME_IS_DEFAULT_DEF (t))
2373 return true;
2374 else
2375 return false;
2376 }
2377
2378 /* Find the indirect call graph edge corresponding to STMT and mark it as a
2379 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2380 indirect call graph edge.
2381 If POLYMORPHIC is true record is as a destination of polymorphic call. */
2382
2383 static struct cgraph_edge *
ipa_note_param_call(struct cgraph_node * node,int param_index,gcall * stmt,bool polymorphic)2384 ipa_note_param_call (struct cgraph_node *node, int param_index,
2385 gcall *stmt, bool polymorphic)
2386 {
2387 struct cgraph_edge *cs;
2388
2389 cs = node->get_edge (stmt);
2390 cs->indirect_info->param_index = param_index;
2391 cs->indirect_info->agg_contents = 0;
2392 cs->indirect_info->member_ptr = 0;
2393 cs->indirect_info->guaranteed_unmodified = 0;
2394 ipa_set_param_used_by_indirect_call (IPA_NODE_REF (node),
2395 param_index, true);
2396 if (cs->indirect_info->polymorphic || polymorphic)
2397 ipa_set_param_used_by_polymorphic_call
2398 (IPA_NODE_REF (node), param_index, true);
2399 return cs;
2400 }
2401
2402 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
2403 (described by INFO). PARMS_AINFO is a pointer to a vector containing
2404 intermediate information about each formal parameter. Currently it checks
2405 whether the call calls a pointer that is a formal parameter and if so, the
2406 parameter is marked with the called flag and an indirect call graph edge
2407 describing the call is created. This is very simple for ordinary pointers
2408 represented in SSA but not-so-nice when it comes to member pointers. The
2409 ugly part of this function does nothing more than trying to match the
2410 pattern of such a call. An example of such a pattern is the gimple dump
2411 below, the call is on the last line:
2412
2413 <bb 2>:
2414 f$__delta_5 = f.__delta;
2415 f$__pfn_24 = f.__pfn;
2416
2417 or
2418 <bb 2>:
2419 f$__delta_5 = MEM[(struct *)&f];
2420 f$__pfn_24 = MEM[(struct *)&f + 4B];
2421
2422 and a few lines below:
2423
2424 <bb 5>
2425 D.2496_3 = (int) f$__pfn_24;
2426 D.2497_4 = D.2496_3 & 1;
2427 if (D.2497_4 != 0)
2428 goto <bb 3>;
2429 else
2430 goto <bb 4>;
2431
2432 <bb 6>:
2433 D.2500_7 = (unsigned int) f$__delta_5;
2434 D.2501_8 = &S + D.2500_7;
2435 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2436 D.2503_10 = *D.2502_9;
2437 D.2504_12 = f$__pfn_24 + -1;
2438 D.2505_13 = (unsigned int) D.2504_12;
2439 D.2506_14 = D.2503_10 + D.2505_13;
2440 D.2507_15 = *D.2506_14;
2441 iftmp.11_16 = (String:: *) D.2507_15;
2442
2443 <bb 7>:
2444 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2445 D.2500_19 = (unsigned int) f$__delta_5;
2446 D.2508_20 = &S + D.2500_19;
2447 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2448
2449 Such patterns are results of simple calls to a member pointer:
2450
2451 int doprinting (int (MyString::* f)(int) const)
2452 {
2453 MyString S ("somestring");
2454
2455 return (S.*f)(4);
2456 }
2457
2458 Moreover, the function also looks for called pointers loaded from aggregates
2459 passed by value or reference. */
2460
2461 static void
ipa_analyze_indirect_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2462 ipa_analyze_indirect_call_uses (struct ipa_func_body_info *fbi, gcall *call,
2463 tree target)
2464 {
2465 class ipa_node_params *info = fbi->info;
2466 HOST_WIDE_INT offset;
2467 bool by_ref;
2468
2469 if (SSA_NAME_IS_DEFAULT_DEF (target))
2470 {
2471 tree var = SSA_NAME_VAR (target);
2472 int index = ipa_get_param_decl_index (info, var);
2473 if (index >= 0)
2474 ipa_note_param_call (fbi->node, index, call, false);
2475 return;
2476 }
2477
2478 int index;
2479 gimple *def = SSA_NAME_DEF_STMT (target);
2480 bool guaranteed_unmodified;
2481 if (gimple_assign_single_p (def)
2482 && ipa_load_from_parm_agg (fbi, info->descriptors, def,
2483 gimple_assign_rhs1 (def), &index, &offset,
2484 NULL, &by_ref, &guaranteed_unmodified))
2485 {
2486 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2487 call, false);
2488 cs->indirect_info->offset = offset;
2489 cs->indirect_info->agg_contents = 1;
2490 cs->indirect_info->by_ref = by_ref;
2491 cs->indirect_info->guaranteed_unmodified = guaranteed_unmodified;
2492 return;
2493 }
2494
2495 /* Now we need to try to match the complex pattern of calling a member
2496 pointer. */
2497 if (gimple_code (def) != GIMPLE_PHI
2498 || gimple_phi_num_args (def) != 2
2499 || !POINTER_TYPE_P (TREE_TYPE (target))
2500 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2501 return;
2502
2503 /* First, we need to check whether one of these is a load from a member
2504 pointer that is a parameter to this function. */
2505 tree n1 = PHI_ARG_DEF (def, 0);
2506 tree n2 = PHI_ARG_DEF (def, 1);
2507 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
2508 return;
2509 gimple *d1 = SSA_NAME_DEF_STMT (n1);
2510 gimple *d2 = SSA_NAME_DEF_STMT (n2);
2511
2512 tree rec;
2513 basic_block bb, virt_bb;
2514 basic_block join = gimple_bb (def);
2515 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
2516 {
2517 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
2518 return;
2519
2520 bb = EDGE_PRED (join, 0)->src;
2521 virt_bb = gimple_bb (d2);
2522 }
2523 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
2524 {
2525 bb = EDGE_PRED (join, 1)->src;
2526 virt_bb = gimple_bb (d1);
2527 }
2528 else
2529 return;
2530
2531 /* Second, we need to check that the basic blocks are laid out in the way
2532 corresponding to the pattern. */
2533
2534 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2535 || single_pred (virt_bb) != bb
2536 || single_succ (virt_bb) != join)
2537 return;
2538
2539 /* Third, let's see that the branching is done depending on the least
2540 significant bit of the pfn. */
2541
2542 gimple *branch = last_stmt (bb);
2543 if (!branch || gimple_code (branch) != GIMPLE_COND)
2544 return;
2545
2546 if ((gimple_cond_code (branch) != NE_EXPR
2547 && gimple_cond_code (branch) != EQ_EXPR)
2548 || !integer_zerop (gimple_cond_rhs (branch)))
2549 return;
2550
2551 tree cond = gimple_cond_lhs (branch);
2552 if (!ipa_is_ssa_with_stmt_def (cond))
2553 return;
2554
2555 def = SSA_NAME_DEF_STMT (cond);
2556 if (!is_gimple_assign (def)
2557 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2558 || !integer_onep (gimple_assign_rhs2 (def)))
2559 return;
2560
2561 cond = gimple_assign_rhs1 (def);
2562 if (!ipa_is_ssa_with_stmt_def (cond))
2563 return;
2564
2565 def = SSA_NAME_DEF_STMT (cond);
2566
2567 if (is_gimple_assign (def)
2568 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
2569 {
2570 cond = gimple_assign_rhs1 (def);
2571 if (!ipa_is_ssa_with_stmt_def (cond))
2572 return;
2573 def = SSA_NAME_DEF_STMT (cond);
2574 }
2575
2576 tree rec2;
2577 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2578 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2579 == ptrmemfunc_vbit_in_delta),
2580 NULL);
2581 if (rec != rec2)
2582 return;
2583
2584 index = ipa_get_param_decl_index (info, rec);
2585 if (index >= 0
2586 && parm_preserved_before_stmt_p (fbi, index, call, rec))
2587 {
2588 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2589 call, false);
2590 cs->indirect_info->offset = offset;
2591 cs->indirect_info->agg_contents = 1;
2592 cs->indirect_info->member_ptr = 1;
2593 cs->indirect_info->guaranteed_unmodified = 1;
2594 }
2595
2596 return;
2597 }
2598
2599 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2600 object referenced in the expression is a formal parameter of the caller
2601 FBI->node (described by FBI->info), create a call note for the
2602 statement. */
2603
2604 static void
ipa_analyze_virtual_call_uses(struct ipa_func_body_info * fbi,gcall * call,tree target)2605 ipa_analyze_virtual_call_uses (struct ipa_func_body_info *fbi,
2606 gcall *call, tree target)
2607 {
2608 tree obj = OBJ_TYPE_REF_OBJECT (target);
2609 int index;
2610 HOST_WIDE_INT anc_offset;
2611
2612 if (!flag_devirtualize)
2613 return;
2614
2615 if (TREE_CODE (obj) != SSA_NAME)
2616 return;
2617
2618 class ipa_node_params *info = fbi->info;
2619 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2620 {
2621 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2622 return;
2623
2624 anc_offset = 0;
2625 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2626 gcc_assert (index >= 0);
2627 if (detect_type_change_ssa (fbi, obj, obj_type_ref_class (target),
2628 call))
2629 return;
2630 }
2631 else
2632 {
2633 gimple *stmt = SSA_NAME_DEF_STMT (obj);
2634 tree expr;
2635
2636 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2637 if (!expr)
2638 return;
2639 index = ipa_get_param_decl_index (info,
2640 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2641 gcc_assert (index >= 0);
2642 if (detect_type_change (fbi, obj, expr, obj_type_ref_class (target),
2643 call, anc_offset))
2644 return;
2645 }
2646
2647 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index,
2648 call, true);
2649 class cgraph_indirect_call_info *ii = cs->indirect_info;
2650 ii->offset = anc_offset;
2651 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
2652 ii->otr_type = obj_type_ref_class (target);
2653 ii->polymorphic = 1;
2654 }
2655
2656 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2657 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2658 containing intermediate information about each formal parameter. */
2659
2660 static void
ipa_analyze_call_uses(struct ipa_func_body_info * fbi,gcall * call)2661 ipa_analyze_call_uses (struct ipa_func_body_info *fbi, gcall *call)
2662 {
2663 tree target = gimple_call_fn (call);
2664
2665 if (!target
2666 || (TREE_CODE (target) != SSA_NAME
2667 && !virtual_method_call_p (target)))
2668 return;
2669
2670 struct cgraph_edge *cs = fbi->node->get_edge (call);
2671 /* If we previously turned the call into a direct call, there is
2672 no need to analyze. */
2673 if (cs && !cs->indirect_unknown_callee)
2674 return;
2675
2676 if (cs->indirect_info->polymorphic && flag_devirtualize)
2677 {
2678 tree instance;
2679 tree target = gimple_call_fn (call);
2680 ipa_polymorphic_call_context context (current_function_decl,
2681 target, call, &instance);
2682
2683 gcc_checking_assert (cs->indirect_info->otr_type
2684 == obj_type_ref_class (target));
2685 gcc_checking_assert (cs->indirect_info->otr_token
2686 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
2687
2688 cs->indirect_info->vptr_changed
2689 = !context.get_dynamic_type (instance,
2690 OBJ_TYPE_REF_OBJECT (target),
2691 obj_type_ref_class (target), call,
2692 &fbi->aa_walk_budget);
2693 cs->indirect_info->context = context;
2694 }
2695
2696 if (TREE_CODE (target) == SSA_NAME)
2697 ipa_analyze_indirect_call_uses (fbi, call, target);
2698 else if (virtual_method_call_p (target))
2699 ipa_analyze_virtual_call_uses (fbi, call, target);
2700 }
2701
2702
2703 /* Analyze the call statement STMT with respect to formal parameters (described
2704 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2705 formal parameters are called. */
2706
2707 static void
ipa_analyze_stmt_uses(struct ipa_func_body_info * fbi,gimple * stmt)2708 ipa_analyze_stmt_uses (struct ipa_func_body_info *fbi, gimple *stmt)
2709 {
2710 if (is_gimple_call (stmt))
2711 ipa_analyze_call_uses (fbi, as_a <gcall *> (stmt));
2712 }
2713
2714 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2715 If OP is a parameter declaration, mark it as used in the info structure
2716 passed in DATA. */
2717
2718 static bool
visit_ref_for_mod_analysis(gimple *,tree op,tree,void * data)2719 visit_ref_for_mod_analysis (gimple *, tree op, tree, void *data)
2720 {
2721 class ipa_node_params *info = (class ipa_node_params *) data;
2722
2723 op = get_base_address (op);
2724 if (op
2725 && TREE_CODE (op) == PARM_DECL)
2726 {
2727 int index = ipa_get_param_decl_index (info, op);
2728 gcc_assert (index >= 0);
2729 ipa_set_param_used (info, index, true);
2730 }
2731
2732 return false;
2733 }
2734
2735 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2736 the findings in various structures of the associated ipa_node_params
2737 structure, such as parameter flags, notes etc. FBI holds various data about
2738 the function being analyzed. */
2739
2740 static void
ipa_analyze_params_uses_in_bb(struct ipa_func_body_info * fbi,basic_block bb)2741 ipa_analyze_params_uses_in_bb (struct ipa_func_body_info *fbi, basic_block bb)
2742 {
2743 gimple_stmt_iterator gsi;
2744 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2745 {
2746 gimple *stmt = gsi_stmt (gsi);
2747
2748 if (is_gimple_debug (stmt))
2749 continue;
2750
2751 ipa_analyze_stmt_uses (fbi, stmt);
2752 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2753 visit_ref_for_mod_analysis,
2754 visit_ref_for_mod_analysis,
2755 visit_ref_for_mod_analysis);
2756 }
2757 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2758 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2759 visit_ref_for_mod_analysis,
2760 visit_ref_for_mod_analysis,
2761 visit_ref_for_mod_analysis);
2762 }
2763
2764 /* Calculate controlled uses of parameters of NODE. */
2765
2766 static void
ipa_analyze_controlled_uses(struct cgraph_node * node)2767 ipa_analyze_controlled_uses (struct cgraph_node *node)
2768 {
2769 class ipa_node_params *info = IPA_NODE_REF (node);
2770
2771 for (int i = 0; i < ipa_get_param_count (info); i++)
2772 {
2773 tree parm = ipa_get_param (info, i);
2774 int controlled_uses = 0;
2775
2776 /* For SSA regs see if parameter is used. For non-SSA we compute
2777 the flag during modification analysis. */
2778 if (is_gimple_reg (parm))
2779 {
2780 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
2781 parm);
2782 if (ddef && !has_zero_uses (ddef))
2783 {
2784 imm_use_iterator imm_iter;
2785 use_operand_p use_p;
2786
2787 ipa_set_param_used (info, i, true);
2788 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2789 if (!is_gimple_call (USE_STMT (use_p)))
2790 {
2791 if (!is_gimple_debug (USE_STMT (use_p)))
2792 {
2793 controlled_uses = IPA_UNDESCRIBED_USE;
2794 break;
2795 }
2796 }
2797 else
2798 controlled_uses++;
2799 }
2800 else
2801 controlled_uses = 0;
2802 }
2803 else
2804 controlled_uses = IPA_UNDESCRIBED_USE;
2805 ipa_set_controlled_uses (info, i, controlled_uses);
2806 }
2807 }
2808
2809 /* Free stuff in BI. */
2810
2811 static void
free_ipa_bb_info(struct ipa_bb_info * bi)2812 free_ipa_bb_info (struct ipa_bb_info *bi)
2813 {
2814 bi->cg_edges.release ();
2815 bi->param_aa_statuses.release ();
2816 }
2817
2818 /* Dominator walker driving the analysis. */
2819
2820 class analysis_dom_walker : public dom_walker
2821 {
2822 public:
analysis_dom_walker(struct ipa_func_body_info * fbi)2823 analysis_dom_walker (struct ipa_func_body_info *fbi)
2824 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2825
2826 virtual edge before_dom_children (basic_block);
2827
2828 private:
2829 struct ipa_func_body_info *m_fbi;
2830 };
2831
2832 edge
before_dom_children(basic_block bb)2833 analysis_dom_walker::before_dom_children (basic_block bb)
2834 {
2835 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2836 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2837 return NULL;
2838 }
2839
2840 /* Release body info FBI. */
2841
2842 void
ipa_release_body_info(struct ipa_func_body_info * fbi)2843 ipa_release_body_info (struct ipa_func_body_info *fbi)
2844 {
2845 int i;
2846 struct ipa_bb_info *bi;
2847
2848 FOR_EACH_VEC_ELT (fbi->bb_infos, i, bi)
2849 free_ipa_bb_info (bi);
2850 fbi->bb_infos.release ();
2851 }
2852
2853 /* Initialize the array describing properties of formal parameters
2854 of NODE, analyze their uses and compute jump functions associated
2855 with actual arguments of calls from within NODE. */
2856
2857 void
ipa_analyze_node(struct cgraph_node * node)2858 ipa_analyze_node (struct cgraph_node *node)
2859 {
2860 struct ipa_func_body_info fbi;
2861 class ipa_node_params *info;
2862
2863 ipa_check_create_node_params ();
2864 ipa_check_create_edge_args ();
2865 info = IPA_NODE_REF_GET_CREATE (node);
2866
2867 if (info->analysis_done)
2868 return;
2869 info->analysis_done = 1;
2870
2871 if (ipa_func_spec_opts_forbid_analysis_p (node))
2872 {
2873 for (int i = 0; i < ipa_get_param_count (info); i++)
2874 {
2875 ipa_set_param_used (info, i, true);
2876 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2877 }
2878 return;
2879 }
2880
2881 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2882 push_cfun (func);
2883 calculate_dominance_info (CDI_DOMINATORS);
2884 ipa_initialize_node_params (node);
2885 ipa_analyze_controlled_uses (node);
2886
2887 fbi.node = node;
2888 fbi.info = IPA_NODE_REF (node);
2889 fbi.bb_infos = vNULL;
2890 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2891 fbi.param_count = ipa_get_param_count (info);
2892 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
2893
2894 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2895 {
2896 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2897 bi->cg_edges.safe_push (cs);
2898 }
2899
2900 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2901 {
2902 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2903 bi->cg_edges.safe_push (cs);
2904 }
2905
2906 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2907
2908 ipa_release_body_info (&fbi);
2909 free_dominance_info (CDI_DOMINATORS);
2910 pop_cfun ();
2911 }
2912
2913 /* Update the jump functions associated with call graph edge E when the call
2914 graph edge CS is being inlined, assuming that E->caller is already (possibly
2915 indirectly) inlined into CS->callee and that E has not been inlined. */
2916
2917 static void
update_jump_functions_after_inlining(struct cgraph_edge * cs,struct cgraph_edge * e)2918 update_jump_functions_after_inlining (struct cgraph_edge *cs,
2919 struct cgraph_edge *e)
2920 {
2921 class ipa_edge_args *top = IPA_EDGE_REF (cs);
2922 class ipa_edge_args *args = IPA_EDGE_REF (e);
2923 if (!args)
2924 return;
2925 int count = ipa_get_cs_argument_count (args);
2926 int i;
2927
2928 for (i = 0; i < count; i++)
2929 {
2930 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
2931 class ipa_polymorphic_call_context *dst_ctx
2932 = ipa_get_ith_polymorhic_call_context (args, i);
2933
2934 if (dst->agg.items)
2935 {
2936 struct ipa_agg_jf_item *item;
2937 int j;
2938
2939 FOR_EACH_VEC_ELT (*dst->agg.items, j, item)
2940 {
2941 int dst_fid;
2942 struct ipa_jump_func *src;
2943
2944 if (item->jftype != IPA_JF_PASS_THROUGH
2945 && item->jftype != IPA_JF_LOAD_AGG)
2946 continue;
2947
2948 dst_fid = item->value.pass_through.formal_id;
2949 if (!top || dst_fid >= ipa_get_cs_argument_count (top))
2950 {
2951 item->jftype = IPA_JF_UNKNOWN;
2952 continue;
2953 }
2954
2955 item->value.pass_through.formal_id = -1;
2956 src = ipa_get_ith_jump_func (top, dst_fid);
2957 if (src->type == IPA_JF_CONST)
2958 {
2959 if (item->jftype == IPA_JF_PASS_THROUGH
2960 && item->value.pass_through.operation == NOP_EXPR)
2961 {
2962 item->jftype = IPA_JF_CONST;
2963 item->value.constant = src->value.constant.value;
2964 continue;
2965 }
2966 }
2967 else if (src->type == IPA_JF_PASS_THROUGH
2968 && src->value.pass_through.operation == NOP_EXPR)
2969 {
2970 if (item->jftype == IPA_JF_PASS_THROUGH
2971 || !item->value.load_agg.by_ref
2972 || src->value.pass_through.agg_preserved)
2973 item->value.pass_through.formal_id
2974 = src->value.pass_through.formal_id;
2975 }
2976 else if (src->type == IPA_JF_ANCESTOR)
2977 {
2978 if (item->jftype == IPA_JF_PASS_THROUGH)
2979 {
2980 if (!src->value.ancestor.offset)
2981 item->value.pass_through.formal_id
2982 = src->value.ancestor.formal_id;
2983 }
2984 else if (src->value.ancestor.agg_preserved)
2985 {
2986 gcc_checking_assert (item->value.load_agg.by_ref);
2987
2988 item->value.pass_through.formal_id
2989 = src->value.ancestor.formal_id;
2990 item->value.load_agg.offset
2991 += src->value.ancestor.offset;
2992 }
2993 }
2994
2995 if (item->value.pass_through.formal_id < 0)
2996 item->jftype = IPA_JF_UNKNOWN;
2997 }
2998 }
2999
3000 if (!top)
3001 {
3002 ipa_set_jf_unknown (dst);
3003 continue;
3004 }
3005
3006 if (dst->type == IPA_JF_ANCESTOR)
3007 {
3008 struct ipa_jump_func *src;
3009 int dst_fid = dst->value.ancestor.formal_id;
3010 class ipa_polymorphic_call_context *src_ctx
3011 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3012
3013 /* Variable number of arguments can cause havoc if we try to access
3014 one that does not exist in the inlined edge. So make sure we
3015 don't. */
3016 if (dst_fid >= ipa_get_cs_argument_count (top))
3017 {
3018 ipa_set_jf_unknown (dst);
3019 continue;
3020 }
3021
3022 src = ipa_get_ith_jump_func (top, dst_fid);
3023
3024 if (src_ctx && !src_ctx->useless_p ())
3025 {
3026 class ipa_polymorphic_call_context ctx = *src_ctx;
3027
3028 /* TODO: Make type preserved safe WRT contexts. */
3029 if (!ipa_get_jf_ancestor_type_preserved (dst))
3030 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3031 ctx.offset_by (dst->value.ancestor.offset);
3032 if (!ctx.useless_p ())
3033 {
3034 if (!dst_ctx)
3035 {
3036 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3037 count);
3038 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3039 }
3040
3041 dst_ctx->combine_with (ctx);
3042 }
3043 }
3044
3045 /* Parameter and argument in ancestor jump function must be pointer
3046 type, which means access to aggregate must be by-reference. */
3047 gcc_assert (!src->agg.items || src->agg.by_ref);
3048
3049 if (src->agg.items && dst->value.ancestor.agg_preserved)
3050 {
3051 struct ipa_agg_jf_item *item;
3052 int j;
3053
3054 /* Currently we do not produce clobber aggregate jump functions,
3055 replace with merging when we do. */
3056 gcc_assert (!dst->agg.items);
3057
3058 dst->agg.items = vec_safe_copy (src->agg.items);
3059 dst->agg.by_ref = src->agg.by_ref;
3060 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
3061 item->offset -= dst->value.ancestor.offset;
3062 }
3063
3064 if (src->type == IPA_JF_PASS_THROUGH
3065 && src->value.pass_through.operation == NOP_EXPR)
3066 {
3067 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
3068 dst->value.ancestor.agg_preserved &=
3069 src->value.pass_through.agg_preserved;
3070 }
3071 else if (src->type == IPA_JF_ANCESTOR)
3072 {
3073 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
3074 dst->value.ancestor.offset += src->value.ancestor.offset;
3075 dst->value.ancestor.agg_preserved &=
3076 src->value.ancestor.agg_preserved;
3077 dst->value.ancestor.keep_null |= src->value.ancestor.keep_null;
3078 }
3079 else
3080 ipa_set_jf_unknown (dst);
3081 }
3082 else if (dst->type == IPA_JF_PASS_THROUGH)
3083 {
3084 struct ipa_jump_func *src;
3085 /* We must check range due to calls with variable number of arguments
3086 and we cannot combine jump functions with operations. */
3087 if (dst->value.pass_through.operation == NOP_EXPR
3088 && (top && dst->value.pass_through.formal_id
3089 < ipa_get_cs_argument_count (top)))
3090 {
3091 int dst_fid = dst->value.pass_through.formal_id;
3092 src = ipa_get_ith_jump_func (top, dst_fid);
3093 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
3094 class ipa_polymorphic_call_context *src_ctx
3095 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
3096
3097 if (src_ctx && !src_ctx->useless_p ())
3098 {
3099 class ipa_polymorphic_call_context ctx = *src_ctx;
3100
3101 /* TODO: Make type preserved safe WRT contexts. */
3102 if (!ipa_get_jf_pass_through_type_preserved (dst))
3103 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
3104 if (!ctx.useless_p ())
3105 {
3106 if (!dst_ctx)
3107 {
3108 vec_safe_grow_cleared (args->polymorphic_call_contexts,
3109 count);
3110 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
3111 }
3112 dst_ctx->combine_with (ctx);
3113 }
3114 }
3115 switch (src->type)
3116 {
3117 case IPA_JF_UNKNOWN:
3118 ipa_set_jf_unknown (dst);
3119 break;
3120 case IPA_JF_CONST:
3121 ipa_set_jf_cst_copy (dst, src);
3122 break;
3123
3124 case IPA_JF_PASS_THROUGH:
3125 {
3126 int formal_id = ipa_get_jf_pass_through_formal_id (src);
3127 enum tree_code operation;
3128 operation = ipa_get_jf_pass_through_operation (src);
3129
3130 if (operation == NOP_EXPR)
3131 {
3132 bool agg_p;
3133 agg_p = dst_agg_p
3134 && ipa_get_jf_pass_through_agg_preserved (src);
3135 ipa_set_jf_simple_pass_through (dst, formal_id, agg_p);
3136 }
3137 else if (TREE_CODE_CLASS (operation) == tcc_unary)
3138 ipa_set_jf_unary_pass_through (dst, formal_id, operation);
3139 else
3140 {
3141 tree operand = ipa_get_jf_pass_through_operand (src);
3142 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
3143 operation);
3144 }
3145 break;
3146 }
3147 case IPA_JF_ANCESTOR:
3148 {
3149 bool agg_p;
3150 agg_p = dst_agg_p
3151 && ipa_get_jf_ancestor_agg_preserved (src);
3152 ipa_set_ancestor_jf (dst,
3153 ipa_get_jf_ancestor_offset (src),
3154 ipa_get_jf_ancestor_formal_id (src),
3155 agg_p,
3156 ipa_get_jf_ancestor_keep_null (src));
3157 break;
3158 }
3159 default:
3160 gcc_unreachable ();
3161 }
3162
3163 if (src->agg.items
3164 && (dst_agg_p || !src->agg.by_ref))
3165 {
3166 /* Currently we do not produce clobber aggregate jump
3167 functions, replace with merging when we do. */
3168 gcc_assert (!dst->agg.items);
3169
3170 dst->agg.by_ref = src->agg.by_ref;
3171 dst->agg.items = vec_safe_copy (src->agg.items);
3172 }
3173 }
3174 else
3175 ipa_set_jf_unknown (dst);
3176 }
3177 }
3178 }
3179
3180 /* If TARGET is an addr_expr of a function declaration, make it the
3181 (SPECULATIVE)destination of an indirect edge IE and return the edge.
3182 Otherwise, return NULL. */
3183
3184 struct cgraph_edge *
ipa_make_edge_direct_to_target(struct cgraph_edge * ie,tree target,bool speculative)3185 ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
3186 bool speculative)
3187 {
3188 struct cgraph_node *callee;
3189 bool unreachable = false;
3190
3191 if (TREE_CODE (target) == ADDR_EXPR)
3192 target = TREE_OPERAND (target, 0);
3193 if (TREE_CODE (target) != FUNCTION_DECL)
3194 {
3195 target = canonicalize_constructor_val (target, NULL);
3196 if (!target || TREE_CODE (target) != FUNCTION_DECL)
3197 {
3198 /* Member pointer call that goes through a VMT lookup. */
3199 if (ie->indirect_info->member_ptr
3200 /* Or if target is not an invariant expression and we do not
3201 know if it will evaulate to function at runtime.
3202 This can happen when folding through &VAR, where &VAR
3203 is IP invariant, but VAR itself is not.
3204
3205 TODO: Revisit this when GCC 5 is branched. It seems that
3206 member_ptr check is not needed and that we may try to fold
3207 the expression and see if VAR is readonly. */
3208 || !is_gimple_ip_invariant (target))
3209 {
3210 if (dump_enabled_p ())
3211 {
3212 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3213 "discovered direct call non-invariant %s\n",
3214 ie->caller->dump_name ());
3215 }
3216 return NULL;
3217 }
3218
3219
3220 if (dump_enabled_p ())
3221 {
3222 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3223 "discovered direct call to non-function in %s, "
3224 "making it __builtin_unreachable\n",
3225 ie->caller->dump_name ());
3226 }
3227
3228 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3229 callee = cgraph_node::get_create (target);
3230 unreachable = true;
3231 }
3232 else
3233 callee = cgraph_node::get (target);
3234 }
3235 else
3236 callee = cgraph_node::get (target);
3237
3238 /* Because may-edges are not explicitely represented and vtable may be external,
3239 we may create the first reference to the object in the unit. */
3240 if (!callee || callee->inlined_to)
3241 {
3242
3243 /* We are better to ensure we can refer to it.
3244 In the case of static functions we are out of luck, since we already
3245 removed its body. In the case of public functions we may or may
3246 not introduce the reference. */
3247 if (!canonicalize_constructor_val (target, NULL)
3248 || !TREE_PUBLIC (target))
3249 {
3250 if (dump_file)
3251 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
3252 "(%s -> %s) but cannot refer to it. Giving up.\n",
3253 ie->caller->dump_name (),
3254 ie->callee->dump_name ());
3255 return NULL;
3256 }
3257 callee = cgraph_node::get_create (target);
3258 }
3259
3260 /* If the edge is already speculated. */
3261 if (speculative && ie->speculative)
3262 {
3263 if (dump_file)
3264 {
3265 cgraph_edge *e2 = ie->speculative_call_for_target (callee);
3266 if (!e2)
3267 {
3268 if (dump_file)
3269 fprintf (dump_file, "ipa-prop: Discovered call to a "
3270 "speculative target (%s -> %s) but the call is "
3271 "already speculated to different target. "
3272 "Giving up.\n",
3273 ie->caller->dump_name (), callee->dump_name ());
3274 }
3275 else
3276 {
3277 if (dump_file)
3278 fprintf (dump_file,
3279 "ipa-prop: Discovered call to a speculative target "
3280 "(%s -> %s) this agree with previous speculation.\n",
3281 ie->caller->dump_name (), callee->dump_name ());
3282 }
3283 }
3284 return NULL;
3285 }
3286
3287 if (!dbg_cnt (devirt))
3288 return NULL;
3289
3290 ipa_check_create_node_params ();
3291
3292 /* We cannot make edges to inline clones. It is bug that someone removed
3293 the cgraph node too early. */
3294 gcc_assert (!callee->inlined_to);
3295
3296 if (dump_file && !unreachable)
3297 {
3298 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
3299 "(%s -> %s), for stmt ",
3300 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
3301 speculative ? "speculative" : "known",
3302 ie->caller->dump_name (),
3303 callee->dump_name ());
3304 if (ie->call_stmt)
3305 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
3306 else
3307 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3308 }
3309 if (dump_enabled_p ())
3310 {
3311 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, ie->call_stmt,
3312 "converting indirect call in %s to direct call to %s\n",
3313 ie->caller->dump_name (), callee->dump_name ());
3314 }
3315 if (!speculative)
3316 {
3317 struct cgraph_edge *orig = ie;
3318 ie = cgraph_edge::make_direct (ie, callee);
3319 /* If we resolved speculative edge the cost is already up to date
3320 for direct call (adjusted by inline_edge_duplication_hook). */
3321 if (ie == orig)
3322 {
3323 ipa_call_summary *es = ipa_call_summaries->get (ie);
3324 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
3325 - eni_size_weights.call_cost);
3326 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
3327 - eni_time_weights.call_cost);
3328 }
3329 }
3330 else
3331 {
3332 if (!callee->can_be_discarded_p ())
3333 {
3334 cgraph_node *alias;
3335 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
3336 if (alias)
3337 callee = alias;
3338 }
3339 /* make_speculative will update ie's cost to direct call cost. */
3340 ie = ie->make_speculative
3341 (callee, ie->count.apply_scale (8, 10));
3342 }
3343
3344 return ie;
3345 }
3346
3347 /* Attempt to locate an interprocedural constant at a given REQ_OFFSET in
3348 CONSTRUCTOR and return it. Return NULL if the search fails for some
3349 reason. */
3350
3351 static tree
find_constructor_constant_at_offset(tree constructor,HOST_WIDE_INT req_offset)3352 find_constructor_constant_at_offset (tree constructor, HOST_WIDE_INT req_offset)
3353 {
3354 tree type = TREE_TYPE (constructor);
3355 if (TREE_CODE (type) != ARRAY_TYPE
3356 && TREE_CODE (type) != RECORD_TYPE)
3357 return NULL;
3358
3359 unsigned ix;
3360 tree index, val;
3361 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (constructor), ix, index, val)
3362 {
3363 HOST_WIDE_INT elt_offset;
3364 if (TREE_CODE (type) == ARRAY_TYPE)
3365 {
3366 offset_int off;
3367 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (type));
3368 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3369
3370 if (index)
3371 {
3372 if (TREE_CODE (index) == RANGE_EXPR)
3373 off = wi::to_offset (TREE_OPERAND (index, 0));
3374 else
3375 off = wi::to_offset (index);
3376 if (TYPE_DOMAIN (type) && TYPE_MIN_VALUE (TYPE_DOMAIN (type)))
3377 {
3378 tree low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
3379 gcc_assert (TREE_CODE (unit_size) == INTEGER_CST);
3380 off = wi::sext (off - wi::to_offset (low_bound),
3381 TYPE_PRECISION (TREE_TYPE (index)));
3382 }
3383 off *= wi::to_offset (unit_size);
3384 /* ??? Handle more than just the first index of a
3385 RANGE_EXPR. */
3386 }
3387 else
3388 off = wi::to_offset (unit_size) * ix;
3389
3390 off = wi::lshift (off, LOG2_BITS_PER_UNIT);
3391 if (!wi::fits_shwi_p (off) || wi::neg_p (off))
3392 continue;
3393 elt_offset = off.to_shwi ();
3394 }
3395 else if (TREE_CODE (type) == RECORD_TYPE)
3396 {
3397 gcc_checking_assert (index && TREE_CODE (index) == FIELD_DECL);
3398 if (DECL_BIT_FIELD (index))
3399 continue;
3400 elt_offset = int_bit_position (index);
3401 }
3402 else
3403 gcc_unreachable ();
3404
3405 if (elt_offset > req_offset)
3406 return NULL;
3407
3408 if (TREE_CODE (val) == CONSTRUCTOR)
3409 return find_constructor_constant_at_offset (val,
3410 req_offset - elt_offset);
3411
3412 if (elt_offset == req_offset
3413 && is_gimple_reg_type (TREE_TYPE (val))
3414 && is_gimple_ip_invariant (val))
3415 return val;
3416 }
3417 return NULL;
3418 }
3419
3420 /* Check whether SCALAR could be used to look up an aggregate interprocedural
3421 invariant from a static constructor and if so, return it. Otherwise return
3422 NULL. */
3423
3424 static tree
ipa_find_agg_cst_from_init(tree scalar,HOST_WIDE_INT offset,bool by_ref)3425 ipa_find_agg_cst_from_init (tree scalar, HOST_WIDE_INT offset, bool by_ref)
3426 {
3427 if (by_ref)
3428 {
3429 if (TREE_CODE (scalar) != ADDR_EXPR)
3430 return NULL;
3431 scalar = TREE_OPERAND (scalar, 0);
3432 }
3433
3434 if (!VAR_P (scalar)
3435 || !is_global_var (scalar)
3436 || !TREE_READONLY (scalar)
3437 || !DECL_INITIAL (scalar)
3438 || TREE_CODE (DECL_INITIAL (scalar)) != CONSTRUCTOR)
3439 return NULL;
3440
3441 return find_constructor_constant_at_offset (DECL_INITIAL (scalar), offset);
3442 }
3443
3444 /* Retrieve value from AGG, a set of known offset/value for an aggregate or
3445 static initializer of SCALAR (which can be NULL) for the given OFFSET or
3446 return NULL if there is none. BY_REF specifies whether the value has to be
3447 passed by reference or by value. If FROM_GLOBAL_CONSTANT is non-NULL, then
3448 the boolean it points to is set to true if the value comes from an
3449 initializer of a constant. */
3450
3451 tree
ipa_find_agg_cst_for_param(struct ipa_agg_value_set * agg,tree scalar,HOST_WIDE_INT offset,bool by_ref,bool * from_global_constant)3452 ipa_find_agg_cst_for_param (struct ipa_agg_value_set *agg, tree scalar,
3453 HOST_WIDE_INT offset, bool by_ref,
3454 bool *from_global_constant)
3455 {
3456 struct ipa_agg_value *item;
3457 int i;
3458
3459 if (scalar)
3460 {
3461 tree res = ipa_find_agg_cst_from_init (scalar, offset, by_ref);
3462 if (res)
3463 {
3464 if (from_global_constant)
3465 *from_global_constant = true;
3466 return res;
3467 }
3468 }
3469
3470 if (!agg
3471 || by_ref != agg->by_ref)
3472 return NULL;
3473
3474 FOR_EACH_VEC_ELT (agg->items, i, item)
3475 if (item->offset == offset)
3476 {
3477 /* Currently we do not have clobber values, return NULL for them once
3478 we do. */
3479 gcc_checking_assert (is_gimple_ip_invariant (item->value));
3480 if (from_global_constant)
3481 *from_global_constant = false;
3482 return item->value;
3483 }
3484 return NULL;
3485 }
3486
3487 /* Remove a reference to SYMBOL from the list of references of a node given by
3488 reference description RDESC. Return true if the reference has been
3489 successfully found and removed. */
3490
3491 static bool
remove_described_reference(symtab_node * symbol,struct ipa_cst_ref_desc * rdesc)3492 remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
3493 {
3494 struct ipa_ref *to_del;
3495 struct cgraph_edge *origin;
3496
3497 origin = rdesc->cs;
3498 if (!origin)
3499 return false;
3500 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3501 origin->lto_stmt_uid);
3502 if (!to_del)
3503 return false;
3504
3505 to_del->remove_reference ();
3506 if (dump_file)
3507 fprintf (dump_file, "ipa-prop: Removed a reference from %s to %s.\n",
3508 origin->caller->dump_name (), symbol->dump_name ());
3509 return true;
3510 }
3511
3512 /* If JFUNC has a reference description with refcount different from
3513 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3514 NULL. JFUNC must be a constant jump function. */
3515
3516 static struct ipa_cst_ref_desc *
jfunc_rdesc_usable(struct ipa_jump_func * jfunc)3517 jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3518 {
3519 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3520 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3521 return rdesc;
3522 else
3523 return NULL;
3524 }
3525
3526 /* If the value of constant jump function JFUNC is an address of a function
3527 declaration, return the associated call graph node. Otherwise return
3528 NULL. */
3529
3530 static cgraph_node *
cgraph_node_for_jfunc(struct ipa_jump_func * jfunc)3531 cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3532 {
3533 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3534 tree cst = ipa_get_jf_constant (jfunc);
3535 if (TREE_CODE (cst) != ADDR_EXPR
3536 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3537 return NULL;
3538
3539 return cgraph_node::get (TREE_OPERAND (cst, 0));
3540 }
3541
3542
3543 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
3544 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3545 the edge specified in the rdesc. Return false if either the symbol or the
3546 reference could not be found, otherwise return true. */
3547
3548 static bool
try_decrement_rdesc_refcount(struct ipa_jump_func * jfunc)3549 try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3550 {
3551 struct ipa_cst_ref_desc *rdesc;
3552 if (jfunc->type == IPA_JF_CONST
3553 && (rdesc = jfunc_rdesc_usable (jfunc))
3554 && --rdesc->refcount == 0)
3555 {
3556 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
3557 if (!symbol)
3558 return false;
3559
3560 return remove_described_reference (symbol, rdesc);
3561 }
3562 return true;
3563 }
3564
3565 /* Try to find a destination for indirect edge IE that corresponds to a simple
3566 call or a call of a member function pointer and where the destination is a
3567 pointer formal parameter described by jump function JFUNC. TARGET_TYPE is
3568 the type of the parameter to which the result of JFUNC is passed. If it can
3569 be determined, return the newly direct edge, otherwise return NULL.
3570 NEW_ROOT and NEW_ROOT_INFO is the node and its info that JFUNC lattices are
3571 relative to. */
3572
3573 static struct cgraph_edge *
try_make_edge_direct_simple_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,tree target_type,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3574 try_make_edge_direct_simple_call (struct cgraph_edge *ie,
3575 struct ipa_jump_func *jfunc, tree target_type,
3576 struct cgraph_node *new_root,
3577 class ipa_node_params *new_root_info)
3578 {
3579 struct cgraph_edge *cs;
3580 tree target;
3581 bool agg_contents = ie->indirect_info->agg_contents;
3582 tree scalar = ipa_value_from_jfunc (new_root_info, jfunc, target_type);
3583 if (agg_contents)
3584 {
3585 bool from_global_constant;
3586 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3587 new_root,
3588 &jfunc->agg);
3589 target = ipa_find_agg_cst_for_param (&agg, scalar,
3590 ie->indirect_info->offset,
3591 ie->indirect_info->by_ref,
3592 &from_global_constant);
3593 agg.release ();
3594 if (target
3595 && !from_global_constant
3596 && !ie->indirect_info->guaranteed_unmodified)
3597 return NULL;
3598 }
3599 else
3600 target = scalar;
3601 if (!target)
3602 return NULL;
3603 cs = ipa_make_edge_direct_to_target (ie, target);
3604
3605 if (cs && !agg_contents)
3606 {
3607 bool ok;
3608 gcc_checking_assert (cs->callee
3609 && (cs != ie
3610 || jfunc->type != IPA_JF_CONST
3611 || !cgraph_node_for_jfunc (jfunc)
3612 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3613 ok = try_decrement_rdesc_refcount (jfunc);
3614 gcc_checking_assert (ok);
3615 }
3616
3617 return cs;
3618 }
3619
3620 /* Return the target to be used in cases of impossible devirtualization. IE
3621 and target (the latter can be NULL) are dumped when dumping is enabled. */
3622
3623 tree
ipa_impossible_devirt_target(struct cgraph_edge * ie,tree target)3624 ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
3625 {
3626 if (dump_file)
3627 {
3628 if (target)
3629 fprintf (dump_file,
3630 "Type inconsistent devirtualization: %s->%s\n",
3631 ie->caller->dump_name (),
3632 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3633 else
3634 fprintf (dump_file,
3635 "No devirtualization target in %s\n",
3636 ie->caller->dump_name ());
3637 }
3638 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3639 cgraph_node::get_create (new_target);
3640 return new_target;
3641 }
3642
3643 /* Try to find a destination for indirect edge IE that corresponds to a virtual
3644 call based on a formal parameter which is described by jump function JFUNC
3645 and if it can be determined, make it direct and return the direct edge.
3646 Otherwise, return NULL. CTX describes the polymorphic context that the
3647 parameter the call is based on brings along with it. NEW_ROOT and
3648 NEW_ROOT_INFO is the node and its info that JFUNC lattices are relative
3649 to. */
3650
3651 static struct cgraph_edge *
try_make_edge_direct_virtual_call(struct cgraph_edge * ie,struct ipa_jump_func * jfunc,class ipa_polymorphic_call_context ctx,struct cgraph_node * new_root,class ipa_node_params * new_root_info)3652 try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
3653 struct ipa_jump_func *jfunc,
3654 class ipa_polymorphic_call_context ctx,
3655 struct cgraph_node *new_root,
3656 class ipa_node_params *new_root_info)
3657 {
3658 tree target = NULL;
3659 bool speculative = false;
3660
3661 if (!opt_for_fn (ie->caller->decl, flag_devirtualize))
3662 return NULL;
3663
3664 gcc_assert (!ie->indirect_info->by_ref);
3665
3666 /* Try to do lookup via known virtual table pointer value. */
3667 if (!ie->indirect_info->vptr_changed
3668 || opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively))
3669 {
3670 tree vtable;
3671 unsigned HOST_WIDE_INT offset;
3672 tree scalar = (jfunc->type == IPA_JF_CONST) ? ipa_get_jf_constant (jfunc)
3673 : NULL;
3674 ipa_agg_value_set agg = ipa_agg_value_set_from_jfunc (new_root_info,
3675 new_root,
3676 &jfunc->agg);
3677 tree t = ipa_find_agg_cst_for_param (&agg, scalar,
3678 ie->indirect_info->offset,
3679 true);
3680 agg.release ();
3681 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3682 {
3683 bool can_refer;
3684 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
3685 vtable, offset, &can_refer);
3686 if (can_refer)
3687 {
3688 if (!t
3689 || fndecl_built_in_p (t, BUILT_IN_UNREACHABLE)
3690 || !possible_polymorphic_call_target_p
3691 (ie, cgraph_node::get (t)))
3692 {
3693 /* Do not speculate builtin_unreachable, it is stupid! */
3694 if (!ie->indirect_info->vptr_changed)
3695 target = ipa_impossible_devirt_target (ie, target);
3696 else
3697 target = NULL;
3698 }
3699 else
3700 {
3701 target = t;
3702 speculative = ie->indirect_info->vptr_changed;
3703 }
3704 }
3705 }
3706 }
3707
3708 ipa_polymorphic_call_context ie_context (ie);
3709 vec <cgraph_node *>targets;
3710 bool final;
3711
3712 ctx.offset_by (ie->indirect_info->offset);
3713 if (ie->indirect_info->vptr_changed)
3714 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3715 ie->indirect_info->otr_type);
3716 ctx.combine_with (ie_context, ie->indirect_info->otr_type);
3717 targets = possible_polymorphic_call_targets
3718 (ie->indirect_info->otr_type,
3719 ie->indirect_info->otr_token,
3720 ctx, &final);
3721 if (final && targets.length () <= 1)
3722 {
3723 speculative = false;
3724 if (targets.length () == 1)
3725 target = targets[0]->decl;
3726 else
3727 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3728 }
3729 else if (!target && opt_for_fn (ie->caller->decl, flag_devirtualize_speculatively)
3730 && !ie->speculative && ie->maybe_hot_p ())
3731 {
3732 cgraph_node *n;
3733 n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3734 ie->indirect_info->otr_token,
3735 ie->indirect_info->context);
3736 if (n)
3737 {
3738 target = n->decl;
3739 speculative = true;
3740 }
3741 }
3742
3743 if (target)
3744 {
3745 if (!possible_polymorphic_call_target_p
3746 (ie, cgraph_node::get_create (target)))
3747 {
3748 if (speculative)
3749 return NULL;
3750 target = ipa_impossible_devirt_target (ie, target);
3751 }
3752 return ipa_make_edge_direct_to_target (ie, target, speculative);
3753 }
3754 else
3755 return NULL;
3756 }
3757
3758 /* Update the param called notes associated with NODE when CS is being inlined,
3759 assuming NODE is (potentially indirectly) inlined into CS->callee.
3760 Moreover, if the callee is discovered to be constant, create a new cgraph
3761 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3762 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3763
3764 static bool
update_indirect_edges_after_inlining(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3765 update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3766 struct cgraph_node *node,
3767 vec<cgraph_edge *> *new_edges)
3768 {
3769 class ipa_edge_args *top;
3770 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
3771 struct cgraph_node *new_root;
3772 class ipa_node_params *new_root_info, *inlined_node_info;
3773 bool res = false;
3774
3775 ipa_check_create_edge_args ();
3776 top = IPA_EDGE_REF (cs);
3777 new_root = cs->caller->inlined_to
3778 ? cs->caller->inlined_to : cs->caller;
3779 new_root_info = IPA_NODE_REF (new_root);
3780 inlined_node_info = IPA_NODE_REF (cs->callee->function_symbol ());
3781
3782 for (ie = node->indirect_calls; ie; ie = next_ie)
3783 {
3784 class cgraph_indirect_call_info *ici = ie->indirect_info;
3785 struct ipa_jump_func *jfunc;
3786 int param_index;
3787
3788 next_ie = ie->next_callee;
3789
3790 if (ici->param_index == -1)
3791 continue;
3792
3793 /* We must check range due to calls with variable number of arguments: */
3794 if (!top || ici->param_index >= ipa_get_cs_argument_count (top))
3795 {
3796 ici->param_index = -1;
3797 continue;
3798 }
3799
3800 param_index = ici->param_index;
3801 jfunc = ipa_get_ith_jump_func (top, param_index);
3802
3803 auto_vec<cgraph_node *, 4> spec_targets;
3804 if (ie->speculative)
3805 for (cgraph_edge *direct = ie->first_speculative_call_target ();
3806 direct;
3807 direct = direct->next_speculative_call_target ())
3808 spec_targets.safe_push (direct->callee);
3809
3810 if (!opt_for_fn (node->decl, flag_indirect_inlining))
3811 new_direct_edge = NULL;
3812 else if (ici->polymorphic)
3813 {
3814 ipa_polymorphic_call_context ctx;
3815 ctx = ipa_context_from_jfunc (new_root_info, cs, param_index, jfunc);
3816 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc, ctx,
3817 new_root,
3818 new_root_info);
3819 }
3820 else
3821 {
3822 tree target_type = ipa_get_type (inlined_node_info, param_index);
3823 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3824 target_type,
3825 new_root,
3826 new_root_info);
3827 }
3828
3829 /* If speculation was removed, then we need to do nothing. */
3830 if (new_direct_edge && new_direct_edge != ie
3831 && spec_targets.contains (new_direct_edge->callee))
3832 {
3833 new_direct_edge->indirect_inlining_edge = 1;
3834 top = IPA_EDGE_REF (cs);
3835 res = true;
3836 if (!new_direct_edge->speculative)
3837 continue;
3838 }
3839 else if (new_direct_edge)
3840 {
3841 new_direct_edge->indirect_inlining_edge = 1;
3842 if (new_edges)
3843 {
3844 new_edges->safe_push (new_direct_edge);
3845 res = true;
3846 }
3847 top = IPA_EDGE_REF (cs);
3848 /* If speculative edge was introduced we still need to update
3849 call info of the indirect edge. */
3850 if (!new_direct_edge->speculative)
3851 continue;
3852 }
3853 if (jfunc->type == IPA_JF_PASS_THROUGH
3854 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3855 {
3856 if (ici->agg_contents
3857 && !ipa_get_jf_pass_through_agg_preserved (jfunc)
3858 && !ici->polymorphic)
3859 ici->param_index = -1;
3860 else
3861 {
3862 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3863 if (ici->polymorphic
3864 && !ipa_get_jf_pass_through_type_preserved (jfunc))
3865 ici->vptr_changed = true;
3866 ipa_set_param_used_by_indirect_call (new_root_info,
3867 ici->param_index, true);
3868 if (ici->polymorphic)
3869 ipa_set_param_used_by_polymorphic_call (new_root_info,
3870 ici->param_index, true);
3871 }
3872 }
3873 else if (jfunc->type == IPA_JF_ANCESTOR)
3874 {
3875 if (ici->agg_contents
3876 && !ipa_get_jf_ancestor_agg_preserved (jfunc)
3877 && !ici->polymorphic)
3878 ici->param_index = -1;
3879 else
3880 {
3881 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3882 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3883 if (ici->polymorphic
3884 && !ipa_get_jf_ancestor_type_preserved (jfunc))
3885 ici->vptr_changed = true;
3886 ipa_set_param_used_by_indirect_call (new_root_info,
3887 ici->param_index, true);
3888 if (ici->polymorphic)
3889 ipa_set_param_used_by_polymorphic_call (new_root_info,
3890 ici->param_index, true);
3891 }
3892 }
3893 else
3894 /* Either we can find a destination for this edge now or never. */
3895 ici->param_index = -1;
3896 }
3897
3898 return res;
3899 }
3900
3901 /* Recursively traverse subtree of NODE (including node) made of inlined
3902 cgraph_edges when CS has been inlined and invoke
3903 update_indirect_edges_after_inlining on all nodes and
3904 update_jump_functions_after_inlining on all non-inlined edges that lead out
3905 of this subtree. Newly discovered indirect edges will be added to
3906 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3907 created. */
3908
3909 static bool
propagate_info_to_inlined_callees(struct cgraph_edge * cs,struct cgraph_node * node,vec<cgraph_edge * > * new_edges)3910 propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3911 struct cgraph_node *node,
3912 vec<cgraph_edge *> *new_edges)
3913 {
3914 struct cgraph_edge *e;
3915 bool res;
3916
3917 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3918
3919 for (e = node->callees; e; e = e->next_callee)
3920 if (!e->inline_failed)
3921 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3922 else
3923 update_jump_functions_after_inlining (cs, e);
3924 for (e = node->indirect_calls; e; e = e->next_callee)
3925 update_jump_functions_after_inlining (cs, e);
3926
3927 return res;
3928 }
3929
3930 /* Combine two controlled uses counts as done during inlining. */
3931
3932 static int
combine_controlled_uses_counters(int c,int d)3933 combine_controlled_uses_counters (int c, int d)
3934 {
3935 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3936 return IPA_UNDESCRIBED_USE;
3937 else
3938 return c + d - 1;
3939 }
3940
3941 /* Propagate number of controlled users from CS->caleee to the new root of the
3942 tree of inlined nodes. */
3943
3944 static void
propagate_controlled_uses(struct cgraph_edge * cs)3945 propagate_controlled_uses (struct cgraph_edge *cs)
3946 {
3947 class ipa_edge_args *args = IPA_EDGE_REF (cs);
3948 if (!args)
3949 return;
3950 struct cgraph_node *new_root = cs->caller->inlined_to
3951 ? cs->caller->inlined_to : cs->caller;
3952 class ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3953 class ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3954 int count, i;
3955
3956 if (!old_root_info)
3957 return;
3958
3959 count = MIN (ipa_get_cs_argument_count (args),
3960 ipa_get_param_count (old_root_info));
3961 for (i = 0; i < count; i++)
3962 {
3963 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3964 struct ipa_cst_ref_desc *rdesc;
3965
3966 if (jf->type == IPA_JF_PASS_THROUGH)
3967 {
3968 int src_idx, c, d;
3969 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3970 c = ipa_get_controlled_uses (new_root_info, src_idx);
3971 d = ipa_get_controlled_uses (old_root_info, i);
3972
3973 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3974 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3975 c = combine_controlled_uses_counters (c, d);
3976 ipa_set_controlled_uses (new_root_info, src_idx, c);
3977 if (c == 0 && new_root_info->ipcp_orig_node)
3978 {
3979 struct cgraph_node *n;
3980 struct ipa_ref *ref;
3981 tree t = new_root_info->known_csts[src_idx];
3982
3983 if (t && TREE_CODE (t) == ADDR_EXPR
3984 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
3985 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
3986 && (ref = new_root->find_reference (n, NULL, 0)))
3987 {
3988 if (dump_file)
3989 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3990 "reference from %s to %s.\n",
3991 new_root->dump_name (),
3992 n->dump_name ());
3993 ref->remove_reference ();
3994 }
3995 }
3996 }
3997 else if (jf->type == IPA_JF_CONST
3998 && (rdesc = jfunc_rdesc_usable (jf)))
3999 {
4000 int d = ipa_get_controlled_uses (old_root_info, i);
4001 int c = rdesc->refcount;
4002 rdesc->refcount = combine_controlled_uses_counters (c, d);
4003 if (rdesc->refcount == 0)
4004 {
4005 tree cst = ipa_get_jf_constant (jf);
4006 struct cgraph_node *n;
4007 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
4008 && TREE_CODE (TREE_OPERAND (cst, 0))
4009 == FUNCTION_DECL);
4010 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4011 if (n)
4012 {
4013 struct cgraph_node *clone;
4014 bool ok;
4015 ok = remove_described_reference (n, rdesc);
4016 gcc_checking_assert (ok);
4017
4018 clone = cs->caller;
4019 while (clone->inlined_to
4020 && clone->ipcp_clone
4021 && clone != rdesc->cs->caller)
4022 {
4023 struct ipa_ref *ref;
4024 ref = clone->find_reference (n, NULL, 0);
4025 if (ref)
4026 {
4027 if (dump_file)
4028 fprintf (dump_file, "ipa-prop: Removing "
4029 "cloning-created reference "
4030 "from %s to %s.\n",
4031 clone->dump_name (),
4032 n->dump_name ());
4033 ref->remove_reference ();
4034 }
4035 clone = clone->callers->caller;
4036 }
4037 }
4038 }
4039 }
4040 }
4041
4042 for (i = ipa_get_param_count (old_root_info);
4043 i < ipa_get_cs_argument_count (args);
4044 i++)
4045 {
4046 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
4047
4048 if (jf->type == IPA_JF_CONST)
4049 {
4050 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
4051 if (rdesc)
4052 rdesc->refcount = IPA_UNDESCRIBED_USE;
4053 }
4054 else if (jf->type == IPA_JF_PASS_THROUGH)
4055 ipa_set_controlled_uses (new_root_info,
4056 jf->value.pass_through.formal_id,
4057 IPA_UNDESCRIBED_USE);
4058 }
4059 }
4060
4061 /* Update jump functions and call note functions on inlining the call site CS.
4062 CS is expected to lead to a node already cloned by
4063 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
4064 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
4065 created. */
4066
4067 bool
ipa_propagate_indirect_call_infos(struct cgraph_edge * cs,vec<cgraph_edge * > * new_edges)4068 ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
4069 vec<cgraph_edge *> *new_edges)
4070 {
4071 bool changed;
4072 /* Do nothing if the preparation phase has not been carried out yet
4073 (i.e. during early inlining). */
4074 if (!ipa_node_params_sum)
4075 return false;
4076 gcc_assert (ipa_edge_args_sum);
4077
4078 propagate_controlled_uses (cs);
4079 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
4080 ipa_node_params_sum->remove (cs->callee);
4081
4082 class ipa_edge_args *args = IPA_EDGE_REF (cs);
4083 if (args)
4084 {
4085 bool ok = true;
4086 if (args->jump_functions)
4087 {
4088 struct ipa_jump_func *jf;
4089 int i;
4090 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4091 if (jf->type == IPA_JF_CONST
4092 && ipa_get_jf_constant_rdesc (jf))
4093 {
4094 ok = false;
4095 break;
4096 }
4097 }
4098 if (ok)
4099 ipa_edge_args_sum->remove (cs);
4100 }
4101 if (ipcp_transformation_sum)
4102 ipcp_transformation_sum->remove (cs->callee);
4103
4104 return changed;
4105 }
4106
4107 /* Ensure that array of edge arguments infos is big enough to accommodate a
4108 structure for all edges and reallocates it if not. Also, allocate
4109 associated hash tables is they do not already exist. */
4110
4111 void
ipa_check_create_edge_args(void)4112 ipa_check_create_edge_args (void)
4113 {
4114 if (!ipa_edge_args_sum)
4115 ipa_edge_args_sum
4116 = (new (ggc_alloc_no_dtor<ipa_edge_args_sum_t> ())
4117 ipa_edge_args_sum_t (symtab, true));
4118 if (!ipa_bits_hash_table)
4119 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4120 if (!ipa_vr_hash_table)
4121 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4122 }
4123
4124 /* Free all ipa_edge structures. */
4125
4126 void
ipa_free_all_edge_args(void)4127 ipa_free_all_edge_args (void)
4128 {
4129 if (!ipa_edge_args_sum)
4130 return;
4131
4132 ggc_delete (ipa_edge_args_sum);
4133 ipa_edge_args_sum = NULL;
4134 }
4135
4136 /* Free all ipa_node_params structures. */
4137
4138 void
ipa_free_all_node_params(void)4139 ipa_free_all_node_params (void)
4140 {
4141 ggc_delete (ipa_node_params_sum);
4142 ipa_node_params_sum = NULL;
4143 }
4144
4145 /* Initialize IPA CP transformation summary and also allocate any necessary hash
4146 tables if they do not already exist. */
4147
4148 void
ipcp_transformation_initialize(void)4149 ipcp_transformation_initialize (void)
4150 {
4151 if (!ipa_bits_hash_table)
4152 ipa_bits_hash_table = hash_table<ipa_bit_ggc_hash_traits>::create_ggc (37);
4153 if (!ipa_vr_hash_table)
4154 ipa_vr_hash_table = hash_table<ipa_vr_ggc_hash_traits>::create_ggc (37);
4155 if (ipcp_transformation_sum == NULL)
4156 ipcp_transformation_sum = ipcp_transformation_t::create_ggc (symtab);
4157 }
4158
4159 /* Release the IPA CP transformation summary. */
4160
4161 void
ipcp_free_transformation_sum(void)4162 ipcp_free_transformation_sum (void)
4163 {
4164 if (!ipcp_transformation_sum)
4165 return;
4166
4167 ipcp_transformation_sum->~function_summary<ipcp_transformation *> ();
4168 ggc_free (ipcp_transformation_sum);
4169 ipcp_transformation_sum = NULL;
4170 }
4171
4172 /* Set the aggregate replacements of NODE to be AGGVALS. */
4173
4174 void
ipa_set_node_agg_value_chain(struct cgraph_node * node,struct ipa_agg_replacement_value * aggvals)4175 ipa_set_node_agg_value_chain (struct cgraph_node *node,
4176 struct ipa_agg_replacement_value *aggvals)
4177 {
4178 ipcp_transformation_initialize ();
4179 ipcp_transformation *s = ipcp_transformation_sum->get_create (node);
4180 s->agg_values = aggvals;
4181 }
4182
4183 /* Hook that is called by cgraph.c when an edge is removed. Adjust reference
4184 count data structures accordingly. */
4185
4186 void
remove(cgraph_edge * cs,ipa_edge_args * args)4187 ipa_edge_args_sum_t::remove (cgraph_edge *cs, ipa_edge_args *args)
4188 {
4189 if (args->jump_functions)
4190 {
4191 struct ipa_jump_func *jf;
4192 int i;
4193 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
4194 {
4195 struct ipa_cst_ref_desc *rdesc;
4196 try_decrement_rdesc_refcount (jf);
4197 if (jf->type == IPA_JF_CONST
4198 && (rdesc = ipa_get_jf_constant_rdesc (jf))
4199 && rdesc->cs == cs)
4200 rdesc->cs = NULL;
4201 }
4202 }
4203 }
4204
4205 /* Method invoked when an edge is duplicated. Copy ipa_edge_args and adjust
4206 reference count data strucutres accordingly. */
4207
4208 void
duplicate(cgraph_edge * src,cgraph_edge * dst,ipa_edge_args * old_args,ipa_edge_args * new_args)4209 ipa_edge_args_sum_t::duplicate (cgraph_edge *src, cgraph_edge *dst,
4210 ipa_edge_args *old_args, ipa_edge_args *new_args)
4211 {
4212 unsigned int i;
4213
4214 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
4215 if (old_args->polymorphic_call_contexts)
4216 new_args->polymorphic_call_contexts
4217 = vec_safe_copy (old_args->polymorphic_call_contexts);
4218
4219 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4220 {
4221 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
4222 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
4223
4224 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
4225
4226 if (src_jf->type == IPA_JF_CONST)
4227 {
4228 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
4229
4230 if (!src_rdesc)
4231 dst_jf->value.constant.rdesc = NULL;
4232 else if (src->caller == dst->caller)
4233 {
4234 struct ipa_ref *ref;
4235 symtab_node *n = cgraph_node_for_jfunc (src_jf);
4236 gcc_checking_assert (n);
4237 ref = src->caller->find_reference (n, src->call_stmt,
4238 src->lto_stmt_uid);
4239 gcc_checking_assert (ref);
4240 dst->caller->clone_reference (ref, ref->stmt);
4241
4242 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4243 dst_rdesc->cs = dst;
4244 dst_rdesc->refcount = src_rdesc->refcount;
4245 dst_rdesc->next_duplicate = NULL;
4246 dst_jf->value.constant.rdesc = dst_rdesc;
4247 }
4248 else if (src_rdesc->cs == src)
4249 {
4250 struct ipa_cst_ref_desc *dst_rdesc = ipa_refdesc_pool.allocate ();
4251 dst_rdesc->cs = dst;
4252 dst_rdesc->refcount = src_rdesc->refcount;
4253 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
4254 src_rdesc->next_duplicate = dst_rdesc;
4255 dst_jf->value.constant.rdesc = dst_rdesc;
4256 }
4257 else
4258 {
4259 struct ipa_cst_ref_desc *dst_rdesc;
4260 /* This can happen during inlining, when a JFUNC can refer to a
4261 reference taken in a function up in the tree of inline clones.
4262 We need to find the duplicate that refers to our tree of
4263 inline clones. */
4264
4265 gcc_assert (dst->caller->inlined_to);
4266 for (dst_rdesc = src_rdesc->next_duplicate;
4267 dst_rdesc;
4268 dst_rdesc = dst_rdesc->next_duplicate)
4269 {
4270 struct cgraph_node *top;
4271 top = dst_rdesc->cs->caller->inlined_to
4272 ? dst_rdesc->cs->caller->inlined_to
4273 : dst_rdesc->cs->caller;
4274 if (dst->caller->inlined_to == top)
4275 break;
4276 }
4277 gcc_assert (dst_rdesc);
4278 dst_jf->value.constant.rdesc = dst_rdesc;
4279 }
4280 }
4281 else if (dst_jf->type == IPA_JF_PASS_THROUGH
4282 && src->caller == dst->caller)
4283 {
4284 struct cgraph_node *inline_root = dst->caller->inlined_to
4285 ? dst->caller->inlined_to : dst->caller;
4286 class ipa_node_params *root_info = IPA_NODE_REF (inline_root);
4287 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
4288
4289 int c = ipa_get_controlled_uses (root_info, idx);
4290 if (c != IPA_UNDESCRIBED_USE)
4291 {
4292 c++;
4293 ipa_set_controlled_uses (root_info, idx, c);
4294 }
4295 }
4296 }
4297 }
4298
4299 /* Analyze newly added function into callgraph. */
4300
4301 static void
ipa_add_new_function(cgraph_node * node,void * data ATTRIBUTE_UNUSED)4302 ipa_add_new_function (cgraph_node *node, void *data ATTRIBUTE_UNUSED)
4303 {
4304 if (node->has_gimple_body_p ())
4305 ipa_analyze_node (node);
4306 }
4307
4308 /* Hook that is called by summary when a node is duplicated. */
4309
4310 void
duplicate(cgraph_node * src,cgraph_node * dst,ipa_node_params * old_info,ipa_node_params * new_info)4311 ipa_node_params_t::duplicate(cgraph_node *src, cgraph_node *dst,
4312 ipa_node_params *old_info,
4313 ipa_node_params *new_info)
4314 {
4315 ipa_agg_replacement_value *old_av, *new_av;
4316
4317 new_info->descriptors = vec_safe_copy (old_info->descriptors);
4318 new_info->lattices = NULL;
4319 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
4320 new_info->known_csts = old_info->known_csts.copy ();
4321 new_info->known_contexts = old_info->known_contexts.copy ();
4322
4323 new_info->analysis_done = old_info->analysis_done;
4324 new_info->node_enqueued = old_info->node_enqueued;
4325 new_info->versionable = old_info->versionable;
4326
4327 old_av = ipa_get_agg_replacements_for_node (src);
4328 if (old_av)
4329 {
4330 new_av = NULL;
4331 while (old_av)
4332 {
4333 struct ipa_agg_replacement_value *v;
4334
4335 v = ggc_alloc<ipa_agg_replacement_value> ();
4336 memcpy (v, old_av, sizeof (*v));
4337 v->next = new_av;
4338 new_av = v;
4339 old_av = old_av->next;
4340 }
4341 ipa_set_node_agg_value_chain (dst, new_av);
4342 }
4343 }
4344
4345 /* Duplication of ipcp transformation summaries. */
4346
4347 void
duplicate(cgraph_node *,cgraph_node * dst,ipcp_transformation * src_trans,ipcp_transformation * dst_trans)4348 ipcp_transformation_t::duplicate(cgraph_node *, cgraph_node *dst,
4349 ipcp_transformation *src_trans,
4350 ipcp_transformation *dst_trans)
4351 {
4352 /* Avoid redundant work of duplicating vectors we will never use. */
4353 if (dst->inlined_to)
4354 return;
4355 dst_trans->bits = vec_safe_copy (src_trans->bits);
4356 dst_trans->m_vr = vec_safe_copy (src_trans->m_vr);
4357 ipa_agg_replacement_value *agg = src_trans->agg_values,
4358 **aggptr = &dst_trans->agg_values;
4359 while (agg)
4360 {
4361 *aggptr = ggc_alloc<ipa_agg_replacement_value> ();
4362 **aggptr = *agg;
4363 agg = agg->next;
4364 aggptr = &(*aggptr)->next;
4365 }
4366 }
4367
4368 /* Register our cgraph hooks if they are not already there. */
4369
4370 void
ipa_register_cgraph_hooks(void)4371 ipa_register_cgraph_hooks (void)
4372 {
4373 ipa_check_create_node_params ();
4374 ipa_check_create_edge_args ();
4375
4376 function_insertion_hook_holder =
4377 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
4378 }
4379
4380 /* Unregister our cgraph hooks if they are not already there. */
4381
4382 static void
ipa_unregister_cgraph_hooks(void)4383 ipa_unregister_cgraph_hooks (void)
4384 {
4385 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
4386 function_insertion_hook_holder = NULL;
4387 }
4388
4389 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4390 longer needed after ipa-cp. */
4391
4392 void
ipa_free_all_structures_after_ipa_cp(void)4393 ipa_free_all_structures_after_ipa_cp (void)
4394 {
4395 if (!optimize && !in_lto_p)
4396 {
4397 ipa_free_all_edge_args ();
4398 ipa_free_all_node_params ();
4399 ipcp_sources_pool.release ();
4400 ipcp_cst_values_pool.release ();
4401 ipcp_poly_ctx_values_pool.release ();
4402 ipcp_agg_lattice_pool.release ();
4403 ipa_unregister_cgraph_hooks ();
4404 ipa_refdesc_pool.release ();
4405 }
4406 }
4407
4408 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
4409 longer needed after indirect inlining. */
4410
4411 void
ipa_free_all_structures_after_iinln(void)4412 ipa_free_all_structures_after_iinln (void)
4413 {
4414 ipa_free_all_edge_args ();
4415 ipa_free_all_node_params ();
4416 ipa_unregister_cgraph_hooks ();
4417 ipcp_sources_pool.release ();
4418 ipcp_cst_values_pool.release ();
4419 ipcp_poly_ctx_values_pool.release ();
4420 ipcp_agg_lattice_pool.release ();
4421 ipa_refdesc_pool.release ();
4422 }
4423
4424 /* Print ipa_tree_map data structures of all functions in the
4425 callgraph to F. */
4426
4427 void
ipa_print_node_params(FILE * f,struct cgraph_node * node)4428 ipa_print_node_params (FILE *f, struct cgraph_node *node)
4429 {
4430 int i, count;
4431 class ipa_node_params *info;
4432
4433 if (!node->definition)
4434 return;
4435 info = IPA_NODE_REF (node);
4436 fprintf (f, " function %s parameter descriptors:\n", node->dump_name ());
4437 if (!info)
4438 {
4439 fprintf (f, " no params return\n");
4440 return;
4441 }
4442 count = ipa_get_param_count (info);
4443 for (i = 0; i < count; i++)
4444 {
4445 int c;
4446
4447 fprintf (f, " ");
4448 ipa_dump_param (f, info, i);
4449 if (ipa_is_param_used (info, i))
4450 fprintf (f, " used");
4451 if (ipa_is_param_used_by_ipa_predicates (info, i))
4452 fprintf (f, " used_by_ipa_predicates");
4453 if (ipa_is_param_used_by_indirect_call (info, i))
4454 fprintf (f, " used_by_indirect_call");
4455 if (ipa_is_param_used_by_polymorphic_call (info, i))
4456 fprintf (f, " used_by_polymorphic_call");
4457 c = ipa_get_controlled_uses (info, i);
4458 if (c == IPA_UNDESCRIBED_USE)
4459 fprintf (f, " undescribed_use");
4460 else
4461 fprintf (f, " controlled_uses=%i", c);
4462 fprintf (f, "\n");
4463 }
4464 }
4465
4466 /* Print ipa_tree_map data structures of all functions in the
4467 callgraph to F. */
4468
4469 void
ipa_print_all_params(FILE * f)4470 ipa_print_all_params (FILE * f)
4471 {
4472 struct cgraph_node *node;
4473
4474 fprintf (f, "\nFunction parameters:\n");
4475 FOR_EACH_FUNCTION (node)
4476 ipa_print_node_params (f, node);
4477 }
4478
4479 /* Dump the AV linked list. */
4480
4481 void
ipa_dump_agg_replacement_values(FILE * f,struct ipa_agg_replacement_value * av)4482 ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4483 {
4484 bool comma = false;
4485 fprintf (f, " Aggregate replacements:");
4486 for (; av; av = av->next)
4487 {
4488 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4489 av->index, av->offset);
4490 print_generic_expr (f, av->value);
4491 comma = true;
4492 }
4493 fprintf (f, "\n");
4494 }
4495
4496 /* Stream out jump function JUMP_FUNC to OB. */
4497
4498 static void
ipa_write_jump_function(struct output_block * ob,struct ipa_jump_func * jump_func)4499 ipa_write_jump_function (struct output_block *ob,
4500 struct ipa_jump_func *jump_func)
4501 {
4502 struct ipa_agg_jf_item *item;
4503 struct bitpack_d bp;
4504 int i, count;
4505 int flag = 0;
4506
4507 /* ADDR_EXPRs are very comon IP invariants; save some streamer data
4508 as well as WPA memory by handling them specially. */
4509 if (jump_func->type == IPA_JF_CONST
4510 && TREE_CODE (jump_func->value.constant.value) == ADDR_EXPR)
4511 flag = 1;
4512
4513 streamer_write_uhwi (ob, jump_func->type * 2 + flag);
4514 switch (jump_func->type)
4515 {
4516 case IPA_JF_UNKNOWN:
4517 break;
4518 case IPA_JF_CONST:
4519 gcc_assert (
4520 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4521 stream_write_tree (ob,
4522 flag
4523 ? TREE_OPERAND (jump_func->value.constant.value, 0)
4524 : jump_func->value.constant.value, true);
4525 break;
4526 case IPA_JF_PASS_THROUGH:
4527 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4528 if (jump_func->value.pass_through.operation == NOP_EXPR)
4529 {
4530 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4531 bp = bitpack_create (ob->main_stream);
4532 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
4533 streamer_write_bitpack (&bp);
4534 }
4535 else if (TREE_CODE_CLASS (jump_func->value.pass_through.operation)
4536 == tcc_unary)
4537 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4538 else
4539 {
4540 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4541 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4542 }
4543 break;
4544 case IPA_JF_ANCESTOR:
4545 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
4546 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
4547 bp = bitpack_create (ob->main_stream);
4548 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
4549 bp_pack_value (&bp, jump_func->value.ancestor.keep_null, 1);
4550 streamer_write_bitpack (&bp);
4551 break;
4552 default:
4553 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4554 }
4555
4556 count = vec_safe_length (jump_func->agg.items);
4557 streamer_write_uhwi (ob, count);
4558 if (count)
4559 {
4560 bp = bitpack_create (ob->main_stream);
4561 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4562 streamer_write_bitpack (&bp);
4563 }
4564
4565 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
4566 {
4567 stream_write_tree (ob, item->type, true);
4568 streamer_write_uhwi (ob, item->offset);
4569 streamer_write_uhwi (ob, item->jftype);
4570 switch (item->jftype)
4571 {
4572 case IPA_JF_UNKNOWN:
4573 break;
4574 case IPA_JF_CONST:
4575 stream_write_tree (ob, item->value.constant, true);
4576 break;
4577 case IPA_JF_PASS_THROUGH:
4578 case IPA_JF_LOAD_AGG:
4579 streamer_write_uhwi (ob, item->value.pass_through.operation);
4580 streamer_write_uhwi (ob, item->value.pass_through.formal_id);
4581 if (TREE_CODE_CLASS (item->value.pass_through.operation)
4582 != tcc_unary)
4583 stream_write_tree (ob, item->value.pass_through.operand, true);
4584 if (item->jftype == IPA_JF_LOAD_AGG)
4585 {
4586 stream_write_tree (ob, item->value.load_agg.type, true);
4587 streamer_write_uhwi (ob, item->value.load_agg.offset);
4588 bp = bitpack_create (ob->main_stream);
4589 bp_pack_value (&bp, item->value.load_agg.by_ref, 1);
4590 streamer_write_bitpack (&bp);
4591 }
4592 break;
4593 default:
4594 fatal_error (UNKNOWN_LOCATION,
4595 "invalid jump function in LTO stream");
4596 }
4597 }
4598
4599 bp = bitpack_create (ob->main_stream);
4600 bp_pack_value (&bp, !!jump_func->bits, 1);
4601 streamer_write_bitpack (&bp);
4602 if (jump_func->bits)
4603 {
4604 streamer_write_widest_int (ob, jump_func->bits->value);
4605 streamer_write_widest_int (ob, jump_func->bits->mask);
4606 }
4607 bp_pack_value (&bp, !!jump_func->m_vr, 1);
4608 streamer_write_bitpack (&bp);
4609 if (jump_func->m_vr)
4610 {
4611 streamer_write_enum (ob->main_stream, value_rang_type,
4612 VR_LAST, jump_func->m_vr->kind ());
4613 stream_write_tree (ob, jump_func->m_vr->min (), true);
4614 stream_write_tree (ob, jump_func->m_vr->max (), true);
4615 }
4616 }
4617
4618 /* Read in jump function JUMP_FUNC from IB. */
4619
4620 static void
ipa_read_jump_function(class lto_input_block * ib,struct ipa_jump_func * jump_func,struct cgraph_edge * cs,class data_in * data_in,bool prevails)4621 ipa_read_jump_function (class lto_input_block *ib,
4622 struct ipa_jump_func *jump_func,
4623 struct cgraph_edge *cs,
4624 class data_in *data_in,
4625 bool prevails)
4626 {
4627 enum jump_func_type jftype;
4628 enum tree_code operation;
4629 int i, count;
4630 int val = streamer_read_uhwi (ib);
4631 bool flag = val & 1;
4632
4633 jftype = (enum jump_func_type) (val / 2);
4634 switch (jftype)
4635 {
4636 case IPA_JF_UNKNOWN:
4637 ipa_set_jf_unknown (jump_func);
4638 break;
4639 case IPA_JF_CONST:
4640 {
4641 tree t = stream_read_tree (ib, data_in);
4642 if (flag && prevails)
4643 t = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (t)), t);
4644 ipa_set_jf_constant (jump_func, t, cs);
4645 }
4646 break;
4647 case IPA_JF_PASS_THROUGH:
4648 operation = (enum tree_code) streamer_read_uhwi (ib);
4649 if (operation == NOP_EXPR)
4650 {
4651 int formal_id = streamer_read_uhwi (ib);
4652 struct bitpack_d bp = streamer_read_bitpack (ib);
4653 bool agg_preserved = bp_unpack_value (&bp, 1);
4654 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
4655 }
4656 else if (TREE_CODE_CLASS (operation) == tcc_unary)
4657 {
4658 int formal_id = streamer_read_uhwi (ib);
4659 ipa_set_jf_unary_pass_through (jump_func, formal_id, operation);
4660 }
4661 else
4662 {
4663 tree operand = stream_read_tree (ib, data_in);
4664 int formal_id = streamer_read_uhwi (ib);
4665 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4666 operation);
4667 }
4668 break;
4669 case IPA_JF_ANCESTOR:
4670 {
4671 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4672 int formal_id = streamer_read_uhwi (ib);
4673 struct bitpack_d bp = streamer_read_bitpack (ib);
4674 bool agg_preserved = bp_unpack_value (&bp, 1);
4675 bool keep_null = bp_unpack_value (&bp, 1);
4676 ipa_set_ancestor_jf (jump_func, offset, formal_id, agg_preserved,
4677 keep_null);
4678 break;
4679 }
4680 default:
4681 fatal_error (UNKNOWN_LOCATION, "invalid jump function in LTO stream");
4682 }
4683
4684 count = streamer_read_uhwi (ib);
4685 if (prevails)
4686 vec_alloc (jump_func->agg.items, count);
4687 if (count)
4688 {
4689 struct bitpack_d bp = streamer_read_bitpack (ib);
4690 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4691 }
4692 for (i = 0; i < count; i++)
4693 {
4694 struct ipa_agg_jf_item item;
4695 item.type = stream_read_tree (ib, data_in);
4696 item.offset = streamer_read_uhwi (ib);
4697 item.jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4698
4699 switch (item.jftype)
4700 {
4701 case IPA_JF_UNKNOWN:
4702 break;
4703 case IPA_JF_CONST:
4704 item.value.constant = stream_read_tree (ib, data_in);
4705 break;
4706 case IPA_JF_PASS_THROUGH:
4707 case IPA_JF_LOAD_AGG:
4708 operation = (enum tree_code) streamer_read_uhwi (ib);
4709 item.value.pass_through.operation = operation;
4710 item.value.pass_through.formal_id = streamer_read_uhwi (ib);
4711 if (TREE_CODE_CLASS (operation) == tcc_unary)
4712 item.value.pass_through.operand = NULL_TREE;
4713 else
4714 item.value.pass_through.operand = stream_read_tree (ib, data_in);
4715 if (item.jftype == IPA_JF_LOAD_AGG)
4716 {
4717 struct bitpack_d bp;
4718 item.value.load_agg.type = stream_read_tree (ib, data_in);
4719 item.value.load_agg.offset = streamer_read_uhwi (ib);
4720 bp = streamer_read_bitpack (ib);
4721 item.value.load_agg.by_ref = bp_unpack_value (&bp, 1);
4722 }
4723 break;
4724 default:
4725 fatal_error (UNKNOWN_LOCATION,
4726 "invalid jump function in LTO stream");
4727 }
4728 if (prevails)
4729 jump_func->agg.items->quick_push (item);
4730 }
4731
4732 struct bitpack_d bp = streamer_read_bitpack (ib);
4733 bool bits_known = bp_unpack_value (&bp, 1);
4734 if (bits_known)
4735 {
4736 widest_int value = streamer_read_widest_int (ib);
4737 widest_int mask = streamer_read_widest_int (ib);
4738 if (prevails)
4739 ipa_set_jfunc_bits (jump_func, value, mask);
4740 }
4741 else
4742 jump_func->bits = NULL;
4743
4744 struct bitpack_d vr_bp = streamer_read_bitpack (ib);
4745 bool vr_known = bp_unpack_value (&vr_bp, 1);
4746 if (vr_known)
4747 {
4748 enum value_range_kind type = streamer_read_enum (ib, value_range_kind,
4749 VR_LAST);
4750 tree min = stream_read_tree (ib, data_in);
4751 tree max = stream_read_tree (ib, data_in);
4752 if (prevails)
4753 ipa_set_jfunc_vr (jump_func, type, min, max);
4754 }
4755 else
4756 jump_func->m_vr = NULL;
4757 }
4758
4759 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4760 relevant to indirect inlining to OB. */
4761
4762 static void
ipa_write_indirect_edge_info(struct output_block * ob,struct cgraph_edge * cs)4763 ipa_write_indirect_edge_info (struct output_block *ob,
4764 struct cgraph_edge *cs)
4765 {
4766 class cgraph_indirect_call_info *ii = cs->indirect_info;
4767 struct bitpack_d bp;
4768
4769 streamer_write_hwi (ob, ii->param_index);
4770 bp = bitpack_create (ob->main_stream);
4771 bp_pack_value (&bp, ii->polymorphic, 1);
4772 bp_pack_value (&bp, ii->agg_contents, 1);
4773 bp_pack_value (&bp, ii->member_ptr, 1);
4774 bp_pack_value (&bp, ii->by_ref, 1);
4775 bp_pack_value (&bp, ii->guaranteed_unmodified, 1);
4776 bp_pack_value (&bp, ii->vptr_changed, 1);
4777 streamer_write_bitpack (&bp);
4778 if (ii->agg_contents || ii->polymorphic)
4779 streamer_write_hwi (ob, ii->offset);
4780 else
4781 gcc_assert (ii->offset == 0);
4782
4783 if (ii->polymorphic)
4784 {
4785 streamer_write_hwi (ob, ii->otr_token);
4786 stream_write_tree (ob, ii->otr_type, true);
4787 ii->context.stream_out (ob);
4788 }
4789 }
4790
4791 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4792 relevant to indirect inlining from IB. */
4793
4794 static void
ipa_read_indirect_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * cs,class ipa_node_params * info)4795 ipa_read_indirect_edge_info (class lto_input_block *ib,
4796 class data_in *data_in,
4797 struct cgraph_edge *cs,
4798 class ipa_node_params *info)
4799 {
4800 class cgraph_indirect_call_info *ii = cs->indirect_info;
4801 struct bitpack_d bp;
4802
4803 ii->param_index = (int) streamer_read_hwi (ib);
4804 bp = streamer_read_bitpack (ib);
4805 ii->polymorphic = bp_unpack_value (&bp, 1);
4806 ii->agg_contents = bp_unpack_value (&bp, 1);
4807 ii->member_ptr = bp_unpack_value (&bp, 1);
4808 ii->by_ref = bp_unpack_value (&bp, 1);
4809 ii->guaranteed_unmodified = bp_unpack_value (&bp, 1);
4810 ii->vptr_changed = bp_unpack_value (&bp, 1);
4811 if (ii->agg_contents || ii->polymorphic)
4812 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4813 else
4814 ii->offset = 0;
4815 if (ii->polymorphic)
4816 {
4817 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
4818 ii->otr_type = stream_read_tree (ib, data_in);
4819 ii->context.stream_in (ib, data_in);
4820 }
4821 if (info && ii->param_index >= 0)
4822 {
4823 if (ii->polymorphic)
4824 ipa_set_param_used_by_polymorphic_call (info,
4825 ii->param_index , true);
4826 ipa_set_param_used_by_indirect_call (info,
4827 ii->param_index, true);
4828 }
4829 }
4830
4831 /* Stream out NODE info to OB. */
4832
4833 static void
ipa_write_node_info(struct output_block * ob,struct cgraph_node * node)4834 ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4835 {
4836 int node_ref;
4837 lto_symtab_encoder_t encoder;
4838 class ipa_node_params *info = IPA_NODE_REF (node);
4839 int j;
4840 struct cgraph_edge *e;
4841 struct bitpack_d bp;
4842
4843 encoder = ob->decl_state->symtab_node_encoder;
4844 node_ref = lto_symtab_encoder_encode (encoder, node);
4845 streamer_write_uhwi (ob, node_ref);
4846
4847 streamer_write_uhwi (ob, ipa_get_param_count (info));
4848 for (j = 0; j < ipa_get_param_count (info); j++)
4849 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
4850 bp = bitpack_create (ob->main_stream);
4851 gcc_assert (info->analysis_done
4852 || ipa_get_param_count (info) == 0);
4853 gcc_assert (!info->node_enqueued);
4854 gcc_assert (!info->ipcp_orig_node);
4855 for (j = 0; j < ipa_get_param_count (info); j++)
4856 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
4857 streamer_write_bitpack (&bp);
4858 for (j = 0; j < ipa_get_param_count (info); j++)
4859 {
4860 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
4861 stream_write_tree (ob, ipa_get_type (info, j), true);
4862 }
4863 for (e = node->callees; e; e = e->next_callee)
4864 {
4865 class ipa_edge_args *args = IPA_EDGE_REF (e);
4866
4867 if (!args)
4868 {
4869 streamer_write_uhwi (ob, 0);
4870 continue;
4871 }
4872
4873 streamer_write_uhwi (ob,
4874 ipa_get_cs_argument_count (args) * 2
4875 + (args->polymorphic_call_contexts != NULL));
4876 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4877 {
4878 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4879 if (args->polymorphic_call_contexts != NULL)
4880 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4881 }
4882 }
4883 for (e = node->indirect_calls; e; e = e->next_callee)
4884 {
4885 class ipa_edge_args *args = IPA_EDGE_REF (e);
4886 if (!args)
4887 streamer_write_uhwi (ob, 0);
4888 else
4889 {
4890 streamer_write_uhwi (ob,
4891 ipa_get_cs_argument_count (args) * 2
4892 + (args->polymorphic_call_contexts != NULL));
4893 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4894 {
4895 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4896 if (args->polymorphic_call_contexts != NULL)
4897 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4898 }
4899 }
4900 ipa_write_indirect_edge_info (ob, e);
4901 }
4902 }
4903
4904 /* Stream in edge E from IB. */
4905
4906 static void
ipa_read_edge_info(class lto_input_block * ib,class data_in * data_in,struct cgraph_edge * e,bool prevails)4907 ipa_read_edge_info (class lto_input_block *ib,
4908 class data_in *data_in,
4909 struct cgraph_edge *e, bool prevails)
4910 {
4911 int count = streamer_read_uhwi (ib);
4912 bool contexts_computed = count & 1;
4913
4914 count /= 2;
4915 if (!count)
4916 return;
4917 if (prevails && e->possibly_call_in_translation_unit_p ())
4918 {
4919 class ipa_edge_args *args = IPA_EDGE_REF_GET_CREATE (e);
4920 vec_safe_grow_cleared (args->jump_functions, count);
4921 if (contexts_computed)
4922 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
4923 for (int k = 0; k < count; k++)
4924 {
4925 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4926 data_in, prevails);
4927 if (contexts_computed)
4928 ipa_get_ith_polymorhic_call_context (args, k)->stream_in
4929 (ib, data_in);
4930 }
4931 }
4932 else
4933 {
4934 for (int k = 0; k < count; k++)
4935 {
4936 struct ipa_jump_func dummy;
4937 ipa_read_jump_function (ib, &dummy, e,
4938 data_in, prevails);
4939 if (contexts_computed)
4940 {
4941 class ipa_polymorphic_call_context ctx;
4942 ctx.stream_in (ib, data_in);
4943 }
4944 }
4945 }
4946 }
4947
4948 /* Stream in NODE info from IB. */
4949
4950 static void
ipa_read_node_info(class lto_input_block * ib,struct cgraph_node * node,class data_in * data_in)4951 ipa_read_node_info (class lto_input_block *ib, struct cgraph_node *node,
4952 class data_in *data_in)
4953 {
4954 int k;
4955 struct cgraph_edge *e;
4956 struct bitpack_d bp;
4957 bool prevails = node->prevailing_p ();
4958 class ipa_node_params *info = prevails
4959 ? IPA_NODE_REF_GET_CREATE (node) : NULL;
4960
4961 int param_count = streamer_read_uhwi (ib);
4962 if (prevails)
4963 {
4964 ipa_alloc_node_params (node, param_count);
4965 for (k = 0; k < param_count; k++)
4966 (*info->descriptors)[k].move_cost = streamer_read_uhwi (ib);
4967 if (ipa_get_param_count (info) != 0)
4968 info->analysis_done = true;
4969 info->node_enqueued = false;
4970 }
4971 else
4972 for (k = 0; k < param_count; k++)
4973 streamer_read_uhwi (ib);
4974
4975 bp = streamer_read_bitpack (ib);
4976 for (k = 0; k < param_count; k++)
4977 {
4978 bool used = bp_unpack_value (&bp, 1);
4979
4980 if (prevails)
4981 ipa_set_param_used (info, k, used);
4982 }
4983 for (k = 0; k < param_count; k++)
4984 {
4985 int nuses = streamer_read_hwi (ib);
4986 tree type = stream_read_tree (ib, data_in);
4987
4988 if (prevails)
4989 {
4990 ipa_set_controlled_uses (info, k, nuses);
4991 (*info->descriptors)[k].decl_or_type = type;
4992 }
4993 }
4994 for (e = node->callees; e; e = e->next_callee)
4995 ipa_read_edge_info (ib, data_in, e, prevails);
4996 for (e = node->indirect_calls; e; e = e->next_callee)
4997 {
4998 ipa_read_edge_info (ib, data_in, e, prevails);
4999 ipa_read_indirect_edge_info (ib, data_in, e, info);
5000 }
5001 }
5002
5003 /* Write jump functions for nodes in SET. */
5004
5005 void
ipa_prop_write_jump_functions(void)5006 ipa_prop_write_jump_functions (void)
5007 {
5008 struct cgraph_node *node;
5009 struct output_block *ob;
5010 unsigned int count = 0;
5011 lto_symtab_encoder_iterator lsei;
5012 lto_symtab_encoder_t encoder;
5013
5014 if (!ipa_node_params_sum || !ipa_edge_args_sum)
5015 return;
5016
5017 ob = create_output_block (LTO_section_jump_functions);
5018 encoder = ob->decl_state->symtab_node_encoder;
5019 ob->symbol = NULL;
5020 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5021 lsei_next_function_in_partition (&lsei))
5022 {
5023 node = lsei_cgraph_node (lsei);
5024 if (node->has_gimple_body_p ()
5025 && IPA_NODE_REF (node) != NULL)
5026 count++;
5027 }
5028
5029 streamer_write_uhwi (ob, count);
5030
5031 /* Process all of the functions. */
5032 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5033 lsei_next_function_in_partition (&lsei))
5034 {
5035 node = lsei_cgraph_node (lsei);
5036 if (node->has_gimple_body_p ()
5037 && IPA_NODE_REF (node) != NULL)
5038 ipa_write_node_info (ob, node);
5039 }
5040 streamer_write_char_stream (ob->main_stream, 0);
5041 produce_asm (ob, NULL);
5042 destroy_output_block (ob);
5043 }
5044
5045 /* Read section in file FILE_DATA of length LEN with data DATA. */
5046
5047 static void
ipa_prop_read_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5048 ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5049 size_t len)
5050 {
5051 const struct lto_function_header *header =
5052 (const struct lto_function_header *) data;
5053 const int cfg_offset = sizeof (struct lto_function_header);
5054 const int main_offset = cfg_offset + header->cfg_size;
5055 const int string_offset = main_offset + header->main_size;
5056 class data_in *data_in;
5057 unsigned int i;
5058 unsigned int count;
5059
5060 lto_input_block ib_main ((const char *) data + main_offset,
5061 header->main_size, file_data->mode_table);
5062
5063 data_in =
5064 lto_data_in_create (file_data, (const char *) data + string_offset,
5065 header->string_size, vNULL);
5066 count = streamer_read_uhwi (&ib_main);
5067
5068 for (i = 0; i < count; i++)
5069 {
5070 unsigned int index;
5071 struct cgraph_node *node;
5072 lto_symtab_encoder_t encoder;
5073
5074 index = streamer_read_uhwi (&ib_main);
5075 encoder = file_data->symtab_node_encoder;
5076 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5077 index));
5078 gcc_assert (node->definition);
5079 ipa_read_node_info (&ib_main, node, data_in);
5080 }
5081 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5082 len);
5083 lto_data_in_delete (data_in);
5084 }
5085
5086 /* Read ipcp jump functions. */
5087
5088 void
ipa_prop_read_jump_functions(void)5089 ipa_prop_read_jump_functions (void)
5090 {
5091 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5092 struct lto_file_decl_data *file_data;
5093 unsigned int j = 0;
5094
5095 ipa_check_create_node_params ();
5096 ipa_check_create_edge_args ();
5097 ipa_register_cgraph_hooks ();
5098
5099 while ((file_data = file_data_vec[j++]))
5100 {
5101 size_t len;
5102 const char *data
5103 = lto_get_summary_section_data (file_data, LTO_section_jump_functions,
5104 &len);
5105 if (data)
5106 ipa_prop_read_section (file_data, data, len);
5107 }
5108 }
5109
5110 void
write_ipcp_transformation_info(output_block * ob,cgraph_node * node)5111 write_ipcp_transformation_info (output_block *ob, cgraph_node *node)
5112 {
5113 int node_ref;
5114 unsigned int count = 0;
5115 lto_symtab_encoder_t encoder;
5116 struct ipa_agg_replacement_value *aggvals, *av;
5117
5118 aggvals = ipa_get_agg_replacements_for_node (node);
5119 encoder = ob->decl_state->symtab_node_encoder;
5120 node_ref = lto_symtab_encoder_encode (encoder, node);
5121 streamer_write_uhwi (ob, node_ref);
5122
5123 for (av = aggvals; av; av = av->next)
5124 count++;
5125 streamer_write_uhwi (ob, count);
5126
5127 for (av = aggvals; av; av = av->next)
5128 {
5129 struct bitpack_d bp;
5130
5131 streamer_write_uhwi (ob, av->offset);
5132 streamer_write_uhwi (ob, av->index);
5133 stream_write_tree (ob, av->value, true);
5134
5135 bp = bitpack_create (ob->main_stream);
5136 bp_pack_value (&bp, av->by_ref, 1);
5137 streamer_write_bitpack (&bp);
5138 }
5139
5140 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5141 if (ts && vec_safe_length (ts->m_vr) > 0)
5142 {
5143 count = ts->m_vr->length ();
5144 streamer_write_uhwi (ob, count);
5145 for (unsigned i = 0; i < count; ++i)
5146 {
5147 struct bitpack_d bp;
5148 ipa_vr *parm_vr = &(*ts->m_vr)[i];
5149 bp = bitpack_create (ob->main_stream);
5150 bp_pack_value (&bp, parm_vr->known, 1);
5151 streamer_write_bitpack (&bp);
5152 if (parm_vr->known)
5153 {
5154 streamer_write_enum (ob->main_stream, value_rang_type,
5155 VR_LAST, parm_vr->type);
5156 streamer_write_wide_int (ob, parm_vr->min);
5157 streamer_write_wide_int (ob, parm_vr->max);
5158 }
5159 }
5160 }
5161 else
5162 streamer_write_uhwi (ob, 0);
5163
5164 if (ts && vec_safe_length (ts->bits) > 0)
5165 {
5166 count = ts->bits->length ();
5167 streamer_write_uhwi (ob, count);
5168
5169 for (unsigned i = 0; i < count; ++i)
5170 {
5171 const ipa_bits *bits_jfunc = (*ts->bits)[i];
5172 struct bitpack_d bp = bitpack_create (ob->main_stream);
5173 bp_pack_value (&bp, !!bits_jfunc, 1);
5174 streamer_write_bitpack (&bp);
5175 if (bits_jfunc)
5176 {
5177 streamer_write_widest_int (ob, bits_jfunc->value);
5178 streamer_write_widest_int (ob, bits_jfunc->mask);
5179 }
5180 }
5181 }
5182 else
5183 streamer_write_uhwi (ob, 0);
5184 }
5185
5186 /* Stream in the aggregate value replacement chain for NODE from IB. */
5187
5188 static void
read_ipcp_transformation_info(lto_input_block * ib,cgraph_node * node,data_in * data_in)5189 read_ipcp_transformation_info (lto_input_block *ib, cgraph_node *node,
5190 data_in *data_in)
5191 {
5192 struct ipa_agg_replacement_value *aggvals = NULL;
5193 unsigned int count, i;
5194
5195 count = streamer_read_uhwi (ib);
5196 for (i = 0; i <count; i++)
5197 {
5198 struct ipa_agg_replacement_value *av;
5199 struct bitpack_d bp;
5200
5201 av = ggc_alloc<ipa_agg_replacement_value> ();
5202 av->offset = streamer_read_uhwi (ib);
5203 av->index = streamer_read_uhwi (ib);
5204 av->value = stream_read_tree (ib, data_in);
5205 bp = streamer_read_bitpack (ib);
5206 av->by_ref = bp_unpack_value (&bp, 1);
5207 av->next = aggvals;
5208 aggvals = av;
5209 }
5210 ipa_set_node_agg_value_chain (node, aggvals);
5211
5212 count = streamer_read_uhwi (ib);
5213 if (count > 0)
5214 {
5215 ipcp_transformation_initialize ();
5216 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5217 vec_safe_grow_cleared (ts->m_vr, count);
5218 for (i = 0; i < count; i++)
5219 {
5220 ipa_vr *parm_vr;
5221 parm_vr = &(*ts->m_vr)[i];
5222 struct bitpack_d bp;
5223 bp = streamer_read_bitpack (ib);
5224 parm_vr->known = bp_unpack_value (&bp, 1);
5225 if (parm_vr->known)
5226 {
5227 parm_vr->type = streamer_read_enum (ib, value_range_kind,
5228 VR_LAST);
5229 parm_vr->min = streamer_read_wide_int (ib);
5230 parm_vr->max = streamer_read_wide_int (ib);
5231 }
5232 }
5233 }
5234 count = streamer_read_uhwi (ib);
5235 if (count > 0)
5236 {
5237 ipcp_transformation_initialize ();
5238 ipcp_transformation *ts = ipcp_transformation_sum->get_create (node);
5239 vec_safe_grow_cleared (ts->bits, count);
5240
5241 for (i = 0; i < count; i++)
5242 {
5243 struct bitpack_d bp = streamer_read_bitpack (ib);
5244 bool known = bp_unpack_value (&bp, 1);
5245 if (known)
5246 {
5247 const widest_int value = streamer_read_widest_int (ib);
5248 const widest_int mask = streamer_read_widest_int (ib);
5249 ipa_bits *bits
5250 = ipa_get_ipa_bits_for_value (value, mask);
5251 (*ts->bits)[i] = bits;
5252 }
5253 }
5254 }
5255 }
5256
5257 /* Write all aggregate replacement for nodes in set. */
5258
5259 void
ipcp_write_transformation_summaries(void)5260 ipcp_write_transformation_summaries (void)
5261 {
5262 struct cgraph_node *node;
5263 struct output_block *ob;
5264 unsigned int count = 0;
5265 lto_symtab_encoder_iterator lsei;
5266 lto_symtab_encoder_t encoder;
5267
5268 ob = create_output_block (LTO_section_ipcp_transform);
5269 encoder = ob->decl_state->symtab_node_encoder;
5270 ob->symbol = NULL;
5271 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5272 lsei_next_function_in_partition (&lsei))
5273 {
5274 node = lsei_cgraph_node (lsei);
5275 if (node->has_gimple_body_p ())
5276 count++;
5277 }
5278
5279 streamer_write_uhwi (ob, count);
5280
5281 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5282 lsei_next_function_in_partition (&lsei))
5283 {
5284 node = lsei_cgraph_node (lsei);
5285 if (node->has_gimple_body_p ())
5286 write_ipcp_transformation_info (ob, node);
5287 }
5288 streamer_write_char_stream (ob->main_stream, 0);
5289 produce_asm (ob, NULL);
5290 destroy_output_block (ob);
5291 }
5292
5293 /* Read replacements section in file FILE_DATA of length LEN with data
5294 DATA. */
5295
5296 static void
read_replacements_section(struct lto_file_decl_data * file_data,const char * data,size_t len)5297 read_replacements_section (struct lto_file_decl_data *file_data,
5298 const char *data,
5299 size_t len)
5300 {
5301 const struct lto_function_header *header =
5302 (const struct lto_function_header *) data;
5303 const int cfg_offset = sizeof (struct lto_function_header);
5304 const int main_offset = cfg_offset + header->cfg_size;
5305 const int string_offset = main_offset + header->main_size;
5306 class data_in *data_in;
5307 unsigned int i;
5308 unsigned int count;
5309
5310 lto_input_block ib_main ((const char *) data + main_offset,
5311 header->main_size, file_data->mode_table);
5312
5313 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
5314 header->string_size, vNULL);
5315 count = streamer_read_uhwi (&ib_main);
5316
5317 for (i = 0; i < count; i++)
5318 {
5319 unsigned int index;
5320 struct cgraph_node *node;
5321 lto_symtab_encoder_t encoder;
5322
5323 index = streamer_read_uhwi (&ib_main);
5324 encoder = file_data->symtab_node_encoder;
5325 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5326 index));
5327 gcc_assert (node->definition);
5328 read_ipcp_transformation_info (&ib_main, node, data_in);
5329 }
5330 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5331 len);
5332 lto_data_in_delete (data_in);
5333 }
5334
5335 /* Read IPA-CP aggregate replacements. */
5336
5337 void
ipcp_read_transformation_summaries(void)5338 ipcp_read_transformation_summaries (void)
5339 {
5340 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5341 struct lto_file_decl_data *file_data;
5342 unsigned int j = 0;
5343
5344 while ((file_data = file_data_vec[j++]))
5345 {
5346 size_t len;
5347 const char *data
5348 = lto_get_summary_section_data (file_data, LTO_section_ipcp_transform,
5349 &len);
5350 if (data)
5351 read_replacements_section (file_data, data, len);
5352 }
5353 }
5354
5355 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5356 NODE. */
5357
5358 static void
adjust_agg_replacement_values(struct cgraph_node * node,struct ipa_agg_replacement_value * aggval)5359 adjust_agg_replacement_values (struct cgraph_node *node,
5360 struct ipa_agg_replacement_value *aggval)
5361 {
5362 struct ipa_agg_replacement_value *v;
5363
5364 if (!node->clone.param_adjustments)
5365 return;
5366
5367 auto_vec<int, 16> new_indices;
5368 node->clone.param_adjustments->get_updated_indices (&new_indices);
5369 for (v = aggval; v; v = v->next)
5370 {
5371 gcc_checking_assert (v->index >= 0);
5372
5373 if ((unsigned) v->index < new_indices.length ())
5374 v->index = new_indices[v->index];
5375 else
5376 /* This can happen if we know about a constant passed by reference by
5377 an argument which is never actually used for anything, let alone
5378 loading that constant. */
5379 v->index = -1;
5380 }
5381 }
5382
5383 /* Dominator walker driving the ipcp modification phase. */
5384
5385 class ipcp_modif_dom_walker : public dom_walker
5386 {
5387 public:
ipcp_modif_dom_walker(struct ipa_func_body_info * fbi,vec<ipa_param_descriptor,va_gc> * descs,struct ipa_agg_replacement_value * av,bool * sc,bool * cc)5388 ipcp_modif_dom_walker (struct ipa_func_body_info *fbi,
5389 vec<ipa_param_descriptor, va_gc> *descs,
5390 struct ipa_agg_replacement_value *av,
5391 bool *sc, bool *cc)
5392 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5393 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5394
5395 virtual edge before_dom_children (basic_block);
5396
5397 private:
5398 struct ipa_func_body_info *m_fbi;
5399 vec<ipa_param_descriptor, va_gc> *m_descriptors;
5400 struct ipa_agg_replacement_value *m_aggval;
5401 bool *m_something_changed, *m_cfg_changed;
5402 };
5403
5404 edge
before_dom_children(basic_block bb)5405 ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5406 {
5407 gimple_stmt_iterator gsi;
5408 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5409 {
5410 struct ipa_agg_replacement_value *v;
5411 gimple *stmt = gsi_stmt (gsi);
5412 tree rhs, val, t;
5413 HOST_WIDE_INT offset;
5414 poly_int64 size;
5415 int index;
5416 bool by_ref, vce;
5417
5418 if (!gimple_assign_load_p (stmt))
5419 continue;
5420 rhs = gimple_assign_rhs1 (stmt);
5421 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5422 continue;
5423
5424 vce = false;
5425 t = rhs;
5426 while (handled_component_p (t))
5427 {
5428 /* V_C_E can do things like convert an array of integers to one
5429 bigger integer and similar things we do not handle below. */
5430 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
5431 {
5432 vce = true;
5433 break;
5434 }
5435 t = TREE_OPERAND (t, 0);
5436 }
5437 if (vce)
5438 continue;
5439
5440 if (!ipa_load_from_parm_agg (m_fbi, m_descriptors, stmt, rhs, &index,
5441 &offset, &size, &by_ref))
5442 continue;
5443 for (v = m_aggval; v; v = v->next)
5444 if (v->index == index
5445 && v->offset == offset)
5446 break;
5447 if (!v
5448 || v->by_ref != by_ref
5449 || maybe_ne (tree_to_poly_int64 (TYPE_SIZE (TREE_TYPE (v->value))),
5450 size))
5451 continue;
5452
5453 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5454 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5455 {
5456 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5457 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5458 else if (TYPE_SIZE (TREE_TYPE (rhs))
5459 == TYPE_SIZE (TREE_TYPE (v->value)))
5460 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5461 else
5462 {
5463 if (dump_file)
5464 {
5465 fprintf (dump_file, " const ");
5466 print_generic_expr (dump_file, v->value);
5467 fprintf (dump_file, " can't be converted to type of ");
5468 print_generic_expr (dump_file, rhs);
5469 fprintf (dump_file, "\n");
5470 }
5471 continue;
5472 }
5473 }
5474 else
5475 val = v->value;
5476
5477 if (dump_file && (dump_flags & TDF_DETAILS))
5478 {
5479 fprintf (dump_file, "Modifying stmt:\n ");
5480 print_gimple_stmt (dump_file, stmt, 0);
5481 }
5482 gimple_assign_set_rhs_from_tree (&gsi, val);
5483 update_stmt (stmt);
5484
5485 if (dump_file && (dump_flags & TDF_DETAILS))
5486 {
5487 fprintf (dump_file, "into:\n ");
5488 print_gimple_stmt (dump_file, stmt, 0);
5489 fprintf (dump_file, "\n");
5490 }
5491
5492 *m_something_changed = true;
5493 if (maybe_clean_eh_stmt (stmt)
5494 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5495 *m_cfg_changed = true;
5496 }
5497 return NULL;
5498 }
5499
5500 /* Return true if we have recorded VALUE and MASK about PARM.
5501 Set VALUE and MASk accordingly. */
5502
5503 bool
ipcp_get_parm_bits(tree parm,tree * value,widest_int * mask)5504 ipcp_get_parm_bits (tree parm, tree *value, widest_int *mask)
5505 {
5506 cgraph_node *cnode = cgraph_node::get (current_function_decl);
5507 ipcp_transformation *ts = ipcp_get_transformation_summary (cnode);
5508 if (!ts || vec_safe_length (ts->bits) == 0)
5509 return false;
5510
5511 int i = 0;
5512 for (tree p = DECL_ARGUMENTS (current_function_decl);
5513 p != parm; p = DECL_CHAIN (p))
5514 {
5515 i++;
5516 /* Ignore static chain. */
5517 if (!p)
5518 return false;
5519 }
5520
5521 if (cnode->clone.param_adjustments)
5522 {
5523 i = cnode->clone.param_adjustments->get_original_index (i);
5524 if (i < 0)
5525 return false;
5526 }
5527
5528 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5529 if (!bits[i])
5530 return false;
5531 *mask = bits[i]->mask;
5532 *value = wide_int_to_tree (TREE_TYPE (parm), bits[i]->value);
5533 return true;
5534 }
5535
5536
5537 /* Update bits info of formal parameters as described in
5538 ipcp_transformation. */
5539
5540 static void
ipcp_update_bits(struct cgraph_node * node)5541 ipcp_update_bits (struct cgraph_node *node)
5542 {
5543 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5544
5545 if (!ts || vec_safe_length (ts->bits) == 0)
5546 return;
5547 vec<ipa_bits *, va_gc> &bits = *ts->bits;
5548 unsigned count = bits.length ();
5549 if (!count)
5550 return;
5551
5552 auto_vec<int, 16> new_indices;
5553 bool need_remapping = false;
5554 if (node->clone.param_adjustments)
5555 {
5556 node->clone.param_adjustments->get_updated_indices (&new_indices);
5557 need_remapping = true;
5558 }
5559 auto_vec <tree, 16> parm_decls;
5560 push_function_arg_decls (&parm_decls, node->decl);
5561
5562 for (unsigned i = 0; i < count; ++i)
5563 {
5564 tree parm;
5565 if (need_remapping)
5566 {
5567 if (i >= new_indices.length ())
5568 continue;
5569 int idx = new_indices[i];
5570 if (idx < 0)
5571 continue;
5572 parm = parm_decls[idx];
5573 }
5574 else
5575 parm = parm_decls[i];
5576 gcc_checking_assert (parm);
5577
5578
5579 if (!bits[i]
5580 || !(INTEGRAL_TYPE_P (TREE_TYPE (parm))
5581 || POINTER_TYPE_P (TREE_TYPE (parm)))
5582 || !is_gimple_reg (parm))
5583 continue;
5584
5585 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5586 if (!ddef)
5587 continue;
5588
5589 if (dump_file)
5590 {
5591 fprintf (dump_file, "Adjusting mask for param %u to ", i);
5592 print_hex (bits[i]->mask, dump_file);
5593 fprintf (dump_file, "\n");
5594 }
5595
5596 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5597 {
5598 unsigned prec = TYPE_PRECISION (TREE_TYPE (ddef));
5599 signop sgn = TYPE_SIGN (TREE_TYPE (ddef));
5600
5601 wide_int nonzero_bits = wide_int::from (bits[i]->mask, prec, UNSIGNED)
5602 | wide_int::from (bits[i]->value, prec, sgn);
5603 set_nonzero_bits (ddef, nonzero_bits);
5604 }
5605 else
5606 {
5607 unsigned tem = bits[i]->mask.to_uhwi ();
5608 unsigned HOST_WIDE_INT bitpos = bits[i]->value.to_uhwi ();
5609 unsigned align = tem & -tem;
5610 unsigned misalign = bitpos & (align - 1);
5611
5612 if (align > 1)
5613 {
5614 if (dump_file)
5615 fprintf (dump_file, "Adjusting align: %u, misalign: %u\n", align, misalign);
5616
5617 unsigned old_align, old_misalign;
5618 struct ptr_info_def *pi = get_ptr_info (ddef);
5619 bool old_known = get_ptr_info_alignment (pi, &old_align, &old_misalign);
5620
5621 if (old_known
5622 && old_align > align)
5623 {
5624 if (dump_file)
5625 {
5626 fprintf (dump_file, "But alignment was already %u.\n", old_align);
5627 if ((old_misalign & (align - 1)) != misalign)
5628 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5629 old_misalign, misalign);
5630 }
5631 continue;
5632 }
5633
5634 if (old_known
5635 && ((misalign & (old_align - 1)) != old_misalign)
5636 && dump_file)
5637 fprintf (dump_file, "old_misalign (%u) and misalign (%u) mismatch\n",
5638 old_misalign, misalign);
5639
5640 set_ptr_info_alignment (pi, align, misalign);
5641 }
5642 }
5643 }
5644 }
5645
5646 bool
nonzero_p(tree expr_type)5647 ipa_vr::nonzero_p (tree expr_type) const
5648 {
5649 if (type == VR_ANTI_RANGE && wi::eq_p (min, 0) && wi::eq_p (max, 0))
5650 return true;
5651
5652 unsigned prec = TYPE_PRECISION (expr_type);
5653 return (type == VR_RANGE
5654 && TYPE_UNSIGNED (expr_type)
5655 && wi::eq_p (min, wi::one (prec))
5656 && wi::eq_p (max, wi::max_value (prec, TYPE_SIGN (expr_type))));
5657 }
5658
5659 /* Update value range of formal parameters as described in
5660 ipcp_transformation. */
5661
5662 static void
ipcp_update_vr(struct cgraph_node * node)5663 ipcp_update_vr (struct cgraph_node *node)
5664 {
5665 ipcp_transformation *ts = ipcp_get_transformation_summary (node);
5666 if (!ts || vec_safe_length (ts->m_vr) == 0)
5667 return;
5668 const vec<ipa_vr, va_gc> &vr = *ts->m_vr;
5669 unsigned count = vr.length ();
5670 if (!count)
5671 return;
5672
5673 auto_vec<int, 16> new_indices;
5674 bool need_remapping = false;
5675 if (node->clone.param_adjustments)
5676 {
5677 node->clone.param_adjustments->get_updated_indices (&new_indices);
5678 need_remapping = true;
5679 }
5680 auto_vec <tree, 16> parm_decls;
5681 push_function_arg_decls (&parm_decls, node->decl);
5682
5683 for (unsigned i = 0; i < count; ++i)
5684 {
5685 tree parm;
5686 int remapped_idx;
5687 if (need_remapping)
5688 {
5689 if (i >= new_indices.length ())
5690 continue;
5691 remapped_idx = new_indices[i];
5692 if (remapped_idx < 0)
5693 continue;
5694 }
5695 else
5696 remapped_idx = i;
5697
5698 parm = parm_decls[remapped_idx];
5699
5700 gcc_checking_assert (parm);
5701 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl), parm);
5702
5703 if (!ddef || !is_gimple_reg (parm))
5704 continue;
5705
5706 if (vr[i].known
5707 && (vr[i].type == VR_RANGE || vr[i].type == VR_ANTI_RANGE))
5708 {
5709 tree type = TREE_TYPE (ddef);
5710 unsigned prec = TYPE_PRECISION (type);
5711 if (INTEGRAL_TYPE_P (TREE_TYPE (ddef)))
5712 {
5713 if (dump_file)
5714 {
5715 fprintf (dump_file, "Setting value range of param %u "
5716 "(now %i) ", i, remapped_idx);
5717 fprintf (dump_file, "%s[",
5718 (vr[i].type == VR_ANTI_RANGE) ? "~" : "");
5719 print_decs (vr[i].min, dump_file);
5720 fprintf (dump_file, ", ");
5721 print_decs (vr[i].max, dump_file);
5722 fprintf (dump_file, "]\n");
5723 }
5724 set_range_info (ddef, vr[i].type,
5725 wide_int_storage::from (vr[i].min, prec,
5726 TYPE_SIGN (type)),
5727 wide_int_storage::from (vr[i].max, prec,
5728 TYPE_SIGN (type)));
5729 }
5730 else if (POINTER_TYPE_P (TREE_TYPE (ddef))
5731 && vr[i].nonzero_p (TREE_TYPE (ddef)))
5732 {
5733 if (dump_file)
5734 fprintf (dump_file, "Setting nonnull for %u\n", i);
5735 set_ptr_nonnull (ddef);
5736 }
5737 }
5738 }
5739 }
5740
5741 /* IPCP transformation phase doing propagation of aggregate values. */
5742
5743 unsigned int
ipcp_transform_function(struct cgraph_node * node)5744 ipcp_transform_function (struct cgraph_node *node)
5745 {
5746 vec<ipa_param_descriptor, va_gc> *descriptors = NULL;
5747 struct ipa_func_body_info fbi;
5748 struct ipa_agg_replacement_value *aggval;
5749 int param_count;
5750 bool cfg_changed = false, something_changed = false;
5751
5752 gcc_checking_assert (cfun);
5753 gcc_checking_assert (current_function_decl);
5754
5755 if (dump_file)
5756 fprintf (dump_file, "Modification phase of node %s\n",
5757 node->dump_name ());
5758
5759 ipcp_update_bits (node);
5760 ipcp_update_vr (node);
5761 aggval = ipa_get_agg_replacements_for_node (node);
5762 if (!aggval)
5763 return 0;
5764 param_count = count_formal_params (node->decl);
5765 if (param_count == 0)
5766 return 0;
5767 adjust_agg_replacement_values (node, aggval);
5768 if (dump_file)
5769 ipa_dump_agg_replacement_values (dump_file, aggval);
5770
5771 fbi.node = node;
5772 fbi.info = NULL;
5773 fbi.bb_infos = vNULL;
5774 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5775 fbi.param_count = param_count;
5776 fbi.aa_walk_budget = opt_for_fn (node->decl, param_ipa_max_aa_steps);
5777
5778 vec_safe_grow_cleared (descriptors, param_count);
5779 ipa_populate_param_decls (node, *descriptors);
5780 calculate_dominance_info (CDI_DOMINATORS);
5781 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5782 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
5783
5784 int i;
5785 struct ipa_bb_info *bi;
5786 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5787 free_ipa_bb_info (bi);
5788 fbi.bb_infos.release ();
5789 free_dominance_info (CDI_DOMINATORS);
5790
5791 ipcp_transformation *s = ipcp_transformation_sum->get (node);
5792 s->agg_values = NULL;
5793 s->bits = NULL;
5794 s->m_vr = NULL;
5795
5796 vec_free (descriptors);
5797
5798 if (!something_changed)
5799 return 0;
5800
5801 if (cfg_changed)
5802 delete_unreachable_blocks_update_callgraph (node, false);
5803
5804 return TODO_update_ssa_only_virtuals;
5805 }
5806
5807
5808 /* Return true if OTHER describes same agg value. */
5809 bool
equal_to(const ipa_agg_value & other)5810 ipa_agg_value::equal_to (const ipa_agg_value &other)
5811 {
5812 return offset == other.offset
5813 && operand_equal_p (value, other.value, 0);
5814 }
5815 #include "gt-ipa-prop.h"
5816