1 /* Manipulation of formal and actual parameters of functions and function
2 calls.
3 Copyright (C) 2017-2022 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "ssa.h"
28 #include "cgraph.h"
29 #include "fold-const.h"
30 #include "tree-eh.h"
31 #include "stor-layout.h"
32 #include "gimplify.h"
33 #include "gimple-iterator.h"
34 #include "gimplify-me.h"
35 #include "tree-cfg.h"
36 #include "tree-dfa.h"
37 #include "ipa-param-manipulation.h"
38 #include "print-tree.h"
39 #include "gimple-pretty-print.h"
40 #include "builtins.h"
41 #include "tree-ssa.h"
42 #include "tree-inline.h"
43 #include "alloc-pool.h"
44 #include "symbol-summary.h"
45 #include "symtab-clones.h"
46 #include "tree-phinodes.h"
47 #include "cfgexpand.h"
48 #include "attribs.h"
49
50
51 /* Actual prefixes of different newly synthetized parameters. Keep in sync
52 with IPA_PARAM_PREFIX_* defines. */
53
54 static const char *ipa_param_prefixes[IPA_PARAM_PREFIX_COUNT]
55 = {"SYNTH",
56 "ISRA",
57 "simd",
58 "mask"};
59
60 /* Names of parameters for dumping. Keep in sync with enum ipa_parm_op. */
61
62 static const char *ipa_param_op_names[IPA_PARAM_PREFIX_COUNT]
63 = {"IPA_PARAM_OP_UNDEFINED",
64 "IPA_PARAM_OP_COPY",
65 "IPA_PARAM_OP_NEW",
66 "IPA_PARAM_OP_SPLIT"};
67
68 /* Structure to hold declarations representing pass-through IPA-SRA splits. In
69 essence, it tells new index for a combination of original index and
70 offset. */
71
72 struct pass_through_split_map
73 {
74 /* Original argument index. */
75 unsigned base_index;
76 /* Offset of the split part in the original argument. */
77 unsigned unit_offset;
78 /* Index of the split part in the call statement - where clone
79 materialization put it. */
80 int new_index;
81 };
82
83 /* Information about some call statements that needs to be conveyed from clone
84 materialization to edge redirection. */
85
86 class ipa_edge_modification_info
87 {
88 public:
ipa_edge_modification_info()89 ipa_edge_modification_info ()
90 {}
91
92 /* Mapping of original argument indices to where those arguments sit in the
93 call statement now or to a negative index if they were removed. */
94 auto_vec<int> index_map;
95 /* Information about ISRA replacements put into the call statement at the
96 clone materialization stages. */
97 auto_vec<pass_through_split_map> pass_through_map;
98 /* Necessary adjustment to ipa_param_adjustments::m_always_copy_start when
99 redirecting the call. */
100 int always_copy_delta = 0;
101 };
102
103 /* Class for storing and retrieving summaries about cal statement
104 modifications. */
105
106 class ipa_edge_modification_sum
107 : public call_summary <ipa_edge_modification_info *>
108 {
109 public:
ipa_edge_modification_sum(symbol_table * table)110 ipa_edge_modification_sum (symbol_table *table)
111 : call_summary<ipa_edge_modification_info *> (table)
112 {
113 }
114
115 /* Hook that is called by summary when an edge is duplicated. */
116
duplicate(cgraph_edge *,cgraph_edge *,ipa_edge_modification_info * old_info,ipa_edge_modification_info * new_info)117 virtual void duplicate (cgraph_edge *,
118 cgraph_edge *,
119 ipa_edge_modification_info *old_info,
120 ipa_edge_modification_info *new_info)
121 {
122 new_info->index_map.safe_splice (old_info->index_map);
123 new_info->pass_through_map.safe_splice (old_info->pass_through_map);
124 new_info->always_copy_delta = old_info->always_copy_delta;
125 }
126 };
127
128 /* Call summary to store information about edges which have had their arguments
129 partially modified already. */
130
131 static ipa_edge_modification_sum *ipa_edge_modifications;
132
133 /* Fail compilation if CS has any summary associated with it in
134 ipa_edge_modifications. */
135
136 DEBUG_FUNCTION void
ipa_verify_edge_has_no_modifications(cgraph_edge * cs)137 ipa_verify_edge_has_no_modifications (cgraph_edge *cs)
138 {
139 gcc_assert (!ipa_edge_modifications || !ipa_edge_modifications->get (cs));
140 }
141
142 /* Fill an empty vector ARGS with PARM_DECLs representing formal parameters of
143 FNDECL. The function should not be called during LTO WPA phase except for
144 thunks (or functions with bodies streamed in). */
145
146 void
push_function_arg_decls(vec<tree> * args,tree fndecl)147 push_function_arg_decls (vec<tree> *args, tree fndecl)
148 {
149 int count;
150 tree parm;
151
152 /* Safety check that we do not attempt to use the function in WPA, except
153 when the function is a thunk and then we have DECL_ARGUMENTS or when we
154 have already explicitely loaded its body. */
155 gcc_assert (!flag_wpa
156 || DECL_ARGUMENTS (fndecl)
157 || gimple_has_body_p (fndecl));
158 count = 0;
159 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
160 count++;
161
162 args->reserve_exact (count);
163 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
164 args->quick_push (parm);
165 }
166
167 /* Fill an empty vector TYPES with trees representing formal parameters of
168 function type FNTYPE. */
169
170 void
push_function_arg_types(vec<tree> * types,tree fntype)171 push_function_arg_types (vec<tree> *types, tree fntype)
172 {
173 int count = 0;
174 tree t;
175
176 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
177 count++;
178
179 types->reserve_exact (count);
180 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
181 types->quick_push (TREE_VALUE (t));
182 }
183
184 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
185 friendly way, assuming they are meant to be applied to FNDECL. */
186
187 void
ipa_dump_adjusted_parameters(FILE * f,vec<ipa_adjusted_param,va_gc> * adj_params)188 ipa_dump_adjusted_parameters (FILE *f,
189 vec<ipa_adjusted_param, va_gc> *adj_params)
190 {
191 unsigned i, len = vec_safe_length (adj_params);
192 bool first = true;
193
194 if (!len)
195 return;
196
197 fprintf (f, " IPA adjusted parameters: ");
198 for (i = 0; i < len; i++)
199 {
200 struct ipa_adjusted_param *apm;
201 apm = &(*adj_params)[i];
202
203 if (!first)
204 fprintf (f, " ");
205 else
206 first = false;
207
208 fprintf (f, "%i. %s %s", i, ipa_param_op_names[apm->op],
209 apm->prev_clone_adjustment ? "prev_clone_adjustment " : "");
210 switch (apm->op)
211 {
212 case IPA_PARAM_OP_UNDEFINED:
213 break;
214
215 case IPA_PARAM_OP_COPY:
216 fprintf (f, ", base_index: %u", apm->base_index);
217 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
218 break;
219
220 case IPA_PARAM_OP_SPLIT:
221 fprintf (f, ", offset: %u", apm->unit_offset);
222 /* fall-through */
223 case IPA_PARAM_OP_NEW:
224 fprintf (f, ", base_index: %u", apm->base_index);
225 fprintf (f, ", prev_clone_index: %u", apm->prev_clone_index);
226 print_node_brief (f, ", type: ", apm->type, 0);
227 print_node_brief (f, ", alias type: ", apm->alias_ptr_type, 0);
228 fprintf (f, " prefix: %s",
229 ipa_param_prefixes[apm->param_prefix_index]);
230 if (apm->reverse)
231 fprintf (f, ", reverse");
232 break;
233 }
234 fprintf (f, "\n");
235 }
236 }
237
238 /* Fill NEW_TYPES with types of a function after its current OTYPES have been
239 modified as described in ADJ_PARAMS. When USE_PREV_INDICES is true, use
240 prev_clone_index from ADJ_PARAMS as opposed to base_index when the parameter
241 is false. */
242
243 static void
fill_vector_of_new_param_types(vec<tree> * new_types,vec<tree> * otypes,vec<ipa_adjusted_param,va_gc> * adj_params,bool use_prev_indices)244 fill_vector_of_new_param_types (vec<tree> *new_types, vec<tree> *otypes,
245 vec<ipa_adjusted_param, va_gc> *adj_params,
246 bool use_prev_indices)
247 {
248 unsigned adj_len = vec_safe_length (adj_params);
249 new_types->reserve_exact (adj_len);
250 for (unsigned i = 0; i < adj_len ; i++)
251 {
252 ipa_adjusted_param *apm = &(*adj_params)[i];
253 if (apm->op == IPA_PARAM_OP_COPY)
254 {
255 unsigned index
256 = use_prev_indices ? apm->prev_clone_index : apm->base_index;
257 /* The following needs to be handled gracefully because of type
258 mismatches. This happens with LTO but apparently also in Fortran
259 with -fcoarray=lib -O2 -lcaf_single -latomic. */
260 if (index >= otypes->length ())
261 continue;
262 new_types->quick_push ((*otypes)[index]);
263 }
264 else if (apm->op == IPA_PARAM_OP_NEW
265 || apm->op == IPA_PARAM_OP_SPLIT)
266 {
267 tree ntype = apm->type;
268 if (is_gimple_reg_type (ntype)
269 && TYPE_MODE (ntype) != BLKmode)
270 {
271 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ntype));
272 if (TYPE_ALIGN (ntype) != malign)
273 ntype = build_aligned_type (ntype, malign);
274 }
275 new_types->quick_push (ntype);
276 }
277 else
278 gcc_unreachable ();
279 }
280 }
281
282 /* Return false if given attribute should prevent type adjustments. */
283
284 bool
type_attribute_allowed_p(tree name)285 ipa_param_adjustments::type_attribute_allowed_p (tree name)
286 {
287 if ((is_attribute_p ("fn spec", name) && flag_ipa_modref)
288 || is_attribute_p ("access", name)
289 || is_attribute_p ("returns_nonnull", name)
290 || is_attribute_p ("assume_aligned", name)
291 || is_attribute_p ("nocf_check", name)
292 || is_attribute_p ("warn_unused_result", name))
293 return true;
294 return false;
295 }
296
297 /* Return true if attribute should be dropped if parameter changed. */
298
299 static bool
drop_type_attribute_if_params_changed_p(tree name)300 drop_type_attribute_if_params_changed_p (tree name)
301 {
302 if (is_attribute_p ("fn spec", name)
303 || is_attribute_p ("access", name))
304 return true;
305 return false;
306 }
307
308 /* Build and return a function type just like ORIG_TYPE but with parameter
309 types given in NEW_PARAM_TYPES - which can be NULL if, but only if,
310 ORIG_TYPE itself has NULL TREE_ARG_TYPEs. If METHOD2FUNC is true, also make
311 it a FUNCTION_TYPE instead of FUNCTION_TYPE.
312 If ARG_MODIFIED is true drop attributes that are no longer up to date. */
313
314 static tree
build_adjusted_function_type(tree orig_type,vec<tree> * new_param_types,bool method2func,bool skip_return,bool args_modified)315 build_adjusted_function_type (tree orig_type, vec<tree> *new_param_types,
316 bool method2func, bool skip_return,
317 bool args_modified)
318 {
319 tree new_arg_types = NULL;
320 if (TYPE_ARG_TYPES (orig_type))
321 {
322 gcc_checking_assert (new_param_types);
323 bool last_parm_void = (TREE_VALUE (tree_last (TYPE_ARG_TYPES (orig_type)))
324 == void_type_node);
325 unsigned len = new_param_types->length ();
326 for (unsigned i = 0; i < len; i++)
327 new_arg_types = tree_cons (NULL_TREE, (*new_param_types)[i],
328 new_arg_types);
329
330 tree new_reversed = nreverse (new_arg_types);
331 if (last_parm_void)
332 {
333 if (new_reversed)
334 TREE_CHAIN (new_arg_types) = void_list_node;
335 else
336 new_reversed = void_list_node;
337 }
338 new_arg_types = new_reversed;
339 }
340
341 /* Use build_distinct_type_copy to preserve as much as possible from original
342 type (debug info, attribute lists etc.). The one exception is
343 METHOD_TYPEs which must have THIS argument and when we are asked to remove
344 it, we need to build new FUNCTION_TYPE instead. */
345 tree new_type = NULL;
346 if (method2func)
347 {
348 tree ret_type;
349 if (skip_return)
350 ret_type = void_type_node;
351 else
352 ret_type = TREE_TYPE (orig_type);
353
354 new_type
355 = build_distinct_type_copy (build_function_type (ret_type,
356 new_arg_types));
357 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
358 }
359 else
360 {
361 new_type = build_distinct_type_copy (orig_type);
362 TYPE_ARG_TYPES (new_type) = new_arg_types;
363 if (skip_return)
364 TREE_TYPE (new_type) = void_type_node;
365 }
366 if (args_modified && TYPE_ATTRIBUTES (new_type))
367 {
368 tree t = TYPE_ATTRIBUTES (new_type);
369 tree *last = &TYPE_ATTRIBUTES (new_type);
370 TYPE_ATTRIBUTES (new_type) = NULL;
371 for (;t; t = TREE_CHAIN (t))
372 if (!drop_type_attribute_if_params_changed_p
373 (get_attribute_name (t)))
374 {
375 *last = copy_node (t);
376 TREE_CHAIN (*last) = NULL;
377 last = &TREE_CHAIN (*last);
378 }
379 }
380
381 return new_type;
382 }
383
384 /* Return the maximum index in any IPA_PARAM_OP_COPY adjustment or -1 if there
385 is none. */
386
387 int
get_max_base_index()388 ipa_param_adjustments::get_max_base_index ()
389 {
390 unsigned adj_len = vec_safe_length (m_adj_params);
391 int max_index = -1;
392 for (unsigned i = 0; i < adj_len ; i++)
393 {
394 ipa_adjusted_param *apm = &(*m_adj_params)[i];
395 if (apm->op == IPA_PARAM_OP_COPY
396 && max_index < apm->base_index)
397 max_index = apm->base_index;
398 }
399 return max_index;
400 }
401
402
403 /* Fill SURVIVING_PARAMS with an array of bools where each one says whether a
404 parameter that originally was at that position still survives in the given
405 clone or is removed/replaced. If the final array is smaller than an index
406 of an original parameter, that parameter also did not survive. That a
407 parameter survives does not mean it has the same index as before. */
408
409 void
get_surviving_params(vec<bool> * surviving_params)410 ipa_param_adjustments::get_surviving_params (vec<bool> *surviving_params)
411 {
412 unsigned adj_len = vec_safe_length (m_adj_params);
413 int max_index = get_max_base_index ();
414
415 if (max_index < 0)
416 return;
417 surviving_params->reserve_exact (max_index + 1);
418 surviving_params->quick_grow_cleared (max_index + 1);
419 for (unsigned i = 0; i < adj_len ; i++)
420 {
421 ipa_adjusted_param *apm = &(*m_adj_params)[i];
422 if (apm->op == IPA_PARAM_OP_COPY)
423 (*surviving_params)[apm->base_index] = true;
424 }
425 }
426
427 /* Fill NEW_INDICES with new indices of each surviving parameter or -1 for
428 those which do not survive. Any parameter outside of lenght of the vector
429 does not survive. There is currently no support for a parameter to be
430 copied to two distinct new parameters. */
431
432 void
get_updated_indices(vec<int> * new_indices)433 ipa_param_adjustments::get_updated_indices (vec<int> *new_indices)
434 {
435 unsigned adj_len = vec_safe_length (m_adj_params);
436 int max_index = get_max_base_index ();
437
438 if (max_index < 0)
439 return;
440 unsigned res_len = max_index + 1;
441 new_indices->reserve_exact (res_len);
442 for (unsigned i = 0; i < res_len ; i++)
443 new_indices->quick_push (-1);
444 for (unsigned i = 0; i < adj_len ; i++)
445 {
446 ipa_adjusted_param *apm = &(*m_adj_params)[i];
447 if (apm->op == IPA_PARAM_OP_COPY)
448 (*new_indices)[apm->base_index] = i;
449 }
450 }
451
452 /* If a parameter with original INDEX has survived intact, return its new
453 index. Otherwise return -1. In that case, if it has been split and there
454 is a new parameter representing a portion at unit OFFSET for which a value
455 of a TYPE can be substituted, store its new index into SPLIT_INDEX,
456 otherwise store -1 there. */
457 int
get_updated_index_or_split(int index,unsigned unit_offset,tree type,int * split_index)458 ipa_param_adjustments::get_updated_index_or_split (int index,
459 unsigned unit_offset,
460 tree type, int *split_index)
461 {
462 unsigned adj_len = vec_safe_length (m_adj_params);
463 for (unsigned i = 0; i < adj_len ; i++)
464 {
465 ipa_adjusted_param *apm = &(*m_adj_params)[i];
466 if (apm->base_index != index)
467 continue;
468 if (apm->op == IPA_PARAM_OP_COPY)
469 return i;
470 if (apm->op == IPA_PARAM_OP_SPLIT
471 && apm->unit_offset == unit_offset)
472 {
473 if (useless_type_conversion_p (apm->type, type))
474 *split_index = i;
475 else
476 *split_index = -1;
477 return -1;
478 }
479 }
480
481 *split_index = -1;
482 return -1;
483 }
484
485 /* Return the original index for the given new parameter index. Return a
486 negative number if not available. */
487
488 int
get_original_index(int newidx)489 ipa_param_adjustments::get_original_index (int newidx)
490 {
491 const ipa_adjusted_param *adj = &(*m_adj_params)[newidx];
492 if (adj->op != IPA_PARAM_OP_COPY)
493 return -1;
494 return adj->base_index;
495 }
496
497 /* Return true if the first parameter (assuming there was one) survives the
498 transformation intact and remains the first one. */
499
500 bool
first_param_intact_p()501 ipa_param_adjustments::first_param_intact_p ()
502 {
503 return (!vec_safe_is_empty (m_adj_params)
504 && (*m_adj_params)[0].op == IPA_PARAM_OP_COPY
505 && (*m_adj_params)[0].base_index == 0);
506 }
507
508 /* Return true if we have to change what has formerly been a method into a
509 function. */
510
511 bool
method2func_p(tree orig_type)512 ipa_param_adjustments::method2func_p (tree orig_type)
513 {
514 return ((TREE_CODE (orig_type) == METHOD_TYPE) && !first_param_intact_p ());
515 }
516
517 /* Given function type OLD_TYPE, return a new type derived from it after
518 performing all atored modifications. TYPE_ORIGINAL_P should be true when
519 OLD_TYPE refers to the type before any IPA transformations, as opposed to a
520 type that can be an intermediate one in between various IPA
521 transformations. */
522
523 tree
build_new_function_type(tree old_type,bool type_original_p)524 ipa_param_adjustments::build_new_function_type (tree old_type,
525 bool type_original_p)
526 {
527 auto_vec<tree,16> new_param_types, *new_param_types_p;
528 if (prototype_p (old_type))
529 {
530 auto_vec<tree, 16> otypes;
531 push_function_arg_types (&otypes, old_type);
532 fill_vector_of_new_param_types (&new_param_types, &otypes, m_adj_params,
533 !type_original_p);
534 new_param_types_p = &new_param_types;
535 }
536 else
537 new_param_types_p = NULL;
538
539 /* Check if any params type cares about are modified. In this case will
540 need to drop some type attributes. */
541 bool modified = false;
542 size_t index = 0;
543 if (m_adj_params)
544 for (tree t = TYPE_ARG_TYPES (old_type);
545 t && (int)index < m_always_copy_start && !modified;
546 t = TREE_CHAIN (t), index++)
547 if (index >= m_adj_params->length ()
548 || get_original_index (index) != (int)index)
549 modified = true;
550
551
552 return build_adjusted_function_type (old_type, new_param_types_p,
553 method2func_p (old_type), m_skip_return,
554 modified);
555 }
556
557 /* Build variant of function decl ORIG_DECL which has no return value if
558 M_SKIP_RETURN is true and, if ORIG_DECL's types or parameters is known, has
559 this type adjusted as indicated in M_ADJ_PARAMS. Arguments from
560 DECL_ARGUMENTS list are not processed now, since they are linked by
561 TREE_CHAIN directly and not accessible in LTO during WPA. The caller is
562 responsible for eliminating them when clones are properly materialized. */
563
564 tree
adjust_decl(tree orig_decl)565 ipa_param_adjustments::adjust_decl (tree orig_decl)
566 {
567 tree new_decl = copy_node (orig_decl);
568 tree orig_type = TREE_TYPE (orig_decl);
569 if (prototype_p (orig_type)
570 || (m_skip_return && !VOID_TYPE_P (TREE_TYPE (orig_type))))
571 {
572 tree new_type = build_new_function_type (orig_type, false);
573 TREE_TYPE (new_decl) = new_type;
574 }
575 if (method2func_p (orig_type))
576 DECL_VINDEX (new_decl) = NULL_TREE;
577
578 /* When signature changes, we need to clear builtin info. */
579 if (fndecl_built_in_p (new_decl))
580 set_decl_built_in_function (new_decl, NOT_BUILT_IN, 0);
581
582 DECL_VIRTUAL_P (new_decl) = 0;
583 DECL_LANG_SPECIFIC (new_decl) = NULL;
584
585 /* Drop MALLOC attribute for a void function. */
586 if (m_skip_return)
587 DECL_IS_MALLOC (new_decl) = 0;
588
589 return new_decl;
590 }
591
592 /* Wrapper around get_base_ref_and_offset for cases interesting for IPA-SRA
593 transformations. Return true if EXPR has an interesting form and fill in
594 *BASE_P and *UNIT_OFFSET_P with the appropriate info. */
595
596 static bool
isra_get_ref_base_and_offset(tree expr,tree * base_p,unsigned * unit_offset_p)597 isra_get_ref_base_and_offset (tree expr, tree *base_p, unsigned *unit_offset_p)
598 {
599 HOST_WIDE_INT offset, size;
600 bool reverse;
601 tree base
602 = get_ref_base_and_extent_hwi (expr, &offset, &size, &reverse);
603 if (!base || size < 0)
604 return false;
605
606 if ((offset % BITS_PER_UNIT) != 0)
607 return false;
608
609 if (TREE_CODE (base) == MEM_REF)
610 {
611 poly_int64 plmoff = mem_ref_offset (base).force_shwi ();
612 HOST_WIDE_INT moff;
613 bool is_cst = plmoff.is_constant (&moff);
614 if (!is_cst)
615 return false;
616 offset += moff * BITS_PER_UNIT;
617 base = TREE_OPERAND (base, 0);
618 }
619
620 if (offset < 0 || (offset / BITS_PER_UNIT) > UINT_MAX)
621 return false;
622
623 *base_p = base;
624 *unit_offset_p = offset / BITS_PER_UNIT;
625 return true;
626 }
627
628 /* Remove all statements that use NAME directly or indirectly. KILLED_SSAS
629 contains the SSA_NAMEs that are already being or have been processed and new
630 ones need to be added to it. The function only has to process situations
631 handled by ssa_name_only_returned_p in ipa-sra.cc with the exception that it
632 can assume it must never reach a use in a return statement. */
633
634 static void
purge_all_uses(tree name,hash_set<tree> * killed_ssas)635 purge_all_uses (tree name, hash_set <tree> *killed_ssas)
636 {
637 imm_use_iterator imm_iter;
638 gimple *stmt;
639 auto_vec <tree, 4> worklist;
640
641 worklist.safe_push (name);
642 while (!worklist.is_empty ())
643 {
644 tree cur_name = worklist.pop ();
645 FOR_EACH_IMM_USE_STMT (stmt, imm_iter, cur_name)
646 {
647 if (gimple_debug_bind_p (stmt))
648 {
649 /* When runing within tree-inline, we will never end up here but
650 adding the SSAs to killed_ssas will do the trick in this case
651 and the respective debug statements will get reset. */
652 gimple_debug_bind_reset_value (stmt);
653 update_stmt (stmt);
654 continue;
655 }
656
657 tree lhs = NULL_TREE;
658 if (is_gimple_assign (stmt))
659 lhs = gimple_assign_lhs (stmt);
660 else if (gimple_code (stmt) == GIMPLE_PHI)
661 lhs = gimple_phi_result (stmt);
662 gcc_assert (lhs
663 && (TREE_CODE (lhs) == SSA_NAME)
664 && !gimple_vdef (stmt));
665 if (!killed_ssas->add (lhs))
666 {
667 worklist.safe_push (lhs);
668 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
669 gsi_remove (&gsi, true);
670 }
671 }
672 }
673 }
674
675 /* Modify actual arguments of a function call in statement currently belonging
676 to CS, and make it call CS->callee->decl. Return the new statement that
677 replaced the old one. When invoked, cfun and current_function_decl have to
678 be set to the caller. When called from within tree-inline, KILLED_SSAs has
679 to contain the pointer to killed_new_ssa_names within the copy_body_data
680 structure and SSAs discovered to be useless (if LHS is removed) will be
681 added to it, otherwise it needs to be NULL. */
682
683 gcall *
modify_call(cgraph_edge * cs,bool update_references,hash_set<tree> * killed_ssas)684 ipa_param_adjustments::modify_call (cgraph_edge *cs,
685 bool update_references,
686 hash_set <tree> *killed_ssas)
687 {
688 gcall *stmt = cs->call_stmt;
689 tree callee_decl = cs->callee->decl;
690
691 ipa_edge_modification_info *mod_info
692 = ipa_edge_modifications ? ipa_edge_modifications->get (cs) : NULL;
693 if (mod_info && symtab->dump_file)
694 {
695 fprintf (symtab->dump_file, "Information about pre-exiting "
696 "modifications.\n Index map:");
697 unsigned idx_len = mod_info->index_map.length ();
698 for (unsigned i = 0; i < idx_len; i++)
699 fprintf (symtab->dump_file, " %i", mod_info->index_map[i]);
700 fprintf (symtab->dump_file, "\n Pass-through split map: ");
701 unsigned ptm_len = mod_info->pass_through_map.length ();
702 for (unsigned i = 0; i < ptm_len; i++)
703 fprintf (symtab->dump_file,
704 " (base_index: %u, offset: %u, new_index: %i)",
705 mod_info->pass_through_map[i].base_index,
706 mod_info->pass_through_map[i].unit_offset,
707 mod_info->pass_through_map[i].new_index);
708 fprintf (symtab->dump_file, "\n Always-copy delta: %i\n",
709 mod_info->always_copy_delta);
710 }
711
712 unsigned len = vec_safe_length (m_adj_params);
713 auto_vec<tree, 16> vargs (len);
714 unsigned old_nargs = gimple_call_num_args (stmt);
715 unsigned orig_nargs = mod_info ? mod_info->index_map.length () : old_nargs;
716 auto_vec<bool, 16> kept (old_nargs);
717 kept.quick_grow_cleared (old_nargs);
718
719 cgraph_node *current_node = cgraph_node::get (current_function_decl);
720 if (update_references)
721 current_node->remove_stmt_references (stmt);
722
723 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
724 gimple_stmt_iterator prev_gsi = gsi;
725 gsi_prev (&prev_gsi);
726 for (unsigned i = 0; i < len; i++)
727 {
728 ipa_adjusted_param *apm = &(*m_adj_params)[i];
729 if (apm->op == IPA_PARAM_OP_COPY)
730 {
731 int index = apm->base_index;
732 if ((unsigned) index >= orig_nargs)
733 /* Can happen if the original call has argument mismatch,
734 ignore. */
735 continue;
736 if (mod_info)
737 {
738 index = mod_info->index_map[apm->base_index];
739 gcc_assert (index >= 0);
740 }
741
742 tree arg = gimple_call_arg (stmt, index);
743
744 vargs.quick_push (arg);
745 kept[index] = true;
746 continue;
747 }
748
749 /* At the moment the only user of IPA_PARAM_OP_NEW modifies calls itself.
750 If we ever want to support it during WPA IPA stage, we'll need a
751 mechanism to call into the IPA passes that introduced them. Currently
752 we simply mandate that IPA infrastructure understands all argument
753 modifications. Remember, edge redirection/modification is done only
754 once, not in steps for each pass modifying the callee like clone
755 materialization. */
756 gcc_assert (apm->op == IPA_PARAM_OP_SPLIT);
757
758 /* We have to handle pass-through changes differently using the map
759 clone materialziation might have left behind. */
760 tree repl = NULL_TREE;
761 unsigned ptm_len = mod_info ? mod_info->pass_through_map.length () : 0;
762 for (unsigned j = 0; j < ptm_len; j++)
763 if (mod_info->pass_through_map[j].base_index == apm->base_index
764 && mod_info->pass_through_map[j].unit_offset == apm->unit_offset)
765 {
766 int repl_idx = mod_info->pass_through_map[j].new_index;
767 gcc_assert (repl_idx >= 0);
768 repl = gimple_call_arg (stmt, repl_idx);
769 break;
770 }
771 if (repl)
772 {
773 if (!useless_type_conversion_p(apm->type, repl->typed.type))
774 {
775 repl = force_value_to_type (apm->type, repl);
776 repl = force_gimple_operand_gsi (&gsi, repl,
777 true, NULL, true, GSI_SAME_STMT);
778 }
779 vargs.quick_push (repl);
780 continue;
781 }
782
783 int index = apm->base_index;
784 if ((unsigned) index >= orig_nargs)
785 /* Can happen if the original call has argument mismatch, ignore. */
786 continue;
787 if (mod_info)
788 {
789 index = mod_info->index_map[apm->base_index];
790 gcc_assert (index >= 0);
791 }
792 tree base = gimple_call_arg (stmt, index);
793
794 /* We create a new parameter out of the value of the old one, we can
795 do the following kind of transformations:
796
797 - A scalar passed by reference, potentially as a part of a larger
798 aggregate, is converted to a scalar passed by value.
799
800 - A part of an aggregate is passed instead of the whole aggregate. */
801
802 location_t loc = gimple_location (stmt);
803 tree off;
804 bool deref_base = false;
805 unsigned int deref_align = 0;
806 if (TREE_CODE (base) != ADDR_EXPR
807 && is_gimple_reg_type (TREE_TYPE (base)))
808 {
809 /* Detect type mismatches in calls in invalid programs and make a
810 poor attempt to gracefully convert them so that we don't ICE. */
811 if (!POINTER_TYPE_P (TREE_TYPE (base)))
812 base = force_value_to_type (ptr_type_node, base);
813
814 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
815 }
816 else
817 {
818 bool addrof;
819 if (TREE_CODE (base) == ADDR_EXPR)
820 {
821 base = TREE_OPERAND (base, 0);
822 addrof = true;
823 }
824 else
825 addrof = false;
826
827 tree prev_base = base;
828 poly_int64 base_offset;
829 base = get_addr_base_and_unit_offset (base, &base_offset);
830
831 /* Aggregate arguments can have non-invariant addresses. */
832 if (!base)
833 {
834 base = build_fold_addr_expr (prev_base);
835 off = build_int_cst (apm->alias_ptr_type, apm->unit_offset);
836 }
837 else if (TREE_CODE (base) == MEM_REF)
838 {
839 if (!addrof)
840 {
841 deref_base = true;
842 deref_align = TYPE_ALIGN (TREE_TYPE (base));
843 }
844 off = build_int_cst (apm->alias_ptr_type,
845 base_offset + apm->unit_offset);
846 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
847 off);
848 base = TREE_OPERAND (base, 0);
849 }
850 else
851 {
852 off = build_int_cst (apm->alias_ptr_type,
853 base_offset + apm->unit_offset);
854 base = build_fold_addr_expr (base);
855 }
856 }
857
858 tree type = apm->type;
859 unsigned int align;
860 unsigned HOST_WIDE_INT misalign;
861
862 if (deref_base)
863 {
864 align = deref_align;
865 misalign = 0;
866 }
867 else
868 {
869 get_pointer_alignment_1 (base, &align, &misalign);
870 /* All users must make sure that we can be optimistic when it
871 comes to alignment in this case (by inspecting the final users
872 of these new parameters). */
873 if (TYPE_ALIGN (type) > align)
874 align = TYPE_ALIGN (type);
875 }
876 misalign
877 += (offset_int::from (wi::to_wide (off), SIGNED).to_short_addr ()
878 * BITS_PER_UNIT);
879 misalign = misalign & (align - 1);
880 if (misalign != 0)
881 align = least_bit_hwi (misalign);
882 if (align < TYPE_ALIGN (type))
883 type = build_aligned_type (type, align);
884 base = force_gimple_operand_gsi (&gsi, base,
885 true, NULL, true, GSI_SAME_STMT);
886 tree expr = fold_build2_loc (loc, MEM_REF, type, base, off);
887 REF_REVERSE_STORAGE_ORDER (expr) = apm->reverse;
888 /* If expr is not a valid gimple call argument emit
889 a load into a temporary. */
890 if (is_gimple_reg_type (TREE_TYPE (expr)))
891 {
892 gimple *tem = gimple_build_assign (NULL_TREE, expr);
893 if (gimple_in_ssa_p (cfun))
894 {
895 gimple_set_vuse (tem, gimple_vuse (stmt));
896 expr = make_ssa_name (TREE_TYPE (expr), tem);
897 }
898 else
899 expr = create_tmp_reg (TREE_TYPE (expr));
900 gimple_assign_set_lhs (tem, expr);
901 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
902 }
903 vargs.quick_push (expr);
904 }
905
906 if (m_always_copy_start >= 0)
907 {
908 int always_copy_start = m_always_copy_start;
909 if (mod_info)
910 {
911 always_copy_start += mod_info->always_copy_delta;
912 gcc_assert (always_copy_start >= 0);
913 }
914 for (unsigned i = always_copy_start; i < old_nargs; i++)
915 vargs.safe_push (gimple_call_arg (stmt, i));
916 }
917
918 /* For optimized away parameters, add on the caller side
919 before the call
920 DEBUG D#X => parm_Y(D)
921 stmts and associate D#X with parm in decl_debug_args_lookup
922 vector to say for debug info that if parameter parm had been passed,
923 it would have value parm_Y(D). */
924 tree old_decl = gimple_call_fndecl (stmt);
925 if (MAY_HAVE_DEBUG_BIND_STMTS && old_decl && callee_decl)
926 {
927 vec<tree, va_gc> **debug_args = NULL;
928 unsigned i = 0;
929 cgraph_node *callee_node = cgraph_node::get (callee_decl);
930
931 /* FIXME: we don't seem to be able to insert debug args before clone
932 is materialized. Materializing them early leads to extra memory
933 use. */
934 if (callee_node->clone_of)
935 callee_node->get_untransformed_body ();
936 for (tree old_parm = DECL_ARGUMENTS (old_decl);
937 old_parm && i < old_nargs && ((int) i) < m_always_copy_start;
938 old_parm = DECL_CHAIN (old_parm), i++)
939 {
940 if (!is_gimple_reg (old_parm) || kept[i])
941 continue;
942 tree arg;
943 if (mod_info)
944 {
945 if (mod_info->index_map[i] < 0)
946 continue;
947 arg = gimple_call_arg (stmt, mod_info->index_map[i]);
948 }
949 else
950 arg = gimple_call_arg (stmt, i);
951
952 tree origin = DECL_ORIGIN (old_parm);
953 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
954 {
955 if (!fold_convertible_p (TREE_TYPE (origin), arg))
956 continue;
957 tree rhs1;
958 if (TREE_CODE (arg) == SSA_NAME
959 && gimple_assign_cast_p (SSA_NAME_DEF_STMT (arg))
960 && (rhs1
961 = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (arg)))
962 && useless_type_conversion_p (TREE_TYPE (origin),
963 TREE_TYPE (rhs1)))
964 arg = rhs1;
965 else
966 arg = fold_convert_loc (gimple_location (stmt),
967 TREE_TYPE (origin), arg);
968 }
969 if (debug_args == NULL)
970 debug_args = decl_debug_args_insert (callee_decl);
971 unsigned int ix;
972 tree ddecl = NULL_TREE;
973 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
974 if (ddecl == origin)
975 {
976 ddecl = (**debug_args)[ix + 1];
977 break;
978 }
979 if (ddecl == NULL)
980 {
981 ddecl = build_debug_expr_decl (TREE_TYPE (origin));
982 /* FIXME: Is setting the mode really necessary? */
983 SET_DECL_MODE (ddecl, DECL_MODE (origin));
984
985 vec_safe_push (*debug_args, origin);
986 vec_safe_push (*debug_args, ddecl);
987 }
988 gimple *def_temp = gimple_build_debug_bind (ddecl,
989 unshare_expr (arg), stmt);
990 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
991 }
992 }
993
994 if (dump_file && (dump_flags & TDF_DETAILS))
995 {
996 fprintf (dump_file, "replacing stmt:");
997 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0);
998 }
999
1000 gcall *new_stmt = gimple_build_call_vec (callee_decl, vargs);
1001
1002 hash_set <tree> *ssas_to_remove = NULL;
1003 if (tree lhs = gimple_call_lhs (stmt))
1004 {
1005 if (!m_skip_return)
1006 gimple_call_set_lhs (new_stmt, lhs);
1007 else if (TREE_CODE (lhs) == SSA_NAME)
1008 {
1009 if (!killed_ssas)
1010 {
1011 ssas_to_remove = new hash_set<tree> (8);
1012 killed_ssas = ssas_to_remove;
1013 }
1014 killed_ssas->add (lhs);
1015 purge_all_uses (lhs, killed_ssas);
1016 }
1017 }
1018
1019 gimple_set_block (new_stmt, gimple_block (stmt));
1020 if (gimple_has_location (stmt))
1021 gimple_set_location (new_stmt, gimple_location (stmt));
1022 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
1023 gimple_call_copy_flags (new_stmt, stmt);
1024 if (gimple_in_ssa_p (cfun))
1025 gimple_move_vops (new_stmt, stmt);
1026
1027 if (dump_file && (dump_flags & TDF_DETAILS))
1028 {
1029 fprintf (dump_file, "with stmt:");
1030 print_gimple_stmt (dump_file, new_stmt, 0);
1031 fprintf (dump_file, "\n");
1032 }
1033 gsi_replace (&gsi, new_stmt, true);
1034 if (ssas_to_remove)
1035 {
1036 ipa_release_ssas_in_hash (ssas_to_remove);
1037 delete ssas_to_remove;
1038 }
1039 if (update_references)
1040 do
1041 {
1042 current_node->record_stmt_references (gsi_stmt (gsi));
1043 gsi_prev (&gsi);
1044 }
1045 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
1046
1047 if (mod_info)
1048 ipa_edge_modifications->remove (cs);
1049 return new_stmt;
1050 }
1051
1052 /* Dump information contained in the object in textual form to F. */
1053
1054 void
dump(FILE * f)1055 ipa_param_adjustments::dump (FILE *f)
1056 {
1057 fprintf (f, " m_always_copy_start: %i\n", m_always_copy_start);
1058 ipa_dump_adjusted_parameters (f, m_adj_params);
1059 if (m_skip_return)
1060 fprintf (f, " Will SKIP return.\n");
1061 }
1062
1063 /* Dump information contained in the object in textual form to stderr. */
1064
1065 void
debug()1066 ipa_param_adjustments::debug ()
1067 {
1068 dump (stderr);
1069 }
1070
1071 /* Register that REPLACEMENT should replace parameter described in APM. */
1072
1073 void
register_replacement(ipa_adjusted_param * apm,tree replacement)1074 ipa_param_body_adjustments::register_replacement (ipa_adjusted_param *apm,
1075 tree replacement)
1076 {
1077 gcc_checking_assert (apm->op == IPA_PARAM_OP_SPLIT
1078 || apm->op == IPA_PARAM_OP_NEW);
1079 gcc_checking_assert (!apm->prev_clone_adjustment);
1080 ipa_param_body_replacement psr;
1081 psr.base = m_oparms[apm->prev_clone_index];
1082 psr.repl = replacement;
1083 psr.dummy = NULL_TREE;
1084 psr.unit_offset = apm->unit_offset;
1085 m_replacements.safe_push (psr);
1086 }
1087
1088 /* Copy or not, as appropriate given m_id and decl context, a pre-existing
1089 PARM_DECL T so that it can be included in the parameters of the modified
1090 function. */
1091
1092 tree
carry_over_param(tree t)1093 ipa_param_body_adjustments::carry_over_param (tree t)
1094 {
1095 tree new_parm;
1096 if (m_id)
1097 {
1098 new_parm = remap_decl (t, m_id);
1099 if (TREE_CODE (new_parm) != PARM_DECL)
1100 new_parm = m_id->copy_decl (t, m_id);
1101 }
1102 else if (DECL_CONTEXT (t) != m_fndecl)
1103 {
1104 new_parm = copy_node (t);
1105 DECL_CONTEXT (new_parm) = m_fndecl;
1106 }
1107 else
1108 new_parm = t;
1109 return new_parm;
1110 }
1111
1112 /* Populate m_dead_stmts given that DEAD_PARAM is going to be removed without
1113 any replacement or splitting. REPL is the replacement VAR_SECL to base any
1114 remaining uses of a removed parameter on. Push all removed SSA names that
1115 are used within debug statements to DEBUGSTACK. */
1116
1117 void
mark_dead_statements(tree dead_param,vec<tree> * debugstack)1118 ipa_param_body_adjustments::mark_dead_statements (tree dead_param,
1119 vec<tree> *debugstack)
1120 {
1121 /* Current IPA analyses which remove unused parameters never remove a
1122 non-gimple register ones which have any use except as parameters in other
1123 calls, so we can safely leve them as they are. */
1124 if (!is_gimple_reg (dead_param))
1125 return;
1126 tree parm_ddef = ssa_default_def (m_id->src_cfun, dead_param);
1127 if (!parm_ddef || has_zero_uses (parm_ddef))
1128 return;
1129
1130 auto_vec<tree, 4> stack;
1131 hash_set<tree> used_in_debug;
1132 m_dead_ssas.add (parm_ddef);
1133 stack.safe_push (parm_ddef);
1134 while (!stack.is_empty ())
1135 {
1136 imm_use_iterator imm_iter;
1137 use_operand_p use_p;
1138 tree t = stack.pop ();
1139
1140 insert_decl_map (m_id, t, error_mark_node);
1141 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, t)
1142 {
1143 gimple *stmt = USE_STMT (use_p);
1144
1145 /* Calls containing dead arguments cannot be deleted,
1146 modify_call_stmt will instead remove just the argument later on.
1147 If isra_track_scalar_value_uses in ipa-sra.cc is extended to look
1148 through const functions, we will need to do so here too. */
1149 if (is_gimple_call (stmt)
1150 || (m_id->blocks_to_copy
1151 && !bitmap_bit_p (m_id->blocks_to_copy,
1152 gimple_bb (stmt)->index)))
1153 continue;
1154
1155 if (is_gimple_debug (stmt))
1156 {
1157 m_dead_stmts.add (stmt);
1158 gcc_assert (gimple_debug_bind_p (stmt));
1159 if (!used_in_debug.contains (t))
1160 {
1161 used_in_debug.add (t);
1162 debugstack->safe_push (t);
1163 }
1164 }
1165 else if (gimple_code (stmt) == GIMPLE_PHI)
1166 {
1167 gphi *phi = as_a <gphi *> (stmt);
1168 int ix = PHI_ARG_INDEX_FROM_USE (use_p);
1169
1170 if (!m_id->blocks_to_copy
1171 || bitmap_bit_p (m_id->blocks_to_copy,
1172 gimple_phi_arg_edge (phi, ix)->src->index))
1173 {
1174 m_dead_stmts.add (phi);
1175 tree res = gimple_phi_result (phi);
1176 if (!m_dead_ssas.add (res))
1177 stack.safe_push (res);
1178 }
1179 }
1180 else if (is_gimple_assign (stmt))
1181 {
1182 m_dead_stmts.add (stmt);
1183 if (!gimple_clobber_p (stmt))
1184 {
1185 tree lhs = gimple_assign_lhs (stmt);
1186 gcc_assert (TREE_CODE (lhs) == SSA_NAME);
1187 if (!m_dead_ssas.add (lhs))
1188 stack.safe_push (lhs);
1189 }
1190 }
1191 else
1192 /* IPA-SRA does not analyze other types of statements. */
1193 gcc_unreachable ();
1194 }
1195 }
1196
1197 if (!MAY_HAVE_DEBUG_STMTS)
1198 {
1199 gcc_assert (debugstack->is_empty ());
1200 return;
1201 }
1202
1203 tree dp_ddecl = build_debug_expr_decl (TREE_TYPE (dead_param));
1204 /* FIXME: Is setting the mode really necessary? */
1205 SET_DECL_MODE (dp_ddecl, DECL_MODE (dead_param));
1206 m_dead_ssa_debug_equiv.put (parm_ddef, dp_ddecl);
1207 }
1208
1209 /* Callback to walk_tree. If REMAP is an SSA_NAME that is present in hash_map
1210 passed in DATA, replace it with unshared version of what it was mapped to.
1211 If an SSA argument would be remapped to NULL, the whole operation needs to
1212 abort which is signaled by returning error_mark_node. */
1213
1214 static tree
replace_with_mapped_expr(tree * remap,int * walk_subtrees,void * data)1215 replace_with_mapped_expr (tree *remap, int *walk_subtrees, void *data)
1216 {
1217 if (TYPE_P (*remap))
1218 {
1219 *walk_subtrees = 0;
1220 return 0;
1221 }
1222 if (TREE_CODE (*remap) != SSA_NAME)
1223 return 0;
1224
1225 *walk_subtrees = 0;
1226
1227 hash_map<tree, tree> *equivs = (hash_map<tree, tree> *) data;
1228 if (tree *p = equivs->get (*remap))
1229 {
1230 if (!*p)
1231 return error_mark_node;
1232 *remap = unshare_expr (*p);
1233 }
1234 return 0;
1235 }
1236
1237 /* Replace all occurances of SSAs in m_dead_ssa_debug_equiv in t with what they
1238 are mapped to. */
1239
1240 void
remap_with_debug_expressions(tree * t)1241 ipa_param_body_adjustments::remap_with_debug_expressions (tree *t)
1242 {
1243 /* If *t is an SSA_NAME which should have its debug statements reset, it is
1244 mapped to NULL in the hash_map.
1245
1246 It is perhaps simpler to handle the SSA_NAME cases directly and only
1247 invoke walk_tree on more complex expressions. When
1248 remap_with_debug_expressions is called from tree-inline.cc, a to-be-reset
1249 SSA_NAME can be an operand to such expressions and the entire debug
1250 variable we are remapping should be reset. This is signaled by walk_tree
1251 returning error_mark_node and done by setting *t to NULL. */
1252 if (TREE_CODE (*t) == SSA_NAME)
1253 {
1254 if (tree *p = m_dead_ssa_debug_equiv.get (*t))
1255 *t = *p;
1256 }
1257 else if (walk_tree (t, replace_with_mapped_expr,
1258 &m_dead_ssa_debug_equiv, NULL) == error_mark_node)
1259 *t = NULL_TREE;
1260 }
1261
1262 /* For an SSA_NAME DEAD_SSA which is about to be DCEd because it is based on a
1263 useless parameter, prepare an expression that should represent it in
1264 debug_binds in the cloned function and add a mapping from DEAD_SSA to
1265 m_dead_ssa_debug_equiv. That mapping is to NULL when the associated
1266 debug_statement has to be reset instead. In such case return false,
1267 ottherwise return true. If DEAD_SSA comes from a basic block which is not
1268 about to be copied, ignore it and return true. */
1269
1270 bool
prepare_debug_expressions(tree dead_ssa)1271 ipa_param_body_adjustments::prepare_debug_expressions (tree dead_ssa)
1272 {
1273 gcc_checking_assert (m_dead_ssas.contains (dead_ssa));
1274 if (tree *d = m_dead_ssa_debug_equiv.get (dead_ssa))
1275 return (*d != NULL_TREE);
1276
1277 gcc_assert (!SSA_NAME_IS_DEFAULT_DEF (dead_ssa));
1278 gimple *def = SSA_NAME_DEF_STMT (dead_ssa);
1279 if (m_id->blocks_to_copy
1280 && !bitmap_bit_p (m_id->blocks_to_copy, gimple_bb (def)->index))
1281 return true;
1282
1283 if (gimple_code (def) == GIMPLE_PHI)
1284 {
1285 /* In theory, we could ignore all SSAs coming from BBs not in
1286 m_id->blocks_to_copy but at the time of the writing this code that
1287 should never really be the case because only fnsplit uses that bitmap,
1288 so don't bother. */
1289 tree value = degenerate_phi_result (as_a <gphi *> (def));
1290 if (!value
1291 || (m_dead_ssas.contains (value)
1292 && !prepare_debug_expressions (value)))
1293 {
1294 m_dead_ssa_debug_equiv.put (dead_ssa, NULL_TREE);
1295 return false;
1296 }
1297
1298 gcc_assert (TREE_CODE (value) == SSA_NAME);
1299 tree *d = m_dead_ssa_debug_equiv.get (value);
1300 m_dead_ssa_debug_equiv.put (dead_ssa, *d);
1301 return true;
1302 }
1303
1304 bool lost = false;
1305 use_operand_p use_p;
1306 ssa_op_iter oi;
1307 FOR_EACH_PHI_OR_STMT_USE (use_p, def, oi, SSA_OP_USE)
1308 {
1309 tree use = USE_FROM_PTR (use_p);
1310 if (m_dead_ssas.contains (use)
1311 && !prepare_debug_expressions (use))
1312 {
1313 lost = true;
1314 break;
1315 }
1316 }
1317
1318 if (lost)
1319 {
1320 m_dead_ssa_debug_equiv.put (dead_ssa, NULL_TREE);
1321 return false;
1322 }
1323
1324 if (is_gimple_assign (def))
1325 {
1326 gcc_assert (!gimple_clobber_p (def));
1327 if (gimple_assign_copy_p (def)
1328 && TREE_CODE (gimple_assign_rhs1 (def)) == SSA_NAME)
1329 {
1330 tree d = *m_dead_ssa_debug_equiv.get (gimple_assign_rhs1 (def));
1331 gcc_assert (d);
1332 m_dead_ssa_debug_equiv.put (dead_ssa, d);
1333 return true;
1334 }
1335
1336 tree val
1337 = unshare_expr_without_location (gimple_assign_rhs_to_tree (def));
1338 remap_with_debug_expressions (&val);
1339
1340 tree vexpr = build_debug_expr_decl (TREE_TYPE (val));
1341 m_dead_stmt_debug_equiv.put (def, val);
1342 m_dead_ssa_debug_equiv.put (dead_ssa, vexpr);
1343 return true;
1344 }
1345 else
1346 gcc_unreachable ();
1347 }
1348
1349 /* Common initialization performed by all ipa_param_body_adjustments
1350 constructors. OLD_FNDECL is the declaration we take original arguments
1351 from, (it may be the same as M_FNDECL). VARS, if non-NULL, is a pointer to
1352 a chained list of new local variables. TREE_MAP is the IPA-CP produced
1353 mapping of trees to constants.
1354
1355 The function is rather long but it really onlu initializes all data members
1356 of the class. It creates new param DECLs, finds their new types, */
1357
1358 void
common_initialization(tree old_fndecl,tree * vars,vec<ipa_replace_map *,va_gc> * tree_map)1359 ipa_param_body_adjustments::common_initialization (tree old_fndecl,
1360 tree *vars,
1361 vec<ipa_replace_map *,
1362 va_gc> *tree_map)
1363 {
1364 push_function_arg_decls (&m_oparms, old_fndecl);
1365 auto_vec<tree,16> otypes;
1366 if (TYPE_ARG_TYPES (TREE_TYPE (old_fndecl)) != NULL_TREE)
1367 push_function_arg_types (&otypes, TREE_TYPE (old_fndecl));
1368 else
1369 {
1370 auto_vec<tree,16> oparms;
1371 push_function_arg_decls (&oparms, old_fndecl);
1372 unsigned ocount = oparms.length ();
1373 otypes.reserve_exact (ocount);
1374 for (unsigned i = 0; i < ocount; i++)
1375 otypes.quick_push (TREE_TYPE (oparms[i]));
1376 }
1377 fill_vector_of_new_param_types (&m_new_types, &otypes, m_adj_params, true);
1378
1379 auto_vec<bool, 16> kept;
1380 kept.reserve_exact (m_oparms.length ());
1381 kept.quick_grow_cleared (m_oparms.length ());
1382 auto_vec<bool, 16> split;
1383 split.reserve_exact (m_oparms.length ());
1384 split.quick_grow_cleared (m_oparms.length ());
1385
1386 unsigned adj_len = vec_safe_length (m_adj_params);
1387 m_method2func = ((TREE_CODE (TREE_TYPE (m_fndecl)) == METHOD_TYPE)
1388 && (adj_len == 0
1389 || (*m_adj_params)[0].op != IPA_PARAM_OP_COPY
1390 || (*m_adj_params)[0].base_index != 0));
1391
1392 /* The main job of the this function is to go over the vector of adjusted
1393 parameters and create declarations or find corresponding old ones and push
1394 them to m_new_decls. For IPA-SRA replacements it also creates
1395 corresponding m_id->dst_node->clone.performed_splits entries. */
1396
1397 m_new_decls.reserve_exact (adj_len);
1398 for (unsigned i = 0; i < adj_len ; i++)
1399 {
1400 ipa_adjusted_param *apm = &(*m_adj_params)[i];
1401 unsigned prev_index = apm->prev_clone_index;
1402 tree new_parm;
1403 if (apm->op == IPA_PARAM_OP_COPY
1404 || apm->prev_clone_adjustment)
1405 {
1406 kept[prev_index] = true;
1407 new_parm = carry_over_param (m_oparms[prev_index]);
1408 m_new_decls.quick_push (new_parm);
1409 }
1410 else if (apm->op == IPA_PARAM_OP_NEW
1411 || apm->op == IPA_PARAM_OP_SPLIT)
1412 {
1413 tree new_type = m_new_types[i];
1414 gcc_checking_assert (new_type);
1415 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
1416 new_type);
1417 const char *prefix = ipa_param_prefixes[apm->param_prefix_index];
1418 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
1419 DECL_ARTIFICIAL (new_parm) = 1;
1420 DECL_ARG_TYPE (new_parm) = new_type;
1421 DECL_CONTEXT (new_parm) = m_fndecl;
1422 TREE_USED (new_parm) = 1;
1423 DECL_IGNORED_P (new_parm) = 1;
1424 layout_decl (new_parm, 0);
1425 m_new_decls.quick_push (new_parm);
1426
1427 if (apm->op == IPA_PARAM_OP_SPLIT)
1428 {
1429 m_split_modifications_p = true;
1430 split[prev_index] = true;
1431 register_replacement (apm, new_parm);
1432 }
1433 }
1434 else
1435 gcc_unreachable ();
1436 }
1437
1438 if (tree_map)
1439 {
1440 /* Do not treat parameters which were replaced with a constant as
1441 completely vanished. */
1442 auto_vec <int, 16> index_mapping;
1443 bool need_remap = false;
1444
1445 if (m_id)
1446 {
1447 clone_info *cinfo = clone_info::get (m_id->src_node);
1448 if (cinfo && cinfo->param_adjustments)
1449 {
1450 cinfo->param_adjustments->get_updated_indices (&index_mapping);
1451 need_remap = true;
1452 }
1453 }
1454
1455 for (unsigned i = 0; i < tree_map->length (); i++)
1456 {
1457 int parm_num = (*tree_map)[i]->parm_num;
1458 gcc_assert (parm_num >= 0);
1459 if (need_remap)
1460 parm_num = index_mapping[parm_num];
1461 kept[parm_num] = true;
1462 }
1463 }
1464
1465 /* As part of body modifications, we will also have to replace remaining uses
1466 of remaining uses of removed PARM_DECLs (which do not however use the
1467 initial value) with their VAR_DECL copies.
1468
1469 We do this differently with and without m_id. With m_id, we rely on its
1470 mapping and create a replacement straight away. Without it, we have our
1471 own mechanism for which we have to populate m_removed_decls vector. Just
1472 don't mix them, that is why you should not call
1473 replace_removed_params_ssa_names or perform_cfun_body_modifications when
1474 you construct with ID not equal to NULL. */
1475
1476 auto_vec<tree, 8> ssas_to_process_debug;
1477 unsigned op_len = m_oparms.length ();
1478 for (unsigned i = 0; i < op_len; i++)
1479 if (!kept[i])
1480 {
1481 if (m_id)
1482 {
1483 gcc_assert (!m_id->decl_map->get (m_oparms[i]));
1484 tree var = copy_decl_to_var (m_oparms[i], m_id);
1485 insert_decl_map (m_id, m_oparms[i], var);
1486 /* Declare this new variable. */
1487 DECL_CHAIN (var) = *vars;
1488 *vars = var;
1489
1490 /* If this is not a split but a real removal, init hash sets
1491 that will guide what not to copy to the new body. */
1492 if (!split[i])
1493 mark_dead_statements (m_oparms[i], &ssas_to_process_debug);
1494 if (MAY_HAVE_DEBUG_STMTS
1495 && is_gimple_reg (m_oparms[i]))
1496 m_reset_debug_decls.safe_push (m_oparms[i]);
1497 }
1498 else
1499 {
1500 m_removed_decls.safe_push (m_oparms[i]);
1501 m_removed_map.put (m_oparms[i], m_removed_decls.length () - 1);
1502 if (MAY_HAVE_DEBUG_STMTS
1503 && !kept[i]
1504 && is_gimple_reg (m_oparms[i]))
1505 m_reset_debug_decls.safe_push (m_oparms[i]);
1506 }
1507 }
1508
1509 while (!ssas_to_process_debug.is_empty ())
1510 prepare_debug_expressions (ssas_to_process_debug.pop ());
1511 }
1512
1513 /* Constructor of ipa_param_body_adjustments from a simple list of
1514 modifications to parameters listed in ADJ_PARAMS which will prepare ground
1515 for modification of parameters of fndecl. Return value of the function will
1516 not be removed and the object will assume it does not run as a part of
1517 tree-function_versioning. */
1518
1519 ipa_param_body_adjustments
ipa_param_body_adjustments(vec<ipa_adjusted_param,va_gc> * adj_params,tree fndecl)1520 ::ipa_param_body_adjustments (vec<ipa_adjusted_param, va_gc> *adj_params,
1521 tree fndecl)
1522 : m_adj_params (adj_params), m_adjustments (NULL), m_reset_debug_decls (),
1523 m_split_modifications_p (false), m_dead_stmts (), m_dead_ssas (),
1524 m_dead_ssa_debug_equiv (), m_dead_stmt_debug_equiv (), m_fndecl (fndecl),
1525 m_id (NULL), m_oparms (), m_new_decls (), m_new_types (), m_replacements (),
1526 m_removed_decls (), m_removed_map (), m_method2func (false)
1527 {
1528 common_initialization (fndecl, NULL, NULL);
1529 }
1530
1531 /* Constructor of ipa_param_body_adjustments from ipa_param_adjustments in
1532 ADJUSTMENTS which will prepare ground for modification of parameters of
1533 fndecl. The object will assume it does not run as a part of
1534 tree-function_versioning. */
1535
1536 ipa_param_body_adjustments
ipa_param_body_adjustments(ipa_param_adjustments * adjustments,tree fndecl)1537 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1538 tree fndecl)
1539 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1540 m_reset_debug_decls (), m_split_modifications_p (false), m_dead_stmts (),
1541 m_dead_ssas (), m_dead_ssa_debug_equiv (), m_dead_stmt_debug_equiv (),
1542 m_fndecl (fndecl), m_id (NULL), m_oparms (), m_new_decls (),
1543 m_new_types (), m_replacements (), m_removed_decls (), m_removed_map (),
1544 m_method2func (false)
1545 {
1546 common_initialization (fndecl, NULL, NULL);
1547 }
1548
1549 /* Constructor of ipa_param_body_adjustments which sets it up as a part of
1550 running tree_function_versioning. Planned modifications to the function are
1551 in ADJUSTMENTS. FNDECL designates the new function clone which is being
1552 modified. OLD_FNDECL is the function of which FNDECL is a clone (and which
1553 at the time of invocation still share DECL_ARGUMENTS). ID is the
1554 copy_body_data structure driving the wholy body copying process. VARS is a
1555 pointer to the head of the list of new local variables, TREE_MAP is the map
1556 that drives tree substitution in the cloning process. */
1557
1558 ipa_param_body_adjustments
ipa_param_body_adjustments(ipa_param_adjustments * adjustments,tree fndecl,tree old_fndecl,copy_body_data * id,tree * vars,vec<ipa_replace_map *,va_gc> * tree_map)1559 ::ipa_param_body_adjustments (ipa_param_adjustments *adjustments,
1560 tree fndecl, tree old_fndecl,
1561 copy_body_data *id, tree *vars,
1562 vec<ipa_replace_map *, va_gc> *tree_map)
1563 : m_adj_params (adjustments->m_adj_params), m_adjustments (adjustments),
1564 m_reset_debug_decls (), m_split_modifications_p (false), m_dead_stmts (),
1565 m_dead_ssas (), m_dead_ssa_debug_equiv (), m_dead_stmt_debug_equiv (),
1566 m_fndecl (fndecl), m_id (id), m_oparms (), m_new_decls (), m_new_types (),
1567 m_replacements (), m_removed_decls (), m_removed_map (),
1568 m_method2func (false)
1569 {
1570 common_initialization (old_fndecl, vars, tree_map);
1571 }
1572
1573 /* Chain new param decls up and return them. */
1574
1575 tree
get_new_param_chain()1576 ipa_param_body_adjustments::get_new_param_chain ()
1577 {
1578 tree result;
1579 tree *link = &result;
1580
1581 unsigned len = vec_safe_length (m_adj_params);
1582 for (unsigned i = 0; i < len; i++)
1583 {
1584 tree new_decl = m_new_decls[i];
1585 *link = new_decl;
1586 link = &DECL_CHAIN (new_decl);
1587 }
1588 *link = NULL_TREE;
1589 return result;
1590 }
1591
1592 /* Modify the function parameters FNDECL and its type according to the plan in
1593 ADJUSTMENTS. This function needs to be called when the decl has not already
1594 been processed with ipa_param_adjustments::adjust_decl, otherwise just
1595 seting DECL_ARGUMENTS to whatever get_new_param_chain will do is enough. */
1596
1597 void
modify_formal_parameters()1598 ipa_param_body_adjustments::modify_formal_parameters ()
1599 {
1600 tree orig_type = TREE_TYPE (m_fndecl);
1601 DECL_ARGUMENTS (m_fndecl) = get_new_param_chain ();
1602
1603 /* When signature changes, we need to clear builtin info. */
1604 if (fndecl_built_in_p (m_fndecl))
1605 set_decl_built_in_function (m_fndecl, NOT_BUILT_IN, 0);
1606
1607 bool modified = false;
1608 size_t index = 0;
1609 if (m_adj_params)
1610 for (tree t = TYPE_ARG_TYPES (orig_type);
1611 t && !modified;
1612 t = TREE_CHAIN (t), index++)
1613 if (index >= m_adj_params->length ()
1614 || (*m_adj_params)[index].op != IPA_PARAM_OP_COPY
1615 || (*m_adj_params)[index].base_index != index)
1616 modified = true;
1617
1618 /* At this point, removing return value is only implemented when going
1619 through tree_function_versioning, not when modifying function body
1620 directly. */
1621 gcc_assert (!m_adjustments || !m_adjustments->m_skip_return);
1622 tree new_type = build_adjusted_function_type (orig_type, &m_new_types,
1623 m_method2func, false, modified);
1624
1625 TREE_TYPE (m_fndecl) = new_type;
1626 DECL_VIRTUAL_P (m_fndecl) = 0;
1627 DECL_LANG_SPECIFIC (m_fndecl) = NULL;
1628 if (m_method2func)
1629 DECL_VINDEX (m_fndecl) = NULL_TREE;
1630 }
1631
1632 /* Given BASE and UNIT_OFFSET, find the corresponding record among replacement
1633 structures. */
1634
1635 ipa_param_body_replacement *
lookup_replacement_1(tree base,unsigned unit_offset)1636 ipa_param_body_adjustments::lookup_replacement_1 (tree base,
1637 unsigned unit_offset)
1638 {
1639 unsigned int len = m_replacements.length ();
1640 for (unsigned i = 0; i < len; i++)
1641 {
1642 ipa_param_body_replacement *pbr = &m_replacements[i];
1643
1644 if (pbr->base == base
1645 && (pbr->unit_offset == unit_offset))
1646 return pbr;
1647 }
1648 return NULL;
1649 }
1650
1651 /* Given BASE and UNIT_OFFSET, find the corresponding replacement expression
1652 and return it, assuming it is known it does not hold value by reference or
1653 in reverse storage order. */
1654
1655 tree
lookup_replacement(tree base,unsigned unit_offset)1656 ipa_param_body_adjustments::lookup_replacement (tree base, unsigned unit_offset)
1657 {
1658 ipa_param_body_replacement *pbr = lookup_replacement_1 (base, unit_offset);
1659 if (!pbr)
1660 return NULL;
1661 return pbr->repl;
1662 }
1663
1664 /* If T is an SSA_NAME, return NULL if it is not a default def or
1665 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
1666 the base variable is always returned, regardless if it is a default
1667 def. Return T if it is not an SSA_NAME. */
1668
1669 static tree
get_ssa_base_param(tree t,bool ignore_default_def)1670 get_ssa_base_param (tree t, bool ignore_default_def)
1671 {
1672 if (TREE_CODE (t) == SSA_NAME)
1673 {
1674 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
1675 return SSA_NAME_VAR (t);
1676 else
1677 return NULL_TREE;
1678 }
1679 return t;
1680 }
1681
1682 /* Given an expression, return the structure describing how it should be
1683 replaced if it accesses a part of a split parameter or NULL otherwise.
1684
1685 Do not free the result, it will be deallocated when the object is destroyed.
1686
1687 If IGNORE_DEFAULT_DEF is cleared, consider only SSA_NAMEs of PARM_DECLs
1688 which are default definitions, if set, consider all SSA_NAMEs of
1689 PARM_DECLs. */
1690
1691 ipa_param_body_replacement *
get_expr_replacement(tree expr,bool ignore_default_def)1692 ipa_param_body_adjustments::get_expr_replacement (tree expr,
1693 bool ignore_default_def)
1694 {
1695 tree base;
1696 unsigned unit_offset;
1697
1698 if (!isra_get_ref_base_and_offset (expr, &base, &unit_offset))
1699 return NULL;
1700
1701 base = get_ssa_base_param (base, ignore_default_def);
1702 if (!base || TREE_CODE (base) != PARM_DECL)
1703 return NULL;
1704 return lookup_replacement_1 (base, unit_offset);
1705 }
1706
1707 /* Given OLD_DECL, which is a PARM_DECL of a parameter that is being removed
1708 (which includes it being split or replaced), return a new variable that
1709 should be used for any SSA names that will remain in the function that
1710 previously belonged to OLD_DECL. */
1711
1712 tree
get_replacement_ssa_base(tree old_decl)1713 ipa_param_body_adjustments::get_replacement_ssa_base (tree old_decl)
1714 {
1715 unsigned *idx = m_removed_map.get (old_decl);
1716 if (!idx)
1717 return NULL;
1718
1719 tree repl;
1720 if (TREE_CODE (m_removed_decls[*idx]) == PARM_DECL)
1721 {
1722 gcc_assert (m_removed_decls[*idx] == old_decl);
1723 repl = copy_var_decl (old_decl, DECL_NAME (old_decl),
1724 TREE_TYPE (old_decl));
1725 m_removed_decls[*idx] = repl;
1726 }
1727 else
1728 repl = m_removed_decls[*idx];
1729 return repl;
1730 }
1731
1732 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
1733 parameter which is to be removed because its value is not used, create a new
1734 SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
1735 original with it and return it. If there is no need to re-map, return NULL.
1736 ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments. */
1737
1738 tree
replace_removed_params_ssa_names(tree old_name,gimple * stmt)1739 ipa_param_body_adjustments::replace_removed_params_ssa_names (tree old_name,
1740 gimple *stmt)
1741 {
1742 gcc_assert (!m_id);
1743 if (TREE_CODE (old_name) != SSA_NAME)
1744 return NULL;
1745
1746 tree decl = SSA_NAME_VAR (old_name);
1747 if (decl == NULL_TREE
1748 || TREE_CODE (decl) != PARM_DECL)
1749 return NULL;
1750
1751 tree repl = get_replacement_ssa_base (decl);
1752 if (!repl)
1753 return NULL;
1754
1755 tree new_name = make_ssa_name (repl, stmt);
1756 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
1757 = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
1758
1759 if (dump_file && (dump_flags & TDF_DETAILS))
1760 {
1761 fprintf (dump_file, "replacing an SSA name of a removed param ");
1762 print_generic_expr (dump_file, old_name);
1763 fprintf (dump_file, " with ");
1764 print_generic_expr (dump_file, new_name);
1765 fprintf (dump_file, "\n");
1766 }
1767
1768 replace_uses_by (old_name, new_name);
1769 return new_name;
1770 }
1771
1772 /* If the expression *EXPR_P should be replaced, do so. CONVERT specifies
1773 whether the function should care about type incompatibility of the current
1774 and new expressions. If it is false, the function will leave
1775 incompatibility issues to the caller - note that when the function
1776 encounters a BIT_FIELD_REF, IMAGPART_EXPR or REALPART_EXPR, it will modify
1777 their bases instead of the expressions themselves and then also performs any
1778 necessary conversions. */
1779
1780 bool
modify_expression(tree * expr_p,bool convert)1781 ipa_param_body_adjustments::modify_expression (tree *expr_p, bool convert)
1782 {
1783 tree expr = *expr_p;
1784
1785 if (TREE_CODE (expr) == BIT_FIELD_REF
1786 || TREE_CODE (expr) == IMAGPART_EXPR
1787 || TREE_CODE (expr) == REALPART_EXPR)
1788 {
1789 expr_p = &TREE_OPERAND (expr, 0);
1790 expr = *expr_p;
1791 convert = true;
1792 }
1793
1794 ipa_param_body_replacement *pbr = get_expr_replacement (expr, false);
1795 if (!pbr)
1796 return false;
1797
1798 tree repl = pbr->repl;
1799 if (dump_file && (dump_flags & TDF_DETAILS))
1800 {
1801 fprintf (dump_file, "About to replace expr ");
1802 print_generic_expr (dump_file, expr);
1803 fprintf (dump_file, " with ");
1804 print_generic_expr (dump_file, repl);
1805 fprintf (dump_file, "\n");
1806 }
1807
1808 if (convert && !useless_type_conversion_p (TREE_TYPE (expr),
1809 TREE_TYPE (repl)))
1810 {
1811 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expr), repl);
1812 *expr_p = vce;
1813 }
1814 else
1815 *expr_p = repl;
1816 return true;
1817 }
1818
1819 /* If the assignment statement STMT contains any expressions that need to
1820 replaced with a different one as noted by ADJUSTMENTS, do so. Handle any
1821 potential type incompatibilities. If any conversion sttements have to be
1822 pre-pended to STMT, they will be added to EXTRA_STMTS. Return true iff the
1823 statement was modified. */
1824
1825 bool
modify_assignment(gimple * stmt,gimple_seq * extra_stmts)1826 ipa_param_body_adjustments::modify_assignment (gimple *stmt,
1827 gimple_seq *extra_stmts)
1828 {
1829 tree *lhs_p, *rhs_p;
1830 bool any;
1831
1832 if (!gimple_assign_single_p (stmt))
1833 return false;
1834
1835 rhs_p = gimple_assign_rhs1_ptr (stmt);
1836 lhs_p = gimple_assign_lhs_ptr (stmt);
1837
1838 any = modify_expression (lhs_p, false);
1839 any |= modify_expression (rhs_p, false);
1840 if (any
1841 && !useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
1842 {
1843 if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
1844 {
1845 /* V_C_Es of constructors can cause trouble (PR 42714). */
1846 if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
1847 *rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
1848 else
1849 *rhs_p = build_constructor (TREE_TYPE (*lhs_p),
1850 NULL);
1851 }
1852 else
1853 {
1854 tree new_rhs = fold_build1_loc (gimple_location (stmt),
1855 VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
1856 *rhs_p);
1857 tree tmp = force_gimple_operand (new_rhs, extra_stmts, true,
1858 NULL_TREE);
1859 gimple_assign_set_rhs1 (stmt, tmp);
1860 }
1861 return true;
1862 }
1863
1864 return any;
1865 }
1866
1867 /* Record information about what modifications to call arguments have already
1868 been done by clone materialization into a summary describing CS. The
1869 information is stored in NEW_INDEX_MAP, NEW_PT_MAP and NEW_ALWAYS_COPY_DELTA
1870 and correspond to equivalent fields in ipa_edge_modification_info. Return
1871 the edge summary. */
1872
1873 static ipa_edge_modification_info *
record_argument_state_1(cgraph_edge * cs,const vec<int> & new_index_map,const vec<pass_through_split_map> & new_pt_map,int new_always_copy_delta)1874 record_argument_state_1 (cgraph_edge *cs, const vec<int> &new_index_map,
1875 const vec<pass_through_split_map> &new_pt_map,
1876 int new_always_copy_delta)
1877
1878 {
1879 ipa_edge_modification_info *sum = ipa_edge_modifications->get_create (cs);
1880
1881 unsigned len = sum->pass_through_map.length ();
1882 for (unsigned i = 0; i < len; i++)
1883 {
1884 unsigned oldnew = sum->pass_through_map[i].new_index;
1885 sum->pass_through_map[i].new_index = new_index_map[oldnew];
1886 }
1887
1888 len = sum->index_map.length ();
1889 if (len > 0)
1890 {
1891 unsigned nptlen = new_pt_map.length ();
1892 for (unsigned j = 0; j < nptlen; j++)
1893 {
1894 int inverse = -1;
1895 for (unsigned i = 0; i < len ; i++)
1896 if ((unsigned) sum->index_map[i] == new_pt_map[j].base_index)
1897 {
1898 inverse = i;
1899 break;
1900 }
1901 gcc_assert (inverse >= 0);
1902 pass_through_split_map ptm_item;
1903
1904 ptm_item.base_index = inverse;
1905 ptm_item.unit_offset = new_pt_map[j].unit_offset;
1906 ptm_item.new_index = new_pt_map[j].new_index;
1907 sum->pass_through_map.safe_push (ptm_item);
1908 }
1909
1910 for (unsigned i = 0; i < len; i++)
1911 {
1912 int idx = sum->index_map[i];
1913 if (idx < 0)
1914 continue;
1915 sum->index_map[i] = new_index_map[idx];
1916 }
1917 }
1918 else
1919 {
1920 sum->pass_through_map.safe_splice (new_pt_map);
1921 sum->index_map.safe_splice (new_index_map);
1922 }
1923 sum->always_copy_delta += new_always_copy_delta;
1924 return sum;
1925 }
1926
1927 /* Record information about what modifications to call arguments have already
1928 been done by clone materialization into a summary of an edge describing the
1929 call in this clone and all its clones. NEW_INDEX_MAP, NEW_PT_MAP and
1930 NEW_ALWAYS_COPY_DELTA have the same meaning as record_argument_state_1.
1931
1932 In order to associate the info with the right edge summaries, we need
1933 address of the ORIG_STMT in the function from which we are cloning (because
1934 the edges have not yet been re-assigned to the new statement that has just
1935 been created) and ID, the structure governing function body copying. */
1936
1937 static void
record_argument_state(copy_body_data * id,gimple * orig_stmt,const vec<int> & new_index_map,const vec<pass_through_split_map> & new_pt_map,int new_always_copy_delta)1938 record_argument_state (copy_body_data *id, gimple *orig_stmt,
1939 const vec<int> &new_index_map,
1940 const vec<pass_through_split_map> &new_pt_map,
1941 int new_always_copy_delta)
1942 {
1943 if (!ipa_edge_modifications)
1944 ipa_edge_modifications = new ipa_edge_modification_sum (symtab);
1945
1946 struct cgraph_node *this_node = id->dst_node;
1947 ipa_edge_modification_info *first_sum = NULL;
1948 cgraph_edge *cs = this_node->get_edge (orig_stmt);
1949 if (cs)
1950 first_sum = record_argument_state_1 (cs, new_index_map, new_pt_map,
1951 new_always_copy_delta);
1952 else
1953 gcc_assert (this_node->clones);
1954
1955 if (!this_node->clones)
1956 return;
1957 for (cgraph_node *subclone = this_node->clones; subclone != this_node;)
1958 {
1959 cs = subclone->get_edge (orig_stmt);
1960 if (cs)
1961 {
1962 if (!first_sum)
1963 first_sum = record_argument_state_1 (cs, new_index_map, new_pt_map,
1964 new_always_copy_delta);
1965 else
1966 {
1967 ipa_edge_modification_info *s2
1968 = ipa_edge_modifications->get_create (cs);
1969 s2->index_map.truncate (0);
1970 s2->index_map.safe_splice (first_sum->index_map);
1971 s2->pass_through_map.truncate (0);
1972 s2->pass_through_map.safe_splice (first_sum->pass_through_map);
1973 s2->always_copy_delta = first_sum->always_copy_delta;
1974 }
1975 }
1976 else
1977 gcc_assert (subclone->clones);
1978
1979 if (subclone->clones)
1980 subclone = subclone->clones;
1981 else if (subclone->next_sibling_clone)
1982 subclone = subclone->next_sibling_clone;
1983 else
1984 {
1985 while (subclone != this_node && !subclone->next_sibling_clone)
1986 subclone = subclone->clone_of;
1987 if (subclone != this_node)
1988 subclone = subclone->next_sibling_clone;
1989 }
1990 }
1991 }
1992
1993 /* If the call statement pointed at by STMT_P contains any expressions that
1994 need to replaced with a different one as noted by ADJUSTMENTS, do so. f the
1995 statement needs to be rebuilt, do so. Return true if any modifications have
1996 been performed. ORIG_STMT, if not NULL, is the original statement in the
1997 function that is being cloned from, which at this point can be used to look
1998 up call_graph edges.
1999
2000 If the method is invoked as a part of IPA clone materialization and if any
2001 parameter split is pass-through, i.e. it applies to the functin that is
2002 being modified and also to the callee of the statement, replace the
2003 parameter passed to old callee with all of the replacement a callee might
2004 possibly want and record the performed argument modifications in
2005 ipa_edge_modifications. Likewise if any argument has already been left out
2006 because it is not necessary. */
2007
2008 bool
modify_call_stmt(gcall ** stmt_p,gimple * orig_stmt)2009 ipa_param_body_adjustments::modify_call_stmt (gcall **stmt_p,
2010 gimple *orig_stmt)
2011 {
2012 auto_vec <unsigned, 4> pass_through_args;
2013 auto_vec <unsigned, 4> pass_through_pbr_indices;
2014 auto_vec <HOST_WIDE_INT, 4> pass_through_offsets;
2015 gcall *stmt = *stmt_p;
2016 unsigned nargs = gimple_call_num_args (stmt);
2017 bool recreate = false;
2018
2019 for (unsigned i = 0; i < gimple_call_num_args (stmt); i++)
2020 {
2021 tree t = gimple_call_arg (stmt, i);
2022 gcc_assert (TREE_CODE (t) != BIT_FIELD_REF
2023 && TREE_CODE (t) != IMAGPART_EXPR
2024 && TREE_CODE (t) != REALPART_EXPR);
2025
2026 if (TREE_CODE (t) == SSA_NAME
2027 && m_dead_ssas.contains (t))
2028 recreate = true;
2029
2030 if (!m_split_modifications_p)
2031 continue;
2032
2033 tree base;
2034 unsigned agg_arg_offset;
2035 if (!isra_get_ref_base_and_offset (t, &base, &agg_arg_offset))
2036 continue;
2037
2038 bool by_ref = false;
2039 if (TREE_CODE (base) == SSA_NAME)
2040 {
2041 if (!SSA_NAME_IS_DEFAULT_DEF (base))
2042 continue;
2043 base = SSA_NAME_VAR (base);
2044 gcc_checking_assert (base);
2045 by_ref = true;
2046 }
2047 if (TREE_CODE (base) != PARM_DECL)
2048 continue;
2049
2050 bool base_among_replacements = false;
2051 unsigned j, repl_list_len = m_replacements.length ();
2052 for (j = 0; j < repl_list_len; j++)
2053 {
2054 ipa_param_body_replacement *pbr = &m_replacements[j];
2055 if (pbr->base == base)
2056 {
2057 base_among_replacements = true;
2058 break;
2059 }
2060 }
2061 if (!base_among_replacements)
2062 continue;
2063
2064 /* We still have to distinguish between an end-use that we have to
2065 transform now and a pass-through, which happens in the following
2066 two cases. */
2067
2068 /* TODO: After we adjust ptr_parm_has_nonarg_uses to also consider
2069 &MEM_REF[ssa_name + offset], we will also have to detect that case
2070 here. */
2071
2072 if (TREE_CODE (t) == SSA_NAME
2073 && SSA_NAME_IS_DEFAULT_DEF (t)
2074 && SSA_NAME_VAR (t)
2075 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL)
2076 {
2077 /* This must be a by_reference pass-through. */
2078 recreate = true;
2079 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
2080 pass_through_args.safe_push (i);
2081 pass_through_pbr_indices.safe_push (j);
2082 pass_through_offsets.safe_push (agg_arg_offset);
2083 }
2084 else if (!by_ref && AGGREGATE_TYPE_P (TREE_TYPE (t)))
2085 {
2086 /* Currently IPA-SRA guarantees the aggregate access type
2087 exactly matches in this case. So if it does not match, it is
2088 a pass-through argument that will be sorted out at edge
2089 redirection time. */
2090 ipa_param_body_replacement *pbr
2091 = lookup_replacement_1 (base, agg_arg_offset);
2092
2093 if (!pbr
2094 || (TYPE_MAIN_VARIANT (TREE_TYPE (t))
2095 != TYPE_MAIN_VARIANT (TREE_TYPE (pbr->repl))))
2096 {
2097 recreate = true;
2098 pass_through_args.safe_push (i);
2099 pass_through_pbr_indices.safe_push (j);
2100 pass_through_offsets.safe_push (agg_arg_offset);
2101 }
2102 }
2103 }
2104
2105 if (!recreate)
2106 {
2107 /* No need to rebuild the statement, let's just modify arguments
2108 and the LHS if/as appropriate. */
2109 bool modified = false;
2110 for (unsigned i = 0; i < nargs; i++)
2111 {
2112 tree *t = gimple_call_arg_ptr (stmt, i);
2113 modified |= modify_expression (t, true);
2114 }
2115 if (gimple_call_lhs (stmt))
2116 {
2117 tree *t = gimple_call_lhs_ptr (stmt);
2118 modified |= modify_expression (t, false);
2119 }
2120 return modified;
2121 }
2122
2123 auto_vec<int, 16> index_map;
2124 auto_vec<pass_through_split_map, 4> pass_through_map;
2125 auto_vec<tree, 16> vargs;
2126 int always_copy_delta = 0;
2127 unsigned pt_idx = 0;
2128 int new_arg_idx = 0;
2129 for (unsigned i = 0; i < nargs; i++)
2130 {
2131 if (pt_idx < pass_through_args.length ()
2132 && i == pass_through_args[pt_idx])
2133 {
2134 unsigned j = pass_through_pbr_indices[pt_idx];
2135 unsigned agg_arg_offset = pass_through_offsets[pt_idx];
2136 pt_idx++;
2137 always_copy_delta--;
2138 tree base = m_replacements[j].base;
2139
2140 /* In order to be put into SSA form, we have to push all replacements
2141 pertaining to this parameter as parameters to the call statement.
2142 Edge redirection will need to use edge summary to weed out the
2143 unnecessary ones. */
2144 unsigned repl_list_len = m_replacements.length ();
2145 for (; j < repl_list_len; j++)
2146 {
2147 if (m_replacements[j].base != base)
2148 break;
2149 if (m_replacements[j].unit_offset < agg_arg_offset)
2150 continue;
2151 pass_through_split_map pt_map;
2152 pt_map.base_index = i;
2153 pt_map.unit_offset
2154 = m_replacements[j].unit_offset - agg_arg_offset;
2155 pt_map.new_index = new_arg_idx;
2156 pass_through_map.safe_push (pt_map);
2157 vargs.safe_push (m_replacements[j].repl);
2158 new_arg_idx++;
2159 always_copy_delta++;
2160 }
2161 index_map.safe_push (-1);
2162 }
2163 else
2164 {
2165 tree t = gimple_call_arg (stmt, i);
2166 if (TREE_CODE (t) == SSA_NAME
2167 && m_dead_ssas.contains (t))
2168 {
2169 always_copy_delta--;
2170 index_map.safe_push (-1);
2171 }
2172 else
2173 {
2174 modify_expression (&t, true);
2175 vargs.safe_push (t);
2176 index_map.safe_push (new_arg_idx);
2177 new_arg_idx++;
2178 }
2179 }
2180 }
2181
2182 gcall *new_stmt = gimple_build_call_vec (gimple_call_fn (stmt), vargs);
2183 if (gimple_has_location (stmt))
2184 gimple_set_location (new_stmt, gimple_location (stmt));
2185 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
2186 gimple_call_copy_flags (new_stmt, stmt);
2187 if (tree lhs = gimple_call_lhs (stmt))
2188 {
2189 modify_expression (&lhs, false);
2190 /* Avoid adjusting SSA_NAME_DEF_STMT of a SSA lhs, SSA names
2191 have not yet been remapped. */
2192 *gimple_call_lhs_ptr (new_stmt) = lhs;
2193 }
2194 *stmt_p = new_stmt;
2195
2196 if (orig_stmt)
2197 record_argument_state (m_id, orig_stmt, index_map, pass_through_map,
2198 always_copy_delta);
2199 return true;
2200 }
2201
2202 /* If the statement STMT contains any expressions that need to replaced with a
2203 different one as noted by ADJUSTMENTS, do so. Handle any potential type
2204 incompatibilities. If any conversion sttements have to be pre-pended to
2205 STMT, they will be added to EXTRA_STMTS. Return true iff the statement was
2206 modified. */
2207
2208 bool
modify_gimple_stmt(gimple ** stmt,gimple_seq * extra_stmts,gimple * orig_stmt)2209 ipa_param_body_adjustments::modify_gimple_stmt (gimple **stmt,
2210 gimple_seq *extra_stmts,
2211 gimple *orig_stmt)
2212 {
2213 bool modified = false;
2214 tree *t;
2215
2216 switch (gimple_code (*stmt))
2217 {
2218 case GIMPLE_RETURN:
2219 t = gimple_return_retval_ptr (as_a <greturn *> (*stmt));
2220 if (m_adjustments && m_adjustments->m_skip_return)
2221 *t = NULL_TREE;
2222 else if (*t != NULL_TREE)
2223 modified |= modify_expression (t, true);
2224 break;
2225
2226 case GIMPLE_ASSIGN:
2227 modified |= modify_assignment (*stmt, extra_stmts);
2228 break;
2229
2230 case GIMPLE_CALL:
2231 modified |= modify_call_stmt ((gcall **) stmt, orig_stmt);
2232 break;
2233
2234 case GIMPLE_ASM:
2235 {
2236 gasm *asm_stmt = as_a <gasm *> (*stmt);
2237 for (unsigned i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
2238 {
2239 t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
2240 modified |= modify_expression (t, true);
2241 }
2242 for (unsigned i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
2243 {
2244 t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
2245 modified |= modify_expression (t, false);
2246 }
2247 }
2248 break;
2249
2250 default:
2251 break;
2252 }
2253 return modified;
2254 }
2255
2256
2257 /* Traverse body of the current function and perform the requested adjustments
2258 on its statements. Return true iff the CFG has been changed. */
2259
2260 bool
modify_cfun_body()2261 ipa_param_body_adjustments::modify_cfun_body ()
2262 {
2263 bool cfg_changed = false;
2264 basic_block bb;
2265
2266 FOR_EACH_BB_FN (bb, cfun)
2267 {
2268 gimple_stmt_iterator gsi;
2269
2270 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2271 {
2272 gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
2273 tree new_lhs, old_lhs = gimple_phi_result (phi);
2274 new_lhs = replace_removed_params_ssa_names (old_lhs, phi);
2275 if (new_lhs)
2276 {
2277 gimple_phi_set_result (phi, new_lhs);
2278 release_ssa_name (old_lhs);
2279 }
2280 }
2281
2282 gsi = gsi_start_bb (bb);
2283 while (!gsi_end_p (gsi))
2284 {
2285 gimple *stmt = gsi_stmt (gsi);
2286 gimple *stmt_copy = stmt;
2287 gimple_seq extra_stmts = NULL;
2288 bool modified = modify_gimple_stmt (&stmt, &extra_stmts, NULL);
2289 if (stmt != stmt_copy)
2290 {
2291 gcc_checking_assert (modified);
2292 gsi_replace (&gsi, stmt, false);
2293 }
2294 if (!gimple_seq_empty_p (extra_stmts))
2295 gsi_insert_seq_before (&gsi, extra_stmts, GSI_SAME_STMT);
2296
2297 def_operand_p defp;
2298 ssa_op_iter iter;
2299 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
2300 {
2301 tree old_def = DEF_FROM_PTR (defp);
2302 if (tree new_def = replace_removed_params_ssa_names (old_def,
2303 stmt))
2304 {
2305 SET_DEF (defp, new_def);
2306 release_ssa_name (old_def);
2307 modified = true;
2308 }
2309 }
2310
2311 if (modified)
2312 {
2313 update_stmt (stmt);
2314 if (maybe_clean_eh_stmt (stmt)
2315 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
2316 cfg_changed = true;
2317 }
2318 gsi_next (&gsi);
2319 }
2320 }
2321
2322 return cfg_changed;
2323 }
2324
2325 /* Call gimple_debug_bind_reset_value on all debug statements describing
2326 gimple register parameters that are being removed or replaced. */
2327
2328 void
reset_debug_stmts()2329 ipa_param_body_adjustments::reset_debug_stmts ()
2330 {
2331 int i, len;
2332 gimple_stmt_iterator *gsip = NULL, gsi;
2333
2334 if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
2335 {
2336 gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
2337 gsip = &gsi;
2338 }
2339 len = m_reset_debug_decls.length ();
2340 for (i = 0; i < len; i++)
2341 {
2342 imm_use_iterator ui;
2343 gimple *stmt;
2344 gdebug *def_temp;
2345 tree name, vexpr, copy = NULL_TREE;
2346 use_operand_p use_p;
2347 tree decl = m_reset_debug_decls[i];
2348
2349 gcc_checking_assert (is_gimple_reg (decl));
2350 name = ssa_default_def (cfun, decl);
2351 vexpr = NULL;
2352 if (name)
2353 FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2354 {
2355 if (gimple_clobber_p (stmt))
2356 {
2357 gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
2358 unlink_stmt_vdef (stmt);
2359 gsi_remove (&cgsi, true);
2360 release_defs (stmt);
2361 continue;
2362 }
2363 /* All other users must have been removed by function body
2364 modification. */
2365 gcc_assert (is_gimple_debug (stmt));
2366 if (vexpr == NULL && gsip != NULL)
2367 {
2368 vexpr = build_debug_expr_decl (TREE_TYPE (name));
2369 /* FIXME: Is setting the mode really necessary? */
2370 SET_DECL_MODE (vexpr, DECL_MODE (decl));
2371 def_temp = gimple_build_debug_source_bind (vexpr, decl, NULL);
2372 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
2373 }
2374 if (vexpr)
2375 {
2376 FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
2377 SET_USE (use_p, vexpr);
2378 }
2379 else
2380 gimple_debug_bind_reset_value (stmt);
2381 update_stmt (stmt);
2382 }
2383 /* Create a VAR_DECL for debug info purposes. */
2384 if (!DECL_IGNORED_P (decl))
2385 {
2386 copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2387 VAR_DECL, DECL_NAME (decl),
2388 TREE_TYPE (decl));
2389 if (DECL_PT_UID_SET_P (decl))
2390 SET_DECL_PT_UID (copy, DECL_PT_UID (decl));
2391 TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
2392 TREE_READONLY (copy) = TREE_READONLY (decl);
2393 TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
2394 DECL_NOT_GIMPLE_REG_P (copy) = DECL_NOT_GIMPLE_REG_P (decl);
2395 DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (decl);
2396 DECL_IGNORED_P (copy) = DECL_IGNORED_P (decl);
2397 DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (decl);
2398 DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
2399 SET_DECL_RTL (copy, 0);
2400 TREE_USED (copy) = 1;
2401 DECL_CONTEXT (copy) = current_function_decl;
2402 add_local_decl (cfun, copy);
2403 DECL_CHAIN (copy)
2404 = BLOCK_VARS (DECL_INITIAL (current_function_decl));
2405 BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
2406 }
2407 if (gsip != NULL && copy && target_for_debug_bind (decl))
2408 {
2409 gcc_assert (TREE_CODE (decl) == PARM_DECL);
2410 if (vexpr)
2411 def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
2412 else
2413 def_temp = gimple_build_debug_source_bind (copy, decl,
2414 NULL);
2415 gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
2416 }
2417 }
2418 }
2419
2420 /* Perform all necessary body changes to change signature, body and debug info
2421 of fun according to adjustments passed at construction. Return true if CFG
2422 was changed in any way. The main entry point for modification of standalone
2423 functions that is not part of IPA clone materialization. */
2424
2425 bool
perform_cfun_body_modifications()2426 ipa_param_body_adjustments::perform_cfun_body_modifications ()
2427 {
2428 bool cfg_changed;
2429 modify_formal_parameters ();
2430 cfg_changed = modify_cfun_body ();
2431 reset_debug_stmts ();
2432
2433 return cfg_changed;
2434 }
2435
2436
2437 /* Deallocate summaries which otherwise stay alive until the end of
2438 compilation. */
2439
2440 void
ipa_edge_modifications_finalize()2441 ipa_edge_modifications_finalize ()
2442 {
2443 if (!ipa_edge_modifications)
2444 return;
2445 delete ipa_edge_modifications;
2446 ipa_edge_modifications = NULL;
2447 }
2448
2449 /* Helper used to sort a vector of SSA_NAMES. */
2450
2451 static int
compare_ssa_versions(const void * va,const void * vb)2452 compare_ssa_versions (const void *va, const void *vb)
2453 {
2454 const_tree const a = *(const_tree const*)va;
2455 const_tree const b = *(const_tree const*)vb;
2456
2457 if (SSA_NAME_VERSION (a) < SSA_NAME_VERSION (b))
2458 return -1;
2459 if (SSA_NAME_VERSION (a) > SSA_NAME_VERSION (b))
2460 return 1;
2461 return 0;
2462 }
2463
2464 /* Call release_ssa_name on all elements in KILLED_SSAS in a defined order. */
2465
2466 void
ipa_release_ssas_in_hash(hash_set<tree> * killed_ssas)2467 ipa_release_ssas_in_hash (hash_set <tree> *killed_ssas)
2468 {
2469 auto_vec<tree, 16> ssas_to_release;
2470 for (tree sn : *killed_ssas)
2471 ssas_to_release.safe_push (sn);
2472 ssas_to_release.qsort (compare_ssa_versions);
2473 for (tree sn : ssas_to_release)
2474 release_ssa_name (sn);
2475 }
2476