xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-sra.c (revision b7b7574d3bf8eeb51a1fa3977b59142ec6434a55)
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2    references into scalar references, exposing them to the scalar
3    optimizers.
4    Copyright (C) 2008, 2009, 2010 Free Software Foundation, Inc.
5    Contributed by Martin Jambor <mjambor@suse.cz>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24    twice, once in the early stages of compilation (early SRA) and once in the
25    late stages (late SRA).  The aim of both is to turn references to scalar
26    parts of aggregates into uses of independent scalar variables.
27 
28    The two passes are nearly identical, the only difference is that early SRA
29    does not scalarize unions which are used as the result in a GIMPLE_RETURN
30    statement because together with inlining this can lead to weird type
31    conversions.
32 
33    Both passes operate in four stages:
34 
35    1. The declarations that have properties which make them candidates for
36       scalarization are identified in function find_var_candidates().  The
37       candidates are stored in candidate_bitmap.
38 
39    2. The function body is scanned.  In the process, declarations which are
40       used in a manner that prevent their scalarization are removed from the
41       candidate bitmap.  More importantly, for every access into an aggregate,
42       an access structure (struct access) is created by create_access() and
43       stored in a vector associated with the aggregate.  Among other
44       information, the aggregate declaration, the offset and size of the access
45       and its type are stored in the structure.
46 
47       On a related note, assign_link structures are created for every assign
48       statement between candidate aggregates and attached to the related
49       accesses.
50 
51    3. The vectors of accesses are analyzed.  They are first sorted according to
52       their offset and size and then scanned for partially overlapping accesses
53       (i.e. those which overlap but one is not entirely within another).  Such
54       an access disqualifies the whole aggregate from being scalarized.
55 
56       If there is no such inhibiting overlap, a representative access structure
57       is chosen for every unique combination of offset and size.  Afterwards,
58       the pass builds a set of trees from these structures, in which children
59       of an access are within their parent (in terms of offset and size).
60 
61       Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62       does not create a partially overlapping access) across assign_links from
63       the right hand side to the left hand side.
64 
65       Then the set of trees for each declaration is traversed again and those
66       accesses which should be replaced by a scalar are identified.
67 
68    4. The function is traversed again, and for every reference into an
69       aggregate that has some component which is about to be scalarized,
70       statements are amended and new statements are created as necessary.
71       Finally, if a parameter got scalarized, the scalar replacements are
72       initialized with values from respective parameter aggregates.  */
73 
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "expr.h"
81 #include "gimple.h"
82 #include "cgraph.h"
83 #include "tree-flow.h"
84 #include "ipa-prop.h"
85 #include "diagnostic.h"
86 #include "statistics.h"
87 #include "tree-dump.h"
88 #include "timevar.h"
89 #include "params.h"
90 #include "target.h"
91 #include "flags.h"
92 #include "tree-inline.h"
93 
94 /* Enumeration of all aggregate reductions we can do.  */
95 enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
96 		SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97 		SRA_MODE_INTRA };     /* late intraprocedural SRA */
98 
99 /* Global variable describing which aggregate reduction we are performing at
100    the moment.  */
101 static enum sra_mode sra_mode;
102 
103 struct assign_link;
104 
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106    part).  It can also represent a group of accesses that refer to exactly the
107    same fragment of an aggregate (i.e. those that have exactly the same offset
108    and size).  Such representatives for a single aggregate, once determined,
109    are linked in a linked list and have the group fields set.
110 
111    Moreover, when doing intraprocedural SRA, a tree is built from those
112    representatives (by the means of first_child and next_sibling pointers), in
113    which all items in a subtree are "within" the root, i.e. their offset is
114    greater or equal to offset of the root and offset+size is smaller or equal
115    to offset+size of the root.  Children of an access are sorted by offset.
116 
117    Note that accesses to parts of vector and complex number types always
118    represented by an access to the whole complex number or a vector.  It is a
119    duty of the modifying functions to replace them appropriately.  */
120 
121 struct access
122 {
123   /* Values returned by  `get_ref_base_and_extent' for each component reference
124      If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
125      `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
126   HOST_WIDE_INT offset;
127   HOST_WIDE_INT size;
128   tree base;
129 
130   /* Expression.  It is context dependent so do not use it to create new
131      expressions to access the original aggregate.  See PR 42154 for a
132      testcase.  */
133   tree expr;
134   /* Type.  */
135   tree type;
136 
137   /* The statement this access belongs to.  */
138   gimple stmt;
139 
140   /* Next group representative for this aggregate. */
141   struct access *next_grp;
142 
143   /* Pointer to the group representative.  Pointer to itself if the struct is
144      the representative.  */
145   struct access *group_representative;
146 
147   /* If this access has any children (in terms of the definition above), this
148      points to the first one.  */
149   struct access *first_child;
150 
151   /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152      described above.  In IPA-SRA this is a pointer to the next access
153      belonging to the same group (having the same representative).  */
154   struct access *next_sibling;
155 
156   /* Pointers to the first and last element in the linked list of assign
157      links.  */
158   struct assign_link *first_link, *last_link;
159 
160   /* Pointer to the next access in the work queue.  */
161   struct access *next_queued;
162 
163   /* Replacement variable for this access "region."  Never to be accessed
164      directly, always only by the means of get_access_replacement() and only
165      when grp_to_be_replaced flag is set.  */
166   tree replacement_decl;
167 
168   /* Is this particular access write access? */
169   unsigned write : 1;
170 
171   /* Is this access an artificial one created to scalarize some record
172      entirely? */
173   unsigned total_scalarization : 1;
174 
175   /* Is this access currently in the work queue?  */
176   unsigned grp_queued : 1;
177 
178   /* Does this group contain a write access?  This flag is propagated down the
179      access tree.  */
180   unsigned grp_write : 1;
181 
182   /* Does this group contain a read access?  This flag is propagated down the
183      access tree.  */
184   unsigned grp_read : 1;
185 
186   /* Does this group contain a read access that comes from an assignment
187      statement?  This flag is propagated down the access tree.  */
188   unsigned grp_assignment_read : 1;
189 
190   /* Other passes of the analysis use this bit to make function
191      analyze_access_subtree create scalar replacements for this group if
192      possible.  */
193   unsigned grp_hint : 1;
194 
195   /* Is the subtree rooted in this access fully covered by scalar
196      replacements?  */
197   unsigned grp_covered : 1;
198 
199   /* If set to true, this access and all below it in an access tree must not be
200      scalarized.  */
201   unsigned grp_unscalarizable_region : 1;
202 
203   /* Whether data have been written to parts of the aggregate covered by this
204      access which is not to be scalarized.  This flag is propagated up in the
205      access tree.  */
206   unsigned grp_unscalarized_data : 1;
207 
208   /* Does this access and/or group contain a write access through a
209      BIT_FIELD_REF?  */
210   unsigned grp_partial_lhs : 1;
211 
212   /* Set when a scalar replacement should be created for this variable.  We do
213      the decision and creation at different places because create_tmp_var
214      cannot be called from within FOR_EACH_REFERENCED_VAR. */
215   unsigned grp_to_be_replaced : 1;
216 
217   /* Is it possible that the group refers to data which might be (directly or
218      otherwise) modified?  */
219   unsigned grp_maybe_modified : 1;
220 
221   /* Set when this is a representative of a pointer to scalar (i.e. by
222      reference) parameter which we consider for turning into a plain scalar
223      (i.e. a by value parameter).  */
224   unsigned grp_scalar_ptr : 1;
225 
226   /* Set when we discover that this pointer is not safe to dereference in the
227      caller.  */
228   unsigned grp_not_necessarilly_dereferenced : 1;
229 };
230 
231 typedef struct access *access_p;
232 
233 DEF_VEC_P (access_p);
234 DEF_VEC_ALLOC_P (access_p, heap);
235 
236 /* Alloc pool for allocating access structures.  */
237 static alloc_pool access_pool;
238 
239 /* A structure linking lhs and rhs accesses from an aggregate assignment.  They
240    are used to propagate subaccesses from rhs to lhs as long as they don't
241    conflict with what is already there.  */
242 struct assign_link
243 {
244   struct access *lacc, *racc;
245   struct assign_link *next;
246 };
247 
248 /* Alloc pool for allocating assign link structures.  */
249 static alloc_pool link_pool;
250 
251 /* Base (tree) -> Vector (VEC(access_p,heap) *) map.  */
252 static struct pointer_map_t *base_access_vec;
253 
254 /* Bitmap of candidates.  */
255 static bitmap candidate_bitmap;
256 
257 /* Bitmap of candidates which we should try to entirely scalarize away and
258    those which cannot be (because they are and need be used as a whole).  */
259 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
260 
261 /* Obstack for creation of fancy names.  */
262 static struct obstack name_obstack;
263 
264 /* Head of a linked list of accesses that need to have its subaccesses
265    propagated to their assignment counterparts. */
266 static struct access *work_queue_head;
267 
268 /* Number of parameters of the analyzed function when doing early ipa SRA.  */
269 static int func_param_count;
270 
271 /* scan_function sets the following to true if it encounters a call to
272    __builtin_apply_args.  */
273 static bool encountered_apply_args;
274 
275 /* Set by scan_function when it finds a recursive call with less actual
276    arguments than formal parameters..  */
277 static bool encountered_unchangable_recursive_call;
278 
279 /* Set by scan_function when it changes the control flow graph.  */
280 static bool cfg_changed;
281 
282 /* This is a table in which for each basic block and parameter there is a
283    distance (offset + size) in that parameter which is dereferenced and
284    accessed in that BB.  */
285 static HOST_WIDE_INT *bb_dereferences;
286 /* Bitmap of BBs that can cause the function to "stop" progressing by
287    returning, throwing externally, looping infinitely or calling a function
288    which might abort etc.. */
289 static bitmap final_bbs;
290 
291 /* Representative of no accesses at all. */
292 static struct access  no_accesses_representant;
293 
294 /* Predicate to test the special value.  */
295 
296 static inline bool
297 no_accesses_p (struct access *access)
298 {
299   return access == &no_accesses_representant;
300 }
301 
302 /* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
303    representative fields are dumped, otherwise those which only describe the
304    individual access are.  */
305 
306 static struct
307 {
308   /* Number of processed aggregates is readily available in
309      analyze_all_variable_accesses and so is not stored here.  */
310 
311   /* Number of created scalar replacements.  */
312   int replacements;
313 
314   /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
315      expression.  */
316   int exprs;
317 
318   /* Number of statements created by generate_subtree_copies.  */
319   int subtree_copies;
320 
321   /* Number of statements created by load_assign_lhs_subreplacements.  */
322   int subreplacements;
323 
324   /* Number of times sra_modify_assign has deleted a statement.  */
325   int deleted;
326 
327   /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
328      RHS reparately due to type conversions or nonexistent matching
329      references.  */
330   int separate_lhs_rhs_handling;
331 
332   /* Number of parameters that were removed because they were unused.  */
333   int deleted_unused_parameters;
334 
335   /* Number of scalars passed as parameters by reference that have been
336      converted to be passed by value.  */
337   int scalar_by_ref_to_by_val;
338 
339   /* Number of aggregate parameters that were replaced by one or more of their
340      components.  */
341   int aggregate_params_reduced;
342 
343   /* Numbber of components created when splitting aggregate parameters.  */
344   int param_reductions_created;
345 } sra_stats;
346 
347 static void
348 dump_access (FILE *f, struct access *access, bool grp)
349 {
350   fprintf (f, "access { ");
351   fprintf (f, "base = (%d)'", DECL_UID (access->base));
352   print_generic_expr (f, access->base, 0);
353   fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
354   fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
355   fprintf (f, ", expr = ");
356   print_generic_expr (f, access->expr, 0);
357   fprintf (f, ", type = ");
358   print_generic_expr (f, access->type, 0);
359   if (grp)
360     fprintf (f, ", grp_write = %d, total_scalarization = %d, "
361 	     "grp_read = %d, grp_hint = %d, grp_assignment_read = %d,"
362 	     "grp_covered = %d, grp_unscalarizable_region = %d, "
363 	     "grp_unscalarized_data = %d, grp_partial_lhs = %d, "
364 	     "grp_to_be_replaced = %d, grp_maybe_modified = %d, "
365 	     "grp_not_necessarilly_dereferenced = %d\n",
366 	     access->grp_write, access->total_scalarization,
367 	     access->grp_read, access->grp_hint, access->grp_assignment_read,
368 	     access->grp_covered, access->grp_unscalarizable_region,
369 	     access->grp_unscalarized_data, access->grp_partial_lhs,
370 	     access->grp_to_be_replaced, access->grp_maybe_modified,
371 	     access->grp_not_necessarilly_dereferenced);
372   else
373     fprintf (f, ", write = %d, total_scalarization = %d, "
374 	     "grp_partial_lhs = %d\n",
375 	     access->write, access->total_scalarization,
376 	     access->grp_partial_lhs);
377 }
378 
379 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
380 
381 static void
382 dump_access_tree_1 (FILE *f, struct access *access, int level)
383 {
384   do
385     {
386       int i;
387 
388       for (i = 0; i < level; i++)
389 	fputs ("* ", dump_file);
390 
391       dump_access (f, access, true);
392 
393       if (access->first_child)
394 	dump_access_tree_1 (f, access->first_child, level + 1);
395 
396       access = access->next_sibling;
397     }
398   while (access);
399 }
400 
401 /* Dump all access trees for a variable, given the pointer to the first root in
402    ACCESS.  */
403 
404 static void
405 dump_access_tree (FILE *f, struct access *access)
406 {
407   for (; access; access = access->next_grp)
408     dump_access_tree_1 (f, access, 0);
409 }
410 
411 /* Return true iff ACC is non-NULL and has subaccesses.  */
412 
413 static inline bool
414 access_has_children_p (struct access *acc)
415 {
416   return acc && acc->first_child;
417 }
418 
419 /* Return a vector of pointers to accesses for the variable given in BASE or
420    NULL if there is none.  */
421 
422 static VEC (access_p, heap) *
423 get_base_access_vector (tree base)
424 {
425   void **slot;
426 
427   slot = pointer_map_contains (base_access_vec, base);
428   if (!slot)
429     return NULL;
430   else
431     return *(VEC (access_p, heap) **) slot;
432 }
433 
434 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
435    in ACCESS.  Return NULL if it cannot be found.  */
436 
437 static struct access *
438 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
439 			HOST_WIDE_INT size)
440 {
441   while (access && (access->offset != offset || access->size != size))
442     {
443       struct access *child = access->first_child;
444 
445       while (child && (child->offset + child->size <= offset))
446 	child = child->next_sibling;
447       access = child;
448     }
449 
450   return access;
451 }
452 
453 /* Return the first group representative for DECL or NULL if none exists.  */
454 
455 static struct access *
456 get_first_repr_for_decl (tree base)
457 {
458   VEC (access_p, heap) *access_vec;
459 
460   access_vec = get_base_access_vector (base);
461   if (!access_vec)
462     return NULL;
463 
464   return VEC_index (access_p, access_vec, 0);
465 }
466 
467 /* Find an access representative for the variable BASE and given OFFSET and
468    SIZE.  Requires that access trees have already been built.  Return NULL if
469    it cannot be found.  */
470 
471 static struct access *
472 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
473 				 HOST_WIDE_INT size)
474 {
475   struct access *access;
476 
477   access = get_first_repr_for_decl (base);
478   while (access && (access->offset + access->size <= offset))
479     access = access->next_grp;
480   if (!access)
481     return NULL;
482 
483   return find_access_in_subtree (access, offset, size);
484 }
485 
486 /* Add LINK to the linked list of assign links of RACC.  */
487 static void
488 add_link_to_rhs (struct access *racc, struct assign_link *link)
489 {
490   gcc_assert (link->racc == racc);
491 
492   if (!racc->first_link)
493     {
494       gcc_assert (!racc->last_link);
495       racc->first_link = link;
496     }
497   else
498     racc->last_link->next = link;
499 
500   racc->last_link = link;
501   link->next = NULL;
502 }
503 
504 /* Move all link structures in their linked list in OLD_RACC to the linked list
505    in NEW_RACC.  */
506 static void
507 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
508 {
509   if (!old_racc->first_link)
510     {
511       gcc_assert (!old_racc->last_link);
512       return;
513     }
514 
515   if (new_racc->first_link)
516     {
517       gcc_assert (!new_racc->last_link->next);
518       gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
519 
520       new_racc->last_link->next = old_racc->first_link;
521       new_racc->last_link = old_racc->last_link;
522     }
523   else
524     {
525       gcc_assert (!new_racc->last_link);
526 
527       new_racc->first_link = old_racc->first_link;
528       new_racc->last_link = old_racc->last_link;
529     }
530   old_racc->first_link = old_racc->last_link = NULL;
531 }
532 
533 /* Add ACCESS to the work queue (which is actually a stack).  */
534 
535 static void
536 add_access_to_work_queue (struct access *access)
537 {
538   if (!access->grp_queued)
539     {
540       gcc_assert (!access->next_queued);
541       access->next_queued = work_queue_head;
542       access->grp_queued = 1;
543       work_queue_head = access;
544     }
545 }
546 
547 /* Pop an access from the work queue, and return it, assuming there is one.  */
548 
549 static struct access *
550 pop_access_from_work_queue (void)
551 {
552   struct access *access = work_queue_head;
553 
554   work_queue_head = access->next_queued;
555   access->next_queued = NULL;
556   access->grp_queued = 0;
557   return access;
558 }
559 
560 
561 /* Allocate necessary structures.  */
562 
563 static void
564 sra_initialize (void)
565 {
566   candidate_bitmap = BITMAP_ALLOC (NULL);
567   should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
568   cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
569   gcc_obstack_init (&name_obstack);
570   access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
571   link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
572   base_access_vec = pointer_map_create ();
573   memset (&sra_stats, 0, sizeof (sra_stats));
574   encountered_apply_args = false;
575   encountered_unchangable_recursive_call = false;
576   cfg_changed = false;
577 }
578 
579 /* Hook fed to pointer_map_traverse, deallocate stored vectors.  */
580 
581 static bool
582 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
583 		     void *data ATTRIBUTE_UNUSED)
584 {
585   VEC (access_p, heap) *access_vec;
586   access_vec = (VEC (access_p, heap) *) *value;
587   VEC_free (access_p, heap, access_vec);
588 
589   return true;
590 }
591 
592 /* Deallocate all general structures.  */
593 
594 static void
595 sra_deinitialize (void)
596 {
597   BITMAP_FREE (candidate_bitmap);
598   BITMAP_FREE (should_scalarize_away_bitmap);
599   BITMAP_FREE (cannot_scalarize_away_bitmap);
600   free_alloc_pool (access_pool);
601   free_alloc_pool (link_pool);
602   obstack_free (&name_obstack, NULL);
603 
604   pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
605   pointer_map_destroy (base_access_vec);
606 }
607 
608 /* Remove DECL from candidates for SRA and write REASON to the dump file if
609    there is one.  */
610 static void
611 disqualify_candidate (tree decl, const char *reason)
612 {
613   bitmap_clear_bit (candidate_bitmap, DECL_UID (decl));
614 
615   if (dump_file && (dump_flags & TDF_DETAILS))
616     {
617       fprintf (dump_file, "! Disqualifying ");
618       print_generic_expr (dump_file, decl, 0);
619       fprintf (dump_file, " - %s\n", reason);
620     }
621 }
622 
623 /* Return true iff the type contains a field or an element which does not allow
624    scalarization.  */
625 
626 static bool
627 type_internals_preclude_sra_p (tree type)
628 {
629   tree fld;
630   tree et;
631 
632   switch (TREE_CODE (type))
633     {
634     case RECORD_TYPE:
635     case UNION_TYPE:
636     case QUAL_UNION_TYPE:
637       for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
638 	if (TREE_CODE (fld) == FIELD_DECL)
639 	  {
640 	    tree ft = TREE_TYPE (fld);
641 
642 	    if (TREE_THIS_VOLATILE (fld)
643 		|| !DECL_FIELD_OFFSET (fld) || !DECL_SIZE (fld)
644 		|| !host_integerp (DECL_FIELD_OFFSET (fld), 1)
645 		|| !host_integerp (DECL_SIZE (fld), 1))
646 	      return true;
647 
648 	    if (AGGREGATE_TYPE_P (ft)
649 		&& type_internals_preclude_sra_p (ft))
650 	      return true;
651 	  }
652 
653       return false;
654 
655     case ARRAY_TYPE:
656       et = TREE_TYPE (type);
657 
658       if (AGGREGATE_TYPE_P (et))
659 	return type_internals_preclude_sra_p (et);
660       else
661 	return false;
662 
663     default:
664       return false;
665     }
666 }
667 
668 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
669    base variable if it is.  Return T if it is not an SSA_NAME.  */
670 
671 static tree
672 get_ssa_base_param (tree t)
673 {
674   if (TREE_CODE (t) == SSA_NAME)
675     {
676       if (SSA_NAME_IS_DEFAULT_DEF (t))
677 	return SSA_NAME_VAR (t);
678       else
679 	return NULL_TREE;
680     }
681   return t;
682 }
683 
684 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
685    belongs to, unless the BB has already been marked as a potentially
686    final.  */
687 
688 static void
689 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
690 {
691   basic_block bb = gimple_bb (stmt);
692   int idx, parm_index = 0;
693   tree parm;
694 
695   if (bitmap_bit_p (final_bbs, bb->index))
696     return;
697 
698   for (parm = DECL_ARGUMENTS (current_function_decl);
699        parm && parm != base;
700        parm = TREE_CHAIN (parm))
701     parm_index++;
702 
703   gcc_assert (parm_index < func_param_count);
704 
705   idx = bb->index * func_param_count + parm_index;
706   if (bb_dereferences[idx] < dist)
707     bb_dereferences[idx] = dist;
708 }
709 
710 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
711    the three fields.  Also add it to the vector of accesses corresponding to
712    the base.  Finally, return the new access.  */
713 
714 static struct access *
715 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
716 {
717   VEC (access_p, heap) *vec;
718   struct access *access;
719   void **slot;
720 
721   access = (struct access *) pool_alloc (access_pool);
722   memset (access, 0, sizeof (struct access));
723   access->base = base;
724   access->offset = offset;
725   access->size = size;
726 
727   slot = pointer_map_contains (base_access_vec, base);
728   if (slot)
729     vec = (VEC (access_p, heap) *) *slot;
730   else
731     vec = VEC_alloc (access_p, heap, 32);
732 
733   VEC_safe_push (access_p, heap, vec, access);
734 
735   *((struct VEC (access_p,heap) **)
736 	pointer_map_insert (base_access_vec, base)) = vec;
737 
738   return access;
739 }
740 
741 /* Create and insert access for EXPR. Return created access, or NULL if it is
742    not possible.  */
743 
744 static struct access *
745 create_access (tree expr, gimple stmt, bool write)
746 {
747   struct access *access;
748   HOST_WIDE_INT offset, size, max_size;
749   tree base = expr;
750   bool ptr, unscalarizable_region = false;
751 
752   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
753 
754   if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
755     {
756       base = get_ssa_base_param (TREE_OPERAND (base, 0));
757       if (!base)
758 	return NULL;
759       ptr = true;
760     }
761   else
762     ptr = false;
763 
764   if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
765     return NULL;
766 
767   if (sra_mode == SRA_MODE_EARLY_IPA)
768     {
769       if (size < 0 || size != max_size)
770 	{
771 	  disqualify_candidate (base, "Encountered a variable sized access.");
772 	  return NULL;
773 	}
774       if (TREE_CODE (expr) == COMPONENT_REF
775 	  && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
776 	{
777 	  disqualify_candidate (base, "Encountered a bit-field access.");
778 	  return NULL;
779 	}
780       gcc_assert ((offset % BITS_PER_UNIT) == 0);
781 
782       if (ptr)
783 	mark_parm_dereference (base, offset + size, stmt);
784     }
785   else
786     {
787       if (size != max_size)
788 	{
789 	  size = max_size;
790 	  unscalarizable_region = true;
791 	}
792       if (size < 0)
793 	{
794 	  disqualify_candidate (base, "Encountered an unconstrained access.");
795 	  return NULL;
796 	}
797     }
798 
799   access = create_access_1 (base, offset, size);
800   access->expr = expr;
801   access->type = TREE_TYPE (expr);
802   access->write = write;
803   access->grp_unscalarizable_region = unscalarizable_region;
804   access->stmt = stmt;
805 
806   return access;
807 }
808 
809 
810 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
811    register types or (recursively) records with only these two kinds of fields.
812    It also returns false if any of these records has a zero-size field as its
813    last field.  */
814 
815 static bool
816 type_consists_of_records_p (tree type)
817 {
818   tree fld;
819   bool last_fld_has_zero_size = false;
820 
821   if (TREE_CODE (type) != RECORD_TYPE)
822     return false;
823 
824   for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
825     if (TREE_CODE (fld) == FIELD_DECL)
826       {
827 	tree ft = TREE_TYPE (fld);
828 
829 	if (!is_gimple_reg_type (ft)
830 	    && !type_consists_of_records_p (ft))
831 	  return false;
832 
833 	last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0;
834       }
835 
836   if (last_fld_has_zero_size)
837     return false;
838 
839   return true;
840 }
841 
842 /* Create total_scalarization accesses for all scalar type fields in DECL that
843    must be of a RECORD_TYPE conforming to type_consists_of_records_p.  BASE
844    must be the top-most VAR_DECL representing the variable, OFFSET must be the
845    offset of DECL within BASE.  */
846 
847 static void
848 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
849 {
850   tree fld, decl_type = TREE_TYPE (decl);
851 
852   for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld))
853     if (TREE_CODE (fld) == FIELD_DECL)
854       {
855 	HOST_WIDE_INT pos = offset + int_bit_position (fld);
856 	tree ft = TREE_TYPE (fld);
857 
858 	if (is_gimple_reg_type (ft))
859 	  {
860 	    struct access *access;
861 	    HOST_WIDE_INT size;
862 	    tree expr;
863 	    bool ok;
864 
865 	    size = tree_low_cst (DECL_SIZE (fld), 1);
866 	    expr = base;
867 	    ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos,
868 				       ft, false);
869 	    gcc_assert (ok);
870 
871 	    access = create_access_1 (base, pos, size);
872 	    access->expr = expr;
873 	    access->type = ft;
874 	    access->total_scalarization = 1;
875 	    /* Accesses for intraprocedural SRA can have their stmt NULL.  */
876 	  }
877 	else
878 	  completely_scalarize_record (base, fld, pos);
879       }
880 }
881 
882 
883 /* Search the given tree for a declaration by skipping handled components and
884    exclude it from the candidates.  */
885 
886 static void
887 disqualify_base_of_expr (tree t, const char *reason)
888 {
889   while (handled_component_p (t))
890     t = TREE_OPERAND (t, 0);
891 
892   if (sra_mode == SRA_MODE_EARLY_IPA)
893     {
894       if (INDIRECT_REF_P (t))
895 	t = TREE_OPERAND (t, 0);
896       t = get_ssa_base_param (t);
897     }
898 
899   if (t && DECL_P (t))
900     disqualify_candidate (t, reason);
901 }
902 
903 /* Scan expression EXPR and create access structures for all accesses to
904    candidates for scalarization.  Return the created access or NULL if none is
905    created.  */
906 
907 static struct access *
908 build_access_from_expr_1 (tree *expr_ptr, gimple stmt, bool write)
909 {
910   struct access *ret = NULL;
911   tree expr = *expr_ptr;
912   bool partial_ref;
913 
914   if (TREE_CODE (expr) == BIT_FIELD_REF
915       || TREE_CODE (expr) == IMAGPART_EXPR
916       || TREE_CODE (expr) == REALPART_EXPR)
917     {
918       expr = TREE_OPERAND (expr, 0);
919       partial_ref = true;
920     }
921   else
922     partial_ref = false;
923 
924   /* We need to dive through V_C_Es in order to get the size of its parameter
925      and not the result type.  Ada produces such statements.  We are also
926      capable of handling the topmost V_C_E but not any of those buried in other
927      handled components.  */
928   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
929     expr = TREE_OPERAND (expr, 0);
930 
931   if (contains_view_convert_expr_p (expr))
932     {
933       disqualify_base_of_expr (expr, "V_C_E under a different handled "
934 			       "component.");
935       return NULL;
936     }
937 
938   switch (TREE_CODE (expr))
939     {
940     case INDIRECT_REF:
941       if (sra_mode != SRA_MODE_EARLY_IPA)
942 	return NULL;
943       /* fall through */
944     case VAR_DECL:
945     case PARM_DECL:
946     case RESULT_DECL:
947     case COMPONENT_REF:
948     case ARRAY_REF:
949     case ARRAY_RANGE_REF:
950       ret = create_access (expr, stmt, write);
951       break;
952 
953     default:
954       break;
955     }
956 
957   if (write && partial_ref && ret)
958     ret->grp_partial_lhs = 1;
959 
960   return ret;
961 }
962 
963 /* Callback of scan_function.  Scan expression EXPR and create access
964    structures for all accesses to candidates for scalarization.  Return true if
965    any access has been inserted.  */
966 
967 static bool
968 build_access_from_expr (tree *expr_ptr,
969 			gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write,
970 			void *data ATTRIBUTE_UNUSED)
971 {
972   struct access *access;
973 
974   access = build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write);
975   if (access)
976     {
977       /* This means the aggregate is accesses as a whole in a way other than an
978 	 assign statement and thus cannot be removed even if we had a scalar
979 	 replacement for everything.  */
980       if (cannot_scalarize_away_bitmap)
981 	bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
982       return true;
983     }
984   return false;
985 }
986 
987 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
988    modes in which it matters, return true iff they have been disqualified.  RHS
989    may be NULL, in that case ignore it.  If we scalarize an aggregate in
990    intra-SRA we may need to add statements after each statement.  This is not
991    possible if a statement unconditionally has to end the basic block.  */
992 static bool
993 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
994 {
995   if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
996       && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
997     {
998       disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
999       if (rhs)
1000 	disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1001       return true;
1002     }
1003   return false;
1004 }
1005 
1006 
1007 /* Result code for scan_assign callback for scan_function.  */
1008 enum scan_assign_result { SRA_SA_NONE,       /* nothing done for the stmt */
1009 			  SRA_SA_PROCESSED,  /* stmt analyzed/changed */
1010 			  SRA_SA_REMOVED };  /* stmt redundant and eliminated */
1011 
1012 
1013 /* Callback of scan_function.  Scan expressions occuring in the statement
1014    pointed to by STMT_EXPR, create access structures for all accesses to
1015    candidates for scalarization and remove those candidates which occur in
1016    statements or expressions that prevent them from being split apart.  Return
1017    true if any access has been inserted.  */
1018 
1019 static enum scan_assign_result
1020 build_accesses_from_assign (gimple *stmt_ptr,
1021 			    gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
1022 			    void *data ATTRIBUTE_UNUSED)
1023 {
1024   gimple stmt = *stmt_ptr;
1025   tree *lhs_ptr, *rhs_ptr;
1026   struct access *lacc, *racc;
1027 
1028   if (!gimple_assign_single_p (stmt))
1029     return SRA_SA_NONE;
1030 
1031   lhs_ptr = gimple_assign_lhs_ptr (stmt);
1032   rhs_ptr = gimple_assign_rhs1_ptr (stmt);
1033 
1034   if (disqualify_ops_if_throwing_stmt (stmt, *lhs_ptr, *rhs_ptr))
1035     return SRA_SA_NONE;
1036 
1037   racc = build_access_from_expr_1 (rhs_ptr, stmt, false);
1038   lacc = build_access_from_expr_1 (lhs_ptr, stmt, true);
1039 
1040   if (racc)
1041     {
1042       racc->grp_assignment_read = 1;
1043       if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1044 	  && !is_gimple_reg_type (racc->type))
1045 	bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1046     }
1047 
1048   if (lacc && racc
1049       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1050       && !lacc->grp_unscalarizable_region
1051       && !racc->grp_unscalarizable_region
1052       && AGGREGATE_TYPE_P (TREE_TYPE (*lhs_ptr))
1053       /* FIXME: Turn the following line into an assert after PR 40058 is
1054 	 fixed.  */
1055       && lacc->size == racc->size
1056       && useless_type_conversion_p (lacc->type, racc->type))
1057     {
1058       struct assign_link *link;
1059 
1060       link = (struct assign_link *) pool_alloc (link_pool);
1061       memset (link, 0, sizeof (struct assign_link));
1062 
1063       link->lacc = lacc;
1064       link->racc = racc;
1065 
1066       add_link_to_rhs (racc, link);
1067     }
1068 
1069   return (lacc || racc) ? SRA_SA_PROCESSED : SRA_SA_NONE;
1070 }
1071 
1072 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1073    GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1074 
1075 static bool
1076 asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op,
1077 		void *data ATTRIBUTE_UNUSED)
1078 {
1079   if (DECL_P (op))
1080     disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1081 
1082   return false;
1083 }
1084 
1085 /* Return true iff callsite CALL has at least as many actual arguments as there
1086    are formal parameters of the function currently processed by IPA-SRA.  */
1087 
1088 static inline bool
1089 callsite_has_enough_arguments_p (gimple call)
1090 {
1091   return gimple_call_num_args (call) >= (unsigned) func_param_count;
1092 }
1093 
1094 /* Scan function and look for interesting statements. Return true if any has
1095    been found or processed, as indicated by callbacks.  SCAN_EXPR is a callback
1096    called on all expressions within statements except assign statements and
1097    those deemed entirely unsuitable for some reason (all operands in such
1098    statements and expression are removed from candidate_bitmap).  SCAN_ASSIGN
1099    is a callback called on all assign statements, HANDLE_SSA_DEFS is a callback
1100    called on assign statements and those call statements which have a lhs, it
1101    can be NULL.  ANALYSIS_STAGE is true when running in the analysis stage of a
1102    pass and thus no statement is being modified.  DATA is a pointer passed to
1103    all callbacks.  If any single callback returns true, this function also
1104    returns true, otherwise it returns false.  */
1105 
1106 static bool
1107 scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *),
1108 	       enum scan_assign_result (*scan_assign) (gimple *,
1109 						       gimple_stmt_iterator *,
1110 						       void *),
1111 	       bool (*handle_ssa_defs)(gimple, void *),
1112 	       bool analysis_stage, void *data)
1113 {
1114   gimple_stmt_iterator gsi;
1115   basic_block bb;
1116   unsigned i;
1117   tree *t;
1118   bool ret = false;
1119 
1120   FOR_EACH_BB (bb)
1121     {
1122       if (handle_ssa_defs)
1123 	for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1124 	  ret |= handle_ssa_defs (gsi_stmt (gsi), data);
1125 
1126       gsi = gsi_start_bb (bb);
1127       while (!gsi_end_p (gsi))
1128 	{
1129 	  gimple stmt = gsi_stmt (gsi);
1130 	  enum scan_assign_result assign_result;
1131 	  bool any = false, deleted = false;
1132 
1133 	  if (analysis_stage && final_bbs && stmt_can_throw_external (stmt))
1134 	    bitmap_set_bit (final_bbs, bb->index);
1135 	  switch (gimple_code (stmt))
1136 	    {
1137 	    case GIMPLE_RETURN:
1138 	      t = gimple_return_retval_ptr (stmt);
1139 	      if (*t != NULL_TREE)
1140 		any |= scan_expr (t, &gsi, false, data);
1141 	      if (analysis_stage && final_bbs)
1142 		bitmap_set_bit (final_bbs, bb->index);
1143 	      break;
1144 
1145 	    case GIMPLE_ASSIGN:
1146 	      assign_result = scan_assign (&stmt, &gsi, data);
1147 	      any |= assign_result == SRA_SA_PROCESSED;
1148 	      deleted = assign_result == SRA_SA_REMOVED;
1149 	      if (handle_ssa_defs && assign_result != SRA_SA_REMOVED)
1150 		any |= handle_ssa_defs (stmt, data);
1151 	      break;
1152 
1153 	    case GIMPLE_CALL:
1154 	      /* Operands must be processed before the lhs.  */
1155 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
1156 		{
1157 		  tree *argp = gimple_call_arg_ptr (stmt, i);
1158 		  any |= scan_expr (argp, &gsi, false, data);
1159 		}
1160 
1161 	      if (analysis_stage && sra_mode == SRA_MODE_EARLY_IPA)
1162 		{
1163 		  tree dest = gimple_call_fndecl (stmt);
1164 		  int flags = gimple_call_flags (stmt);
1165 
1166 		  if (dest)
1167 		    {
1168 		      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1169 			  && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1170 			encountered_apply_args = true;
1171 		      if (cgraph_get_node (dest)
1172 			  == cgraph_get_node (current_function_decl)
1173 			  && !callsite_has_enough_arguments_p (stmt))
1174 			encountered_unchangable_recursive_call = true;
1175 		    }
1176 
1177 		  if (final_bbs
1178 		      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1179 		    bitmap_set_bit (final_bbs, bb->index);
1180 		}
1181 
1182 	      if (gimple_call_lhs (stmt))
1183 		{
1184 		  tree *lhs_ptr = gimple_call_lhs_ptr (stmt);
1185 		  if (!analysis_stage
1186 		      || !disqualify_ops_if_throwing_stmt (stmt,
1187 							   *lhs_ptr, NULL))
1188 		    {
1189 		      any |= scan_expr (lhs_ptr, &gsi, true, data);
1190 		      if (handle_ssa_defs)
1191 			any |= handle_ssa_defs (stmt, data);
1192 		    }
1193 		}
1194 	      break;
1195 
1196 	    case GIMPLE_ASM:
1197 	      if (analysis_stage)
1198 		{
1199 		  walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1200 						 asm_visit_addr);
1201 		  if (final_bbs)
1202 		    bitmap_set_bit (final_bbs, bb->index);
1203 		}
1204 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1205 		{
1206 		  tree *op = &TREE_VALUE (gimple_asm_input_op (stmt, i));
1207 		  any |= scan_expr (op, &gsi, false, data);
1208 		}
1209 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1210 		{
1211 		  tree *op = &TREE_VALUE (gimple_asm_output_op (stmt, i));
1212 		  any |= scan_expr (op, &gsi, true, data);
1213 		}
1214 	      break;
1215 
1216 	    default:
1217 	      break;
1218 	    }
1219 
1220 	  if (any)
1221 	    {
1222 	      ret = true;
1223 
1224 	      if (!analysis_stage)
1225 		{
1226 		  update_stmt (stmt);
1227 		  if (maybe_clean_eh_stmt (stmt)
1228 		      && gimple_purge_dead_eh_edges (bb))
1229 		    cfg_changed = true;
1230 		}
1231 	    }
1232 	  if (!deleted)
1233 	    gsi_next (&gsi);
1234 	}
1235     }
1236 
1237   return ret;
1238 }
1239 
1240 /* Helper of QSORT function. There are pointers to accesses in the array.  An
1241    access is considered smaller than another if it has smaller offset or if the
1242    offsets are the same but is size is bigger. */
1243 
1244 static int
1245 compare_access_positions (const void *a, const void *b)
1246 {
1247   const access_p *fp1 = (const access_p *) a;
1248   const access_p *fp2 = (const access_p *) b;
1249   const access_p f1 = *fp1;
1250   const access_p f2 = *fp2;
1251 
1252   if (f1->offset != f2->offset)
1253     return f1->offset < f2->offset ? -1 : 1;
1254 
1255   if (f1->size == f2->size)
1256     {
1257       if (f1->type == f2->type)
1258 	return 0;
1259       /* Put any non-aggregate type before any aggregate type.  */
1260       else if (!is_gimple_reg_type (f1->type)
1261 	  && is_gimple_reg_type (f2->type))
1262 	return 1;
1263       else if (is_gimple_reg_type (f1->type)
1264 	       && !is_gimple_reg_type (f2->type))
1265 	return -1;
1266       /* Put any complex or vector type before any other scalar type.  */
1267       else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1268 	       && TREE_CODE (f1->type) != VECTOR_TYPE
1269 	       && (TREE_CODE (f2->type) == COMPLEX_TYPE
1270 		   || TREE_CODE (f2->type) == VECTOR_TYPE))
1271 	return 1;
1272       else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1273 		|| TREE_CODE (f1->type) == VECTOR_TYPE)
1274 	       && TREE_CODE (f2->type) != COMPLEX_TYPE
1275 	       && TREE_CODE (f2->type) != VECTOR_TYPE)
1276 	return -1;
1277       /* Put the integral type with the bigger precision first.  */
1278       else if (INTEGRAL_TYPE_P (f1->type)
1279 	       && INTEGRAL_TYPE_P (f2->type))
1280 	return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1281       /* Put any integral type with non-full precision last.  */
1282       else if (INTEGRAL_TYPE_P (f1->type)
1283 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1284 		   != TYPE_PRECISION (f1->type)))
1285 	return 1;
1286       else if (INTEGRAL_TYPE_P (f2->type)
1287 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1288 		   != TYPE_PRECISION (f2->type)))
1289 	return -1;
1290       /* Stabilize the sort.  */
1291       return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1292     }
1293 
1294   /* We want the bigger accesses first, thus the opposite operator in the next
1295      line: */
1296   return f1->size > f2->size ? -1 : 1;
1297 }
1298 
1299 
1300 /* Append a name of the declaration to the name obstack.  A helper function for
1301    make_fancy_name.  */
1302 
1303 static void
1304 make_fancy_decl_name (tree decl)
1305 {
1306   char buffer[32];
1307 
1308   tree name = DECL_NAME (decl);
1309   if (name)
1310     obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1311 		  IDENTIFIER_LENGTH (name));
1312   else
1313     {
1314       sprintf (buffer, "D%u", DECL_UID (decl));
1315       obstack_grow (&name_obstack, buffer, strlen (buffer));
1316     }
1317 }
1318 
1319 /* Helper for make_fancy_name.  */
1320 
1321 static void
1322 make_fancy_name_1 (tree expr)
1323 {
1324   char buffer[32];
1325   tree index;
1326 
1327   if (DECL_P (expr))
1328     {
1329       make_fancy_decl_name (expr);
1330       return;
1331     }
1332 
1333   switch (TREE_CODE (expr))
1334     {
1335     case COMPONENT_REF:
1336       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1337       obstack_1grow (&name_obstack, '$');
1338       make_fancy_decl_name (TREE_OPERAND (expr, 1));
1339       break;
1340 
1341     case ARRAY_REF:
1342       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1343       obstack_1grow (&name_obstack, '$');
1344       /* Arrays with only one element may not have a constant as their
1345 	 index. */
1346       index = TREE_OPERAND (expr, 1);
1347       if (TREE_CODE (index) != INTEGER_CST)
1348 	break;
1349       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1350       obstack_grow (&name_obstack, buffer, strlen (buffer));
1351 
1352       break;
1353 
1354     case BIT_FIELD_REF:
1355     case REALPART_EXPR:
1356     case IMAGPART_EXPR:
1357       gcc_unreachable (); 	/* we treat these as scalars.  */
1358       break;
1359     default:
1360       break;
1361     }
1362 }
1363 
1364 /* Create a human readable name for replacement variable of ACCESS.  */
1365 
1366 static char *
1367 make_fancy_name (tree expr)
1368 {
1369   make_fancy_name_1 (expr);
1370   obstack_1grow (&name_obstack, '\0');
1371   return XOBFINISH (&name_obstack, char *);
1372 }
1373 
1374 /* Helper function for build_ref_for_offset.  */
1375 
1376 static bool
1377 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
1378 			tree exp_type)
1379 {
1380   while (1)
1381     {
1382       tree fld;
1383       tree tr_size, index, minidx;
1384       HOST_WIDE_INT el_size;
1385 
1386       if (offset == 0 && exp_type
1387 	  && types_compatible_p (exp_type, type))
1388 	return true;
1389 
1390       switch (TREE_CODE (type))
1391 	{
1392 	case UNION_TYPE:
1393 	case QUAL_UNION_TYPE:
1394 	case RECORD_TYPE:
1395 	  for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld))
1396 	    {
1397 	      HOST_WIDE_INT pos, size;
1398 	      tree expr, *expr_ptr;
1399 
1400 	      if (TREE_CODE (fld) != FIELD_DECL)
1401 		continue;
1402 
1403 	      pos = int_bit_position (fld);
1404 	      gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1405 	      tr_size = DECL_SIZE (fld);
1406 	      if (!tr_size || !host_integerp (tr_size, 1))
1407 		continue;
1408 	      size = tree_low_cst (tr_size, 1);
1409 	      if (size == 0)
1410 		{
1411 		  if (pos != offset)
1412 		    continue;
1413 		}
1414 	      else if (pos > offset || (pos + size) <= offset)
1415 		continue;
1416 
1417 	      if (res)
1418 		{
1419 		  expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1420 				 NULL_TREE);
1421 		  expr_ptr = &expr;
1422 		}
1423 	      else
1424 		expr_ptr = NULL;
1425 	      if (build_ref_for_offset_1 (expr_ptr, TREE_TYPE (fld),
1426 					  offset - pos, exp_type))
1427 		{
1428 		  if (res)
1429 		    *res = expr;
1430 		  return true;
1431 		}
1432 	    }
1433 	  return false;
1434 
1435 	case ARRAY_TYPE:
1436 	  tr_size = TYPE_SIZE (TREE_TYPE (type));
1437 	  if (!tr_size || !host_integerp (tr_size, 1))
1438 	    return false;
1439 	  el_size = tree_low_cst (tr_size, 1);
1440 
1441 	  minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1442 	  if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1443 	    return false;
1444 	  if (res)
1445 	    {
1446 	      index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1447 	      if (!integer_zerop (minidx))
1448 		index = int_const_binop (PLUS_EXPR, index, minidx, 0);
1449 	      *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1450 			     NULL_TREE, NULL_TREE);
1451 	    }
1452 	  offset = offset % el_size;
1453 	  type = TREE_TYPE (type);
1454 	  break;
1455 
1456 	default:
1457 	  if (offset != 0)
1458 	    return false;
1459 
1460 	  if (exp_type)
1461 	    return false;
1462 	  else
1463 	    return true;
1464 	}
1465     }
1466 }
1467 
1468 /* Construct an expression that would reference a part of aggregate *EXPR of
1469    type TYPE at the given OFFSET of the type EXP_TYPE.  If EXPR is NULL, the
1470    function only determines whether it can build such a reference without
1471    actually doing it, otherwise, the tree it points to is unshared first and
1472    then used as a base for furhter sub-references.
1473 
1474    FIXME: Eventually this should be replaced with
1475    maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
1476    minor rewrite of fold_stmt.
1477  */
1478 
1479 bool
1480 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
1481 		      tree exp_type, bool allow_ptr)
1482 {
1483   location_t loc = expr ? EXPR_LOCATION (*expr) : UNKNOWN_LOCATION;
1484 
1485   if (expr)
1486     *expr = unshare_expr (*expr);
1487 
1488   if (allow_ptr && POINTER_TYPE_P (type))
1489     {
1490       type = TREE_TYPE (type);
1491       if (expr)
1492 	*expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
1493     }
1494 
1495   return build_ref_for_offset_1 (expr, type, offset, exp_type);
1496 }
1497 
1498 /* Return true iff TYPE is stdarg va_list type.  */
1499 
1500 static inline bool
1501 is_va_list_type (tree type)
1502 {
1503   return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1504 }
1505 
1506 /* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1507    those with type which is suitable for scalarization.  */
1508 
1509 static bool
1510 find_var_candidates (void)
1511 {
1512   tree var, type;
1513   referenced_var_iterator rvi;
1514   bool ret = false;
1515 
1516   FOR_EACH_REFERENCED_VAR (var, rvi)
1517     {
1518       if (TREE_CODE (var) != VAR_DECL && TREE_CODE (var) != PARM_DECL)
1519         continue;
1520       type = TREE_TYPE (var);
1521 
1522       if (!AGGREGATE_TYPE_P (type)
1523 	  || needs_to_live_in_memory (var)
1524 	  || TREE_THIS_VOLATILE (var)
1525 	  || !COMPLETE_TYPE_P (type)
1526 	  || !host_integerp (TYPE_SIZE (type), 1)
1527           || tree_low_cst (TYPE_SIZE (type), 1) == 0
1528 	  || type_internals_preclude_sra_p (type)
1529 	  /* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1530 	      we also want to schedule it rather late.  Thus we ignore it in
1531 	      the early pass. */
1532 	  || (sra_mode == SRA_MODE_EARLY_INTRA
1533 	      && is_va_list_type (type)))
1534 	continue;
1535 
1536       bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1537 
1538       if (dump_file && (dump_flags & TDF_DETAILS))
1539 	{
1540 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1541 	  print_generic_expr (dump_file, var, 0);
1542 	  fprintf (dump_file, "\n");
1543 	}
1544       ret = true;
1545     }
1546 
1547   return ret;
1548 }
1549 
1550 /* Sort all accesses for the given variable, check for partial overlaps and
1551    return NULL if there are any.  If there are none, pick a representative for
1552    each combination of offset and size and create a linked list out of them.
1553    Return the pointer to the first representative and make sure it is the first
1554    one in the vector of accesses.  */
1555 
1556 static struct access *
1557 sort_and_splice_var_accesses (tree var)
1558 {
1559   int i, j, access_count;
1560   struct access *res, **prev_acc_ptr = &res;
1561   VEC (access_p, heap) *access_vec;
1562   bool first = true;
1563   HOST_WIDE_INT low = -1, high = 0;
1564 
1565   access_vec = get_base_access_vector (var);
1566   if (!access_vec)
1567     return NULL;
1568   access_count = VEC_length (access_p, access_vec);
1569 
1570   /* Sort by <OFFSET, SIZE>.  */
1571   qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
1572 	 compare_access_positions);
1573 
1574   i = 0;
1575   while (i < access_count)
1576     {
1577       struct access *access = VEC_index (access_p, access_vec, i);
1578       bool grp_write = access->write;
1579       bool grp_read = !access->write;
1580       bool grp_assignment_read = access->grp_assignment_read;
1581       bool multiple_reads = false;
1582       bool total_scalarization = access->total_scalarization;
1583       bool grp_partial_lhs = access->grp_partial_lhs;
1584       bool first_scalar = is_gimple_reg_type (access->type);
1585       bool unscalarizable_region = access->grp_unscalarizable_region;
1586 
1587       if (first || access->offset >= high)
1588 	{
1589 	  first = false;
1590 	  low = access->offset;
1591 	  high = access->offset + access->size;
1592 	}
1593       else if (access->offset > low && access->offset + access->size > high)
1594 	return NULL;
1595       else
1596 	gcc_assert (access->offset >= low
1597 		    && access->offset + access->size <= high);
1598 
1599       j = i + 1;
1600       while (j < access_count)
1601 	{
1602 	  struct access *ac2 = VEC_index (access_p, access_vec, j);
1603 	  if (ac2->offset != access->offset || ac2->size != access->size)
1604 	    break;
1605 	  if (ac2->write)
1606 	    grp_write = true;
1607 	  else
1608 	    {
1609 	      if (grp_read)
1610 		multiple_reads = true;
1611 	      else
1612 		grp_read = true;
1613 	    }
1614 	  grp_assignment_read |= ac2->grp_assignment_read;
1615 	  grp_partial_lhs |= ac2->grp_partial_lhs;
1616 	  unscalarizable_region |= ac2->grp_unscalarizable_region;
1617 	  total_scalarization |= ac2->total_scalarization;
1618 	  relink_to_new_repr (access, ac2);
1619 
1620 	  /* If there are both aggregate-type and scalar-type accesses with
1621 	     this combination of size and offset, the comparison function
1622 	     should have put the scalars first.  */
1623 	  gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1624 	  ac2->group_representative = access;
1625 	  j++;
1626 	}
1627 
1628       i = j;
1629 
1630       access->group_representative = access;
1631       access->grp_write = grp_write;
1632       access->grp_read = grp_read;
1633       access->grp_assignment_read = grp_assignment_read;
1634       access->grp_hint = multiple_reads || total_scalarization;
1635       access->grp_partial_lhs = grp_partial_lhs;
1636       access->grp_unscalarizable_region = unscalarizable_region;
1637       if (access->first_link)
1638 	add_access_to_work_queue (access);
1639 
1640       *prev_acc_ptr = access;
1641       prev_acc_ptr = &access->next_grp;
1642     }
1643 
1644   gcc_assert (res == VEC_index (access_p, access_vec, 0));
1645   return res;
1646 }
1647 
1648 /* Create a variable for the given ACCESS which determines the type, name and a
1649    few other properties.  Return the variable declaration and store it also to
1650    ACCESS->replacement.  */
1651 
1652 static tree
1653 create_access_replacement (struct access *access, bool rename)
1654 {
1655   tree repl;
1656 
1657   repl = create_tmp_var (access->type, "SR");
1658   get_var_ann (repl);
1659   add_referenced_var (repl);
1660   if (rename)
1661     mark_sym_for_renaming (repl);
1662 
1663   if (!access->grp_partial_lhs
1664       && (TREE_CODE (access->type) == COMPLEX_TYPE
1665 	  || TREE_CODE (access->type) == VECTOR_TYPE))
1666     DECL_GIMPLE_REG_P (repl) = 1;
1667 
1668   DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1669   DECL_ARTIFICIAL (repl) = 1;
1670   DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1671 
1672   if (DECL_NAME (access->base)
1673       && !DECL_IGNORED_P (access->base)
1674       && !DECL_ARTIFICIAL (access->base))
1675     {
1676       char *pretty_name = make_fancy_name (access->expr);
1677 
1678       DECL_NAME (repl) = get_identifier (pretty_name);
1679       obstack_free (&name_obstack, pretty_name);
1680 
1681       SET_DECL_DEBUG_EXPR (repl, access->expr);
1682       DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1683       TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1684     }
1685   else
1686     TREE_NO_WARNING (repl) = 1;
1687 
1688   if (dump_file)
1689     {
1690       fprintf (dump_file, "Created a replacement for ");
1691       print_generic_expr (dump_file, access->base, 0);
1692       fprintf (dump_file, " offset: %u, size: %u: ",
1693 	       (unsigned) access->offset, (unsigned) access->size);
1694       print_generic_expr (dump_file, repl, 0);
1695       fprintf (dump_file, "\n");
1696     }
1697   sra_stats.replacements++;
1698 
1699   return repl;
1700 }
1701 
1702 /* Return ACCESS scalar replacement, create it if it does not exist yet.  */
1703 
1704 static inline tree
1705 get_access_replacement (struct access *access)
1706 {
1707   gcc_assert (access->grp_to_be_replaced);
1708 
1709   if (!access->replacement_decl)
1710     access->replacement_decl = create_access_replacement (access, true);
1711   return access->replacement_decl;
1712 }
1713 
1714 /* Return ACCESS scalar replacement, create it if it does not exist yet but do
1715    not mark it for renaming.  */
1716 
1717 static inline tree
1718 get_unrenamed_access_replacement (struct access *access)
1719 {
1720   gcc_assert (!access->grp_to_be_replaced);
1721 
1722   if (!access->replacement_decl)
1723     access->replacement_decl = create_access_replacement (access, false);
1724   return access->replacement_decl;
1725 }
1726 
1727 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
1728    linked list along the way.  Stop when *ACCESS is NULL or the access pointed
1729    to it is not "within" the root.  Return false iff some accesses partially
1730    overlap.  */
1731 
1732 static bool
1733 build_access_subtree (struct access **access)
1734 {
1735   struct access *root = *access, *last_child = NULL;
1736   HOST_WIDE_INT limit = root->offset + root->size;
1737 
1738   *access = (*access)->next_grp;
1739   while  (*access && (*access)->offset + (*access)->size <= limit)
1740     {
1741       if (!last_child)
1742 	root->first_child = *access;
1743       else
1744 	last_child->next_sibling = *access;
1745       last_child = *access;
1746 
1747       if (!build_access_subtree (access))
1748 	return false;
1749     }
1750 
1751   if (*access && (*access)->offset < limit)
1752     return false;
1753 
1754   return true;
1755 }
1756 
1757 /* Build a tree of access representatives, ACCESS is the pointer to the first
1758    one, others are linked in a list by the next_grp field.  Return false iff
1759    some accesses partially overlap.  */
1760 
1761 static bool
1762 build_access_trees (struct access *access)
1763 {
1764   while (access)
1765     {
1766       struct access *root = access;
1767 
1768       if (!build_access_subtree (&access))
1769 	return false;
1770       root->next_grp = access;
1771     }
1772   return true;
1773 }
1774 
1775 /* Return true if expr contains some ARRAY_REFs into a variable bounded
1776    array.  */
1777 
1778 static bool
1779 expr_with_var_bounded_array_refs_p (tree expr)
1780 {
1781   while (handled_component_p (expr))
1782     {
1783       if (TREE_CODE (expr) == ARRAY_REF
1784 	  && !host_integerp (array_ref_low_bound (expr), 0))
1785 	return true;
1786       expr = TREE_OPERAND (expr, 0);
1787     }
1788   return false;
1789 }
1790 
1791 enum mark_read_status { SRA_MR_NOT_READ, SRA_MR_READ, SRA_MR_ASSIGN_READ};
1792 
1793 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
1794    both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
1795    sorts of access flags appropriately along the way, notably always set
1796    grp_read and grp_assign_read according to MARK_READ and grp_write when
1797    MARK_WRITE is true.  */
1798 
1799 static bool
1800 analyze_access_subtree (struct access *root, bool allow_replacements,
1801 			enum mark_read_status mark_read, bool mark_write)
1802 {
1803   struct access *child;
1804   HOST_WIDE_INT limit = root->offset + root->size;
1805   HOST_WIDE_INT covered_to = root->offset;
1806   bool scalar = is_gimple_reg_type (root->type);
1807   bool hole = false, sth_created = false;
1808   bool direct_read = root->grp_read;
1809 
1810   if (mark_read == SRA_MR_ASSIGN_READ)
1811     {
1812       root->grp_read = 1;
1813       root->grp_assignment_read = 1;
1814     }
1815   if (mark_read == SRA_MR_READ)
1816     root->grp_read = 1;
1817   else if (root->grp_assignment_read)
1818     mark_read = SRA_MR_ASSIGN_READ;
1819   else if (root->grp_read)
1820     mark_read = SRA_MR_READ;
1821 
1822   if (mark_write)
1823     root->grp_write = true;
1824   else if (root->grp_write)
1825     mark_write = true;
1826 
1827   if (root->grp_unscalarizable_region)
1828     allow_replacements = false;
1829 
1830   if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
1831     allow_replacements = false;
1832 
1833   for (child = root->first_child; child; child = child->next_sibling)
1834     {
1835       if (!hole && child->offset < covered_to)
1836 	hole = true;
1837       else
1838 	covered_to += child->size;
1839 
1840       sth_created |= analyze_access_subtree (child,
1841 					     allow_replacements && !scalar,
1842 					     mark_read, mark_write);
1843 
1844       root->grp_unscalarized_data |= child->grp_unscalarized_data;
1845       hole |= !child->grp_covered;
1846     }
1847 
1848   if (allow_replacements && scalar && !root->first_child
1849       && (root->grp_hint
1850 	  || (root->grp_write && (direct_read || root->grp_assignment_read)))
1851       /* We must not ICE later on when trying to build an access to the
1852 	 original data within the aggregate even when it is impossible to do in
1853 	 a defined way like in the PR 42703 testcase.  Therefore we check
1854 	 pre-emptively here that we will be able to do that.  */
1855       && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset,
1856 			       root->type, false))
1857     {
1858       bool new_integer_type;
1859       if (TREE_CODE (root->type) == ENUMERAL_TYPE)
1860 	{
1861 	  tree rt = root->type;
1862 	  root->type = build_nonstandard_integer_type (TYPE_PRECISION (rt),
1863 						       TYPE_UNSIGNED (rt));
1864 	  new_integer_type = true;
1865 	}
1866       else
1867 	new_integer_type = false;
1868 
1869       if (dump_file && (dump_flags & TDF_DETAILS))
1870 	{
1871 	  fprintf (dump_file, "Marking ");
1872 	  print_generic_expr (dump_file, root->base, 0);
1873 	  fprintf (dump_file, " offset: %u, size: %u ",
1874 		   (unsigned) root->offset, (unsigned) root->size);
1875 	  fprintf (dump_file, " to be replaced%s.\n",
1876 		   new_integer_type ? " with an integer": "");
1877 	}
1878 
1879       root->grp_to_be_replaced = 1;
1880       sth_created = true;
1881       hole = false;
1882     }
1883   else if (covered_to < limit)
1884     hole = true;
1885 
1886   if (sth_created && !hole)
1887     {
1888       root->grp_covered = 1;
1889       return true;
1890     }
1891   if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
1892     root->grp_unscalarized_data = 1; /* not covered and written to */
1893   if (sth_created)
1894     return true;
1895   return false;
1896 }
1897 
1898 /* Analyze all access trees linked by next_grp by the means of
1899    analyze_access_subtree.  */
1900 static bool
1901 analyze_access_trees (struct access *access)
1902 {
1903   bool ret = false;
1904 
1905   while (access)
1906     {
1907       if (analyze_access_subtree (access, true, SRA_MR_NOT_READ, false))
1908 	ret = true;
1909       access = access->next_grp;
1910     }
1911 
1912   return ret;
1913 }
1914 
1915 /* Return true iff a potential new child of LACC at offset OFFSET and with size
1916    SIZE would conflict with an already existing one.  If exactly such a child
1917    already exists in LACC, store a pointer to it in EXACT_MATCH.  */
1918 
1919 static bool
1920 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
1921 			      HOST_WIDE_INT size, struct access **exact_match)
1922 {
1923   struct access *child;
1924 
1925   for (child = lacc->first_child; child; child = child->next_sibling)
1926     {
1927       if (child->offset == norm_offset && child->size == size)
1928 	{
1929 	  *exact_match = child;
1930 	  return true;
1931 	}
1932 
1933       if (child->offset < norm_offset + size
1934 	  && child->offset + child->size > norm_offset)
1935 	return true;
1936     }
1937 
1938   return false;
1939 }
1940 
1941 /* Create a new child access of PARENT, with all properties just like MODEL
1942    except for its offset and with its grp_write false and grp_read true.
1943    Return the new access or NULL if it cannot be created.  Note that this access
1944    is created long after all splicing and sorting, it's not located in any
1945    access vector and is automatically a representative of its group.  */
1946 
1947 static struct access *
1948 create_artificial_child_access (struct access *parent, struct access *model,
1949 				HOST_WIDE_INT new_offset)
1950 {
1951   struct access *access;
1952   struct access **child;
1953   tree expr = parent->base;;
1954 
1955   gcc_assert (!model->grp_unscalarizable_region);
1956 
1957   if (!build_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
1958 			     model->type, false))
1959     return NULL;
1960 
1961   access = (struct access *) pool_alloc (access_pool);
1962   memset (access, 0, sizeof (struct access));
1963   access->base = parent->base;
1964   access->expr = expr;
1965   access->offset = new_offset;
1966   access->size = model->size;
1967   access->type = model->type;
1968   access->grp_write = true;
1969   access->grp_read = false;
1970 
1971   child = &parent->first_child;
1972   while (*child && (*child)->offset < new_offset)
1973     child = &(*child)->next_sibling;
1974 
1975   access->next_sibling = *child;
1976   *child = access;
1977 
1978   return access;
1979 }
1980 
1981 
1982 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
1983    true if any new subaccess was created.  Additionally, if RACC is a scalar
1984    access but LACC is not, change the type of the latter, if possible.  */
1985 
1986 static bool
1987 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
1988 {
1989   struct access *rchild;
1990   HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
1991   bool ret = false;
1992 
1993   if (is_gimple_reg_type (lacc->type)
1994       || lacc->grp_unscalarizable_region
1995       || racc->grp_unscalarizable_region)
1996     return false;
1997 
1998   if (!lacc->first_child && !racc->first_child
1999       && is_gimple_reg_type (racc->type))
2000     {
2001       tree t = lacc->base;
2002 
2003       if (build_ref_for_offset (&t, TREE_TYPE (t), lacc->offset, racc->type,
2004 				false))
2005 	{
2006 	  lacc->expr = t;
2007 	  lacc->type = racc->type;
2008 	}
2009       return false;
2010     }
2011 
2012   for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2013     {
2014       struct access *new_acc = NULL;
2015       HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2016 
2017       if (rchild->grp_unscalarizable_region)
2018 	continue;
2019 
2020       if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2021 					&new_acc))
2022 	{
2023 	  if (new_acc)
2024 	    {
2025 	      rchild->grp_hint = 1;
2026 	      new_acc->grp_hint |= new_acc->grp_read;
2027 	      if (rchild->first_child)
2028 		ret |= propagate_subaccesses_across_link (new_acc, rchild);
2029 	    }
2030 	  continue;
2031 	}
2032 
2033       /* If a (part of) a union field is on the RHS of an assignment, it can
2034 	 have sub-accesses which do not make sense on the LHS (PR 40351).
2035 	 Check that this is not the case.  */
2036       if (!build_ref_for_offset (NULL, TREE_TYPE (lacc->base), norm_offset,
2037 				 rchild->type, false))
2038 	continue;
2039 
2040       rchild->grp_hint = 1;
2041       new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2042       if (new_acc)
2043 	{
2044 	  ret = true;
2045 	  if (racc->first_child)
2046 	    propagate_subaccesses_across_link (new_acc, rchild);
2047 	}
2048     }
2049 
2050   return ret;
2051 }
2052 
2053 /* Propagate all subaccesses across assignment links.  */
2054 
2055 static void
2056 propagate_all_subaccesses (void)
2057 {
2058   while (work_queue_head)
2059     {
2060       struct access *racc = pop_access_from_work_queue ();
2061       struct assign_link *link;
2062 
2063       gcc_assert (racc->first_link);
2064 
2065       for (link = racc->first_link; link; link = link->next)
2066 	{
2067 	  struct access *lacc = link->lacc;
2068 
2069 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2070 	    continue;
2071 	  lacc = lacc->group_representative;
2072 	  if (propagate_subaccesses_across_link (lacc, racc)
2073 	      && lacc->first_link)
2074 	    add_access_to_work_queue (lacc);
2075 	}
2076     }
2077 }
2078 
2079 /* Go through all accesses collected throughout the (intraprocedural) analysis
2080    stage, exclude overlapping ones, identify representatives and build trees
2081    out of them, making decisions about scalarization on the way.  Return true
2082    iff there are any to-be-scalarized variables after this stage. */
2083 
2084 static bool
2085 analyze_all_variable_accesses (void)
2086 {
2087   int res = 0;
2088   bitmap tmp = BITMAP_ALLOC (NULL);
2089   bitmap_iterator bi;
2090   unsigned i, max_total_scalarization_size;
2091 
2092   max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2093     * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2094 
2095   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2096     if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2097 	&& !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2098       {
2099 	tree var = referenced_var (i);
2100 
2101 	if (TREE_CODE (var) == VAR_DECL
2102 	    && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2103 		<= max_total_scalarization_size)
2104 	    && type_consists_of_records_p (TREE_TYPE (var)))
2105 	  {
2106 	    completely_scalarize_record (var, var, 0);
2107 	    if (dump_file && (dump_flags & TDF_DETAILS))
2108 	      {
2109 		fprintf (dump_file, "Will attempt to totally scalarize ");
2110 		print_generic_expr (dump_file, var, 0);
2111 		fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2112 	      }
2113 	  }
2114       }
2115 
2116   bitmap_copy (tmp, candidate_bitmap);
2117   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2118     {
2119       tree var = referenced_var (i);
2120       struct access *access;
2121 
2122       access = sort_and_splice_var_accesses (var);
2123       if (!access || !build_access_trees (access))
2124 	disqualify_candidate (var,
2125 			      "No or inhibitingly overlapping accesses.");
2126     }
2127 
2128   propagate_all_subaccesses ();
2129 
2130   bitmap_copy (tmp, candidate_bitmap);
2131   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2132     {
2133       tree var = referenced_var (i);
2134       struct access *access = get_first_repr_for_decl (var);
2135 
2136       if (analyze_access_trees (access))
2137 	{
2138 	  res++;
2139 	  if (dump_file && (dump_flags & TDF_DETAILS))
2140 	    {
2141 	      fprintf (dump_file, "\nAccess trees for ");
2142 	      print_generic_expr (dump_file, var, 0);
2143 	      fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2144 	      dump_access_tree (dump_file, access);
2145 	      fprintf (dump_file, "\n");
2146 	    }
2147 	}
2148       else
2149 	disqualify_candidate (var, "No scalar replacements to be created.");
2150     }
2151 
2152   BITMAP_FREE (tmp);
2153 
2154   if (res)
2155     {
2156       statistics_counter_event (cfun, "Scalarized aggregates", res);
2157       return true;
2158     }
2159   else
2160     return false;
2161 }
2162 
2163 /* Return true iff a reference statement into aggregate AGG can be built for
2164    every single to-be-replaced accesses that is a child of ACCESS, its sibling
2165    or a child of its sibling. TOP_OFFSET is the offset from the processed
2166    access subtree that has to be subtracted from offset of each access.  */
2167 
2168 static bool
2169 ref_expr_for_all_replacements_p (struct access *access, tree agg,
2170 				 HOST_WIDE_INT top_offset)
2171 {
2172   do
2173     {
2174       if (access->grp_to_be_replaced
2175 	  && !build_ref_for_offset (NULL, TREE_TYPE (agg),
2176 				    access->offset - top_offset,
2177 				    access->type, false))
2178 	return false;
2179 
2180       if (access->first_child
2181 	  && !ref_expr_for_all_replacements_p (access->first_child, agg,
2182 					       top_offset))
2183 	return false;
2184 
2185       access = access->next_sibling;
2186     }
2187   while (access);
2188 
2189   return true;
2190 }
2191 
2192 /* Generate statements copying scalar replacements of accesses within a subtree
2193    into or out of AGG.  ACCESS is the first child of the root of the subtree to
2194    be processed.  AGG is an aggregate type expression (can be a declaration but
2195    does not have to be, it can for example also be an indirect_ref).
2196    TOP_OFFSET is the offset of the processed subtree which has to be subtracted
2197    from offsets of individual accesses to get corresponding offsets for AGG.
2198    If CHUNK_SIZE is non-null, copy only replacements in the interval
2199    <start_offset, start_offset + chunk_size>, otherwise copy all.  GSI is a
2200    statement iterator used to place the new statements.  WRITE should be true
2201    when the statements should write from AGG to the replacement and false if
2202    vice versa.  if INSERT_AFTER is true, new statements will be added after the
2203    current statement in GSI, they will be added before the statement
2204    otherwise.  */
2205 
2206 static void
2207 generate_subtree_copies (struct access *access, tree agg,
2208 			 HOST_WIDE_INT top_offset,
2209 			 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2210 			 gimple_stmt_iterator *gsi, bool write,
2211 			 bool insert_after)
2212 {
2213   do
2214     {
2215       tree expr = agg;
2216 
2217       if (chunk_size && access->offset >= start_offset + chunk_size)
2218 	return;
2219 
2220       if (access->grp_to_be_replaced
2221 	  && (chunk_size == 0
2222 	      || access->offset + access->size > start_offset))
2223 	{
2224 	  tree repl = get_access_replacement (access);
2225 	  bool ref_found;
2226 	  gimple stmt;
2227 
2228 	  ref_found = build_ref_for_offset (&expr, TREE_TYPE (agg),
2229 					     access->offset - top_offset,
2230 					     access->type, false);
2231 	  gcc_assert (ref_found);
2232 
2233 	  if (write)
2234 	    {
2235 	      if (access->grp_partial_lhs)
2236 		expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2237 						 !insert_after,
2238 						 insert_after ? GSI_NEW_STMT
2239 						 : GSI_SAME_STMT);
2240 	      stmt = gimple_build_assign (repl, expr);
2241 	    }
2242 	  else
2243 	    {
2244 	      TREE_NO_WARNING (repl) = 1;
2245 	      if (access->grp_partial_lhs)
2246 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2247 						 !insert_after,
2248 						 insert_after ? GSI_NEW_STMT
2249 						 : GSI_SAME_STMT);
2250 	      stmt = gimple_build_assign (expr, repl);
2251 	    }
2252 
2253 	  if (insert_after)
2254 	    gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2255 	  else
2256 	    gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2257 	  update_stmt (stmt);
2258 	  sra_stats.subtree_copies++;
2259 	}
2260 
2261       if (access->first_child)
2262 	generate_subtree_copies (access->first_child, agg, top_offset,
2263 				 start_offset, chunk_size, gsi,
2264 				 write, insert_after);
2265 
2266       access = access->next_sibling;
2267     }
2268   while (access);
2269 }
2270 
2271 /* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2272    the root of the subtree to be processed.  GSI is the statement iterator used
2273    for inserting statements which are added after the current statement if
2274    INSERT_AFTER is true or before it otherwise.  */
2275 
2276 static void
2277 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2278 			bool insert_after)
2279 
2280 {
2281   struct access *child;
2282 
2283   if (access->grp_to_be_replaced)
2284     {
2285       gimple stmt;
2286 
2287       stmt = gimple_build_assign (get_access_replacement (access),
2288 				  fold_convert (access->type,
2289 						integer_zero_node));
2290       if (insert_after)
2291 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2292       else
2293 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2294       update_stmt (stmt);
2295     }
2296 
2297   for (child = access->first_child; child; child = child->next_sibling)
2298     init_subtree_with_zero (child, gsi, insert_after);
2299 }
2300 
2301 /* Search for an access representative for the given expression EXPR and
2302    return it or NULL if it cannot be found.  */
2303 
2304 static struct access *
2305 get_access_for_expr (tree expr)
2306 {
2307   HOST_WIDE_INT offset, size, max_size;
2308   tree base;
2309 
2310   /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2311      a different size than the size of its argument and we need the latter
2312      one.  */
2313   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2314     expr = TREE_OPERAND (expr, 0);
2315 
2316   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2317   if (max_size == -1 || !DECL_P (base))
2318     return NULL;
2319 
2320   if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2321     return NULL;
2322 
2323   return get_var_base_offset_size_access (base, offset, max_size);
2324 }
2325 
2326 /* Callback for scan_function.  Replace the expression EXPR with a scalar
2327    replacement if there is one and generate other statements to do type
2328    conversion or subtree copying if necessary.  GSI is used to place newly
2329    created statements, WRITE is true if the expression is being written to (it
2330    is on a LHS of a statement or output in an assembly statement).  */
2331 
2332 static bool
2333 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write,
2334 		 void *data ATTRIBUTE_UNUSED)
2335 {
2336   struct access *access;
2337   tree type, bfr;
2338 
2339   if (TREE_CODE (*expr) == BIT_FIELD_REF)
2340     {
2341       bfr = *expr;
2342       expr = &TREE_OPERAND (*expr, 0);
2343     }
2344   else
2345     bfr = NULL_TREE;
2346 
2347   if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2348     expr = &TREE_OPERAND (*expr, 0);
2349   access = get_access_for_expr (*expr);
2350   if (!access)
2351     return false;
2352   type = TREE_TYPE (*expr);
2353 
2354   if (access->grp_to_be_replaced)
2355     {
2356       tree repl = get_access_replacement (access);
2357       /* If we replace a non-register typed access simply use the original
2358          access expression to extract the scalar component afterwards.
2359 	 This happens if scalarizing a function return value or parameter
2360 	 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2361 	 gcc.c-torture/compile/20011217-1.c.
2362 
2363          We also want to use this when accessing a complex or vector which can
2364          be accessed as a different type too, potentially creating a need for
2365          type conversion (see PR42196) and when scalarized unions are involved
2366          in assembler statements (see PR42398).  */
2367       if (!useless_type_conversion_p (type, access->type))
2368 	{
2369 	  tree ref = access->base;
2370 	  bool ok;
2371 
2372 	  ok = build_ref_for_offset (&ref, TREE_TYPE (ref),
2373 				     access->offset, access->type, false);
2374 	  gcc_assert (ok);
2375 
2376 	  if (write)
2377 	    {
2378 	      gimple stmt;
2379 
2380 	      if (access->grp_partial_lhs)
2381 		ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2382 						 false, GSI_NEW_STMT);
2383 	      stmt = gimple_build_assign (repl, ref);
2384 	      gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2385 	    }
2386 	  else
2387 	    {
2388 	      gimple stmt;
2389 
2390 	      if (access->grp_partial_lhs)
2391 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2392 						 true, GSI_SAME_STMT);
2393 	      stmt = gimple_build_assign (ref, repl);
2394 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2395 	    }
2396 	}
2397       else
2398 	*expr = repl;
2399       sra_stats.exprs++;
2400     }
2401 
2402   if (access->first_child)
2403     {
2404       HOST_WIDE_INT start_offset, chunk_size;
2405       if (bfr
2406 	  && host_integerp (TREE_OPERAND (bfr, 1), 1)
2407 	  && host_integerp (TREE_OPERAND (bfr, 2), 1))
2408 	{
2409 	  chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2410 	  start_offset = access->offset
2411 	    + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2412 	}
2413       else
2414 	start_offset = chunk_size = 0;
2415 
2416       generate_subtree_copies (access->first_child, access->base, 0,
2417 			       start_offset, chunk_size, gsi, write, write);
2418     }
2419   return true;
2420 }
2421 
2422 /* Where scalar replacements of the RHS have been written to when a replacement
2423    of a LHS of an assigments cannot be direclty loaded from a replacement of
2424    the RHS. */
2425 enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
2426 				  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2427 				  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2428 
2429 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2430    base aggregate if there are unscalarized data or directly to LHS
2431    otherwise.  */
2432 
2433 static enum unscalarized_data_handling
2434 handle_unscalarized_data_in_subtree (struct access *top_racc, tree lhs,
2435 				     gimple_stmt_iterator *gsi)
2436 {
2437   if (top_racc->grp_unscalarized_data)
2438     {
2439       generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2440 			       gsi, false, false);
2441       return SRA_UDH_RIGHT;
2442     }
2443   else
2444     {
2445       generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2446 			       0, 0, gsi, false, false);
2447       return SRA_UDH_LEFT;
2448     }
2449 }
2450 
2451 
2452 /* Try to generate statements to load all sub-replacements in an access
2453    (sub)tree (LACC is the first child) from scalar replacements in the TOP_RACC
2454    (sub)tree.  If that is not possible, refresh the TOP_RACC base aggregate and
2455    load the accesses from it.  LEFT_OFFSET is the offset of the left whole
2456    subtree being copied, RIGHT_OFFSET is the same thing for the right subtree.
2457    NEW_GSI is stmt iterator used for statement insertions after the original
2458    assignment, OLD_GSI is used to insert statements before the assignment.
2459    *REFRESHED keeps the information whether we have needed to refresh
2460    replacements of the LHS and from which side of the assignments this takes
2461    place.  */
2462 
2463 static void
2464 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2465 				 HOST_WIDE_INT left_offset,
2466 				 HOST_WIDE_INT right_offset,
2467 				 gimple_stmt_iterator *old_gsi,
2468 				 gimple_stmt_iterator *new_gsi,
2469 				 enum unscalarized_data_handling *refreshed,
2470 				 tree lhs)
2471 {
2472   location_t loc = EXPR_LOCATION (lacc->expr);
2473   do
2474     {
2475       if (lacc->grp_to_be_replaced)
2476 	{
2477 	  struct access *racc;
2478 	  HOST_WIDE_INT offset = lacc->offset - left_offset + right_offset;
2479 	  gimple stmt;
2480 	  tree rhs;
2481 
2482 	  racc = find_access_in_subtree (top_racc, offset, lacc->size);
2483 	  if (racc && racc->grp_to_be_replaced)
2484 	    {
2485 	      rhs = get_access_replacement (racc);
2486 	      if (!useless_type_conversion_p (lacc->type, racc->type))
2487 		rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2488 	    }
2489 	  else
2490 	    {
2491 	      /* No suitable access on the right hand side, need to load from
2492 		 the aggregate.  See if we have to update it first... */
2493 	      if (*refreshed == SRA_UDH_NONE)
2494 		*refreshed = handle_unscalarized_data_in_subtree (top_racc,
2495 								  lhs, old_gsi);
2496 
2497 	      if (*refreshed == SRA_UDH_LEFT)
2498 		{
2499 		  bool repl_found;
2500 
2501 		  rhs = lacc->base;
2502 		  repl_found = build_ref_for_offset (&rhs, TREE_TYPE (rhs),
2503 						     lacc->offset, lacc->type,
2504 						     false);
2505 		  gcc_assert (repl_found);
2506 		}
2507 	      else
2508 		{
2509 		  bool repl_found;
2510 
2511 		  rhs = top_racc->base;
2512 		  repl_found = build_ref_for_offset (&rhs,
2513 						     TREE_TYPE (top_racc->base),
2514 						     offset, lacc->type, false);
2515 		  gcc_assert (repl_found);
2516 		}
2517 	    }
2518 
2519 	  stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2520 	  gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2521 	  update_stmt (stmt);
2522 	  sra_stats.subreplacements++;
2523 	}
2524       else if (*refreshed == SRA_UDH_NONE
2525 	       && lacc->grp_read && !lacc->grp_covered)
2526 	*refreshed = handle_unscalarized_data_in_subtree (top_racc, lhs,
2527 							  old_gsi);
2528 
2529       if (lacc->first_child)
2530 	load_assign_lhs_subreplacements (lacc->first_child, top_racc,
2531 					 left_offset, right_offset,
2532 					 old_gsi, new_gsi, refreshed, lhs);
2533       lacc = lacc->next_sibling;
2534     }
2535   while (lacc);
2536 }
2537 
2538 /* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
2539    to the assignment and GSI is the statement iterator pointing at it.  Returns
2540    the same values as sra_modify_assign.  */
2541 
2542 static enum scan_assign_result
2543 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2544 {
2545   tree lhs = gimple_assign_lhs (*stmt);
2546   struct access *acc;
2547 
2548   acc = get_access_for_expr (lhs);
2549   if (!acc)
2550     return SRA_SA_NONE;
2551 
2552   if (VEC_length (constructor_elt,
2553 		  CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2554     {
2555       /* I have never seen this code path trigger but if it can happen the
2556 	 following should handle it gracefully.  */
2557       if (access_has_children_p (acc))
2558 	generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2559 				 true, true);
2560       return SRA_SA_PROCESSED;
2561     }
2562 
2563   if (acc->grp_covered)
2564     {
2565       init_subtree_with_zero (acc, gsi, false);
2566       unlink_stmt_vdef (*stmt);
2567       gsi_remove (gsi, true);
2568       return SRA_SA_REMOVED;
2569     }
2570   else
2571     {
2572       init_subtree_with_zero (acc, gsi, true);
2573       return SRA_SA_PROCESSED;
2574     }
2575 }
2576 
2577 /* Create a new suitable default definition SSA_NAME and replace all uses of
2578    SSA with it, RACC is access describing the uninitialized part of an
2579    aggregate that is being loaded.  */
2580 
2581 static void
2582 replace_uses_with_default_def_ssa_name (tree ssa, struct access *racc)
2583 {
2584   tree repl, decl;
2585 
2586   decl = get_unrenamed_access_replacement (racc);
2587 
2588   repl = gimple_default_def (cfun, decl);
2589   if (!repl)
2590     {
2591       repl = make_ssa_name (decl, gimple_build_nop ());
2592       set_default_def (decl, repl);
2593     }
2594 
2595   replace_uses_by (ssa, repl);
2596 }
2597 
2598 /* Callback of scan_function to process assign statements.  It examines both
2599    sides of the statement, replaces them with a scalare replacement if there is
2600    one and generating copying of replacements if scalarized aggregates have been
2601    used in the assignment.  STMT is a pointer to the assign statement, GSI is
2602    used to hold generated statements for type conversions and subtree
2603    copying.  */
2604 
2605 static enum scan_assign_result
2606 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
2607 		   void *data ATTRIBUTE_UNUSED)
2608 {
2609   struct access *lacc, *racc;
2610   tree lhs, rhs;
2611   bool modify_this_stmt = false;
2612   bool force_gimple_rhs = false;
2613   location_t loc = gimple_location (*stmt);
2614   gimple_stmt_iterator orig_gsi = *gsi;
2615 
2616   if (!gimple_assign_single_p (*stmt))
2617     return SRA_SA_NONE;
2618   lhs = gimple_assign_lhs (*stmt);
2619   rhs = gimple_assign_rhs1 (*stmt);
2620 
2621   if (TREE_CODE (rhs) == CONSTRUCTOR)
2622     return sra_modify_constructor_assign (stmt, gsi);
2623 
2624   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
2625       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
2626       || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
2627     {
2628       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
2629 					  gsi, false, data);
2630       modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
2631 					   gsi, true, data);
2632       return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2633     }
2634 
2635   lacc = get_access_for_expr (lhs);
2636   racc = get_access_for_expr (rhs);
2637   if (!lacc && !racc)
2638     return SRA_SA_NONE;
2639 
2640   if (lacc && lacc->grp_to_be_replaced)
2641     {
2642       lhs = get_access_replacement (lacc);
2643       gimple_assign_set_lhs (*stmt, lhs);
2644       modify_this_stmt = true;
2645       if (lacc->grp_partial_lhs)
2646 	force_gimple_rhs = true;
2647       sra_stats.exprs++;
2648     }
2649 
2650   if (racc && racc->grp_to_be_replaced)
2651     {
2652       rhs = get_access_replacement (racc);
2653       modify_this_stmt = true;
2654       if (racc->grp_partial_lhs)
2655 	force_gimple_rhs = true;
2656       sra_stats.exprs++;
2657     }
2658 
2659   if (modify_this_stmt)
2660     {
2661       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2662 	{
2663 	  /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
2664 	     ???  This should move to fold_stmt which we simply should
2665 	     call after building a VIEW_CONVERT_EXPR here.  */
2666 	  if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
2667 	      && !access_has_children_p (lacc))
2668 	    {
2669 	      tree expr = lhs;
2670 	      if (build_ref_for_offset (&expr, TREE_TYPE (lhs), 0,
2671 					TREE_TYPE (rhs), false))
2672 		{
2673 		  lhs = expr;
2674 		  gimple_assign_set_lhs (*stmt, expr);
2675 		}
2676 	    }
2677 	  else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
2678 		   && !access_has_children_p (racc))
2679 	    {
2680 	      tree expr = rhs;
2681 	      if (build_ref_for_offset (&expr, TREE_TYPE (rhs), 0,
2682 					TREE_TYPE (lhs), false))
2683 		rhs = expr;
2684 	    }
2685 	  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
2686 	    {
2687 	      rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
2688 	      if (is_gimple_reg_type (TREE_TYPE (lhs))
2689 		  && TREE_CODE (lhs) != SSA_NAME)
2690 		force_gimple_rhs = true;
2691 	    }
2692 	}
2693     }
2694 
2695   /* From this point on, the function deals with assignments in between
2696      aggregates when at least one has scalar reductions of some of its
2697      components.  There are three possible scenarios: Both the LHS and RHS have
2698      to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
2699 
2700      In the first case, we would like to load the LHS components from RHS
2701      components whenever possible.  If that is not possible, we would like to
2702      read it directly from the RHS (after updating it by storing in it its own
2703      components).  If there are some necessary unscalarized data in the LHS,
2704      those will be loaded by the original assignment too.  If neither of these
2705      cases happen, the original statement can be removed.  Most of this is done
2706      by load_assign_lhs_subreplacements.
2707 
2708      In the second case, we would like to store all RHS scalarized components
2709      directly into LHS and if they cover the aggregate completely, remove the
2710      statement too.  In the third case, we want the LHS components to be loaded
2711      directly from the RHS (DSE will remove the original statement if it
2712      becomes redundant).
2713 
2714      This is a bit complex but manageable when types match and when unions do
2715      not cause confusion in a way that we cannot really load a component of LHS
2716      from the RHS or vice versa (the access representing this level can have
2717      subaccesses that are accessible only through a different union field at a
2718      higher level - different from the one used in the examined expression).
2719      Unions are fun.
2720 
2721      Therefore, I specially handle a fourth case, happening when there is a
2722      specific type cast or it is impossible to locate a scalarized subaccess on
2723      the other side of the expression.  If that happens, I simply "refresh" the
2724      RHS by storing in it is scalarized components leave the original statement
2725      there to do the copying and then load the scalar replacements of the LHS.
2726      This is what the first branch does.  */
2727 
2728   if (gimple_has_volatile_ops (*stmt)
2729       || contains_view_convert_expr_p (rhs)
2730       || contains_view_convert_expr_p (lhs)
2731       || (access_has_children_p (racc)
2732 	  && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset))
2733       || (access_has_children_p (lacc)
2734 	  && !ref_expr_for_all_replacements_p (lacc, rhs, lacc->offset)))
2735     {
2736       if (access_has_children_p (racc))
2737 	generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
2738 				 gsi, false, false);
2739       if (access_has_children_p (lacc))
2740 	generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
2741 				 gsi, true, true);
2742       sra_stats.separate_lhs_rhs_handling++;
2743     }
2744   else
2745     {
2746       if (access_has_children_p (lacc)
2747 	  && access_has_children_p (racc)
2748 	  /* When an access represents an unscalarizable region, it usually
2749 	     represents accesses with variable offset and thus must not be used
2750 	     to generate new memory accesses.  */
2751 	  && !lacc->grp_unscalarizable_region
2752 	  && !racc->grp_unscalarizable_region)
2753 	{
2754 	  gimple_stmt_iterator orig_gsi = *gsi;
2755 	  enum unscalarized_data_handling refreshed;
2756 
2757 	  if (lacc->grp_read && !lacc->grp_covered)
2758 	    refreshed = handle_unscalarized_data_in_subtree (racc, lhs, gsi);
2759 	  else
2760 	    refreshed = SRA_UDH_NONE;
2761 
2762 	  load_assign_lhs_subreplacements (lacc->first_child, racc,
2763 					   lacc->offset, racc->offset,
2764 					   &orig_gsi, gsi, &refreshed, lhs);
2765 	  if (refreshed != SRA_UDH_RIGHT)
2766 	    {
2767 	      gsi_next (gsi);
2768 	      unlink_stmt_vdef (*stmt);
2769 	      gsi_remove (&orig_gsi, true);
2770 	      sra_stats.deleted++;
2771 	      return SRA_SA_REMOVED;
2772 	    }
2773 	}
2774       else
2775 	{
2776 	  if (racc)
2777 	    {
2778 	      if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data)
2779 		{
2780 		  if (racc->first_child)
2781 		    generate_subtree_copies (racc->first_child, lhs,
2782 					     racc->offset, 0, 0, gsi,
2783 					     false, false);
2784 		  gcc_assert (*stmt == gsi_stmt (*gsi));
2785 		  if (TREE_CODE (lhs) == SSA_NAME)
2786 		    replace_uses_with_default_def_ssa_name (lhs, racc);
2787 
2788 		  unlink_stmt_vdef (*stmt);
2789 		  gsi_remove (gsi, true);
2790 		  sra_stats.deleted++;
2791 		  return SRA_SA_REMOVED;
2792 		}
2793 	      else if (racc->first_child)
2794 		generate_subtree_copies (racc->first_child, lhs,
2795 					 racc->offset, 0, 0, gsi, false, true);
2796 	    }
2797 	  if (access_has_children_p (lacc))
2798 	    generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
2799 				     0, 0, gsi, true, true);
2800 	}
2801     }
2802 
2803   /* This gimplification must be done after generate_subtree_copies, lest we
2804      insert the subtree copies in the middle of the gimplified sequence.  */
2805   if (force_gimple_rhs)
2806     rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
2807 				    true, GSI_SAME_STMT);
2808   if (gimple_assign_rhs1 (*stmt) != rhs)
2809     {
2810       gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
2811       gcc_assert (*stmt == gsi_stmt (orig_gsi));
2812     }
2813 
2814   return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE;
2815 }
2816 
2817 /* Generate statements initializing scalar replacements of parts of function
2818    parameters.  */
2819 
2820 static void
2821 initialize_parameter_reductions (void)
2822 {
2823   gimple_stmt_iterator gsi;
2824   gimple_seq seq = NULL;
2825   tree parm;
2826 
2827   for (parm = DECL_ARGUMENTS (current_function_decl);
2828        parm;
2829        parm = TREE_CHAIN (parm))
2830     {
2831       VEC (access_p, heap) *access_vec;
2832       struct access *access;
2833 
2834       if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
2835 	continue;
2836       access_vec = get_base_access_vector (parm);
2837       if (!access_vec)
2838 	continue;
2839 
2840       if (!seq)
2841 	{
2842 	  seq = gimple_seq_alloc ();
2843 	  gsi = gsi_start (seq);
2844 	}
2845 
2846       for (access = VEC_index (access_p, access_vec, 0);
2847 	   access;
2848 	   access = access->next_grp)
2849 	generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true);
2850     }
2851 
2852   if (seq)
2853     gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
2854 }
2855 
2856 /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
2857    it reveals there are components of some aggregates to be scalarized, it runs
2858    the required transformations.  */
2859 static unsigned int
2860 perform_intra_sra (void)
2861 {
2862   int ret = 0;
2863   sra_initialize ();
2864 
2865   if (!find_var_candidates ())
2866     goto out;
2867 
2868   if (!scan_function (build_access_from_expr, build_accesses_from_assign, NULL,
2869 		      true, NULL))
2870     goto out;
2871 
2872   if (!analyze_all_variable_accesses ())
2873     goto out;
2874 
2875   scan_function (sra_modify_expr, sra_modify_assign, NULL, false, NULL);
2876   initialize_parameter_reductions ();
2877 
2878   statistics_counter_event (cfun, "Scalar replacements created",
2879 			    sra_stats.replacements);
2880   statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
2881   statistics_counter_event (cfun, "Subtree copy stmts",
2882 			    sra_stats.subtree_copies);
2883   statistics_counter_event (cfun, "Subreplacement stmts",
2884 			    sra_stats.subreplacements);
2885   statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
2886   statistics_counter_event (cfun, "Separate LHS and RHS handling",
2887 			    sra_stats.separate_lhs_rhs_handling);
2888 
2889   if (cfg_changed)
2890     ret = TODO_update_ssa | TODO_cleanup_cfg;
2891   else
2892     ret = TODO_update_ssa;
2893 
2894  out:
2895   sra_deinitialize ();
2896   return ret;
2897 }
2898 
2899 /* Perform early intraprocedural SRA.  */
2900 static unsigned int
2901 early_intra_sra (void)
2902 {
2903   sra_mode = SRA_MODE_EARLY_INTRA;
2904   return perform_intra_sra ();
2905 }
2906 
2907 /* Perform "late" intraprocedural SRA.  */
2908 static unsigned int
2909 late_intra_sra (void)
2910 {
2911   sra_mode = SRA_MODE_INTRA;
2912   return perform_intra_sra ();
2913 }
2914 
2915 
2916 static bool
2917 gate_intra_sra (void)
2918 {
2919   return flag_tree_sra != 0;
2920 }
2921 
2922 
2923 struct gimple_opt_pass pass_sra_early =
2924 {
2925  {
2926   GIMPLE_PASS,
2927   "esra",	 			/* name */
2928   gate_intra_sra,			/* gate */
2929   early_intra_sra,			/* execute */
2930   NULL,					/* sub */
2931   NULL,					/* next */
2932   0,					/* static_pass_number */
2933   TV_TREE_SRA,				/* tv_id */
2934   PROP_cfg | PROP_ssa,                  /* properties_required */
2935   0,					/* properties_provided */
2936   0,					/* properties_destroyed */
2937   0,					/* todo_flags_start */
2938   TODO_dump_func
2939   | TODO_update_ssa
2940   | TODO_ggc_collect
2941   | TODO_verify_ssa			/* todo_flags_finish */
2942  }
2943 };
2944 
2945 struct gimple_opt_pass pass_sra =
2946 {
2947  {
2948   GIMPLE_PASS,
2949   "sra",	 			/* name */
2950   gate_intra_sra,			/* gate */
2951   late_intra_sra,			/* execute */
2952   NULL,					/* sub */
2953   NULL,					/* next */
2954   0,					/* static_pass_number */
2955   TV_TREE_SRA,				/* tv_id */
2956   PROP_cfg | PROP_ssa,                  /* properties_required */
2957   0,					/* properties_provided */
2958   0,					/* properties_destroyed */
2959   TODO_update_address_taken,		/* todo_flags_start */
2960   TODO_dump_func
2961   | TODO_update_ssa
2962   | TODO_ggc_collect
2963   | TODO_verify_ssa			/* todo_flags_finish */
2964  }
2965 };
2966 
2967 
2968 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
2969    parameter.  */
2970 
2971 static bool
2972 is_unused_scalar_param (tree parm)
2973 {
2974   tree name;
2975   return (is_gimple_reg (parm)
2976 	  && (!(name = gimple_default_def (cfun, parm))
2977 	      || has_zero_uses (name)));
2978 }
2979 
2980 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
2981    examine whether there are any direct or otherwise infeasible ones.  If so,
2982    return true, otherwise return false.  PARM must be a gimple register with a
2983    non-NULL default definition.  */
2984 
2985 static bool
2986 ptr_parm_has_direct_uses (tree parm)
2987 {
2988   imm_use_iterator ui;
2989   gimple stmt;
2990   tree name = gimple_default_def (cfun, parm);
2991   bool ret = false;
2992 
2993   FOR_EACH_IMM_USE_STMT (stmt, ui, name)
2994     {
2995       int uses_ok = 0;
2996       use_operand_p use_p;
2997 
2998       if (is_gimple_debug (stmt))
2999 	continue;
3000 
3001       /* Valid uses include dereferences on the lhs and the rhs.  */
3002       if (gimple_has_lhs (stmt))
3003 	{
3004 	  tree lhs = gimple_get_lhs (stmt);
3005 	  while (handled_component_p (lhs))
3006 	    lhs = TREE_OPERAND (lhs, 0);
3007 	  if (INDIRECT_REF_P (lhs)
3008 	      && TREE_OPERAND (lhs, 0) == name)
3009 	    uses_ok++;
3010 	}
3011       if (gimple_assign_single_p (stmt))
3012 	{
3013 	  tree rhs = gimple_assign_rhs1 (stmt);
3014 	  while (handled_component_p (rhs))
3015 	    rhs = TREE_OPERAND (rhs, 0);
3016 	  if (INDIRECT_REF_P (rhs)
3017 	      && TREE_OPERAND (rhs, 0) == name)
3018 	    uses_ok++;
3019 	}
3020       else if (is_gimple_call (stmt))
3021 	{
3022 	  unsigned i;
3023 	  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3024 	    {
3025 	      tree arg = gimple_call_arg (stmt, i);
3026 	      while (handled_component_p (arg))
3027 		arg = TREE_OPERAND (arg, 0);
3028 	      if (INDIRECT_REF_P (arg)
3029 		  && TREE_OPERAND (arg, 0) == name)
3030 		uses_ok++;
3031 	    }
3032 	}
3033 
3034       /* If the number of valid uses does not match the number of
3035          uses in this stmt there is an unhandled use.  */
3036       FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3037 	--uses_ok;
3038 
3039       if (uses_ok != 0)
3040 	ret = true;
3041 
3042       if (ret)
3043 	BREAK_FROM_IMM_USE_STMT (ui);
3044     }
3045 
3046   return ret;
3047 }
3048 
3049 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3050    them in candidate_bitmap.  Note that these do not necessarily include
3051    parameter which are unused and thus can be removed.  Return true iff any
3052    such candidate has been found.  */
3053 
3054 static bool
3055 find_param_candidates (void)
3056 {
3057   tree parm;
3058   int count = 0;
3059   bool ret = false;
3060 
3061   for (parm = DECL_ARGUMENTS (current_function_decl);
3062        parm;
3063        parm = TREE_CHAIN (parm))
3064     {
3065       tree type = TREE_TYPE (parm);
3066 
3067       count++;
3068 
3069       if (TREE_THIS_VOLATILE (parm)
3070 	  || TREE_ADDRESSABLE (parm)
3071 	  || is_va_list_type (type))
3072 	continue;
3073 
3074       if (is_unused_scalar_param (parm))
3075 	{
3076 	  ret = true;
3077 	  continue;
3078 	}
3079 
3080       if (POINTER_TYPE_P (type))
3081 	{
3082 	  type = TREE_TYPE (type);
3083 
3084 	  if (TREE_CODE (type) == FUNCTION_TYPE
3085 	      || TYPE_VOLATILE (type)
3086 	      || !is_gimple_reg (parm)
3087 	      || is_va_list_type (type)
3088 	      || ptr_parm_has_direct_uses (parm))
3089 	    continue;
3090 	}
3091       else if (!AGGREGATE_TYPE_P (type))
3092 	continue;
3093 
3094       if (!COMPLETE_TYPE_P (type)
3095 	  || !host_integerp (TYPE_SIZE (type), 1)
3096           || tree_low_cst (TYPE_SIZE (type), 1) == 0
3097 	  || (AGGREGATE_TYPE_P (type)
3098 	      && type_internals_preclude_sra_p (type)))
3099 	continue;
3100 
3101       bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3102       ret = true;
3103       if (dump_file && (dump_flags & TDF_DETAILS))
3104 	{
3105 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3106 	  print_generic_expr (dump_file, parm, 0);
3107 	  fprintf (dump_file, "\n");
3108 	}
3109     }
3110 
3111   func_param_count = count;
3112   return ret;
3113 }
3114 
3115 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3116    maybe_modified. */
3117 
3118 static bool
3119 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3120 		     void *data)
3121 {
3122   struct access *repr = (struct access *) data;
3123 
3124   repr->grp_maybe_modified = 1;
3125   return true;
3126 }
3127 
3128 /* Analyze what representatives (in linked lists accessible from
3129    REPRESENTATIVES) can be modified by side effects of statements in the
3130    current function.  */
3131 
3132 static void
3133 analyze_modified_params (VEC (access_p, heap) *representatives)
3134 {
3135   int i;
3136 
3137   for (i = 0; i < func_param_count; i++)
3138     {
3139       struct access *repr;
3140 
3141       for (repr = VEC_index (access_p, representatives, i);
3142 	   repr;
3143 	   repr = repr->next_grp)
3144 	{
3145 	  struct access *access;
3146 	  bitmap visited;
3147 	  ao_ref ar;
3148 
3149 	  if (no_accesses_p (repr))
3150 	    continue;
3151 	  if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3152 	      || repr->grp_maybe_modified)
3153 	    continue;
3154 
3155 	  ao_ref_init (&ar, repr->expr);
3156 	  visited = BITMAP_ALLOC (NULL);
3157 	  for (access = repr; access; access = access->next_sibling)
3158 	    {
3159 	      /* All accesses are read ones, otherwise grp_maybe_modified would
3160 		 be trivially set.  */
3161 	      walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3162 				  mark_maybe_modified, repr, &visited);
3163 	      if (repr->grp_maybe_modified)
3164 		break;
3165 	    }
3166 	  BITMAP_FREE (visited);
3167 	}
3168     }
3169 }
3170 
3171 /* Propagate distances in bb_dereferences in the opposite direction than the
3172    control flow edges, in each step storing the maximum of the current value
3173    and the minimum of all successors.  These steps are repeated until the table
3174    stabilizes.  Note that BBs which might terminate the functions (according to
3175    final_bbs bitmap) never updated in this way.  */
3176 
3177 static void
3178 propagate_dereference_distances (void)
3179 {
3180   VEC (basic_block, heap) *queue;
3181   basic_block bb;
3182 
3183   queue = VEC_alloc (basic_block, heap, last_basic_block_for_function (cfun));
3184   VEC_quick_push (basic_block, queue, ENTRY_BLOCK_PTR);
3185   FOR_EACH_BB (bb)
3186     {
3187       VEC_quick_push (basic_block, queue, bb);
3188       bb->aux = bb;
3189     }
3190 
3191   while (!VEC_empty (basic_block, queue))
3192     {
3193       edge_iterator ei;
3194       edge e;
3195       bool change = false;
3196       int i;
3197 
3198       bb = VEC_pop (basic_block, queue);
3199       bb->aux = NULL;
3200 
3201       if (bitmap_bit_p (final_bbs, bb->index))
3202 	continue;
3203 
3204       for (i = 0; i < func_param_count; i++)
3205 	{
3206 	  int idx = bb->index * func_param_count + i;
3207 	  bool first = true;
3208 	  HOST_WIDE_INT inh = 0;
3209 
3210 	  FOR_EACH_EDGE (e, ei, bb->succs)
3211 	  {
3212 	    int succ_idx = e->dest->index * func_param_count + i;
3213 
3214 	    if (e->src == EXIT_BLOCK_PTR)
3215 	      continue;
3216 
3217 	    if (first)
3218 	      {
3219 		first = false;
3220 		inh = bb_dereferences [succ_idx];
3221 	      }
3222 	    else if (bb_dereferences [succ_idx] < inh)
3223 	      inh = bb_dereferences [succ_idx];
3224 	  }
3225 
3226 	  if (!first && bb_dereferences[idx] < inh)
3227 	    {
3228 	      bb_dereferences[idx] = inh;
3229 	      change = true;
3230 	    }
3231 	}
3232 
3233       if (change && !bitmap_bit_p (final_bbs, bb->index))
3234 	FOR_EACH_EDGE (e, ei, bb->preds)
3235 	  {
3236 	    if (e->src->aux)
3237 	      continue;
3238 
3239 	    e->src->aux = e->src;
3240 	    VEC_quick_push (basic_block, queue, e->src);
3241 	  }
3242     }
3243 
3244   VEC_free (basic_block, heap, queue);
3245 }
3246 
3247 /* Dump a dereferences TABLE with heading STR to file F.  */
3248 
3249 static void
3250 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3251 {
3252   basic_block bb;
3253 
3254   fprintf (dump_file, str);
3255   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3256     {
3257       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3258       if (bb != EXIT_BLOCK_PTR)
3259 	{
3260 	  int i;
3261 	  for (i = 0; i < func_param_count; i++)
3262 	    {
3263 	      int idx = bb->index * func_param_count + i;
3264 	      fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3265 	    }
3266 	}
3267       fprintf (f, "\n");
3268     }
3269   fprintf (dump_file, "\n");
3270 }
3271 
3272 /* Determine what (parts of) parameters passed by reference that are not
3273    assigned to are not certainly dereferenced in this function and thus the
3274    dereferencing cannot be safely moved to the caller without potentially
3275    introducing a segfault.  Mark such REPRESENTATIVES as
3276    grp_not_necessarilly_dereferenced.
3277 
3278    The dereferenced maximum "distance," i.e. the offset + size of the accessed
3279    part is calculated rather than simple booleans are calculated for each
3280    pointer parameter to handle cases when only a fraction of the whole
3281    aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3282    an example).
3283 
3284    The maximum dereference distances for each pointer parameter and BB are
3285    already stored in bb_dereference.  This routine simply propagates these
3286    values upwards by propagate_dereference_distances and then compares the
3287    distances of individual parameters in the ENTRY BB to the equivalent
3288    distances of each representative of a (fraction of a) parameter.  */
3289 
3290 static void
3291 analyze_caller_dereference_legality (VEC (access_p, heap) *representatives)
3292 {
3293   int i;
3294 
3295   if (dump_file && (dump_flags & TDF_DETAILS))
3296     dump_dereferences_table (dump_file,
3297 			     "Dereference table before propagation:\n",
3298 			     bb_dereferences);
3299 
3300   propagate_dereference_distances ();
3301 
3302   if (dump_file && (dump_flags & TDF_DETAILS))
3303     dump_dereferences_table (dump_file,
3304 			     "Dereference table after propagation:\n",
3305 			     bb_dereferences);
3306 
3307   for (i = 0; i < func_param_count; i++)
3308     {
3309       struct access *repr = VEC_index (access_p, representatives, i);
3310       int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3311 
3312       if (!repr || no_accesses_p (repr))
3313 	continue;
3314 
3315       do
3316 	{
3317 	  if ((repr->offset + repr->size) > bb_dereferences[idx])
3318 	    repr->grp_not_necessarilly_dereferenced = 1;
3319 	  repr = repr->next_grp;
3320 	}
3321       while (repr);
3322     }
3323 }
3324 
3325 /* Return the representative access for the parameter declaration PARM if it is
3326    a scalar passed by reference which is not written to and the pointer value
3327    is not used directly.  Thus, if it is legal to dereference it in the caller
3328    and we can rule out modifications through aliases, such parameter should be
3329    turned into one passed by value.  Return NULL otherwise.  */
3330 
3331 static struct access *
3332 unmodified_by_ref_scalar_representative (tree parm)
3333 {
3334   int i, access_count;
3335   struct access *repr;
3336   VEC (access_p, heap) *access_vec;
3337 
3338   access_vec = get_base_access_vector (parm);
3339   gcc_assert (access_vec);
3340   repr = VEC_index (access_p, access_vec, 0);
3341   if (repr->write)
3342     return NULL;
3343   repr->group_representative = repr;
3344 
3345   access_count = VEC_length (access_p, access_vec);
3346   for (i = 1; i < access_count; i++)
3347     {
3348       struct access *access = VEC_index (access_p, access_vec, i);
3349       if (access->write)
3350 	return NULL;
3351       access->group_representative = repr;
3352       access->next_sibling = repr->next_sibling;
3353       repr->next_sibling = access;
3354     }
3355 
3356   repr->grp_read = 1;
3357   repr->grp_scalar_ptr = 1;
3358   return repr;
3359 }
3360 
3361 /* Return true iff this access precludes IPA-SRA of the parameter it is
3362    associated with. */
3363 
3364 static bool
3365 access_precludes_ipa_sra_p (struct access *access)
3366 {
3367   /* Avoid issues such as the second simple testcase in PR 42025.  The problem
3368      is incompatible assign in a call statement (and possibly even in asm
3369      statements).  This can be relaxed by using a new temporary but only for
3370      non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3371      intraprocedural SRA we deal with this by keeping the old aggregate around,
3372      something we cannot do in IPA-SRA.)  */
3373   if (access->write
3374       && (is_gimple_call (access->stmt)
3375 	  || gimple_code (access->stmt) == GIMPLE_ASM))
3376     return true;
3377 
3378   return false;
3379 }
3380 
3381 
3382 /* Sort collected accesses for parameter PARM, identify representatives for
3383    each accessed region and link them together.  Return NULL if there are
3384    different but overlapping accesses, return the special ptr value meaning
3385    there are no accesses for this parameter if that is the case and return the
3386    first representative otherwise.  Set *RO_GRP if there is a group of accesses
3387    with only read (i.e. no write) accesses.  */
3388 
3389 static struct access *
3390 splice_param_accesses (tree parm, bool *ro_grp)
3391 {
3392   int i, j, access_count, group_count;
3393   int agg_size, total_size = 0;
3394   struct access *access, *res, **prev_acc_ptr = &res;
3395   VEC (access_p, heap) *access_vec;
3396 
3397   access_vec = get_base_access_vector (parm);
3398   if (!access_vec)
3399     return &no_accesses_representant;
3400   access_count = VEC_length (access_p, access_vec);
3401 
3402   qsort (VEC_address (access_p, access_vec), access_count, sizeof (access_p),
3403 	 compare_access_positions);
3404 
3405   i = 0;
3406   total_size = 0;
3407   group_count = 0;
3408   while (i < access_count)
3409     {
3410       bool modification;
3411       access = VEC_index (access_p, access_vec, i);
3412       modification = access->write;
3413       if (access_precludes_ipa_sra_p (access))
3414 	return NULL;
3415 
3416       /* Access is about to become group representative unless we find some
3417 	 nasty overlap which would preclude us from breaking this parameter
3418 	 apart. */
3419 
3420       j = i + 1;
3421       while (j < access_count)
3422 	{
3423 	  struct access *ac2 = VEC_index (access_p, access_vec, j);
3424 	  if (ac2->offset != access->offset)
3425 	    {
3426 	      /* All or nothing law for parameters. */
3427 	      if (access->offset + access->size > ac2->offset)
3428 		return NULL;
3429 	      else
3430 		break;
3431 	    }
3432 	  else if (ac2->size != access->size)
3433 	    return NULL;
3434 
3435 	  if (access_precludes_ipa_sra_p (ac2)
3436 	      || (ac2->type != access->type
3437 		  && (TREE_ADDRESSABLE (ac2->type)
3438 		      || TREE_ADDRESSABLE (access->type))))
3439 	    return NULL;
3440 
3441 	  modification |= ac2->write;
3442 	  ac2->group_representative = access;
3443 	  ac2->next_sibling = access->next_sibling;
3444 	  access->next_sibling = ac2;
3445 	  j++;
3446 	}
3447 
3448       group_count++;
3449       access->grp_maybe_modified = modification;
3450       if (!modification)
3451 	*ro_grp = true;
3452       *prev_acc_ptr = access;
3453       prev_acc_ptr = &access->next_grp;
3454       total_size += access->size;
3455       i = j;
3456     }
3457 
3458   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3459     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3460   else
3461     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3462   if (total_size >= agg_size)
3463     return NULL;
3464 
3465   gcc_assert (group_count > 0);
3466   return res;
3467 }
3468 
3469 /* Decide whether parameters with representative accesses given by REPR should
3470    be reduced into components.  */
3471 
3472 static int
3473 decide_one_param_reduction (struct access *repr)
3474 {
3475   int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
3476   bool by_ref;
3477   tree parm;
3478 
3479   parm = repr->base;
3480   cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3481   gcc_assert (cur_parm_size > 0);
3482 
3483   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3484     {
3485       by_ref = true;
3486       agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3487     }
3488   else
3489     {
3490       by_ref = false;
3491       agg_size = cur_parm_size;
3492     }
3493 
3494   if (dump_file)
3495     {
3496       struct access *acc;
3497       fprintf (dump_file, "Evaluating PARAM group sizes for ");
3498       print_generic_expr (dump_file, parm, 0);
3499       fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
3500       for (acc = repr; acc; acc = acc->next_grp)
3501 	dump_access (dump_file, acc, true);
3502     }
3503 
3504   total_size = 0;
3505   new_param_count = 0;
3506 
3507   for (; repr; repr = repr->next_grp)
3508     {
3509       gcc_assert (parm == repr->base);
3510       new_param_count++;
3511 
3512       if (!by_ref || (!repr->grp_maybe_modified
3513 		      && !repr->grp_not_necessarilly_dereferenced))
3514 	total_size += repr->size;
3515       else
3516 	total_size += cur_parm_size;
3517     }
3518 
3519   gcc_assert (new_param_count > 0);
3520 
3521   if (optimize_function_for_size_p (cfun))
3522     parm_size_limit = cur_parm_size;
3523   else
3524     parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
3525                        * cur_parm_size);
3526 
3527   if (total_size < agg_size
3528       && total_size <= parm_size_limit)
3529     {
3530       if (dump_file)
3531 	fprintf (dump_file, "    ....will be split into %i components\n",
3532 		 new_param_count);
3533       return new_param_count;
3534     }
3535   else
3536     return 0;
3537 }
3538 
3539 /* The order of the following enums is important, we need to do extra work for
3540    UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
3541 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
3542 			  MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
3543 
3544 /* Identify representatives of all accesses to all candidate parameters for
3545    IPA-SRA.  Return result based on what representatives have been found. */
3546 
3547 static enum ipa_splicing_result
3548 splice_all_param_accesses (VEC (access_p, heap) **representatives)
3549 {
3550   enum ipa_splicing_result result = NO_GOOD_ACCESS;
3551   tree parm;
3552   struct access *repr;
3553 
3554   *representatives = VEC_alloc (access_p, heap, func_param_count);
3555 
3556   for (parm = DECL_ARGUMENTS (current_function_decl);
3557        parm;
3558        parm = TREE_CHAIN (parm))
3559     {
3560       if (is_unused_scalar_param (parm))
3561 	{
3562 	  VEC_quick_push (access_p, *representatives,
3563 			  &no_accesses_representant);
3564 	  if (result == NO_GOOD_ACCESS)
3565 	    result = UNUSED_PARAMS;
3566 	}
3567       else if (POINTER_TYPE_P (TREE_TYPE (parm))
3568 	       && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
3569 	       && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3570 	{
3571 	  repr = unmodified_by_ref_scalar_representative (parm);
3572 	  VEC_quick_push (access_p, *representatives, repr);
3573 	  if (repr)
3574 	    result = UNMODIF_BY_REF_ACCESSES;
3575 	}
3576       else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3577 	{
3578 	  bool ro_grp = false;
3579 	  repr = splice_param_accesses (parm, &ro_grp);
3580 	  VEC_quick_push (access_p, *representatives, repr);
3581 
3582 	  if (repr && !no_accesses_p (repr))
3583 	    {
3584 	      if (POINTER_TYPE_P (TREE_TYPE (parm)))
3585 		{
3586 		  if (ro_grp)
3587 		    result = UNMODIF_BY_REF_ACCESSES;
3588 		  else if (result < MODIF_BY_REF_ACCESSES)
3589 		    result = MODIF_BY_REF_ACCESSES;
3590 		}
3591 	      else if (result < BY_VAL_ACCESSES)
3592 		result = BY_VAL_ACCESSES;
3593 	    }
3594 	  else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
3595 	    result = UNUSED_PARAMS;
3596 	}
3597       else
3598 	VEC_quick_push (access_p, *representatives, NULL);
3599     }
3600 
3601   if (result == NO_GOOD_ACCESS)
3602     {
3603       VEC_free (access_p, heap, *representatives);
3604       *representatives = NULL;
3605       return NO_GOOD_ACCESS;
3606     }
3607 
3608   return result;
3609 }
3610 
3611 /* Return the index of BASE in PARMS.  Abort if it is not found.  */
3612 
3613 static inline int
3614 get_param_index (tree base, VEC(tree, heap) *parms)
3615 {
3616   int i, len;
3617 
3618   len = VEC_length (tree, parms);
3619   for (i = 0; i < len; i++)
3620     if (VEC_index (tree, parms, i) == base)
3621       return i;
3622   gcc_unreachable ();
3623 }
3624 
3625 /* Convert the decisions made at the representative level into compact
3626    parameter adjustments.  REPRESENTATIVES are pointers to first
3627    representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
3628    final number of adjustments.  */
3629 
3630 static ipa_parm_adjustment_vec
3631 turn_representatives_into_adjustments (VEC (access_p, heap) *representatives,
3632 				       int adjustments_count)
3633 {
3634   VEC (tree, heap) *parms;
3635   ipa_parm_adjustment_vec adjustments;
3636   tree parm;
3637   int i;
3638 
3639   gcc_assert (adjustments_count > 0);
3640   parms = ipa_get_vector_of_formal_parms (current_function_decl);
3641   adjustments = VEC_alloc (ipa_parm_adjustment_t, heap, adjustments_count);
3642   parm = DECL_ARGUMENTS (current_function_decl);
3643   for (i = 0; i < func_param_count; i++, parm = TREE_CHAIN (parm))
3644     {
3645       struct access *repr = VEC_index (access_p, representatives, i);
3646 
3647       if (!repr || no_accesses_p (repr))
3648 	{
3649 	  struct ipa_parm_adjustment *adj;
3650 
3651 	  adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3652 	  memset (adj, 0, sizeof (*adj));
3653 	  adj->base_index = get_param_index (parm, parms);
3654 	  adj->base = parm;
3655 	  if (!repr)
3656 	    adj->copy_param = 1;
3657 	  else
3658 	    adj->remove_param = 1;
3659 	}
3660       else
3661 	{
3662 	  struct ipa_parm_adjustment *adj;
3663 	  int index = get_param_index (parm, parms);
3664 
3665 	  for (; repr; repr = repr->next_grp)
3666 	    {
3667 	      adj = VEC_quick_push (ipa_parm_adjustment_t, adjustments, NULL);
3668 	      memset (adj, 0, sizeof (*adj));
3669 	      gcc_assert (repr->base == parm);
3670 	      adj->base_index = index;
3671 	      adj->base = repr->base;
3672 	      adj->type = repr->type;
3673 	      adj->offset = repr->offset;
3674 	      adj->by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
3675 			     && (repr->grp_maybe_modified
3676 				 || repr->grp_not_necessarilly_dereferenced));
3677 
3678 	    }
3679 	}
3680     }
3681   VEC_free (tree, heap, parms);
3682   return adjustments;
3683 }
3684 
3685 /* Analyze the collected accesses and produce a plan what to do with the
3686    parameters in the form of adjustments, NULL meaning nothing.  */
3687 
3688 static ipa_parm_adjustment_vec
3689 analyze_all_param_acesses (void)
3690 {
3691   enum ipa_splicing_result repr_state;
3692   bool proceed = false;
3693   int i, adjustments_count = 0;
3694   VEC (access_p, heap) *representatives;
3695   ipa_parm_adjustment_vec adjustments;
3696 
3697   repr_state = splice_all_param_accesses (&representatives);
3698   if (repr_state == NO_GOOD_ACCESS)
3699     return NULL;
3700 
3701   /* If there are any parameters passed by reference which are not modified
3702      directly, we need to check whether they can be modified indirectly.  */
3703   if (repr_state == UNMODIF_BY_REF_ACCESSES)
3704     {
3705       analyze_caller_dereference_legality (representatives);
3706       analyze_modified_params (representatives);
3707     }
3708 
3709   for (i = 0; i < func_param_count; i++)
3710     {
3711       struct access *repr = VEC_index (access_p, representatives, i);
3712 
3713       if (repr && !no_accesses_p (repr))
3714 	{
3715 	  if (repr->grp_scalar_ptr)
3716 	    {
3717 	      adjustments_count++;
3718 	      if (repr->grp_not_necessarilly_dereferenced
3719 		  || repr->grp_maybe_modified)
3720 		VEC_replace (access_p, representatives, i, NULL);
3721 	      else
3722 		{
3723 		  proceed = true;
3724 		  sra_stats.scalar_by_ref_to_by_val++;
3725 		}
3726 	    }
3727 	  else
3728 	    {
3729 	      int new_components = decide_one_param_reduction (repr);
3730 
3731 	      if (new_components == 0)
3732 		{
3733 		  VEC_replace (access_p, representatives, i, NULL);
3734 		  adjustments_count++;
3735 		}
3736 	      else
3737 		{
3738 		  adjustments_count += new_components;
3739 		  sra_stats.aggregate_params_reduced++;
3740 		  sra_stats.param_reductions_created += new_components;
3741 		  proceed = true;
3742 		}
3743 	    }
3744 	}
3745       else
3746 	{
3747 	  if (no_accesses_p (repr))
3748 	    {
3749 	      proceed = true;
3750 	      sra_stats.deleted_unused_parameters++;
3751 	    }
3752 	  adjustments_count++;
3753 	}
3754     }
3755 
3756   if (!proceed && dump_file)
3757     fprintf (dump_file, "NOT proceeding to change params.\n");
3758 
3759   if (proceed)
3760     adjustments = turn_representatives_into_adjustments (representatives,
3761 							 adjustments_count);
3762   else
3763     adjustments = NULL;
3764 
3765   VEC_free (access_p, heap, representatives);
3766   return adjustments;
3767 }
3768 
3769 /* If a parameter replacement identified by ADJ does not yet exist in the form
3770    of declaration, create it and record it, otherwise return the previously
3771    created one.  */
3772 
3773 static tree
3774 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
3775 {
3776   tree repl;
3777   if (!adj->new_ssa_base)
3778     {
3779       char *pretty_name = make_fancy_name (adj->base);
3780 
3781       repl = create_tmp_var (TREE_TYPE (adj->base), "ISR");
3782       if (TREE_CODE (TREE_TYPE (repl)) == COMPLEX_TYPE
3783 	  || TREE_CODE (TREE_TYPE (repl)) == VECTOR_TYPE)
3784 	DECL_GIMPLE_REG_P (repl) = 1;
3785       DECL_NAME (repl) = get_identifier (pretty_name);
3786       obstack_free (&name_obstack, pretty_name);
3787 
3788       get_var_ann (repl);
3789       add_referenced_var (repl);
3790       adj->new_ssa_base = repl;
3791     }
3792   else
3793     repl = adj->new_ssa_base;
3794   return repl;
3795 }
3796 
3797 /* Find the first adjustment for a particular parameter BASE in a vector of
3798    ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
3799    adjustment. */
3800 
3801 static struct ipa_parm_adjustment *
3802 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
3803 {
3804   int i, len;
3805 
3806   len = VEC_length (ipa_parm_adjustment_t, adjustments);
3807   for (i = 0; i < len; i++)
3808     {
3809       struct ipa_parm_adjustment *adj;
3810 
3811       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3812       if (!adj->copy_param && adj->base == base)
3813 	return adj;
3814     }
3815 
3816   return NULL;
3817 }
3818 
3819 /* Callback for scan_function.  If the statement STMT defines an SSA_NAME of a
3820    parameter which is to be removed because its value is not used, replace the
3821    SSA_NAME with a one relating to a created VAR_DECL and replace all of its
3822    uses too and return true (update_stmt is then issued for the statement by
3823    the caller).  DATA is a pointer to an adjustments vector.  */
3824 
3825 static bool
3826 replace_removed_params_ssa_names (gimple stmt, void *data)
3827 {
3828   VEC (ipa_parm_adjustment_t, heap) *adjustments;
3829   struct ipa_parm_adjustment *adj;
3830   tree lhs, decl, repl, name;
3831 
3832   adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3833   if (gimple_code (stmt) == GIMPLE_PHI)
3834     lhs = gimple_phi_result (stmt);
3835   else if (is_gimple_assign (stmt))
3836     lhs = gimple_assign_lhs (stmt);
3837   else if (is_gimple_call (stmt))
3838     lhs = gimple_call_lhs (stmt);
3839   else
3840     gcc_unreachable ();
3841 
3842   if (TREE_CODE (lhs) != SSA_NAME)
3843     return false;
3844   decl = SSA_NAME_VAR (lhs);
3845   if (TREE_CODE (decl) != PARM_DECL)
3846     return false;
3847 
3848   adj = get_adjustment_for_base (adjustments, decl);
3849   if (!adj)
3850     return false;
3851 
3852   repl = get_replaced_param_substitute (adj);
3853   name = make_ssa_name (repl, stmt);
3854 
3855   if (dump_file)
3856     {
3857       fprintf (dump_file, "replacing an SSA name of a removed param ");
3858       print_generic_expr (dump_file, lhs, 0);
3859       fprintf (dump_file, " with ");
3860       print_generic_expr (dump_file, name, 0);
3861       fprintf (dump_file, "\n");
3862     }
3863 
3864   if (is_gimple_assign (stmt))
3865     gimple_assign_set_lhs (stmt, name);
3866   else if (is_gimple_call (stmt))
3867     gimple_call_set_lhs (stmt, name);
3868   else
3869     gimple_phi_set_result (stmt, name);
3870 
3871   replace_uses_by (lhs, name);
3872   release_ssa_name (lhs);
3873   return true;
3874 }
3875 
3876 /* Callback for scan_function and helper to sra_ipa_modify_assign.  If the
3877    expression *EXPR should be replaced by a reduction of a parameter, do so.
3878    DATA is a pointer to a vector of adjustments.  DONT_CONVERT specifies
3879    whether the function should care about type incompatibility the current and
3880    new expressions.  If it is true, the function will leave incompatibility
3881    issues to the caller.
3882 
3883    When called directly by scan_function, DONT_CONVERT is true when the EXPR is
3884    a write (LHS) expression.  */
3885 
3886 static bool
3887 sra_ipa_modify_expr (tree *expr, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED,
3888 		     bool dont_convert, void *data)
3889 {
3890   ipa_parm_adjustment_vec adjustments;
3891   int i, len;
3892   struct ipa_parm_adjustment *adj, *cand = NULL;
3893   HOST_WIDE_INT offset, size, max_size;
3894   tree base, src;
3895 
3896   adjustments = (VEC (ipa_parm_adjustment_t, heap) *) data;
3897   len = VEC_length (ipa_parm_adjustment_t, adjustments);
3898 
3899   if (TREE_CODE (*expr) == BIT_FIELD_REF
3900       || TREE_CODE (*expr) == IMAGPART_EXPR
3901       || TREE_CODE (*expr) == REALPART_EXPR)
3902     {
3903       expr = &TREE_OPERAND (*expr, 0);
3904       dont_convert = false;
3905     }
3906 
3907   base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
3908   if (!base || size == -1 || max_size == -1)
3909     return false;
3910 
3911   if (INDIRECT_REF_P (base))
3912     base = TREE_OPERAND (base, 0);
3913 
3914   base = get_ssa_base_param (base);
3915   if (!base || TREE_CODE (base) != PARM_DECL)
3916     return false;
3917 
3918   for (i = 0; i < len; i++)
3919     {
3920       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
3921 
3922       if (adj->base == base &&
3923 	  (adj->offset == offset || adj->remove_param))
3924 	{
3925 	  cand = adj;
3926 	  break;
3927 	}
3928     }
3929   if (!cand || cand->copy_param || cand->remove_param)
3930     return false;
3931 
3932   if (cand->by_ref)
3933     {
3934       tree folded;
3935       src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
3936 		    cand->reduction);
3937       folded = gimple_fold_indirect_ref (src);
3938       if (folded)
3939         src = folded;
3940     }
3941   else
3942     src = cand->reduction;
3943 
3944   if (dump_file && (dump_flags & TDF_DETAILS))
3945     {
3946       fprintf (dump_file, "About to replace expr ");
3947       print_generic_expr (dump_file, *expr, 0);
3948       fprintf (dump_file, " with ");
3949       print_generic_expr (dump_file, src, 0);
3950       fprintf (dump_file, "\n");
3951     }
3952 
3953   if (!dont_convert
3954       && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
3955     {
3956       tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
3957       *expr = vce;
3958     }
3959   else
3960     *expr = src;
3961   return true;
3962 }
3963 
3964 /* Callback for scan_function to process assign statements.  Performs
3965    essentially the same function like sra_ipa_modify_expr.  */
3966 
3967 static enum scan_assign_result
3968 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi, void *data)
3969 {
3970   gimple stmt = *stmt_ptr;
3971   tree *lhs_p, *rhs_p;
3972   bool any;
3973 
3974   if (!gimple_assign_single_p (stmt))
3975     return SRA_SA_NONE;
3976 
3977   rhs_p = gimple_assign_rhs1_ptr (stmt);
3978   lhs_p = gimple_assign_lhs_ptr (stmt);
3979 
3980   any = sra_ipa_modify_expr (rhs_p, gsi, true, data);
3981   any |= sra_ipa_modify_expr (lhs_p, gsi, true, data);
3982   if (any)
3983     {
3984       tree new_rhs = NULL_TREE;
3985 
3986       if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
3987 	{
3988 	  if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
3989 	    {
3990 	      /* V_C_Es of constructors can cause trouble (PR 42714).  */
3991 	      if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
3992 		*rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node);
3993 	      else
3994 		*rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0);
3995 	    }
3996 	  else
3997 	    new_rhs = fold_build1_loc (gimple_location (stmt),
3998 				       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
3999 				       *rhs_p);
4000 	}
4001       else if (REFERENCE_CLASS_P (*rhs_p)
4002 	       && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4003 	       && !is_gimple_reg (*lhs_p))
4004 	/* This can happen when an assignment in between two single field
4005 	   structures is turned into an assignment in between two pointers to
4006 	   scalars (PR 42237).  */
4007 	new_rhs = *rhs_p;
4008 
4009       if (new_rhs)
4010 	{
4011 	  tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4012 					       true, GSI_SAME_STMT);
4013 
4014 	  gimple_assign_set_rhs_from_tree (gsi, tmp);
4015 	}
4016 
4017       return SRA_SA_PROCESSED;
4018     }
4019 
4020   return SRA_SA_NONE;
4021 }
4022 
4023 /* Call gimple_debug_bind_reset_value on all debug statements describing
4024    gimple register parameters that are being removed or replaced.  */
4025 
4026 static void
4027 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4028 {
4029   int i, len;
4030 
4031   len = VEC_length (ipa_parm_adjustment_t, adjustments);
4032   for (i = 0; i < len; i++)
4033     {
4034       struct ipa_parm_adjustment *adj;
4035       imm_use_iterator ui;
4036       gimple stmt;
4037       tree name;
4038 
4039       adj = VEC_index (ipa_parm_adjustment_t, adjustments, i);
4040       if (adj->copy_param || !is_gimple_reg (adj->base))
4041 	continue;
4042       name = gimple_default_def (cfun, adj->base);
4043       if (!name)
4044 	continue;
4045       FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4046 	{
4047 	  /* All other users must have been removed by scan_function.  */
4048 	  gcc_assert (is_gimple_debug (stmt));
4049 	  gimple_debug_bind_reset_value (stmt);
4050 	  update_stmt (stmt);
4051 	}
4052     }
4053 }
4054 
4055 /* Return true iff all callers have at least as many actual arguments as there
4056    are formal parameters in the current function.  */
4057 
4058 static bool
4059 all_callers_have_enough_arguments_p (struct cgraph_node *node)
4060 {
4061   struct cgraph_edge *cs;
4062   for (cs = node->callers; cs; cs = cs->next_caller)
4063     if (!callsite_has_enough_arguments_p (cs->call_stmt))
4064       return false;
4065 
4066   return true;
4067 }
4068 
4069 
4070 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
4071 
4072 static void
4073 convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4074 {
4075   tree old_cur_fndecl = current_function_decl;
4076   struct cgraph_edge *cs;
4077   bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4078 
4079   for (cs = node->callers; cs; cs = cs->next_caller)
4080     {
4081       current_function_decl = cs->caller->decl;
4082       push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
4083 
4084       if (dump_file)
4085 	fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4086 		 cs->caller->uid, cs->callee->uid,
4087 		 cgraph_node_name (cs->caller),
4088 		 cgraph_node_name (cs->callee));
4089 
4090       ipa_modify_call_arguments (cs, cs->call_stmt, adjustments);
4091 
4092       pop_cfun ();
4093     }
4094 
4095   for (cs = node->callers; cs; cs = cs->next_caller)
4096     if (cs->caller != node
4097 	&& !bitmap_bit_p (recomputed_callers, cs->caller->uid))
4098       {
4099 	compute_inline_parameters (cs->caller);
4100 	bitmap_set_bit (recomputed_callers, cs->caller->uid);
4101       }
4102   BITMAP_FREE (recomputed_callers);
4103 
4104   current_function_decl = old_cur_fndecl;
4105   return;
4106 }
4107 
4108 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4109    as given in ADJUSTMENTS.  */
4110 
4111 static void
4112 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4113 {
4114   struct cgraph_node *new_node;
4115   struct cgraph_edge *cs;
4116   VEC (cgraph_edge_p, heap) * redirect_callers;
4117   int node_callers;
4118 
4119   node_callers = 0;
4120   for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4121     node_callers++;
4122   redirect_callers = VEC_alloc (cgraph_edge_p, heap, node_callers);
4123   for (cs = node->callers; cs != NULL; cs = cs->next_caller)
4124     VEC_quick_push (cgraph_edge_p, redirect_callers, cs);
4125 
4126   rebuild_cgraph_edges ();
4127   pop_cfun ();
4128   current_function_decl = NULL_TREE;
4129 
4130   new_node = cgraph_function_versioning (node, redirect_callers, NULL, NULL);
4131   current_function_decl = new_node->decl;
4132   push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
4133 
4134   ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4135   scan_function (sra_ipa_modify_expr, sra_ipa_modify_assign,
4136 		 replace_removed_params_ssa_names, false, adjustments);
4137   sra_ipa_reset_debug_stmts (adjustments);
4138   convert_callers (new_node, adjustments);
4139   cgraph_make_node_local (new_node);
4140   return;
4141 }
4142 
4143 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4144    attributes, return true otherwise.  NODE is the cgraph node of the current
4145    function.  */
4146 
4147 static bool
4148 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4149 {
4150   if (!cgraph_node_can_be_local_p (node))
4151     {
4152       if (dump_file)
4153 	fprintf (dump_file, "Function not local to this compilation unit.\n");
4154       return false;
4155     }
4156 
4157   if (!tree_versionable_function_p (node->decl))
4158     {
4159       if (dump_file)
4160 	fprintf (dump_file, "Function not local to this compilation unit.\n");
4161       return false;
4162     }
4163 
4164   if (DECL_VIRTUAL_P (current_function_decl))
4165     {
4166       if (dump_file)
4167 	fprintf (dump_file, "Function is a virtual method.\n");
4168       return false;
4169     }
4170 
4171   if ((DECL_COMDAT (node->decl) || DECL_EXTERNAL (node->decl))
4172       && node->global.size >= MAX_INLINE_INSNS_AUTO)
4173     {
4174       if (dump_file)
4175 	fprintf (dump_file, "Function too big to be made truly local.\n");
4176       return false;
4177     }
4178 
4179   if (!node->callers)
4180     {
4181       if (dump_file)
4182 	fprintf (dump_file,
4183 		 "Function has no callers in this compilation unit.\n");
4184       return false;
4185     }
4186 
4187   if (cfun->stdarg)
4188     {
4189       if (dump_file)
4190 	fprintf (dump_file, "Function uses stdarg. \n");
4191       return false;
4192     }
4193 
4194   if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
4195     return false;
4196 
4197   return true;
4198 }
4199 
4200 /* Perform early interprocedural SRA.  */
4201 
4202 static unsigned int
4203 ipa_early_sra (void)
4204 {
4205   struct cgraph_node *node = cgraph_node (current_function_decl);
4206   ipa_parm_adjustment_vec adjustments;
4207   int ret = 0;
4208 
4209   if (!ipa_sra_preliminary_function_checks (node))
4210     return 0;
4211 
4212   sra_initialize ();
4213   sra_mode = SRA_MODE_EARLY_IPA;
4214 
4215   if (!find_param_candidates ())
4216     {
4217       if (dump_file)
4218 	fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4219       goto simple_out;
4220     }
4221 
4222   if (!all_callers_have_enough_arguments_p (node))
4223     {
4224       if (dump_file)
4225 	fprintf (dump_file, "There are callers with insufficient number of "
4226 		 "arguments.\n");
4227       goto simple_out;
4228     }
4229 
4230   bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4231 				 func_param_count
4232 				 * last_basic_block_for_function (cfun));
4233   final_bbs = BITMAP_ALLOC (NULL);
4234 
4235   scan_function (build_access_from_expr, build_accesses_from_assign,
4236 		 NULL, true, NULL);
4237   if (encountered_apply_args)
4238     {
4239       if (dump_file)
4240 	fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
4241       goto out;
4242     }
4243 
4244   if (encountered_unchangable_recursive_call)
4245     {
4246       if (dump_file)
4247 	fprintf (dump_file, "Function calls itself with insufficient "
4248 		 "number of arguments.\n");
4249       goto out;
4250     }
4251 
4252   adjustments = analyze_all_param_acesses ();
4253   if (!adjustments)
4254     goto out;
4255   if (dump_file)
4256     ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4257 
4258   modify_function (node, adjustments);
4259   VEC_free (ipa_parm_adjustment_t, heap, adjustments);
4260   if (cfg_changed)
4261     ret = TODO_update_ssa | TODO_cleanup_cfg;
4262   else
4263     ret = TODO_update_ssa;
4264 
4265   statistics_counter_event (cfun, "Unused parameters deleted",
4266 			    sra_stats.deleted_unused_parameters);
4267   statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4268 			    sra_stats.scalar_by_ref_to_by_val);
4269   statistics_counter_event (cfun, "Aggregate parameters broken up",
4270 			    sra_stats.aggregate_params_reduced);
4271   statistics_counter_event (cfun, "Aggregate parameter components created",
4272 			    sra_stats.param_reductions_created);
4273 
4274  out:
4275   BITMAP_FREE (final_bbs);
4276   free (bb_dereferences);
4277  simple_out:
4278   sra_deinitialize ();
4279   return ret;
4280 }
4281 
4282 /* Return if early ipa sra shall be performed.  */
4283 static bool
4284 ipa_early_sra_gate (void)
4285 {
4286   return flag_ipa_sra;
4287 }
4288 
4289 struct gimple_opt_pass pass_early_ipa_sra =
4290 {
4291  {
4292   GIMPLE_PASS,
4293   "eipa_sra",	 			/* name */
4294   ipa_early_sra_gate,			/* gate */
4295   ipa_early_sra,			/* execute */
4296   NULL,					/* sub */
4297   NULL,					/* next */
4298   0,					/* static_pass_number */
4299   TV_IPA_SRA,				/* tv_id */
4300   0,	                                /* properties_required */
4301   0,					/* properties_provided */
4302   0,					/* properties_destroyed */
4303   0,					/* todo_flags_start */
4304   TODO_dump_func | TODO_dump_cgraph 	/* todo_flags_finish */
4305  }
4306 };
4307 
4308 
4309