xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-sra.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2    references into scalar references, exposing them to the scalar
3    optimizers.
4    Copyright (C) 2008-2017 Free Software Foundation, Inc.
5    Contributed by Martin Jambor <mjambor@suse.cz>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24    twice, once in the early stages of compilation (early SRA) and once in the
25    late stages (late SRA).  The aim of both is to turn references to scalar
26    parts of aggregates into uses of independent scalar variables.
27 
28    The two passes are nearly identical, the only difference is that early SRA
29    does not scalarize unions which are used as the result in a GIMPLE_RETURN
30    statement because together with inlining this can lead to weird type
31    conversions.
32 
33    Both passes operate in four stages:
34 
35    1. The declarations that have properties which make them candidates for
36       scalarization are identified in function find_var_candidates().  The
37       candidates are stored in candidate_bitmap.
38 
39    2. The function body is scanned.  In the process, declarations which are
40       used in a manner that prevent their scalarization are removed from the
41       candidate bitmap.  More importantly, for every access into an aggregate,
42       an access structure (struct access) is created by create_access() and
43       stored in a vector associated with the aggregate.  Among other
44       information, the aggregate declaration, the offset and size of the access
45       and its type are stored in the structure.
46 
47       On a related note, assign_link structures are created for every assign
48       statement between candidate aggregates and attached to the related
49       accesses.
50 
51    3. The vectors of accesses are analyzed.  They are first sorted according to
52       their offset and size and then scanned for partially overlapping accesses
53       (i.e. those which overlap but one is not entirely within another).  Such
54       an access disqualifies the whole aggregate from being scalarized.
55 
56       If there is no such inhibiting overlap, a representative access structure
57       is chosen for every unique combination of offset and size.  Afterwards,
58       the pass builds a set of trees from these structures, in which children
59       of an access are within their parent (in terms of offset and size).
60 
61       Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62       does not create a partially overlapping access) across assign_links from
63       the right hand side to the left hand side.
64 
65       Then the set of trees for each declaration is traversed again and those
66       accesses which should be replaced by a scalar are identified.
67 
68    4. The function is traversed again, and for every reference into an
69       aggregate that has some component which is about to be scalarized,
70       statements are amended and new statements are created as necessary.
71       Finally, if a parameter got scalarized, the scalar replacements are
72       initialized with values from respective parameter aggregates.  */
73 
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "backend.h"
78 #include "target.h"
79 #include "rtl.h"
80 #include "tree.h"
81 #include "gimple.h"
82 #include "predict.h"
83 #include "alloc-pool.h"
84 #include "tree-pass.h"
85 #include "ssa.h"
86 #include "cgraph.h"
87 #include "gimple-pretty-print.h"
88 #include "alias.h"
89 #include "fold-const.h"
90 #include "tree-eh.h"
91 #include "stor-layout.h"
92 #include "gimplify.h"
93 #include "gimple-iterator.h"
94 #include "gimplify-me.h"
95 #include "gimple-walk.h"
96 #include "tree-cfg.h"
97 #include "tree-dfa.h"
98 #include "tree-ssa.h"
99 #include "symbol-summary.h"
100 #include "ipa-prop.h"
101 #include "params.h"
102 #include "dbgcnt.h"
103 #include "tree-inline.h"
104 #include "ipa-inline.h"
105 #include "ipa-utils.h"
106 #include "builtins.h"
107 
108 /* Enumeration of all aggregate reductions we can do.  */
109 enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
110 		SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
111 		SRA_MODE_INTRA };     /* late intraprocedural SRA */
112 
113 /* Global variable describing which aggregate reduction we are performing at
114    the moment.  */
115 static enum sra_mode sra_mode;
116 
117 struct assign_link;
118 
119 /* ACCESS represents each access to an aggregate variable (as a whole or a
120    part).  It can also represent a group of accesses that refer to exactly the
121    same fragment of an aggregate (i.e. those that have exactly the same offset
122    and size).  Such representatives for a single aggregate, once determined,
123    are linked in a linked list and have the group fields set.
124 
125    Moreover, when doing intraprocedural SRA, a tree is built from those
126    representatives (by the means of first_child and next_sibling pointers), in
127    which all items in a subtree are "within" the root, i.e. their offset is
128    greater or equal to offset of the root and offset+size is smaller or equal
129    to offset+size of the root.  Children of an access are sorted by offset.
130 
131    Note that accesses to parts of vector and complex number types always
132    represented by an access to the whole complex number or a vector.  It is a
133    duty of the modifying functions to replace them appropriately.  */
134 
135 struct access
136 {
137   /* Values returned by  `get_ref_base_and_extent' for each component reference
138      If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
139      `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
140   HOST_WIDE_INT offset;
141   HOST_WIDE_INT size;
142   tree base;
143 
144   /* Expression.  It is context dependent so do not use it to create new
145      expressions to access the original aggregate.  See PR 42154 for a
146      testcase.  */
147   tree expr;
148   /* Type.  */
149   tree type;
150 
151   /* The statement this access belongs to.  */
152   gimple *stmt;
153 
154   /* Next group representative for this aggregate. */
155   struct access *next_grp;
156 
157   /* Pointer to the group representative.  Pointer to itself if the struct is
158      the representative.  */
159   struct access *group_representative;
160 
161   /* If this access has any children (in terms of the definition above), this
162      points to the first one.  */
163   struct access *first_child;
164 
165   /* In intraprocedural SRA, pointer to the next sibling in the access tree as
166      described above.  In IPA-SRA this is a pointer to the next access
167      belonging to the same group (having the same representative).  */
168   struct access *next_sibling;
169 
170   /* Pointers to the first and last element in the linked list of assign
171      links.  */
172   struct assign_link *first_link, *last_link;
173 
174   /* Pointer to the next access in the work queue.  */
175   struct access *next_queued;
176 
177   /* Replacement variable for this access "region."  Never to be accessed
178      directly, always only by the means of get_access_replacement() and only
179      when grp_to_be_replaced flag is set.  */
180   tree replacement_decl;
181 
182   /* Is this access an access to a non-addressable field? */
183   unsigned non_addressable : 1;
184 
185   /* Is this access made in reverse storage order? */
186   unsigned reverse : 1;
187 
188   /* Is this particular access write access? */
189   unsigned write : 1;
190 
191   /* Is this access currently in the work queue?  */
192   unsigned grp_queued : 1;
193 
194   /* Does this group contain a write access?  This flag is propagated down the
195      access tree.  */
196   unsigned grp_write : 1;
197 
198   /* Does this group contain a read access?  This flag is propagated down the
199      access tree.  */
200   unsigned grp_read : 1;
201 
202   /* Does this group contain a read access that comes from an assignment
203      statement?  This flag is propagated down the access tree.  */
204   unsigned grp_assignment_read : 1;
205 
206   /* Does this group contain a write access that comes from an assignment
207      statement?  This flag is propagated down the access tree.  */
208   unsigned grp_assignment_write : 1;
209 
210   /* Does this group contain a read access through a scalar type?  This flag is
211      not propagated in the access tree in any direction.  */
212   unsigned grp_scalar_read : 1;
213 
214   /* Does this group contain a write access through a scalar type?  This flag
215      is not propagated in the access tree in any direction.  */
216   unsigned grp_scalar_write : 1;
217 
218   /* Is this access an artificial one created to scalarize some record
219      entirely? */
220   unsigned grp_total_scalarization : 1;
221 
222   /* Other passes of the analysis use this bit to make function
223      analyze_access_subtree create scalar replacements for this group if
224      possible.  */
225   unsigned grp_hint : 1;
226 
227   /* Is the subtree rooted in this access fully covered by scalar
228      replacements?  */
229   unsigned grp_covered : 1;
230 
231   /* If set to true, this access and all below it in an access tree must not be
232      scalarized.  */
233   unsigned grp_unscalarizable_region : 1;
234 
235   /* Whether data have been written to parts of the aggregate covered by this
236      access which is not to be scalarized.  This flag is propagated up in the
237      access tree.  */
238   unsigned grp_unscalarized_data : 1;
239 
240   /* Does this access and/or group contain a write access through a
241      BIT_FIELD_REF?  */
242   unsigned grp_partial_lhs : 1;
243 
244   /* Set when a scalar replacement should be created for this variable.  */
245   unsigned grp_to_be_replaced : 1;
246 
247   /* Set when we want a replacement for the sole purpose of having it in
248      generated debug statements.  */
249   unsigned grp_to_be_debug_replaced : 1;
250 
251   /* Should TREE_NO_WARNING of a replacement be set?  */
252   unsigned grp_no_warning : 1;
253 
254   /* Is it possible that the group refers to data which might be (directly or
255      otherwise) modified?  */
256   unsigned grp_maybe_modified : 1;
257 
258   /* Set when this is a representative of a pointer to scalar (i.e. by
259      reference) parameter which we consider for turning into a plain scalar
260      (i.e. a by value parameter).  */
261   unsigned grp_scalar_ptr : 1;
262 
263   /* Set when we discover that this pointer is not safe to dereference in the
264      caller.  */
265   unsigned grp_not_necessarilly_dereferenced : 1;
266 };
267 
268 typedef struct access *access_p;
269 
270 
271 /* Alloc pool for allocating access structures.  */
272 static object_allocator<struct access> access_pool ("SRA accesses");
273 
274 /* A structure linking lhs and rhs accesses from an aggregate assignment.  They
275    are used to propagate subaccesses from rhs to lhs as long as they don't
276    conflict with what is already there.  */
277 struct assign_link
278 {
279   struct access *lacc, *racc;
280   struct assign_link *next;
281 };
282 
283 /* Alloc pool for allocating assign link structures.  */
284 static object_allocator<assign_link> assign_link_pool ("SRA links");
285 
286 /* Base (tree) -> Vector (vec<access_p> *) map.  */
287 static hash_map<tree, auto_vec<access_p> > *base_access_vec;
288 
289 /* Candidate hash table helpers.  */
290 
291 struct uid_decl_hasher : nofree_ptr_hash <tree_node>
292 {
293   static inline hashval_t hash (const tree_node *);
294   static inline bool equal (const tree_node *, const tree_node *);
295 };
296 
297 /* Hash a tree in a uid_decl_map.  */
298 
299 inline hashval_t
300 uid_decl_hasher::hash (const tree_node *item)
301 {
302   return item->decl_minimal.uid;
303 }
304 
305 /* Return true if the DECL_UID in both trees are equal.  */
306 
307 inline bool
308 uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
309 {
310   return (a->decl_minimal.uid == b->decl_minimal.uid);
311 }
312 
313 /* Set of candidates.  */
314 static bitmap candidate_bitmap;
315 static hash_table<uid_decl_hasher> *candidates;
316 
317 /* For a candidate UID return the candidates decl.  */
318 
319 static inline tree
320 candidate (unsigned uid)
321 {
322  tree_node t;
323  t.decl_minimal.uid = uid;
324  return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
325 }
326 
327 /* Bitmap of candidates which we should try to entirely scalarize away and
328    those which cannot be (because they are and need be used as a whole).  */
329 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
330 
331 /* Bitmap of candidates in the constant pool, which cannot be scalarized
332    because this would produce non-constant expressions (e.g. Ada).  */
333 static bitmap disqualified_constants;
334 
335 /* Obstack for creation of fancy names.  */
336 static struct obstack name_obstack;
337 
338 /* Head of a linked list of accesses that need to have its subaccesses
339    propagated to their assignment counterparts. */
340 static struct access *work_queue_head;
341 
342 /* Number of parameters of the analyzed function when doing early ipa SRA.  */
343 static int func_param_count;
344 
345 /* scan_function sets the following to true if it encounters a call to
346    __builtin_apply_args.  */
347 static bool encountered_apply_args;
348 
349 /* Set by scan_function when it finds a recursive call.  */
350 static bool encountered_recursive_call;
351 
352 /* Set by scan_function when it finds a recursive call with less actual
353    arguments than formal parameters..  */
354 static bool encountered_unchangable_recursive_call;
355 
356 /* This is a table in which for each basic block and parameter there is a
357    distance (offset + size) in that parameter which is dereferenced and
358    accessed in that BB.  */
359 static HOST_WIDE_INT *bb_dereferences;
360 /* Bitmap of BBs that can cause the function to "stop" progressing by
361    returning, throwing externally, looping infinitely or calling a function
362    which might abort etc.. */
363 static bitmap final_bbs;
364 
365 /* Representative of no accesses at all. */
366 static struct access  no_accesses_representant;
367 
368 /* Predicate to test the special value.  */
369 
370 static inline bool
371 no_accesses_p (struct access *access)
372 {
373   return access == &no_accesses_representant;
374 }
375 
376 /* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
377    representative fields are dumped, otherwise those which only describe the
378    individual access are.  */
379 
380 static struct
381 {
382   /* Number of processed aggregates is readily available in
383      analyze_all_variable_accesses and so is not stored here.  */
384 
385   /* Number of created scalar replacements.  */
386   int replacements;
387 
388   /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
389      expression.  */
390   int exprs;
391 
392   /* Number of statements created by generate_subtree_copies.  */
393   int subtree_copies;
394 
395   /* Number of statements created by load_assign_lhs_subreplacements.  */
396   int subreplacements;
397 
398   /* Number of times sra_modify_assign has deleted a statement.  */
399   int deleted;
400 
401   /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
402      RHS reparately due to type conversions or nonexistent matching
403      references.  */
404   int separate_lhs_rhs_handling;
405 
406   /* Number of parameters that were removed because they were unused.  */
407   int deleted_unused_parameters;
408 
409   /* Number of scalars passed as parameters by reference that have been
410      converted to be passed by value.  */
411   int scalar_by_ref_to_by_val;
412 
413   /* Number of aggregate parameters that were replaced by one or more of their
414      components.  */
415   int aggregate_params_reduced;
416 
417   /* Numbber of components created when splitting aggregate parameters.  */
418   int param_reductions_created;
419 } sra_stats;
420 
421 static void
422 dump_access (FILE *f, struct access *access, bool grp)
423 {
424   fprintf (f, "access { ");
425   fprintf (f, "base = (%d)'", DECL_UID (access->base));
426   print_generic_expr (f, access->base, 0);
427   fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
428   fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
429   fprintf (f, ", expr = ");
430   print_generic_expr (f, access->expr, 0);
431   fprintf (f, ", type = ");
432   print_generic_expr (f, access->type, 0);
433   fprintf (f, ", non_addressable = %d, reverse = %d",
434 	   access->non_addressable, access->reverse);
435   if (grp)
436     fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
437 	     "grp_assignment_write = %d, grp_scalar_read = %d, "
438 	     "grp_scalar_write = %d, grp_total_scalarization = %d, "
439 	     "grp_hint = %d, grp_covered = %d, "
440 	     "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
441 	     "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
442 	     "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
443 	     "grp_not_necessarilly_dereferenced = %d\n",
444 	     access->grp_read, access->grp_write, access->grp_assignment_read,
445 	     access->grp_assignment_write, access->grp_scalar_read,
446 	     access->grp_scalar_write, access->grp_total_scalarization,
447 	     access->grp_hint, access->grp_covered,
448 	     access->grp_unscalarizable_region, access->grp_unscalarized_data,
449 	     access->grp_partial_lhs, access->grp_to_be_replaced,
450 	     access->grp_to_be_debug_replaced, access->grp_maybe_modified,
451 	     access->grp_not_necessarilly_dereferenced);
452   else
453     fprintf (f, ", write = %d, grp_total_scalarization = %d, "
454 	     "grp_partial_lhs = %d\n",
455 	     access->write, access->grp_total_scalarization,
456 	     access->grp_partial_lhs);
457 }
458 
459 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
460 
461 static void
462 dump_access_tree_1 (FILE *f, struct access *access, int level)
463 {
464   do
465     {
466       int i;
467 
468       for (i = 0; i < level; i++)
469 	fputs ("* ", dump_file);
470 
471       dump_access (f, access, true);
472 
473       if (access->first_child)
474 	dump_access_tree_1 (f, access->first_child, level + 1);
475 
476       access = access->next_sibling;
477     }
478   while (access);
479 }
480 
481 /* Dump all access trees for a variable, given the pointer to the first root in
482    ACCESS.  */
483 
484 static void
485 dump_access_tree (FILE *f, struct access *access)
486 {
487   for (; access; access = access->next_grp)
488     dump_access_tree_1 (f, access, 0);
489 }
490 
491 /* Return true iff ACC is non-NULL and has subaccesses.  */
492 
493 static inline bool
494 access_has_children_p (struct access *acc)
495 {
496   return acc && acc->first_child;
497 }
498 
499 /* Return true iff ACC is (partly) covered by at least one replacement.  */
500 
501 static bool
502 access_has_replacements_p (struct access *acc)
503 {
504   struct access *child;
505   if (acc->grp_to_be_replaced)
506     return true;
507   for (child = acc->first_child; child; child = child->next_sibling)
508     if (access_has_replacements_p (child))
509       return true;
510   return false;
511 }
512 
513 /* Return a vector of pointers to accesses for the variable given in BASE or
514    NULL if there is none.  */
515 
516 static vec<access_p> *
517 get_base_access_vector (tree base)
518 {
519   return base_access_vec->get (base);
520 }
521 
522 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
523    in ACCESS.  Return NULL if it cannot be found.  */
524 
525 static struct access *
526 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
527 			HOST_WIDE_INT size)
528 {
529   while (access && (access->offset != offset || access->size != size))
530     {
531       struct access *child = access->first_child;
532 
533       while (child && (child->offset + child->size <= offset))
534 	child = child->next_sibling;
535       access = child;
536     }
537 
538   return access;
539 }
540 
541 /* Return the first group representative for DECL or NULL if none exists.  */
542 
543 static struct access *
544 get_first_repr_for_decl (tree base)
545 {
546   vec<access_p> *access_vec;
547 
548   access_vec = get_base_access_vector (base);
549   if (!access_vec)
550     return NULL;
551 
552   return (*access_vec)[0];
553 }
554 
555 /* Find an access representative for the variable BASE and given OFFSET and
556    SIZE.  Requires that access trees have already been built.  Return NULL if
557    it cannot be found.  */
558 
559 static struct access *
560 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
561 				 HOST_WIDE_INT size)
562 {
563   struct access *access;
564 
565   access = get_first_repr_for_decl (base);
566   while (access && (access->offset + access->size <= offset))
567     access = access->next_grp;
568   if (!access)
569     return NULL;
570 
571   return find_access_in_subtree (access, offset, size);
572 }
573 
574 /* Add LINK to the linked list of assign links of RACC.  */
575 static void
576 add_link_to_rhs (struct access *racc, struct assign_link *link)
577 {
578   gcc_assert (link->racc == racc);
579 
580   if (!racc->first_link)
581     {
582       gcc_assert (!racc->last_link);
583       racc->first_link = link;
584     }
585   else
586     racc->last_link->next = link;
587 
588   racc->last_link = link;
589   link->next = NULL;
590 }
591 
592 /* Move all link structures in their linked list in OLD_RACC to the linked list
593    in NEW_RACC.  */
594 static void
595 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
596 {
597   if (!old_racc->first_link)
598     {
599       gcc_assert (!old_racc->last_link);
600       return;
601     }
602 
603   if (new_racc->first_link)
604     {
605       gcc_assert (!new_racc->last_link->next);
606       gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
607 
608       new_racc->last_link->next = old_racc->first_link;
609       new_racc->last_link = old_racc->last_link;
610     }
611   else
612     {
613       gcc_assert (!new_racc->last_link);
614 
615       new_racc->first_link = old_racc->first_link;
616       new_racc->last_link = old_racc->last_link;
617     }
618   old_racc->first_link = old_racc->last_link = NULL;
619 }
620 
621 /* Add ACCESS to the work queue (which is actually a stack).  */
622 
623 static void
624 add_access_to_work_queue (struct access *access)
625 {
626   if (!access->grp_queued)
627     {
628       gcc_assert (!access->next_queued);
629       access->next_queued = work_queue_head;
630       access->grp_queued = 1;
631       work_queue_head = access;
632     }
633 }
634 
635 /* Pop an access from the work queue, and return it, assuming there is one.  */
636 
637 static struct access *
638 pop_access_from_work_queue (void)
639 {
640   struct access *access = work_queue_head;
641 
642   work_queue_head = access->next_queued;
643   access->next_queued = NULL;
644   access->grp_queued = 0;
645   return access;
646 }
647 
648 
649 /* Allocate necessary structures.  */
650 
651 static void
652 sra_initialize (void)
653 {
654   candidate_bitmap = BITMAP_ALLOC (NULL);
655   candidates = new hash_table<uid_decl_hasher>
656     (vec_safe_length (cfun->local_decls) / 2);
657   should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
658   cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
659   disqualified_constants = BITMAP_ALLOC (NULL);
660   gcc_obstack_init (&name_obstack);
661   base_access_vec = new hash_map<tree, auto_vec<access_p> >;
662   memset (&sra_stats, 0, sizeof (sra_stats));
663   encountered_apply_args = false;
664   encountered_recursive_call = false;
665   encountered_unchangable_recursive_call = false;
666 }
667 
668 /* Deallocate all general structures.  */
669 
670 static void
671 sra_deinitialize (void)
672 {
673   BITMAP_FREE (candidate_bitmap);
674   delete candidates;
675   candidates = NULL;
676   BITMAP_FREE (should_scalarize_away_bitmap);
677   BITMAP_FREE (cannot_scalarize_away_bitmap);
678   BITMAP_FREE (disqualified_constants);
679   access_pool.release ();
680   assign_link_pool.release ();
681   obstack_free (&name_obstack, NULL);
682 
683   delete base_access_vec;
684 }
685 
686 /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise.  */
687 
688 static bool constant_decl_p (tree decl)
689 {
690   return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
691 }
692 
693 /* Remove DECL from candidates for SRA and write REASON to the dump file if
694    there is one.  */
695 static void
696 disqualify_candidate (tree decl, const char *reason)
697 {
698   if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
699     candidates->remove_elt_with_hash (decl, DECL_UID (decl));
700   if (constant_decl_p (decl))
701     bitmap_set_bit (disqualified_constants, DECL_UID (decl));
702 
703   if (dump_file && (dump_flags & TDF_DETAILS))
704     {
705       fprintf (dump_file, "! Disqualifying ");
706       print_generic_expr (dump_file, decl, 0);
707       fprintf (dump_file, " - %s\n", reason);
708     }
709 }
710 
711 /* Return true iff the type contains a field or an element which does not allow
712    scalarization.  */
713 
714 static bool
715 type_internals_preclude_sra_p (tree type, const char **msg)
716 {
717   tree fld;
718   tree et;
719 
720   switch (TREE_CODE (type))
721     {
722     case RECORD_TYPE:
723     case UNION_TYPE:
724     case QUAL_UNION_TYPE:
725       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
726 	if (TREE_CODE (fld) == FIELD_DECL)
727 	  {
728 	    tree ft = TREE_TYPE (fld);
729 
730 	    if (TREE_THIS_VOLATILE (fld))
731 	      {
732 		*msg = "volatile structure field";
733 		return true;
734 	      }
735 	    if (!DECL_FIELD_OFFSET (fld))
736 	      {
737 		*msg = "no structure field offset";
738 		return true;
739 	      }
740 	    if (!DECL_SIZE (fld))
741 	      {
742 		*msg = "zero structure field size";
743 	        return true;
744 	      }
745 	    if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
746 	      {
747 		*msg = "structure field offset not fixed";
748 		return true;
749 	      }
750 	    if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
751 	      {
752 	        *msg = "structure field size not fixed";
753 		return true;
754 	      }
755 	    if (!tree_fits_shwi_p (bit_position (fld)))
756 	      {
757 	        *msg = "structure field size too big";
758 		return true;
759 	      }
760 	    if (AGGREGATE_TYPE_P (ft)
761 		    && int_bit_position (fld) % BITS_PER_UNIT != 0)
762 	      {
763 		*msg = "structure field is bit field";
764 	        return true;
765 	      }
766 
767 	    if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
768 	      return true;
769 	  }
770 
771       return false;
772 
773     case ARRAY_TYPE:
774       et = TREE_TYPE (type);
775 
776       if (TYPE_VOLATILE (et))
777 	{
778 	  *msg = "element type is volatile";
779 	  return true;
780 	}
781 
782       if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
783 	return true;
784 
785       return false;
786 
787     default:
788       return false;
789     }
790 }
791 
792 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
793    base variable if it is.  Return T if it is not an SSA_NAME.  */
794 
795 static tree
796 get_ssa_base_param (tree t)
797 {
798   if (TREE_CODE (t) == SSA_NAME)
799     {
800       if (SSA_NAME_IS_DEFAULT_DEF (t))
801 	return SSA_NAME_VAR (t);
802       else
803 	return NULL_TREE;
804     }
805   return t;
806 }
807 
808 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
809    belongs to, unless the BB has already been marked as a potentially
810    final.  */
811 
812 static void
813 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
814 {
815   basic_block bb = gimple_bb (stmt);
816   int idx, parm_index = 0;
817   tree parm;
818 
819   if (bitmap_bit_p (final_bbs, bb->index))
820     return;
821 
822   for (parm = DECL_ARGUMENTS (current_function_decl);
823        parm && parm != base;
824        parm = DECL_CHAIN (parm))
825     parm_index++;
826 
827   gcc_assert (parm_index < func_param_count);
828 
829   idx = bb->index * func_param_count + parm_index;
830   if (bb_dereferences[idx] < dist)
831     bb_dereferences[idx] = dist;
832 }
833 
834 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
835    the three fields.  Also add it to the vector of accesses corresponding to
836    the base.  Finally, return the new access.  */
837 
838 static struct access *
839 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
840 {
841   struct access *access = access_pool.allocate ();
842 
843   memset (access, 0, sizeof (struct access));
844   access->base = base;
845   access->offset = offset;
846   access->size = size;
847 
848   base_access_vec->get_or_insert (base).safe_push (access);
849 
850   return access;
851 }
852 
853 static bool maybe_add_sra_candidate (tree);
854 
855 /* Create and insert access for EXPR. Return created access, or NULL if it is
856    not possible.  Also scan for uses of constant pool as we go along and add
857    to candidates.  */
858 
859 static struct access *
860 create_access (tree expr, gimple *stmt, bool write)
861 {
862   struct access *access;
863   HOST_WIDE_INT offset, size, max_size;
864   tree base = expr;
865   bool reverse, ptr, unscalarizable_region = false;
866 
867   base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
868 
869   if (sra_mode == SRA_MODE_EARLY_IPA
870       && TREE_CODE (base) == MEM_REF)
871     {
872       base = get_ssa_base_param (TREE_OPERAND (base, 0));
873       if (!base)
874 	return NULL;
875       ptr = true;
876     }
877   else
878     ptr = false;
879 
880   /* For constant-pool entries, check we can substitute the constant value.  */
881   if (constant_decl_p (base)
882       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
883     {
884       gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
885       if (expr != base
886 	  && !is_gimple_reg_type (TREE_TYPE (expr))
887 	  && dump_file && (dump_flags & TDF_DETAILS))
888 	{
889 	  /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
890 	     and elements of multidimensional arrays (which are
891 	     multi-element arrays in their own right).  */
892 	  fprintf (dump_file, "Allowing non-reg-type load of part"
893 			      " of constant-pool entry: ");
894 	  print_generic_expr (dump_file, expr, 0);
895 	}
896       maybe_add_sra_candidate (base);
897     }
898 
899   if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
900     return NULL;
901 
902   if (sra_mode == SRA_MODE_EARLY_IPA)
903     {
904       if (size < 0 || size != max_size)
905 	{
906 	  disqualify_candidate (base, "Encountered a variable sized access.");
907 	  return NULL;
908 	}
909       if (TREE_CODE (expr) == COMPONENT_REF
910 	  && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
911 	{
912 	  disqualify_candidate (base, "Encountered a bit-field access.");
913 	  return NULL;
914 	}
915       gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
916 
917       if (ptr)
918 	mark_parm_dereference (base, offset + size, stmt);
919     }
920   else
921     {
922       if (size != max_size)
923 	{
924 	  size = max_size;
925 	  unscalarizable_region = true;
926 	}
927       if (size < 0)
928 	{
929 	  disqualify_candidate (base, "Encountered an unconstrained access.");
930 	  return NULL;
931 	}
932     }
933 
934   access = create_access_1 (base, offset, size);
935   access->expr = expr;
936   access->type = TREE_TYPE (expr);
937   access->write = write;
938   access->grp_unscalarizable_region = unscalarizable_region;
939   access->stmt = stmt;
940   access->reverse = reverse;
941 
942   if (TREE_CODE (expr) == COMPONENT_REF
943       && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
944     access->non_addressable = 1;
945 
946   return access;
947 }
948 
949 
950 /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
951    ARRAY_TYPE with fields that are either of gimple register types (excluding
952    bit-fields) or (recursively) scalarizable types.  CONST_DECL must be true if
953    we are considering a decl from constant pool.  If it is false, char arrays
954    will be refused.  */
955 
956 static bool
957 scalarizable_type_p (tree type, bool const_decl)
958 {
959   gcc_assert (!is_gimple_reg_type (type));
960   if (type_contains_placeholder_p (type))
961     return false;
962 
963   switch (TREE_CODE (type))
964   {
965   case RECORD_TYPE:
966     for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
967       if (TREE_CODE (fld) == FIELD_DECL)
968 	{
969 	  tree ft = TREE_TYPE (fld);
970 
971 	  if (DECL_BIT_FIELD (fld))
972 	    return false;
973 
974 	  if (!is_gimple_reg_type (ft)
975 	      && !scalarizable_type_p (ft, const_decl))
976 	    return false;
977 	}
978 
979     return true;
980 
981   case ARRAY_TYPE:
982     {
983       HOST_WIDE_INT min_elem_size;
984       if (const_decl)
985 	min_elem_size = 0;
986       else
987 	min_elem_size = BITS_PER_UNIT;
988 
989       if (TYPE_DOMAIN (type) == NULL_TREE
990 	  || !tree_fits_shwi_p (TYPE_SIZE (type))
991 	  || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
992 	  || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
993 	  || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
994 	return false;
995       if (tree_to_shwi (TYPE_SIZE (type)) == 0
996 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
997 	/* Zero-element array, should not prevent scalarization.  */
998 	;
999       else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
1000 	       || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
1001 	/* Variable-length array, do not allow scalarization.  */
1002 	return false;
1003 
1004       tree elem = TREE_TYPE (type);
1005       if (!is_gimple_reg_type (elem)
1006 	  && !scalarizable_type_p (elem, const_decl))
1007 	return false;
1008       return true;
1009     }
1010   default:
1011     return false;
1012   }
1013 }
1014 
1015 static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
1016 
1017 /* Create total_scalarization accesses for all scalar fields of a member
1018    of type DECL_TYPE conforming to scalarizable_type_p.  BASE
1019    must be the top-most VAR_DECL representing the variable; within that,
1020    OFFSET locates the member and REF must be the memory reference expression for
1021    the member.  */
1022 
1023 static void
1024 completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
1025 {
1026   switch (TREE_CODE (decl_type))
1027     {
1028     case RECORD_TYPE:
1029       for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
1030 	if (TREE_CODE (fld) == FIELD_DECL)
1031 	  {
1032 	    HOST_WIDE_INT pos = offset + int_bit_position (fld);
1033 	    tree ft = TREE_TYPE (fld);
1034 	    tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
1035 
1036 	    scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
1037 			    TYPE_REVERSE_STORAGE_ORDER (decl_type),
1038 			    nref, ft);
1039 	  }
1040       break;
1041     case ARRAY_TYPE:
1042       {
1043 	tree elemtype = TREE_TYPE (decl_type);
1044 	tree elem_size = TYPE_SIZE (elemtype);
1045 	gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
1046 	HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
1047 	gcc_assert (el_size > 0);
1048 
1049 	tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
1050 	gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
1051 	tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
1052 	/* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1.  */
1053 	if (maxidx)
1054 	  {
1055 	    gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
1056 	    tree domain = TYPE_DOMAIN (decl_type);
1057 	    /* MINIDX and MAXIDX are inclusive, and must be interpreted in
1058 	       DOMAIN (e.g. signed int, whereas min/max may be size_int).  */
1059 	    offset_int idx = wi::to_offset (minidx);
1060 	    offset_int max = wi::to_offset (maxidx);
1061 	    if (!TYPE_UNSIGNED (domain))
1062 	      {
1063 		idx = wi::sext (idx, TYPE_PRECISION (domain));
1064 		max = wi::sext (max, TYPE_PRECISION (domain));
1065 	      }
1066 	    for (int el_off = offset; idx <= max; ++idx)
1067 	      {
1068 		tree nref = build4 (ARRAY_REF, elemtype,
1069 				    ref,
1070 				    wide_int_to_tree (domain, idx),
1071 				    NULL_TREE, NULL_TREE);
1072 		scalarize_elem (base, el_off, el_size,
1073 				TYPE_REVERSE_STORAGE_ORDER (decl_type),
1074 				nref, elemtype);
1075 		el_off += el_size;
1076 	      }
1077 	  }
1078       }
1079       break;
1080     default:
1081       gcc_unreachable ();
1082     }
1083 }
1084 
1085 /* Create total_scalarization accesses for a member of type TYPE, which must
1086    satisfy either is_gimple_reg_type or scalarizable_type_p.  BASE must be the
1087    top-most VAR_DECL representing the variable; within that, POS and SIZE locate
1088    the member, REVERSE gives its torage order. and REF must be the reference
1089    expression for it.  */
1090 
1091 static void
1092 scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
1093 		tree ref, tree type)
1094 {
1095   if (is_gimple_reg_type (type))
1096   {
1097     struct access *access = create_access_1 (base, pos, size);
1098     access->expr = ref;
1099     access->type = type;
1100     access->grp_total_scalarization = 1;
1101     access->reverse = reverse;
1102     /* Accesses for intraprocedural SRA can have their stmt NULL.  */
1103   }
1104   else
1105     completely_scalarize (base, type, pos, ref);
1106 }
1107 
1108 /* Create a total_scalarization access for VAR as a whole.  VAR must be of a
1109    RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p.  */
1110 
1111 static void
1112 create_total_scalarization_access (tree var)
1113 {
1114   HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
1115   struct access *access;
1116 
1117   access = create_access_1 (var, 0, size);
1118   access->expr = var;
1119   access->type = TREE_TYPE (var);
1120   access->grp_total_scalarization = 1;
1121 }
1122 
1123 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it.  */
1124 
1125 static inline bool
1126 contains_view_convert_expr_p (const_tree ref)
1127 {
1128   while (handled_component_p (ref))
1129     {
1130       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
1131 	return true;
1132       ref = TREE_OPERAND (ref, 0);
1133     }
1134 
1135   return false;
1136 }
1137 
1138 /* Search the given tree for a declaration by skipping handled components and
1139    exclude it from the candidates.  */
1140 
1141 static void
1142 disqualify_base_of_expr (tree t, const char *reason)
1143 {
1144   t = get_base_address (t);
1145   if (sra_mode == SRA_MODE_EARLY_IPA
1146       && TREE_CODE (t) == MEM_REF)
1147     t = get_ssa_base_param (TREE_OPERAND (t, 0));
1148 
1149   if (t && DECL_P (t))
1150     disqualify_candidate (t, reason);
1151 }
1152 
1153 /* Scan expression EXPR and create access structures for all accesses to
1154    candidates for scalarization.  Return the created access or NULL if none is
1155    created.  */
1156 
1157 static struct access *
1158 build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
1159 {
1160   struct access *ret = NULL;
1161   bool partial_ref;
1162 
1163   if (TREE_CODE (expr) == BIT_FIELD_REF
1164       || TREE_CODE (expr) == IMAGPART_EXPR
1165       || TREE_CODE (expr) == REALPART_EXPR)
1166     {
1167       expr = TREE_OPERAND (expr, 0);
1168       partial_ref = true;
1169     }
1170   else
1171     partial_ref = false;
1172 
1173   /* We need to dive through V_C_Es in order to get the size of its parameter
1174      and not the result type.  Ada produces such statements.  We are also
1175      capable of handling the topmost V_C_E but not any of those buried in other
1176      handled components.  */
1177   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR && !storage_order_barrier_p (expr))
1178     expr = TREE_OPERAND (expr, 0);
1179 
1180   if (contains_view_convert_expr_p (expr))
1181     {
1182       disqualify_base_of_expr (expr, "V_C_E under a different handled "
1183 			       "component.");
1184       return NULL;
1185     }
1186   if (TREE_THIS_VOLATILE (expr))
1187     {
1188       disqualify_base_of_expr (expr, "part of a volatile reference.");
1189       return NULL;
1190     }
1191 
1192   switch (TREE_CODE (expr))
1193     {
1194     case MEM_REF:
1195       if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1196 	  && sra_mode != SRA_MODE_EARLY_IPA)
1197 	return NULL;
1198       /* fall through */
1199     case VAR_DECL:
1200     case PARM_DECL:
1201     case RESULT_DECL:
1202     case COMPONENT_REF:
1203     case ARRAY_REF:
1204     case ARRAY_RANGE_REF:
1205       ret = create_access (expr, stmt, write);
1206       break;
1207 
1208     default:
1209       break;
1210     }
1211 
1212   if (write && partial_ref && ret)
1213     ret->grp_partial_lhs = 1;
1214 
1215   return ret;
1216 }
1217 
1218 /* Scan expression EXPR and create access structures for all accesses to
1219    candidates for scalarization.  Return true if any access has been inserted.
1220    STMT must be the statement from which the expression is taken, WRITE must be
1221    true if the expression is a store and false otherwise. */
1222 
1223 static bool
1224 build_access_from_expr (tree expr, gimple *stmt, bool write)
1225 {
1226   struct access *access;
1227 
1228   access = build_access_from_expr_1 (expr, stmt, write);
1229   if (access)
1230     {
1231       /* This means the aggregate is accesses as a whole in a way other than an
1232 	 assign statement and thus cannot be removed even if we had a scalar
1233 	 replacement for everything.  */
1234       if (cannot_scalarize_away_bitmap)
1235 	bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1236       return true;
1237     }
1238   return false;
1239 }
1240 
1241 /* Return the single non-EH successor edge of BB or NULL if there is none or
1242    more than one.  */
1243 
1244 static edge
1245 single_non_eh_succ (basic_block bb)
1246 {
1247   edge e, res = NULL;
1248   edge_iterator ei;
1249 
1250   FOR_EACH_EDGE (e, ei, bb->succs)
1251     if (!(e->flags & EDGE_EH))
1252       {
1253 	if (res)
1254 	  return NULL;
1255 	res = e;
1256       }
1257 
1258   return res;
1259 }
1260 
1261 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1262    there is no alternative spot where to put statements SRA might need to
1263    generate after it.  The spot we are looking for is an edge leading to a
1264    single non-EH successor, if it exists and is indeed single.  RHS may be
1265    NULL, in that case ignore it.  */
1266 
1267 static bool
1268 disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
1269 {
1270   if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1271       && stmt_ends_bb_p (stmt))
1272     {
1273       if (single_non_eh_succ (gimple_bb (stmt)))
1274 	return false;
1275 
1276       disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1277       if (rhs)
1278 	disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1279       return true;
1280     }
1281   return false;
1282 }
1283 
1284 /* Scan expressions occurring in STMT, create access structures for all accesses
1285    to candidates for scalarization and remove those candidates which occur in
1286    statements or expressions that prevent them from being split apart.  Return
1287    true if any access has been inserted.  */
1288 
1289 static bool
1290 build_accesses_from_assign (gimple *stmt)
1291 {
1292   tree lhs, rhs;
1293   struct access *lacc, *racc;
1294 
1295   if (!gimple_assign_single_p (stmt)
1296       /* Scope clobbers don't influence scalarization.  */
1297       || gimple_clobber_p (stmt))
1298     return false;
1299 
1300   lhs = gimple_assign_lhs (stmt);
1301   rhs = gimple_assign_rhs1 (stmt);
1302 
1303   if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
1304     return false;
1305 
1306   racc = build_access_from_expr_1 (rhs, stmt, false);
1307   lacc = build_access_from_expr_1 (lhs, stmt, true);
1308 
1309   if (lacc)
1310     {
1311       lacc->grp_assignment_write = 1;
1312       if (storage_order_barrier_p (rhs))
1313 	lacc->grp_unscalarizable_region = 1;
1314     }
1315 
1316   if (racc)
1317     {
1318       racc->grp_assignment_read = 1;
1319       if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1320 	  && !is_gimple_reg_type (racc->type))
1321 	bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1322       if (storage_order_barrier_p (lhs))
1323 	racc->grp_unscalarizable_region = 1;
1324     }
1325 
1326   if (lacc && racc
1327       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1328       && !lacc->grp_unscalarizable_region
1329       && !racc->grp_unscalarizable_region
1330       && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1331       && lacc->size == racc->size
1332       && useless_type_conversion_p (lacc->type, racc->type))
1333     {
1334       struct assign_link *link;
1335 
1336       link = assign_link_pool.allocate ();
1337       memset (link, 0, sizeof (struct assign_link));
1338 
1339       link->lacc = lacc;
1340       link->racc = racc;
1341 
1342       add_link_to_rhs (racc, link);
1343     }
1344 
1345   return lacc || racc;
1346 }
1347 
1348 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1349    GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1350 
1351 static bool
1352 asm_visit_addr (gimple *, tree op, tree, void *)
1353 {
1354   op = get_base_address (op);
1355   if (op
1356       && DECL_P (op))
1357     disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1358 
1359   return false;
1360 }
1361 
1362 /* Return true iff callsite CALL has at least as many actual arguments as there
1363    are formal parameters of the function currently processed by IPA-SRA and
1364    that their types match.  */
1365 
1366 static inline bool
1367 callsite_arguments_match_p (gimple *call)
1368 {
1369   if (gimple_call_num_args (call) < (unsigned) func_param_count)
1370     return false;
1371 
1372   tree parm;
1373   int i;
1374   for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
1375        parm;
1376        parm = DECL_CHAIN (parm), i++)
1377     {
1378       tree arg = gimple_call_arg (call, i);
1379       if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
1380 	return false;
1381     }
1382   return true;
1383 }
1384 
1385 /* Scan function and look for interesting expressions and create access
1386    structures for them.  Return true iff any access is created.  */
1387 
1388 static bool
1389 scan_function (void)
1390 {
1391   basic_block bb;
1392   bool ret = false;
1393 
1394   FOR_EACH_BB_FN (bb, cfun)
1395     {
1396       gimple_stmt_iterator gsi;
1397       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1398 	{
1399 	  gimple *stmt = gsi_stmt (gsi);
1400 	  tree t;
1401 	  unsigned i;
1402 
1403 	  if (final_bbs && stmt_can_throw_external (stmt))
1404 	    bitmap_set_bit (final_bbs, bb->index);
1405 	  switch (gimple_code (stmt))
1406 	    {
1407 	    case GIMPLE_RETURN:
1408 	      t = gimple_return_retval (as_a <greturn *> (stmt));
1409 	      if (t != NULL_TREE)
1410 		ret |= build_access_from_expr (t, stmt, false);
1411 	      if (final_bbs)
1412 		bitmap_set_bit (final_bbs, bb->index);
1413 	      break;
1414 
1415 	    case GIMPLE_ASSIGN:
1416 	      ret |= build_accesses_from_assign (stmt);
1417 	      break;
1418 
1419 	    case GIMPLE_CALL:
1420 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
1421 		ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1422 					       stmt, false);
1423 
1424 	      if (sra_mode == SRA_MODE_EARLY_IPA)
1425 		{
1426 		  tree dest = gimple_call_fndecl (stmt);
1427 		  int flags = gimple_call_flags (stmt);
1428 
1429 		  if (dest)
1430 		    {
1431 		      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1432 			  && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1433 			encountered_apply_args = true;
1434 		      if (recursive_call_p (current_function_decl, dest))
1435 			{
1436 			  encountered_recursive_call = true;
1437 			  if (!callsite_arguments_match_p (stmt))
1438 			    encountered_unchangable_recursive_call = true;
1439 			}
1440 		    }
1441 
1442 		  if (final_bbs
1443 		      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1444 		    bitmap_set_bit (final_bbs, bb->index);
1445 		}
1446 
1447 	      t = gimple_call_lhs (stmt);
1448 	      if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
1449 		ret |= build_access_from_expr (t, stmt, true);
1450 	      break;
1451 
1452 	    case GIMPLE_ASM:
1453 	      {
1454 		gasm *asm_stmt = as_a <gasm *> (stmt);
1455 		walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
1456 					       asm_visit_addr);
1457 		if (final_bbs)
1458 		  bitmap_set_bit (final_bbs, bb->index);
1459 
1460 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
1461 		  {
1462 		    t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
1463 		    ret |= build_access_from_expr (t, asm_stmt, false);
1464 		  }
1465 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
1466 		  {
1467 		    t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
1468 		    ret |= build_access_from_expr (t, asm_stmt, true);
1469 		  }
1470 	      }
1471 	      break;
1472 
1473 	    default:
1474 	      break;
1475 	    }
1476 	}
1477     }
1478 
1479   return ret;
1480 }
1481 
1482 /* Helper of QSORT function. There are pointers to accesses in the array.  An
1483    access is considered smaller than another if it has smaller offset or if the
1484    offsets are the same but is size is bigger. */
1485 
1486 static int
1487 compare_access_positions (const void *a, const void *b)
1488 {
1489   const access_p *fp1 = (const access_p *) a;
1490   const access_p *fp2 = (const access_p *) b;
1491   const access_p f1 = *fp1;
1492   const access_p f2 = *fp2;
1493 
1494   if (f1->offset != f2->offset)
1495     return f1->offset < f2->offset ? -1 : 1;
1496 
1497   if (f1->size == f2->size)
1498     {
1499       if (f1->type == f2->type)
1500 	return 0;
1501       /* Put any non-aggregate type before any aggregate type.  */
1502       else if (!is_gimple_reg_type (f1->type)
1503 	  && is_gimple_reg_type (f2->type))
1504 	return 1;
1505       else if (is_gimple_reg_type (f1->type)
1506 	       && !is_gimple_reg_type (f2->type))
1507 	return -1;
1508       /* Put any complex or vector type before any other scalar type.  */
1509       else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1510 	       && TREE_CODE (f1->type) != VECTOR_TYPE
1511 	       && (TREE_CODE (f2->type) == COMPLEX_TYPE
1512 		   || TREE_CODE (f2->type) == VECTOR_TYPE))
1513 	return 1;
1514       else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1515 		|| TREE_CODE (f1->type) == VECTOR_TYPE)
1516 	       && TREE_CODE (f2->type) != COMPLEX_TYPE
1517 	       && TREE_CODE (f2->type) != VECTOR_TYPE)
1518 	return -1;
1519       /* Put any integral type before any non-integral type.  When splicing, we
1520 	 make sure that those with insufficient precision and occupying the
1521 	 same space are not scalarized.  */
1522       else if (INTEGRAL_TYPE_P (f1->type)
1523 	       && !INTEGRAL_TYPE_P (f2->type))
1524 	return -1;
1525       else if (!INTEGRAL_TYPE_P (f1->type)
1526 	       && INTEGRAL_TYPE_P (f2->type))
1527 	return 1;
1528       /* Put the integral type with the bigger precision first.  */
1529       else if (INTEGRAL_TYPE_P (f1->type)
1530 	       && INTEGRAL_TYPE_P (f2->type)
1531 	       && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
1532 	return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1533       /* Stabilize the sort.  */
1534       return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1535     }
1536 
1537   /* We want the bigger accesses first, thus the opposite operator in the next
1538      line: */
1539   return f1->size > f2->size ? -1 : 1;
1540 }
1541 
1542 
1543 /* Append a name of the declaration to the name obstack.  A helper function for
1544    make_fancy_name.  */
1545 
1546 static void
1547 make_fancy_decl_name (tree decl)
1548 {
1549   char buffer[32];
1550 
1551   tree name = DECL_NAME (decl);
1552   if (name)
1553     obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1554 		  IDENTIFIER_LENGTH (name));
1555   else
1556     {
1557       sprintf (buffer, "D%u", DECL_UID (decl));
1558       obstack_grow (&name_obstack, buffer, strlen (buffer));
1559     }
1560 }
1561 
1562 /* Helper for make_fancy_name.  */
1563 
1564 static void
1565 make_fancy_name_1 (tree expr)
1566 {
1567   char buffer[32];
1568   tree index;
1569 
1570   if (DECL_P (expr))
1571     {
1572       make_fancy_decl_name (expr);
1573       return;
1574     }
1575 
1576   switch (TREE_CODE (expr))
1577     {
1578     case COMPONENT_REF:
1579       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1580       obstack_1grow (&name_obstack, '$');
1581       make_fancy_decl_name (TREE_OPERAND (expr, 1));
1582       break;
1583 
1584     case ARRAY_REF:
1585       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1586       obstack_1grow (&name_obstack, '$');
1587       /* Arrays with only one element may not have a constant as their
1588 	 index. */
1589       index = TREE_OPERAND (expr, 1);
1590       if (TREE_CODE (index) != INTEGER_CST)
1591 	break;
1592       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1593       obstack_grow (&name_obstack, buffer, strlen (buffer));
1594       break;
1595 
1596     case ADDR_EXPR:
1597       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1598       break;
1599 
1600     case MEM_REF:
1601       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1602       if (!integer_zerop (TREE_OPERAND (expr, 1)))
1603 	{
1604 	  obstack_1grow (&name_obstack, '$');
1605 	  sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1606 		   TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1607 	  obstack_grow (&name_obstack, buffer, strlen (buffer));
1608 	}
1609       break;
1610 
1611     case BIT_FIELD_REF:
1612     case REALPART_EXPR:
1613     case IMAGPART_EXPR:
1614       gcc_unreachable (); 	/* we treat these as scalars.  */
1615       break;
1616     default:
1617       break;
1618     }
1619 }
1620 
1621 /* Create a human readable name for replacement variable of ACCESS.  */
1622 
1623 static char *
1624 make_fancy_name (tree expr)
1625 {
1626   make_fancy_name_1 (expr);
1627   obstack_1grow (&name_obstack, '\0');
1628   return XOBFINISH (&name_obstack, char *);
1629 }
1630 
1631 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1632    EXP_TYPE at the given OFFSET and with storage order REVERSE.  If BASE is
1633    something for which get_addr_base_and_unit_offset returns NULL, gsi must
1634    be non-NULL and is used to insert new statements either before or below
1635    the current one as specified by INSERT_AFTER.  This function is not capable
1636    of handling bitfields.  */
1637 
1638 tree
1639 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1640 		      bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
1641 		      bool insert_after)
1642 {
1643   tree prev_base = base;
1644   tree off;
1645   tree mem_ref;
1646   HOST_WIDE_INT base_offset;
1647   unsigned HOST_WIDE_INT misalign;
1648   unsigned int align;
1649 
1650   /* Preserve address-space information.  */
1651   addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1652   if (as != TYPE_ADDR_SPACE (exp_type))
1653     exp_type = build_qualified_type (exp_type,
1654 				     TYPE_QUALS (exp_type)
1655 				     | ENCODE_QUAL_ADDR_SPACE (as));
1656 
1657   gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1658   get_object_alignment_1 (base, &align, &misalign);
1659   base = get_addr_base_and_unit_offset (base, &base_offset);
1660 
1661   /* get_addr_base_and_unit_offset returns NULL for references with a variable
1662      offset such as array[var_index].  */
1663   if (!base)
1664     {
1665       gassign *stmt;
1666       tree tmp, addr;
1667 
1668       gcc_checking_assert (gsi);
1669       tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
1670       addr = build_fold_addr_expr (unshare_expr (prev_base));
1671       STRIP_USELESS_TYPE_CONVERSION (addr);
1672       stmt = gimple_build_assign (tmp, addr);
1673       gimple_set_location (stmt, loc);
1674       if (insert_after)
1675 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1676       else
1677 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1678 
1679       off = build_int_cst (reference_alias_ptr_type (prev_base),
1680 			   offset / BITS_PER_UNIT);
1681       base = tmp;
1682     }
1683   else if (TREE_CODE (base) == MEM_REF)
1684     {
1685       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1686 			   base_offset + offset / BITS_PER_UNIT);
1687       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1688       base = unshare_expr (TREE_OPERAND (base, 0));
1689     }
1690   else
1691     {
1692       off = build_int_cst (reference_alias_ptr_type (prev_base),
1693 			   base_offset + offset / BITS_PER_UNIT);
1694       base = build_fold_addr_expr (unshare_expr (base));
1695     }
1696 
1697   misalign = (misalign + offset) & (align - 1);
1698   if (misalign != 0)
1699     align = least_bit_hwi (misalign);
1700   if (align != TYPE_ALIGN (exp_type))
1701     exp_type = build_aligned_type (exp_type, align);
1702 
1703   mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1704   REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
1705   if (TREE_THIS_VOLATILE (prev_base))
1706     TREE_THIS_VOLATILE (mem_ref) = 1;
1707   if (TREE_SIDE_EFFECTS (prev_base))
1708     TREE_SIDE_EFFECTS (mem_ref) = 1;
1709   return mem_ref;
1710 }
1711 
1712 /* Construct a memory reference to a part of an aggregate BASE at the given
1713    OFFSET and of the same type as MODEL.  In case this is a reference to a
1714    bit-field, the function will replicate the last component_ref of model's
1715    expr to access it.  GSI and INSERT_AFTER have the same meaning as in
1716    build_ref_for_offset.  */
1717 
1718 static tree
1719 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1720 		     struct access *model, gimple_stmt_iterator *gsi,
1721 		     bool insert_after)
1722 {
1723   if (TREE_CODE (model->expr) == COMPONENT_REF
1724       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1725     {
1726       /* This access represents a bit-field.  */
1727       tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1728 
1729       offset -= int_bit_position (fld);
1730       exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1731       t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
1732 				gsi, insert_after);
1733       /* The flag will be set on the record type.  */
1734       REF_REVERSE_STORAGE_ORDER (t) = 0;
1735       return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1736 			      NULL_TREE);
1737     }
1738   else
1739     return
1740       build_ref_for_offset (loc, base, offset, model->reverse, model->type,
1741 			    gsi, insert_after);
1742 }
1743 
1744 /* Attempt to build a memory reference that we could but into a gimple
1745    debug_bind statement.  Similar to build_ref_for_model but punts if it has to
1746    create statements and return s NULL instead.  This function also ignores
1747    alignment issues and so its results should never end up in non-debug
1748    statements.  */
1749 
1750 static tree
1751 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1752 			   struct access *model)
1753 {
1754   HOST_WIDE_INT base_offset;
1755   tree off;
1756 
1757   if (TREE_CODE (model->expr) == COMPONENT_REF
1758       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1759     return NULL_TREE;
1760 
1761   base = get_addr_base_and_unit_offset (base, &base_offset);
1762   if (!base)
1763     return NULL_TREE;
1764   if (TREE_CODE (base) == MEM_REF)
1765     {
1766       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1767 			   base_offset + offset / BITS_PER_UNIT);
1768       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1769       base = unshare_expr (TREE_OPERAND (base, 0));
1770     }
1771   else
1772     {
1773       off = build_int_cst (reference_alias_ptr_type (base),
1774 			   base_offset + offset / BITS_PER_UNIT);
1775       base = build_fold_addr_expr (unshare_expr (base));
1776     }
1777 
1778   return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1779 }
1780 
1781 /* Construct a memory reference consisting of component_refs and array_refs to
1782    a part of an aggregate *RES (which is of type TYPE).  The requested part
1783    should have type EXP_TYPE at be the given OFFSET.  This function might not
1784    succeed, it returns true when it does and only then *RES points to something
1785    meaningful.  This function should be used only to build expressions that we
1786    might need to present to user (e.g. in warnings).  In all other situations,
1787    build_ref_for_model or build_ref_for_offset should be used instead.  */
1788 
1789 static bool
1790 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1791 				    tree exp_type)
1792 {
1793   while (1)
1794     {
1795       tree fld;
1796       tree tr_size, index, minidx;
1797       HOST_WIDE_INT el_size;
1798 
1799       if (offset == 0 && exp_type
1800 	  && types_compatible_p (exp_type, type))
1801 	return true;
1802 
1803       switch (TREE_CODE (type))
1804 	{
1805 	case UNION_TYPE:
1806 	case QUAL_UNION_TYPE:
1807 	case RECORD_TYPE:
1808 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1809 	    {
1810 	      HOST_WIDE_INT pos, size;
1811 	      tree tr_pos, expr, *expr_ptr;
1812 
1813 	      if (TREE_CODE (fld) != FIELD_DECL)
1814 		continue;
1815 
1816 	      tr_pos = bit_position (fld);
1817 	      if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
1818 		continue;
1819 	      pos = tree_to_uhwi (tr_pos);
1820 	      gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1821 	      tr_size = DECL_SIZE (fld);
1822 	      if (!tr_size || !tree_fits_uhwi_p (tr_size))
1823 		continue;
1824 	      size = tree_to_uhwi (tr_size);
1825 	      if (size == 0)
1826 		{
1827 		  if (pos != offset)
1828 		    continue;
1829 		}
1830 	      else if (pos > offset || (pos + size) <= offset)
1831 		continue;
1832 
1833 	      expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1834 			     NULL_TREE);
1835 	      expr_ptr = &expr;
1836 	      if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1837 						      offset - pos, exp_type))
1838 		{
1839 		  *res = expr;
1840 		  return true;
1841 		}
1842 	    }
1843 	  return false;
1844 
1845 	case ARRAY_TYPE:
1846 	  tr_size = TYPE_SIZE (TREE_TYPE (type));
1847 	  if (!tr_size || !tree_fits_uhwi_p (tr_size))
1848 	    return false;
1849 	  el_size = tree_to_uhwi (tr_size);
1850 
1851 	  minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1852 	  if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1853 	    return false;
1854 	  index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1855 	  if (!integer_zerop (minidx))
1856 	    index = int_const_binop (PLUS_EXPR, index, minidx);
1857 	  *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1858 			 NULL_TREE, NULL_TREE);
1859 	  offset = offset % el_size;
1860 	  type = TREE_TYPE (type);
1861 	  break;
1862 
1863 	default:
1864 	  if (offset != 0)
1865 	    return false;
1866 
1867 	  if (exp_type)
1868 	    return false;
1869 	  else
1870 	    return true;
1871 	}
1872     }
1873 }
1874 
1875 /* Return true iff TYPE is stdarg va_list type.  */
1876 
1877 static inline bool
1878 is_va_list_type (tree type)
1879 {
1880   return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1881 }
1882 
1883 /* Print message to dump file why a variable was rejected. */
1884 
1885 static void
1886 reject (tree var, const char *msg)
1887 {
1888   if (dump_file && (dump_flags & TDF_DETAILS))
1889     {
1890       fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1891       print_generic_expr (dump_file, var, 0);
1892       fprintf (dump_file, "\n");
1893     }
1894 }
1895 
1896 /* Return true if VAR is a candidate for SRA.  */
1897 
1898 static bool
1899 maybe_add_sra_candidate (tree var)
1900 {
1901   tree type = TREE_TYPE (var);
1902   const char *msg;
1903   tree_node **slot;
1904 
1905   if (!AGGREGATE_TYPE_P (type))
1906     {
1907       reject (var, "not aggregate");
1908       return false;
1909     }
1910   /* Allow constant-pool entries (that "need to live in memory")
1911      unless we are doing IPA SRA.  */
1912   if (needs_to_live_in_memory (var)
1913       && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
1914     {
1915       reject (var, "needs to live in memory");
1916       return false;
1917     }
1918   if (TREE_THIS_VOLATILE (var))
1919     {
1920       reject (var, "is volatile");
1921       return false;
1922     }
1923   if (!COMPLETE_TYPE_P (type))
1924     {
1925       reject (var, "has incomplete type");
1926       return false;
1927     }
1928   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
1929     {
1930       reject (var, "type size not fixed");
1931       return false;
1932     }
1933   if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
1934     {
1935       reject (var, "type size is zero");
1936       return false;
1937     }
1938   if (type_internals_preclude_sra_p (type, &msg))
1939     {
1940       reject (var, msg);
1941       return false;
1942     }
1943   if (/* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1944 	 we also want to schedule it rather late.  Thus we ignore it in
1945 	 the early pass. */
1946       (sra_mode == SRA_MODE_EARLY_INTRA
1947        && is_va_list_type (type)))
1948     {
1949       reject (var, "is va_list");
1950       return false;
1951     }
1952 
1953   bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1954   slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
1955   *slot = var;
1956 
1957   if (dump_file && (dump_flags & TDF_DETAILS))
1958     {
1959       fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1960       print_generic_expr (dump_file, var, 0);
1961       fprintf (dump_file, "\n");
1962     }
1963 
1964   return true;
1965 }
1966 
1967 /* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1968    those with type which is suitable for scalarization.  */
1969 
1970 static bool
1971 find_var_candidates (void)
1972 {
1973   tree var, parm;
1974   unsigned int i;
1975   bool ret = false;
1976 
1977   for (parm = DECL_ARGUMENTS (current_function_decl);
1978        parm;
1979        parm = DECL_CHAIN (parm))
1980     ret |= maybe_add_sra_candidate (parm);
1981 
1982   FOR_EACH_LOCAL_DECL (cfun, i, var)
1983     {
1984       if (!VAR_P (var))
1985         continue;
1986 
1987       ret |= maybe_add_sra_candidate (var);
1988     }
1989 
1990   return ret;
1991 }
1992 
1993 /* Sort all accesses for the given variable, check for partial overlaps and
1994    return NULL if there are any.  If there are none, pick a representative for
1995    each combination of offset and size and create a linked list out of them.
1996    Return the pointer to the first representative and make sure it is the first
1997    one in the vector of accesses.  */
1998 
1999 static struct access *
2000 sort_and_splice_var_accesses (tree var)
2001 {
2002   int i, j, access_count;
2003   struct access *res, **prev_acc_ptr = &res;
2004   vec<access_p> *access_vec;
2005   bool first = true;
2006   HOST_WIDE_INT low = -1, high = 0;
2007 
2008   access_vec = get_base_access_vector (var);
2009   if (!access_vec)
2010     return NULL;
2011   access_count = access_vec->length ();
2012 
2013   /* Sort by <OFFSET, SIZE>.  */
2014   access_vec->qsort (compare_access_positions);
2015 
2016   i = 0;
2017   while (i < access_count)
2018     {
2019       struct access *access = (*access_vec)[i];
2020       bool grp_write = access->write;
2021       bool grp_read = !access->write;
2022       bool grp_scalar_write = access->write
2023 	&& is_gimple_reg_type (access->type);
2024       bool grp_scalar_read = !access->write
2025 	&& is_gimple_reg_type (access->type);
2026       bool grp_assignment_read = access->grp_assignment_read;
2027       bool grp_assignment_write = access->grp_assignment_write;
2028       bool multiple_scalar_reads = false;
2029       bool total_scalarization = access->grp_total_scalarization;
2030       bool grp_partial_lhs = access->grp_partial_lhs;
2031       bool first_scalar = is_gimple_reg_type (access->type);
2032       bool unscalarizable_region = access->grp_unscalarizable_region;
2033       bool bf_non_full_precision
2034 	= (INTEGRAL_TYPE_P (access->type)
2035 	   && TYPE_PRECISION (access->type) != access->size
2036 	   && TREE_CODE (access->expr) == COMPONENT_REF
2037 	   && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
2038 
2039       if (first || access->offset >= high)
2040 	{
2041 	  first = false;
2042 	  low = access->offset;
2043 	  high = access->offset + access->size;
2044 	}
2045       else if (access->offset > low && access->offset + access->size > high)
2046 	return NULL;
2047       else
2048 	gcc_assert (access->offset >= low
2049 		    && access->offset + access->size <= high);
2050 
2051       j = i + 1;
2052       while (j < access_count)
2053 	{
2054 	  struct access *ac2 = (*access_vec)[j];
2055 	  if (ac2->offset != access->offset || ac2->size != access->size)
2056 	    break;
2057 	  if (ac2->write)
2058 	    {
2059 	      grp_write = true;
2060 	      grp_scalar_write = (grp_scalar_write
2061 				  || is_gimple_reg_type (ac2->type));
2062 	    }
2063 	  else
2064 	    {
2065 	      grp_read = true;
2066 	      if (is_gimple_reg_type (ac2->type))
2067 		{
2068 		  if (grp_scalar_read)
2069 		    multiple_scalar_reads = true;
2070 		  else
2071 		    grp_scalar_read = true;
2072 		}
2073 	    }
2074 	  grp_assignment_read |= ac2->grp_assignment_read;
2075 	  grp_assignment_write |= ac2->grp_assignment_write;
2076 	  grp_partial_lhs |= ac2->grp_partial_lhs;
2077 	  unscalarizable_region |= ac2->grp_unscalarizable_region;
2078 	  total_scalarization |= ac2->grp_total_scalarization;
2079 	  relink_to_new_repr (access, ac2);
2080 
2081 	  /* If there are both aggregate-type and scalar-type accesses with
2082 	     this combination of size and offset, the comparison function
2083 	     should have put the scalars first.  */
2084 	  gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
2085 	  /* It also prefers integral types to non-integral.  However, when the
2086 	     precision of the selected type does not span the entire area and
2087 	     should also be used for a non-integer (i.e. float), we must not
2088 	     let that happen.  Normally analyze_access_subtree expands the type
2089 	     to cover the entire area but for bit-fields it doesn't.  */
2090 	  if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
2091 	    {
2092 	      if (dump_file && (dump_flags & TDF_DETAILS))
2093 		{
2094 		  fprintf (dump_file, "Cannot scalarize the following access "
2095 			   "because insufficient precision integer type was "
2096 			   "selected.\n  ");
2097 		  dump_access (dump_file, access, false);
2098 		}
2099 	      unscalarizable_region = true;
2100 	    }
2101 	  ac2->group_representative = access;
2102 	  j++;
2103 	}
2104 
2105       i = j;
2106 
2107       access->group_representative = access;
2108       access->grp_write = grp_write;
2109       access->grp_read = grp_read;
2110       access->grp_scalar_read = grp_scalar_read;
2111       access->grp_scalar_write = grp_scalar_write;
2112       access->grp_assignment_read = grp_assignment_read;
2113       access->grp_assignment_write = grp_assignment_write;
2114       access->grp_hint = total_scalarization
2115 	|| (multiple_scalar_reads && !constant_decl_p (var));
2116       access->grp_total_scalarization = total_scalarization;
2117       access->grp_partial_lhs = grp_partial_lhs;
2118       access->grp_unscalarizable_region = unscalarizable_region;
2119       if (access->first_link)
2120 	add_access_to_work_queue (access);
2121 
2122       *prev_acc_ptr = access;
2123       prev_acc_ptr = &access->next_grp;
2124     }
2125 
2126   gcc_assert (res == (*access_vec)[0]);
2127   return res;
2128 }
2129 
2130 /* Create a variable for the given ACCESS which determines the type, name and a
2131    few other properties.  Return the variable declaration and store it also to
2132    ACCESS->replacement.  */
2133 
2134 static tree
2135 create_access_replacement (struct access *access)
2136 {
2137   tree repl;
2138 
2139   if (access->grp_to_be_debug_replaced)
2140     {
2141       repl = create_tmp_var_raw (access->type);
2142       DECL_CONTEXT (repl) = current_function_decl;
2143     }
2144   else
2145     /* Drop any special alignment on the type if it's not on the main
2146        variant.  This avoids issues with weirdo ABIs like AAPCS.  */
2147     repl = create_tmp_var (build_qualified_type
2148 			     (TYPE_MAIN_VARIANT (access->type),
2149 			      TYPE_QUALS (access->type)), "SR");
2150   if (TREE_CODE (access->type) == COMPLEX_TYPE
2151       || TREE_CODE (access->type) == VECTOR_TYPE)
2152     {
2153       if (!access->grp_partial_lhs)
2154 	DECL_GIMPLE_REG_P (repl) = 1;
2155     }
2156   else if (access->grp_partial_lhs
2157 	   && is_gimple_reg_type (access->type))
2158     TREE_ADDRESSABLE (repl) = 1;
2159 
2160   DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
2161   DECL_ARTIFICIAL (repl) = 1;
2162   DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
2163 
2164   if (DECL_NAME (access->base)
2165       && !DECL_IGNORED_P (access->base)
2166       && !DECL_ARTIFICIAL (access->base))
2167     {
2168       char *pretty_name = make_fancy_name (access->expr);
2169       tree debug_expr = unshare_expr_without_location (access->expr), d;
2170       bool fail = false;
2171 
2172       DECL_NAME (repl) = get_identifier (pretty_name);
2173       DECL_NAMELESS (repl) = 1;
2174       obstack_free (&name_obstack, pretty_name);
2175 
2176       /* Get rid of any SSA_NAMEs embedded in debug_expr,
2177 	 as DECL_DEBUG_EXPR isn't considered when looking for still
2178 	 used SSA_NAMEs and thus they could be freed.  All debug info
2179 	 generation cares is whether something is constant or variable
2180 	 and that get_ref_base_and_extent works properly on the
2181 	 expression.  It cannot handle accesses at a non-constant offset
2182 	 though, so just give up in those cases.  */
2183       for (d = debug_expr;
2184 	   !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
2185 	   d = TREE_OPERAND (d, 0))
2186 	switch (TREE_CODE (d))
2187 	  {
2188 	  case ARRAY_REF:
2189 	  case ARRAY_RANGE_REF:
2190 	    if (TREE_OPERAND (d, 1)
2191 		&& TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
2192 	      fail = true;
2193 	    if (TREE_OPERAND (d, 3)
2194 		&& TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
2195 	      fail = true;
2196 	    /* FALLTHRU */
2197 	  case COMPONENT_REF:
2198 	    if (TREE_OPERAND (d, 2)
2199 		&& TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
2200 	      fail = true;
2201 	    break;
2202 	  case MEM_REF:
2203 	    if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
2204 	      fail = true;
2205 	    else
2206 	      d = TREE_OPERAND (d, 0);
2207 	    break;
2208 	  default:
2209 	    break;
2210 	  }
2211       if (!fail)
2212 	{
2213 	  SET_DECL_DEBUG_EXPR (repl, debug_expr);
2214 	  DECL_HAS_DEBUG_EXPR_P (repl) = 1;
2215 	}
2216       if (access->grp_no_warning)
2217 	TREE_NO_WARNING (repl) = 1;
2218       else
2219 	TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
2220     }
2221   else
2222     TREE_NO_WARNING (repl) = 1;
2223 
2224   if (dump_file)
2225     {
2226       if (access->grp_to_be_debug_replaced)
2227 	{
2228 	  fprintf (dump_file, "Created a debug-only replacement for ");
2229 	  print_generic_expr (dump_file, access->base, 0);
2230 	  fprintf (dump_file, " offset: %u, size: %u\n",
2231 		   (unsigned) access->offset, (unsigned) access->size);
2232 	}
2233       else
2234 	{
2235 	  fprintf (dump_file, "Created a replacement for ");
2236 	  print_generic_expr (dump_file, access->base, 0);
2237 	  fprintf (dump_file, " offset: %u, size: %u: ",
2238 		   (unsigned) access->offset, (unsigned) access->size);
2239 	  print_generic_expr (dump_file, repl, 0);
2240 	  fprintf (dump_file, "\n");
2241 	}
2242     }
2243   sra_stats.replacements++;
2244 
2245   return repl;
2246 }
2247 
2248 /* Return ACCESS scalar replacement, which must exist.  */
2249 
2250 static inline tree
2251 get_access_replacement (struct access *access)
2252 {
2253   gcc_checking_assert (access->replacement_decl);
2254   return access->replacement_decl;
2255 }
2256 
2257 
2258 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2259    linked list along the way.  Stop when *ACCESS is NULL or the access pointed
2260    to it is not "within" the root.  Return false iff some accesses partially
2261    overlap.  */
2262 
2263 static bool
2264 build_access_subtree (struct access **access)
2265 {
2266   struct access *root = *access, *last_child = NULL;
2267   HOST_WIDE_INT limit = root->offset + root->size;
2268 
2269   *access = (*access)->next_grp;
2270   while  (*access && (*access)->offset + (*access)->size <= limit)
2271     {
2272       if (!last_child)
2273 	root->first_child = *access;
2274       else
2275 	last_child->next_sibling = *access;
2276       last_child = *access;
2277 
2278       if (!build_access_subtree (access))
2279 	return false;
2280     }
2281 
2282   if (*access && (*access)->offset < limit)
2283     return false;
2284 
2285   return true;
2286 }
2287 
2288 /* Build a tree of access representatives, ACCESS is the pointer to the first
2289    one, others are linked in a list by the next_grp field.  Return false iff
2290    some accesses partially overlap.  */
2291 
2292 static bool
2293 build_access_trees (struct access *access)
2294 {
2295   while (access)
2296     {
2297       struct access *root = access;
2298 
2299       if (!build_access_subtree (&access))
2300 	return false;
2301       root->next_grp = access;
2302     }
2303   return true;
2304 }
2305 
2306 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2307    array.  */
2308 
2309 static bool
2310 expr_with_var_bounded_array_refs_p (tree expr)
2311 {
2312   while (handled_component_p (expr))
2313     {
2314       if (TREE_CODE (expr) == ARRAY_REF
2315 	  && !tree_fits_shwi_p (array_ref_low_bound (expr)))
2316 	return true;
2317       expr = TREE_OPERAND (expr, 0);
2318     }
2319   return false;
2320 }
2321 
2322 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2323    both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
2324    sorts of access flags appropriately along the way, notably always set
2325    grp_read and grp_assign_read according to MARK_READ and grp_write when
2326    MARK_WRITE is true.
2327 
2328    Creating a replacement for a scalar access is considered beneficial if its
2329    grp_hint is set (this means we are either attempting total scalarization or
2330    there is more than one direct read access) or according to the following
2331    table:
2332 
2333    Access written to through a scalar type (once or more times)
2334    |
2335    |	Written to in an assignment statement
2336    |	|
2337    |	|	Access read as scalar _once_
2338    |	|	|
2339    |   	|	|	Read in an assignment statement
2340    |	|	|	|
2341    |   	|	|	|	Scalarize	Comment
2342 -----------------------------------------------------------------------------
2343    0	0	0	0			No access for the scalar
2344    0	0	0	1			No access for the scalar
2345    0	0	1	0	No		Single read - won't help
2346    0	0	1	1	No		The same case
2347    0	1	0	0			No access for the scalar
2348    0	1	0	1			No access for the scalar
2349    0	1	1	0	Yes		s = *g; return s.i;
2350    0	1	1	1       Yes		The same case as above
2351    1	0	0	0	No		Won't help
2352    1	0	0	1	Yes		s.i = 1; *g = s;
2353    1	0	1	0	Yes		s.i = 5; g = s.i;
2354    1	0	1	1	Yes		The same case as above
2355    1	1	0	0	No		Won't help.
2356    1	1	0	1	Yes		s.i = 1; *g = s;
2357    1	1	1	0	Yes		s = *g; return s.i;
2358    1	1	1	1	Yes		Any of the above yeses  */
2359 
2360 static bool
2361 analyze_access_subtree (struct access *root, struct access *parent,
2362 			bool allow_replacements)
2363 {
2364   struct access *child;
2365   HOST_WIDE_INT limit = root->offset + root->size;
2366   HOST_WIDE_INT covered_to = root->offset;
2367   bool scalar = is_gimple_reg_type (root->type);
2368   bool hole = false, sth_created = false;
2369 
2370   if (parent)
2371     {
2372       if (parent->grp_read)
2373 	root->grp_read = 1;
2374       if (parent->grp_assignment_read)
2375 	root->grp_assignment_read = 1;
2376       if (parent->grp_write)
2377 	root->grp_write = 1;
2378       if (parent->grp_assignment_write)
2379 	root->grp_assignment_write = 1;
2380       if (parent->grp_total_scalarization)
2381 	root->grp_total_scalarization = 1;
2382     }
2383 
2384   if (root->grp_unscalarizable_region)
2385     allow_replacements = false;
2386 
2387   if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2388     allow_replacements = false;
2389 
2390   for (child = root->first_child; child; child = child->next_sibling)
2391     {
2392       hole |= covered_to < child->offset;
2393       sth_created |= analyze_access_subtree (child, root,
2394 					     allow_replacements && !scalar);
2395 
2396       root->grp_unscalarized_data |= child->grp_unscalarized_data;
2397       root->grp_total_scalarization &= child->grp_total_scalarization;
2398       if (child->grp_covered)
2399 	covered_to += child->size;
2400       else
2401 	hole = true;
2402     }
2403 
2404   if (allow_replacements && scalar && !root->first_child
2405       && (root->grp_hint
2406 	  || ((root->grp_scalar_read || root->grp_assignment_read)
2407 	      && (root->grp_scalar_write || root->grp_assignment_write))))
2408     {
2409       /* Always create access replacements that cover the whole access.
2410          For integral types this means the precision has to match.
2411 	 Avoid assumptions based on the integral type kind, too.  */
2412       if (INTEGRAL_TYPE_P (root->type)
2413 	  && (TREE_CODE (root->type) != INTEGER_TYPE
2414 	      || TYPE_PRECISION (root->type) != root->size)
2415 	  /* But leave bitfield accesses alone.  */
2416 	  && (TREE_CODE (root->expr) != COMPONENT_REF
2417 	      || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2418 	{
2419 	  tree rt = root->type;
2420 	  gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2421 		      && (root->size % BITS_PER_UNIT) == 0);
2422 	  root->type = build_nonstandard_integer_type (root->size,
2423 						       TYPE_UNSIGNED (rt));
2424 	  root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
2425 					     root->offset, root->reverse,
2426 					     root->type, NULL, false);
2427 
2428 	  if (dump_file && (dump_flags & TDF_DETAILS))
2429 	    {
2430 	      fprintf (dump_file, "Changing the type of a replacement for ");
2431 	      print_generic_expr (dump_file, root->base, 0);
2432 	      fprintf (dump_file, " offset: %u, size: %u ",
2433 		       (unsigned) root->offset, (unsigned) root->size);
2434 	      fprintf (dump_file, " to an integer.\n");
2435 	    }
2436 	}
2437 
2438       root->grp_to_be_replaced = 1;
2439       root->replacement_decl = create_access_replacement (root);
2440       sth_created = true;
2441       hole = false;
2442     }
2443   else
2444     {
2445       if (allow_replacements
2446 	  && scalar && !root->first_child
2447 	  && (root->grp_scalar_write || root->grp_assignment_write)
2448 	  && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2449 			    DECL_UID (root->base)))
2450 	{
2451 	  gcc_checking_assert (!root->grp_scalar_read
2452 			       && !root->grp_assignment_read);
2453 	  sth_created = true;
2454 	  if (MAY_HAVE_DEBUG_STMTS)
2455 	    {
2456 	      root->grp_to_be_debug_replaced = 1;
2457 	      root->replacement_decl = create_access_replacement (root);
2458 	    }
2459 	}
2460 
2461       if (covered_to < limit)
2462 	hole = true;
2463       if (scalar || !allow_replacements)
2464 	root->grp_total_scalarization = 0;
2465     }
2466 
2467   if (!hole || root->grp_total_scalarization)
2468     root->grp_covered = 1;
2469   else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL
2470 	   || constant_decl_p (root->base))
2471     root->grp_unscalarized_data = 1; /* not covered and written to */
2472   return sth_created;
2473 }
2474 
2475 /* Analyze all access trees linked by next_grp by the means of
2476    analyze_access_subtree.  */
2477 static bool
2478 analyze_access_trees (struct access *access)
2479 {
2480   bool ret = false;
2481 
2482   while (access)
2483     {
2484       if (analyze_access_subtree (access, NULL, true))
2485 	ret = true;
2486       access = access->next_grp;
2487     }
2488 
2489   return ret;
2490 }
2491 
2492 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2493    SIZE would conflict with an already existing one.  If exactly such a child
2494    already exists in LACC, store a pointer to it in EXACT_MATCH.  */
2495 
2496 static bool
2497 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2498 			      HOST_WIDE_INT size, struct access **exact_match)
2499 {
2500   struct access *child;
2501 
2502   for (child = lacc->first_child; child; child = child->next_sibling)
2503     {
2504       if (child->offset == norm_offset && child->size == size)
2505 	{
2506 	  *exact_match = child;
2507 	  return true;
2508 	}
2509 
2510       if (child->offset < norm_offset + size
2511 	  && child->offset + child->size > norm_offset)
2512 	return true;
2513     }
2514 
2515   return false;
2516 }
2517 
2518 /* Create a new child access of PARENT, with all properties just like MODEL
2519    except for its offset and with its grp_write false and grp_read true.
2520    Return the new access or NULL if it cannot be created.  Note that this access
2521    is created long after all splicing and sorting, it's not located in any
2522    access vector and is automatically a representative of its group.  */
2523 
2524 static struct access *
2525 create_artificial_child_access (struct access *parent, struct access *model,
2526 				HOST_WIDE_INT new_offset)
2527 {
2528   struct access **child;
2529   tree expr = parent->base;
2530 
2531   gcc_assert (!model->grp_unscalarizable_region);
2532 
2533   struct access *access = access_pool.allocate ();
2534   memset (access, 0, sizeof (struct access));
2535   if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2536 					   model->type))
2537     {
2538       access->grp_no_warning = true;
2539       expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2540 				  new_offset, model, NULL, false);
2541     }
2542 
2543   access->base = parent->base;
2544   access->expr = expr;
2545   access->offset = new_offset;
2546   access->size = model->size;
2547   access->type = model->type;
2548   access->grp_write = true;
2549   access->grp_read = false;
2550   access->reverse = model->reverse;
2551 
2552   child = &parent->first_child;
2553   while (*child && (*child)->offset < new_offset)
2554     child = &(*child)->next_sibling;
2555 
2556   access->next_sibling = *child;
2557   *child = access;
2558 
2559   return access;
2560 }
2561 
2562 
2563 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2564    true if any new subaccess was created.  Additionally, if RACC is a scalar
2565    access but LACC is not, change the type of the latter, if possible.  */
2566 
2567 static bool
2568 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2569 {
2570   struct access *rchild;
2571   HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2572   bool ret = false;
2573 
2574   if (is_gimple_reg_type (lacc->type)
2575       || lacc->grp_unscalarizable_region
2576       || racc->grp_unscalarizable_region)
2577     return false;
2578 
2579   if (is_gimple_reg_type (racc->type))
2580     {
2581       if (!lacc->first_child && !racc->first_child)
2582 	{
2583 	  tree t = lacc->base;
2584 
2585 	  lacc->type = racc->type;
2586 	  if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2587 						  lacc->offset, racc->type))
2588 	    lacc->expr = t;
2589 	  else
2590 	    {
2591 	      lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2592 						lacc->base, lacc->offset,
2593 						racc, NULL, false);
2594 	      lacc->grp_no_warning = true;
2595 	    }
2596 	}
2597       return false;
2598     }
2599 
2600   for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2601     {
2602       struct access *new_acc = NULL;
2603       HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2604 
2605       if (rchild->grp_unscalarizable_region)
2606 	continue;
2607 
2608       if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2609 					&new_acc))
2610 	{
2611 	  if (new_acc)
2612 	    {
2613 	      rchild->grp_hint = 1;
2614 	      new_acc->grp_hint |= new_acc->grp_read;
2615 	      if (rchild->first_child)
2616 		ret |= propagate_subaccesses_across_link (new_acc, rchild);
2617 	    }
2618 	  continue;
2619 	}
2620 
2621       rchild->grp_hint = 1;
2622       new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2623       if (new_acc)
2624 	{
2625 	  ret = true;
2626 	  if (racc->first_child)
2627 	    propagate_subaccesses_across_link (new_acc, rchild);
2628 	}
2629     }
2630 
2631   return ret;
2632 }
2633 
2634 /* Propagate all subaccesses across assignment links.  */
2635 
2636 static void
2637 propagate_all_subaccesses (void)
2638 {
2639   while (work_queue_head)
2640     {
2641       struct access *racc = pop_access_from_work_queue ();
2642       struct assign_link *link;
2643 
2644       gcc_assert (racc->first_link);
2645 
2646       for (link = racc->first_link; link; link = link->next)
2647 	{
2648 	  struct access *lacc = link->lacc;
2649 
2650 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2651 	    continue;
2652 	  lacc = lacc->group_representative;
2653 	  if (propagate_subaccesses_across_link (lacc, racc)
2654 	      && lacc->first_link)
2655 	    add_access_to_work_queue (lacc);
2656 	}
2657     }
2658 }
2659 
2660 /* Go through all accesses collected throughout the (intraprocedural) analysis
2661    stage, exclude overlapping ones, identify representatives and build trees
2662    out of them, making decisions about scalarization on the way.  Return true
2663    iff there are any to-be-scalarized variables after this stage. */
2664 
2665 static bool
2666 analyze_all_variable_accesses (void)
2667 {
2668   int res = 0;
2669   bitmap tmp = BITMAP_ALLOC (NULL);
2670   bitmap_iterator bi;
2671   unsigned i;
2672   bool optimize_speed_p = !optimize_function_for_size_p (cfun);
2673 
2674   enum compiler_param param = optimize_speed_p
2675 			? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
2676 			: PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
2677 
2678   /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
2679      fall back to a target default.  */
2680   unsigned HOST_WIDE_INT max_scalarization_size
2681     = global_options_set.x_param_values[param]
2682       ? PARAM_VALUE (param)
2683       : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
2684 
2685   max_scalarization_size *= BITS_PER_UNIT;
2686 
2687   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2688     if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2689 	&& !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2690       {
2691 	tree var = candidate (i);
2692 
2693 	if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
2694 						constant_decl_p (var)))
2695 	  {
2696 	    if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
2697 		<= max_scalarization_size)
2698 	      {
2699 		create_total_scalarization_access (var);
2700 		completely_scalarize (var, TREE_TYPE (var), 0, var);
2701 		statistics_counter_event (cfun,
2702 					  "Totally-scalarized aggregates", 1);
2703 		if (dump_file && (dump_flags & TDF_DETAILS))
2704 		  {
2705 		    fprintf (dump_file, "Will attempt to totally scalarize ");
2706 		    print_generic_expr (dump_file, var, 0);
2707 		    fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2708 		  }
2709 	      }
2710 	    else if (dump_file && (dump_flags & TDF_DETAILS))
2711 	      {
2712 		fprintf (dump_file, "Too big to totally scalarize: ");
2713 		print_generic_expr (dump_file, var, 0);
2714 		fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2715 	      }
2716 	  }
2717       }
2718 
2719   bitmap_copy (tmp, candidate_bitmap);
2720   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2721     {
2722       tree var = candidate (i);
2723       struct access *access;
2724 
2725       access = sort_and_splice_var_accesses (var);
2726       if (!access || !build_access_trees (access))
2727 	disqualify_candidate (var,
2728 			      "No or inhibitingly overlapping accesses.");
2729     }
2730 
2731   propagate_all_subaccesses ();
2732 
2733   bitmap_copy (tmp, candidate_bitmap);
2734   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2735     {
2736       tree var = candidate (i);
2737       struct access *access = get_first_repr_for_decl (var);
2738 
2739       if (analyze_access_trees (access))
2740 	{
2741 	  res++;
2742 	  if (dump_file && (dump_flags & TDF_DETAILS))
2743 	    {
2744 	      fprintf (dump_file, "\nAccess trees for ");
2745 	      print_generic_expr (dump_file, var, 0);
2746 	      fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2747 	      dump_access_tree (dump_file, access);
2748 	      fprintf (dump_file, "\n");
2749 	    }
2750 	}
2751       else
2752 	disqualify_candidate (var, "No scalar replacements to be created.");
2753     }
2754 
2755   BITMAP_FREE (tmp);
2756 
2757   if (res)
2758     {
2759       statistics_counter_event (cfun, "Scalarized aggregates", res);
2760       return true;
2761     }
2762   else
2763     return false;
2764 }
2765 
2766 /* Generate statements copying scalar replacements of accesses within a subtree
2767    into or out of AGG.  ACCESS, all its children, siblings and their children
2768    are to be processed.  AGG is an aggregate type expression (can be a
2769    declaration but does not have to be, it can for example also be a mem_ref or
2770    a series of handled components).  TOP_OFFSET is the offset of the processed
2771    subtree which has to be subtracted from offsets of individual accesses to
2772    get corresponding offsets for AGG.  If CHUNK_SIZE is non-null, copy only
2773    replacements in the interval <start_offset, start_offset + chunk_size>,
2774    otherwise copy all.  GSI is a statement iterator used to place the new
2775    statements.  WRITE should be true when the statements should write from AGG
2776    to the replacement and false if vice versa.  if INSERT_AFTER is true, new
2777    statements will be added after the current statement in GSI, they will be
2778    added before the statement otherwise.  */
2779 
2780 static void
2781 generate_subtree_copies (struct access *access, tree agg,
2782 			 HOST_WIDE_INT top_offset,
2783 			 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2784 			 gimple_stmt_iterator *gsi, bool write,
2785 			 bool insert_after, location_t loc)
2786 {
2787   /* Never write anything into constant pool decls.  See PR70602.  */
2788   if (!write && constant_decl_p (agg))
2789     return;
2790   do
2791     {
2792       if (chunk_size && access->offset >= start_offset + chunk_size)
2793 	return;
2794 
2795       if (access->grp_to_be_replaced
2796 	  && (chunk_size == 0
2797 	      || access->offset + access->size > start_offset))
2798 	{
2799 	  tree expr, repl = get_access_replacement (access);
2800 	  gassign *stmt;
2801 
2802 	  expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2803 				      access, gsi, insert_after);
2804 
2805 	  if (write)
2806 	    {
2807 	      if (access->grp_partial_lhs)
2808 		expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2809 						 !insert_after,
2810 						 insert_after ? GSI_NEW_STMT
2811 						 : GSI_SAME_STMT);
2812 	      stmt = gimple_build_assign (repl, expr);
2813 	    }
2814 	  else
2815 	    {
2816 	      TREE_NO_WARNING (repl) = 1;
2817 	      if (access->grp_partial_lhs)
2818 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2819 						 !insert_after,
2820 						 insert_after ? GSI_NEW_STMT
2821 						 : GSI_SAME_STMT);
2822 	      stmt = gimple_build_assign (expr, repl);
2823 	    }
2824 	  gimple_set_location (stmt, loc);
2825 
2826 	  if (insert_after)
2827 	    gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2828 	  else
2829 	    gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2830 	  update_stmt (stmt);
2831 	  sra_stats.subtree_copies++;
2832 	}
2833       else if (write
2834 	       && access->grp_to_be_debug_replaced
2835 	       && (chunk_size == 0
2836 		   || access->offset + access->size > start_offset))
2837 	{
2838 	  gdebug *ds;
2839 	  tree drhs = build_debug_ref_for_model (loc, agg,
2840 						 access->offset - top_offset,
2841 						 access);
2842 	  ds = gimple_build_debug_bind (get_access_replacement (access),
2843 					drhs, gsi_stmt (*gsi));
2844 	  if (insert_after)
2845 	    gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2846 	  else
2847 	    gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2848 	}
2849 
2850       if (access->first_child)
2851 	generate_subtree_copies (access->first_child, agg, top_offset,
2852 				 start_offset, chunk_size, gsi,
2853 				 write, insert_after, loc);
2854 
2855       access = access->next_sibling;
2856     }
2857   while (access);
2858 }
2859 
2860 /* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2861    root of the subtree to be processed.  GSI is the statement iterator used
2862    for inserting statements which are added after the current statement if
2863    INSERT_AFTER is true or before it otherwise.  */
2864 
2865 static void
2866 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2867 			bool insert_after, location_t loc)
2868 
2869 {
2870   struct access *child;
2871 
2872   if (access->grp_to_be_replaced)
2873     {
2874       gassign *stmt;
2875 
2876       stmt = gimple_build_assign (get_access_replacement (access),
2877 				  build_zero_cst (access->type));
2878       if (insert_after)
2879 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2880       else
2881 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2882       update_stmt (stmt);
2883       gimple_set_location (stmt, loc);
2884     }
2885   else if (access->grp_to_be_debug_replaced)
2886     {
2887       gdebug *ds
2888 	= gimple_build_debug_bind (get_access_replacement (access),
2889 				   build_zero_cst (access->type),
2890 				   gsi_stmt (*gsi));
2891       if (insert_after)
2892 	gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2893       else
2894 	gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2895     }
2896 
2897   for (child = access->first_child; child; child = child->next_sibling)
2898     init_subtree_with_zero (child, gsi, insert_after, loc);
2899 }
2900 
2901 /* Clobber all scalar replacements in an access subtree.  ACCESS is the
2902    root of the subtree to be processed.  GSI is the statement iterator used
2903    for inserting statements which are added after the current statement if
2904    INSERT_AFTER is true or before it otherwise.  */
2905 
2906 static void
2907 clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
2908 		bool insert_after, location_t loc)
2909 
2910 {
2911   struct access *child;
2912 
2913   if (access->grp_to_be_replaced)
2914     {
2915       tree rep = get_access_replacement (access);
2916       tree clobber = build_constructor (access->type, NULL);
2917       TREE_THIS_VOLATILE (clobber) = 1;
2918       gimple *stmt = gimple_build_assign (rep, clobber);
2919 
2920       if (insert_after)
2921 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2922       else
2923 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2924       update_stmt (stmt);
2925       gimple_set_location (stmt, loc);
2926     }
2927 
2928   for (child = access->first_child; child; child = child->next_sibling)
2929     clobber_subtree (child, gsi, insert_after, loc);
2930 }
2931 
2932 /* Search for an access representative for the given expression EXPR and
2933    return it or NULL if it cannot be found.  */
2934 
2935 static struct access *
2936 get_access_for_expr (tree expr)
2937 {
2938   HOST_WIDE_INT offset, size, max_size;
2939   tree base;
2940   bool reverse;
2941 
2942   /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2943      a different size than the size of its argument and we need the latter
2944      one.  */
2945   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2946     expr = TREE_OPERAND (expr, 0);
2947 
2948   base = get_ref_base_and_extent (expr, &offset, &size, &max_size, &reverse);
2949   if (max_size == -1 || !DECL_P (base))
2950     return NULL;
2951 
2952   if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2953     return NULL;
2954 
2955   return get_var_base_offset_size_access (base, offset, max_size);
2956 }
2957 
2958 /* Replace the expression EXPR with a scalar replacement if there is one and
2959    generate other statements to do type conversion or subtree copying if
2960    necessary.  GSI is used to place newly created statements, WRITE is true if
2961    the expression is being written to (it is on a LHS of a statement or output
2962    in an assembly statement).  */
2963 
2964 static bool
2965 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2966 {
2967   location_t loc;
2968   struct access *access;
2969   tree type, bfr, orig_expr;
2970 
2971   if (TREE_CODE (*expr) == BIT_FIELD_REF)
2972     {
2973       bfr = *expr;
2974       expr = &TREE_OPERAND (*expr, 0);
2975     }
2976   else
2977     bfr = NULL_TREE;
2978 
2979   if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2980     expr = &TREE_OPERAND (*expr, 0);
2981   access = get_access_for_expr (*expr);
2982   if (!access)
2983     return false;
2984   type = TREE_TYPE (*expr);
2985   orig_expr = *expr;
2986 
2987   loc = gimple_location (gsi_stmt (*gsi));
2988   gimple_stmt_iterator alt_gsi = gsi_none ();
2989   if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
2990     {
2991       alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
2992       gsi = &alt_gsi;
2993     }
2994 
2995   if (access->grp_to_be_replaced)
2996     {
2997       tree repl = get_access_replacement (access);
2998       /* If we replace a non-register typed access simply use the original
2999          access expression to extract the scalar component afterwards.
3000 	 This happens if scalarizing a function return value or parameter
3001 	 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
3002 	 gcc.c-torture/compile/20011217-1.c.
3003 
3004          We also want to use this when accessing a complex or vector which can
3005          be accessed as a different type too, potentially creating a need for
3006          type conversion (see PR42196) and when scalarized unions are involved
3007          in assembler statements (see PR42398).  */
3008       if (!useless_type_conversion_p (type, access->type))
3009 	{
3010 	  tree ref;
3011 
3012 	  ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
3013 
3014 	  if (write)
3015 	    {
3016 	      gassign *stmt;
3017 
3018 	      if (access->grp_partial_lhs)
3019 		ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
3020 						 false, GSI_NEW_STMT);
3021 	      stmt = gimple_build_assign (repl, ref);
3022 	      gimple_set_location (stmt, loc);
3023 	      gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
3024 	    }
3025 	  else
3026 	    {
3027 	      gassign *stmt;
3028 
3029 	      if (access->grp_partial_lhs)
3030 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
3031 						 true, GSI_SAME_STMT);
3032 	      stmt = gimple_build_assign (ref, repl);
3033 	      gimple_set_location (stmt, loc);
3034 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
3035 	    }
3036 	}
3037       else
3038 	*expr = repl;
3039       sra_stats.exprs++;
3040     }
3041   else if (write && access->grp_to_be_debug_replaced)
3042     {
3043       gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
3044 					    NULL_TREE,
3045 					    gsi_stmt (*gsi));
3046       gsi_insert_after (gsi, ds, GSI_NEW_STMT);
3047     }
3048 
3049   if (access->first_child)
3050     {
3051       HOST_WIDE_INT start_offset, chunk_size;
3052       if (bfr
3053 	  && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
3054 	  && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
3055 	{
3056 	  chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
3057 	  start_offset = access->offset
3058 	    + tree_to_uhwi (TREE_OPERAND (bfr, 2));
3059 	}
3060       else
3061 	start_offset = chunk_size = 0;
3062 
3063       generate_subtree_copies (access->first_child, orig_expr, access->offset,
3064 			       start_offset, chunk_size, gsi, write, write,
3065 			       loc);
3066     }
3067   return true;
3068 }
3069 
3070 /* Where scalar replacements of the RHS have been written to when a replacement
3071    of a LHS of an assigments cannot be direclty loaded from a replacement of
3072    the RHS. */
3073 enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
3074 				  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
3075 				  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
3076 
3077 struct subreplacement_assignment_data
3078 {
3079   /* Offset of the access representing the lhs of the assignment.  */
3080   HOST_WIDE_INT left_offset;
3081 
3082   /* LHS and RHS of the original assignment.  */
3083   tree assignment_lhs, assignment_rhs;
3084 
3085   /* Access representing the rhs of the whole assignment.  */
3086   struct access *top_racc;
3087 
3088   /* Stmt iterator used for statement insertions after the original assignment.
3089    It points to the main GSI used to traverse a BB during function body
3090    modification.  */
3091   gimple_stmt_iterator *new_gsi;
3092 
3093   /* Stmt iterator used for statement insertions before the original
3094    assignment.  Keeps on pointing to the original statement.  */
3095   gimple_stmt_iterator old_gsi;
3096 
3097   /* Location of the assignment.   */
3098   location_t loc;
3099 
3100   /* Keeps the information whether we have needed to refresh replacements of
3101    the LHS and from which side of the assignments this takes place.  */
3102   enum unscalarized_data_handling refreshed;
3103 };
3104 
3105 /* Store all replacements in the access tree rooted in TOP_RACC either to their
3106    base aggregate if there are unscalarized data or directly to LHS of the
3107    statement that is pointed to by GSI otherwise.  */
3108 
3109 static void
3110 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
3111 {
3112   tree src;
3113   if (sad->top_racc->grp_unscalarized_data)
3114     {
3115       src = sad->assignment_rhs;
3116       sad->refreshed = SRA_UDH_RIGHT;
3117     }
3118   else
3119     {
3120       src = sad->assignment_lhs;
3121       sad->refreshed = SRA_UDH_LEFT;
3122     }
3123   generate_subtree_copies (sad->top_racc->first_child, src,
3124 			   sad->top_racc->offset, 0, 0,
3125 			   &sad->old_gsi, false, false, sad->loc);
3126 }
3127 
3128 /* Try to generate statements to load all sub-replacements in an access subtree
3129    formed by children of LACC from scalar replacements in the SAD->top_racc
3130    subtree.  If that is not possible, refresh the SAD->top_racc base aggregate
3131    and load the accesses from it.  */
3132 
3133 static void
3134 load_assign_lhs_subreplacements (struct access *lacc,
3135 				 struct subreplacement_assignment_data *sad)
3136 {
3137   for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
3138     {
3139       HOST_WIDE_INT offset;
3140       offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
3141 
3142       if (lacc->grp_to_be_replaced)
3143 	{
3144 	  struct access *racc;
3145 	  gassign *stmt;
3146 	  tree rhs;
3147 
3148 	  racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
3149 	  if (racc && racc->grp_to_be_replaced)
3150 	    {
3151 	      rhs = get_access_replacement (racc);
3152 	      if (!useless_type_conversion_p (lacc->type, racc->type))
3153 		rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3154 				       lacc->type, rhs);
3155 
3156 	      if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
3157 		rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
3158 						NULL_TREE, true, GSI_SAME_STMT);
3159 	    }
3160 	  else
3161 	    {
3162 	      /* No suitable access on the right hand side, need to load from
3163 		 the aggregate.  See if we have to update it first... */
3164 	      if (sad->refreshed == SRA_UDH_NONE)
3165 		handle_unscalarized_data_in_subtree (sad);
3166 
3167 	      if (sad->refreshed == SRA_UDH_LEFT)
3168 		rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
3169 					   lacc->offset - sad->left_offset,
3170 					   lacc, sad->new_gsi, true);
3171 	      else
3172 		rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
3173 					   lacc->offset - sad->left_offset,
3174 					   lacc, sad->new_gsi, true);
3175 	      if (lacc->grp_partial_lhs)
3176 		rhs = force_gimple_operand_gsi (sad->new_gsi,
3177 						rhs, true, NULL_TREE,
3178 						false, GSI_NEW_STMT);
3179 	    }
3180 
3181 	  stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
3182 	  gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
3183 	  gimple_set_location (stmt, sad->loc);
3184 	  update_stmt (stmt);
3185 	  sra_stats.subreplacements++;
3186 	}
3187       else
3188 	{
3189 	  if (sad->refreshed == SRA_UDH_NONE
3190 	      && lacc->grp_read && !lacc->grp_covered)
3191 	    handle_unscalarized_data_in_subtree (sad);
3192 
3193 	  if (lacc && lacc->grp_to_be_debug_replaced)
3194 	    {
3195 	      gdebug *ds;
3196 	      tree drhs;
3197 	      struct access *racc = find_access_in_subtree (sad->top_racc,
3198 							    offset,
3199 							    lacc->size);
3200 
3201 	      if (racc && racc->grp_to_be_replaced)
3202 		{
3203 		  if (racc->grp_write || constant_decl_p (racc->base))
3204 		    drhs = get_access_replacement (racc);
3205 		  else
3206 		    drhs = NULL;
3207 		}
3208 	      else if (sad->refreshed == SRA_UDH_LEFT)
3209 		drhs = build_debug_ref_for_model (sad->loc, lacc->base,
3210 						  lacc->offset, lacc);
3211 	      else if (sad->refreshed == SRA_UDH_RIGHT)
3212 		drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
3213 						  offset, lacc);
3214 	      else
3215 		drhs = NULL_TREE;
3216 	      if (drhs
3217 		  && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
3218 		drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
3219 					lacc->type, drhs);
3220 	      ds = gimple_build_debug_bind (get_access_replacement (lacc),
3221 					    drhs, gsi_stmt (sad->old_gsi));
3222 	      gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
3223 	    }
3224 	}
3225 
3226       if (lacc->first_child)
3227 	load_assign_lhs_subreplacements (lacc, sad);
3228     }
3229 }
3230 
3231 /* Result code for SRA assignment modification.  */
3232 enum assignment_mod_result { SRA_AM_NONE,       /* nothing done for the stmt */
3233 			     SRA_AM_MODIFIED,  /* stmt changed but not
3234 						  removed */
3235 			     SRA_AM_REMOVED };  /* stmt eliminated */
3236 
3237 /* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
3238    to the assignment and GSI is the statement iterator pointing at it.  Returns
3239    the same values as sra_modify_assign.  */
3240 
3241 static enum assignment_mod_result
3242 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3243 {
3244   tree lhs = gimple_assign_lhs (stmt);
3245   struct access *acc = get_access_for_expr (lhs);
3246   if (!acc)
3247     return SRA_AM_NONE;
3248   location_t loc = gimple_location (stmt);
3249 
3250   if (gimple_clobber_p (stmt))
3251     {
3252       /* Clobber the replacement variable.  */
3253       clobber_subtree (acc, gsi, !acc->grp_covered, loc);
3254       /* Remove clobbers of fully scalarized variables, they are dead.  */
3255       if (acc->grp_covered)
3256 	{
3257 	  unlink_stmt_vdef (stmt);
3258 	  gsi_remove (gsi, true);
3259 	  release_defs (stmt);
3260 	  return SRA_AM_REMOVED;
3261 	}
3262       else
3263 	return SRA_AM_MODIFIED;
3264     }
3265 
3266   if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
3267     {
3268       /* I have never seen this code path trigger but if it can happen the
3269 	 following should handle it gracefully.  */
3270       if (access_has_children_p (acc))
3271 	generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
3272 				 true, true, loc);
3273       return SRA_AM_MODIFIED;
3274     }
3275 
3276   if (acc->grp_covered)
3277     {
3278       init_subtree_with_zero (acc, gsi, false, loc);
3279       unlink_stmt_vdef (stmt);
3280       gsi_remove (gsi, true);
3281       release_defs (stmt);
3282       return SRA_AM_REMOVED;
3283     }
3284   else
3285     {
3286       init_subtree_with_zero (acc, gsi, true, loc);
3287       return SRA_AM_MODIFIED;
3288     }
3289 }
3290 
3291 /* Create and return a new suitable default definition SSA_NAME for RACC which
3292    is an access describing an uninitialized part of an aggregate that is being
3293    loaded.  */
3294 
3295 static tree
3296 get_repl_default_def_ssa_name (struct access *racc)
3297 {
3298   gcc_checking_assert (!racc->grp_to_be_replaced
3299 		       && !racc->grp_to_be_debug_replaced);
3300   if (!racc->replacement_decl)
3301     racc->replacement_decl = create_access_replacement (racc);
3302   return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
3303 }
3304 
3305 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3306    bit-field field declaration somewhere in it.  */
3307 
3308 static inline bool
3309 contains_vce_or_bfcref_p (const_tree ref)
3310 {
3311   while (handled_component_p (ref))
3312     {
3313       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
3314 	  || (TREE_CODE (ref) == COMPONENT_REF
3315 	      && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3316 	return true;
3317       ref = TREE_OPERAND (ref, 0);
3318     }
3319 
3320   return false;
3321 }
3322 
3323 /* Examine both sides of the assignment statement pointed to by STMT, replace
3324    them with a scalare replacement if there is one and generate copying of
3325    replacements if scalarized aggregates have been used in the assignment.  GSI
3326    is used to hold generated statements for type conversions and subtree
3327    copying.  */
3328 
3329 static enum assignment_mod_result
3330 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3331 {
3332   struct access *lacc, *racc;
3333   tree lhs, rhs;
3334   bool modify_this_stmt = false;
3335   bool force_gimple_rhs = false;
3336   location_t loc;
3337   gimple_stmt_iterator orig_gsi = *gsi;
3338 
3339   if (!gimple_assign_single_p (stmt))
3340     return SRA_AM_NONE;
3341   lhs = gimple_assign_lhs (stmt);
3342   rhs = gimple_assign_rhs1 (stmt);
3343 
3344   if (TREE_CODE (rhs) == CONSTRUCTOR)
3345     return sra_modify_constructor_assign (stmt, gsi);
3346 
3347   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3348       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3349       || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3350     {
3351       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
3352 					  gsi, false);
3353       modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
3354 					   gsi, true);
3355       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3356     }
3357 
3358   lacc = get_access_for_expr (lhs);
3359   racc = get_access_for_expr (rhs);
3360   if (!lacc && !racc)
3361     return SRA_AM_NONE;
3362   /* Avoid modifying initializations of constant-pool replacements.  */
3363   if (racc && (racc->replacement_decl == lhs))
3364     return SRA_AM_NONE;
3365 
3366   loc = gimple_location (stmt);
3367   if (lacc && lacc->grp_to_be_replaced)
3368     {
3369       lhs = get_access_replacement (lacc);
3370       gimple_assign_set_lhs (stmt, lhs);
3371       modify_this_stmt = true;
3372       if (lacc->grp_partial_lhs)
3373 	force_gimple_rhs = true;
3374       sra_stats.exprs++;
3375     }
3376 
3377   if (racc && racc->grp_to_be_replaced)
3378     {
3379       rhs = get_access_replacement (racc);
3380       modify_this_stmt = true;
3381       if (racc->grp_partial_lhs)
3382 	force_gimple_rhs = true;
3383       sra_stats.exprs++;
3384     }
3385   else if (racc
3386 	   && !racc->grp_unscalarized_data
3387 	   && !racc->grp_unscalarizable_region
3388 	   && TREE_CODE (lhs) == SSA_NAME
3389 	   && !access_has_replacements_p (racc))
3390     {
3391       rhs = get_repl_default_def_ssa_name (racc);
3392       modify_this_stmt = true;
3393       sra_stats.exprs++;
3394     }
3395 
3396   if (modify_this_stmt)
3397     {
3398       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3399 	{
3400 	  /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3401 	     ???  This should move to fold_stmt which we simply should
3402 	     call after building a VIEW_CONVERT_EXPR here.  */
3403 	  if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3404 	      && !contains_bitfld_component_ref_p (lhs))
3405 	    {
3406 	      lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3407 	      gimple_assign_set_lhs (stmt, lhs);
3408 	    }
3409 	  else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3410 		   && !contains_vce_or_bfcref_p (rhs))
3411 	    rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3412 
3413 	  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3414 	    {
3415 	      rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3416 				     rhs);
3417 	      if (is_gimple_reg_type (TREE_TYPE (lhs))
3418 		  && TREE_CODE (lhs) != SSA_NAME)
3419 		force_gimple_rhs = true;
3420 	    }
3421 	}
3422     }
3423 
3424   if (lacc && lacc->grp_to_be_debug_replaced)
3425     {
3426       tree dlhs = get_access_replacement (lacc);
3427       tree drhs = unshare_expr (rhs);
3428       if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3429 	{
3430 	  if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3431 	      && !contains_vce_or_bfcref_p (drhs))
3432 	    drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3433 	  if (drhs
3434 	      && !useless_type_conversion_p (TREE_TYPE (dlhs),
3435 					     TREE_TYPE (drhs)))
3436 	    drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3437 				    TREE_TYPE (dlhs), drhs);
3438 	}
3439       gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
3440       gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3441     }
3442 
3443   /* From this point on, the function deals with assignments in between
3444      aggregates when at least one has scalar reductions of some of its
3445      components.  There are three possible scenarios: Both the LHS and RHS have
3446      to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3447 
3448      In the first case, we would like to load the LHS components from RHS
3449      components whenever possible.  If that is not possible, we would like to
3450      read it directly from the RHS (after updating it by storing in it its own
3451      components).  If there are some necessary unscalarized data in the LHS,
3452      those will be loaded by the original assignment too.  If neither of these
3453      cases happen, the original statement can be removed.  Most of this is done
3454      by load_assign_lhs_subreplacements.
3455 
3456      In the second case, we would like to store all RHS scalarized components
3457      directly into LHS and if they cover the aggregate completely, remove the
3458      statement too.  In the third case, we want the LHS components to be loaded
3459      directly from the RHS (DSE will remove the original statement if it
3460      becomes redundant).
3461 
3462      This is a bit complex but manageable when types match and when unions do
3463      not cause confusion in a way that we cannot really load a component of LHS
3464      from the RHS or vice versa (the access representing this level can have
3465      subaccesses that are accessible only through a different union field at a
3466      higher level - different from the one used in the examined expression).
3467      Unions are fun.
3468 
3469      Therefore, I specially handle a fourth case, happening when there is a
3470      specific type cast or it is impossible to locate a scalarized subaccess on
3471      the other side of the expression.  If that happens, I simply "refresh" the
3472      RHS by storing in it is scalarized components leave the original statement
3473      there to do the copying and then load the scalar replacements of the LHS.
3474      This is what the first branch does.  */
3475 
3476   if (modify_this_stmt
3477       || gimple_has_volatile_ops (stmt)
3478       || contains_vce_or_bfcref_p (rhs)
3479       || contains_vce_or_bfcref_p (lhs)
3480       || stmt_ends_bb_p (stmt))
3481     {
3482       /* No need to copy into a constant-pool, it comes pre-initialized.  */
3483       if (access_has_children_p (racc) && !constant_decl_p (racc->base))
3484 	generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3485 				 gsi, false, false, loc);
3486       if (access_has_children_p (lacc))
3487 	{
3488 	  gimple_stmt_iterator alt_gsi = gsi_none ();
3489 	  if (stmt_ends_bb_p (stmt))
3490 	    {
3491 	      alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
3492 	      gsi = &alt_gsi;
3493 	    }
3494 	  generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
3495 				   gsi, true, true, loc);
3496 	}
3497       sra_stats.separate_lhs_rhs_handling++;
3498 
3499       /* This gimplification must be done after generate_subtree_copies,
3500 	 lest we insert the subtree copies in the middle of the gimplified
3501 	 sequence.  */
3502       if (force_gimple_rhs)
3503 	rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3504 					true, GSI_SAME_STMT);
3505       if (gimple_assign_rhs1 (stmt) != rhs)
3506 	{
3507 	  modify_this_stmt = true;
3508 	  gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3509 	  gcc_assert (stmt == gsi_stmt (orig_gsi));
3510 	}
3511 
3512       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3513     }
3514   else
3515     {
3516       if (access_has_children_p (lacc)
3517 	  && access_has_children_p (racc)
3518 	  /* When an access represents an unscalarizable region, it usually
3519 	     represents accesses with variable offset and thus must not be used
3520 	     to generate new memory accesses.  */
3521 	  && !lacc->grp_unscalarizable_region
3522 	  && !racc->grp_unscalarizable_region)
3523 	{
3524 	  struct subreplacement_assignment_data sad;
3525 
3526 	  sad.left_offset = lacc->offset;
3527 	  sad.assignment_lhs = lhs;
3528 	  sad.assignment_rhs = rhs;
3529 	  sad.top_racc = racc;
3530 	  sad.old_gsi = *gsi;
3531 	  sad.new_gsi = gsi;
3532 	  sad.loc = gimple_location (stmt);
3533 	  sad.refreshed = SRA_UDH_NONE;
3534 
3535 	  if (lacc->grp_read && !lacc->grp_covered)
3536 	    handle_unscalarized_data_in_subtree (&sad);
3537 
3538 	  load_assign_lhs_subreplacements (lacc, &sad);
3539 	  if (sad.refreshed != SRA_UDH_RIGHT)
3540 	    {
3541 	      gsi_next (gsi);
3542 	      unlink_stmt_vdef (stmt);
3543 	      gsi_remove (&sad.old_gsi, true);
3544 	      release_defs (stmt);
3545 	      sra_stats.deleted++;
3546 	      return SRA_AM_REMOVED;
3547 	    }
3548 	}
3549       else
3550 	{
3551 	  if (access_has_children_p (racc)
3552 	      && !racc->grp_unscalarized_data
3553 	      && TREE_CODE (lhs) != SSA_NAME)
3554 	    {
3555 	      if (dump_file)
3556 		{
3557 		  fprintf (dump_file, "Removing load: ");
3558 		  print_gimple_stmt (dump_file, stmt, 0, 0);
3559 		}
3560 	      generate_subtree_copies (racc->first_child, lhs,
3561 				       racc->offset, 0, 0, gsi,
3562 				       false, false, loc);
3563 	      gcc_assert (stmt == gsi_stmt (*gsi));
3564 	      unlink_stmt_vdef (stmt);
3565 	      gsi_remove (gsi, true);
3566 	      release_defs (stmt);
3567 	      sra_stats.deleted++;
3568 	      return SRA_AM_REMOVED;
3569 	    }
3570 	  /* Restore the aggregate RHS from its components so the
3571 	     prevailing aggregate copy does the right thing.  */
3572 	  if (access_has_children_p (racc))
3573 	    generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
3574 				     gsi, false, false, loc);
3575 	  /* Re-load the components of the aggregate copy destination.
3576 	     But use the RHS aggregate to load from to expose more
3577 	     optimization opportunities.  */
3578 	  if (access_has_children_p (lacc))
3579 	    generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3580 				     0, 0, gsi, true, true, loc);
3581 	}
3582 
3583       return SRA_AM_NONE;
3584     }
3585 }
3586 
3587 /* Set any scalar replacements of values in the constant pool to the initial
3588    value of the constant.  (Constant-pool decls like *.LC0 have effectively
3589    been initialized before the program starts, we must do the same for their
3590    replacements.)  Thus, we output statements like 'SR.1 = *.LC0[0];' into
3591    the function's entry block.  */
3592 
3593 static void
3594 initialize_constant_pool_replacements (void)
3595 {
3596   gimple_seq seq = NULL;
3597   gimple_stmt_iterator gsi = gsi_start (seq);
3598   bitmap_iterator bi;
3599   unsigned i;
3600 
3601   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
3602     {
3603       tree var = candidate (i);
3604       if (!constant_decl_p (var))
3605 	continue;
3606       vec<access_p> *access_vec = get_base_access_vector (var);
3607       if (!access_vec)
3608 	continue;
3609       for (unsigned i = 0; i < access_vec->length (); i++)
3610 	{
3611 	  struct access *access = (*access_vec)[i];
3612 	  if (!access->replacement_decl)
3613 	    continue;
3614 	  gassign *stmt
3615 	    = gimple_build_assign (get_access_replacement (access),
3616 				   unshare_expr (access->expr));
3617 	  if (dump_file && (dump_flags & TDF_DETAILS))
3618 	    {
3619 	      fprintf (dump_file, "Generating constant initializer: ");
3620 	      print_gimple_stmt (dump_file, stmt, 0, 1);
3621 	      fprintf (dump_file, "\n");
3622 	    }
3623 	  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
3624 	  update_stmt (stmt);
3625 	}
3626     }
3627 
3628   seq = gsi_seq (gsi);
3629   if (seq)
3630     gsi_insert_seq_on_edge_immediate (
3631       single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3632 }
3633 
3634 /* Traverse the function body and all modifications as decided in
3635    analyze_all_variable_accesses.  Return true iff the CFG has been
3636    changed.  */
3637 
3638 static bool
3639 sra_modify_function_body (void)
3640 {
3641   bool cfg_changed = false;
3642   basic_block bb;
3643 
3644   initialize_constant_pool_replacements ();
3645 
3646   FOR_EACH_BB_FN (bb, cfun)
3647     {
3648       gimple_stmt_iterator gsi = gsi_start_bb (bb);
3649       while (!gsi_end_p (gsi))
3650 	{
3651 	  gimple *stmt = gsi_stmt (gsi);
3652 	  enum assignment_mod_result assign_result;
3653 	  bool modified = false, deleted = false;
3654 	  tree *t;
3655 	  unsigned i;
3656 
3657 	  switch (gimple_code (stmt))
3658 	    {
3659 	    case GIMPLE_RETURN:
3660 	      t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
3661 	      if (*t != NULL_TREE)
3662 		modified |= sra_modify_expr (t, &gsi, false);
3663 	      break;
3664 
3665 	    case GIMPLE_ASSIGN:
3666 	      assign_result = sra_modify_assign (stmt, &gsi);
3667 	      modified |= assign_result == SRA_AM_MODIFIED;
3668 	      deleted = assign_result == SRA_AM_REMOVED;
3669 	      break;
3670 
3671 	    case GIMPLE_CALL:
3672 	      /* Operands must be processed before the lhs.  */
3673 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
3674 		{
3675 		  t = gimple_call_arg_ptr (stmt, i);
3676 		  modified |= sra_modify_expr (t, &gsi, false);
3677 		}
3678 
3679 	      if (gimple_call_lhs (stmt))
3680 		{
3681 		  t = gimple_call_lhs_ptr (stmt);
3682 		  modified |= sra_modify_expr (t, &gsi, true);
3683 		}
3684 	      break;
3685 
3686 	    case GIMPLE_ASM:
3687 	      {
3688 		gasm *asm_stmt = as_a <gasm *> (stmt);
3689 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
3690 		  {
3691 		    t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
3692 		    modified |= sra_modify_expr (t, &gsi, false);
3693 		  }
3694 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
3695 		  {
3696 		    t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
3697 		    modified |= sra_modify_expr (t, &gsi, true);
3698 		  }
3699 	      }
3700 	      break;
3701 
3702 	    default:
3703 	      break;
3704 	    }
3705 
3706 	  if (modified)
3707 	    {
3708 	      update_stmt (stmt);
3709 	      if (maybe_clean_eh_stmt (stmt)
3710 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3711 		cfg_changed = true;
3712 	    }
3713 	  if (!deleted)
3714 	    gsi_next (&gsi);
3715 	}
3716     }
3717 
3718   gsi_commit_edge_inserts ();
3719   return cfg_changed;
3720 }
3721 
3722 /* Generate statements initializing scalar replacements of parts of function
3723    parameters.  */
3724 
3725 static void
3726 initialize_parameter_reductions (void)
3727 {
3728   gimple_stmt_iterator gsi;
3729   gimple_seq seq = NULL;
3730   tree parm;
3731 
3732   gsi = gsi_start (seq);
3733   for (parm = DECL_ARGUMENTS (current_function_decl);
3734        parm;
3735        parm = DECL_CHAIN (parm))
3736     {
3737       vec<access_p> *access_vec;
3738       struct access *access;
3739 
3740       if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3741 	continue;
3742       access_vec = get_base_access_vector (parm);
3743       if (!access_vec)
3744 	continue;
3745 
3746       for (access = (*access_vec)[0];
3747 	   access;
3748 	   access = access->next_grp)
3749 	generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3750 				 EXPR_LOCATION (parm));
3751     }
3752 
3753   seq = gsi_seq (gsi);
3754   if (seq)
3755     gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
3756 }
3757 
3758 /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
3759    it reveals there are components of some aggregates to be scalarized, it runs
3760    the required transformations.  */
3761 static unsigned int
3762 perform_intra_sra (void)
3763 {
3764   int ret = 0;
3765   sra_initialize ();
3766 
3767   if (!find_var_candidates ())
3768     goto out;
3769 
3770   if (!scan_function ())
3771     goto out;
3772 
3773   if (!analyze_all_variable_accesses ())
3774     goto out;
3775 
3776   if (sra_modify_function_body ())
3777     ret = TODO_update_ssa | TODO_cleanup_cfg;
3778   else
3779     ret = TODO_update_ssa;
3780   initialize_parameter_reductions ();
3781 
3782   statistics_counter_event (cfun, "Scalar replacements created",
3783 			    sra_stats.replacements);
3784   statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3785   statistics_counter_event (cfun, "Subtree copy stmts",
3786 			    sra_stats.subtree_copies);
3787   statistics_counter_event (cfun, "Subreplacement stmts",
3788 			    sra_stats.subreplacements);
3789   statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3790   statistics_counter_event (cfun, "Separate LHS and RHS handling",
3791 			    sra_stats.separate_lhs_rhs_handling);
3792 
3793  out:
3794   sra_deinitialize ();
3795   return ret;
3796 }
3797 
3798 /* Perform early intraprocedural SRA.  */
3799 static unsigned int
3800 early_intra_sra (void)
3801 {
3802   sra_mode = SRA_MODE_EARLY_INTRA;
3803   return perform_intra_sra ();
3804 }
3805 
3806 /* Perform "late" intraprocedural SRA.  */
3807 static unsigned int
3808 late_intra_sra (void)
3809 {
3810   sra_mode = SRA_MODE_INTRA;
3811   return perform_intra_sra ();
3812 }
3813 
3814 
3815 static bool
3816 gate_intra_sra (void)
3817 {
3818   return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3819 }
3820 
3821 
3822 namespace {
3823 
3824 const pass_data pass_data_sra_early =
3825 {
3826   GIMPLE_PASS, /* type */
3827   "esra", /* name */
3828   OPTGROUP_NONE, /* optinfo_flags */
3829   TV_TREE_SRA, /* tv_id */
3830   ( PROP_cfg | PROP_ssa ), /* properties_required */
3831   0, /* properties_provided */
3832   0, /* properties_destroyed */
3833   0, /* todo_flags_start */
3834   TODO_update_ssa, /* todo_flags_finish */
3835 };
3836 
3837 class pass_sra_early : public gimple_opt_pass
3838 {
3839 public:
3840   pass_sra_early (gcc::context *ctxt)
3841     : gimple_opt_pass (pass_data_sra_early, ctxt)
3842   {}
3843 
3844   /* opt_pass methods: */
3845   virtual bool gate (function *) { return gate_intra_sra (); }
3846   virtual unsigned int execute (function *) { return early_intra_sra (); }
3847 
3848 }; // class pass_sra_early
3849 
3850 } // anon namespace
3851 
3852 gimple_opt_pass *
3853 make_pass_sra_early (gcc::context *ctxt)
3854 {
3855   return new pass_sra_early (ctxt);
3856 }
3857 
3858 namespace {
3859 
3860 const pass_data pass_data_sra =
3861 {
3862   GIMPLE_PASS, /* type */
3863   "sra", /* name */
3864   OPTGROUP_NONE, /* optinfo_flags */
3865   TV_TREE_SRA, /* tv_id */
3866   ( PROP_cfg | PROP_ssa ), /* properties_required */
3867   0, /* properties_provided */
3868   0, /* properties_destroyed */
3869   TODO_update_address_taken, /* todo_flags_start */
3870   TODO_update_ssa, /* todo_flags_finish */
3871 };
3872 
3873 class pass_sra : public gimple_opt_pass
3874 {
3875 public:
3876   pass_sra (gcc::context *ctxt)
3877     : gimple_opt_pass (pass_data_sra, ctxt)
3878   {}
3879 
3880   /* opt_pass methods: */
3881   virtual bool gate (function *) { return gate_intra_sra (); }
3882   virtual unsigned int execute (function *) { return late_intra_sra (); }
3883 
3884 }; // class pass_sra
3885 
3886 } // anon namespace
3887 
3888 gimple_opt_pass *
3889 make_pass_sra (gcc::context *ctxt)
3890 {
3891   return new pass_sra (ctxt);
3892 }
3893 
3894 
3895 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3896    parameter.  */
3897 
3898 static bool
3899 is_unused_scalar_param (tree parm)
3900 {
3901   tree name;
3902   return (is_gimple_reg (parm)
3903 	  && (!(name = ssa_default_def (cfun, parm))
3904 	      || has_zero_uses (name)));
3905 }
3906 
3907 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3908    examine whether there are any direct or otherwise infeasible ones.  If so,
3909    return true, otherwise return false.  PARM must be a gimple register with a
3910    non-NULL default definition.  */
3911 
3912 static bool
3913 ptr_parm_has_direct_uses (tree parm)
3914 {
3915   imm_use_iterator ui;
3916   gimple *stmt;
3917   tree name = ssa_default_def (cfun, parm);
3918   bool ret = false;
3919 
3920   FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3921     {
3922       int uses_ok = 0;
3923       use_operand_p use_p;
3924 
3925       if (is_gimple_debug (stmt))
3926 	continue;
3927 
3928       /* Valid uses include dereferences on the lhs and the rhs.  */
3929       if (gimple_has_lhs (stmt))
3930 	{
3931 	  tree lhs = gimple_get_lhs (stmt);
3932 	  while (handled_component_p (lhs))
3933 	    lhs = TREE_OPERAND (lhs, 0);
3934 	  if (TREE_CODE (lhs) == MEM_REF
3935 	      && TREE_OPERAND (lhs, 0) == name
3936 	      && integer_zerop (TREE_OPERAND (lhs, 1))
3937 	      && types_compatible_p (TREE_TYPE (lhs),
3938 				     TREE_TYPE (TREE_TYPE (name)))
3939 	      && !TREE_THIS_VOLATILE (lhs))
3940 	    uses_ok++;
3941 	}
3942       if (gimple_assign_single_p (stmt))
3943 	{
3944 	  tree rhs = gimple_assign_rhs1 (stmt);
3945 	  while (handled_component_p (rhs))
3946 	    rhs = TREE_OPERAND (rhs, 0);
3947 	  if (TREE_CODE (rhs) == MEM_REF
3948 	      && TREE_OPERAND (rhs, 0) == name
3949 	      && integer_zerop (TREE_OPERAND (rhs, 1))
3950 	      && types_compatible_p (TREE_TYPE (rhs),
3951 				     TREE_TYPE (TREE_TYPE (name)))
3952 	      && !TREE_THIS_VOLATILE (rhs))
3953 	    uses_ok++;
3954 	}
3955       else if (is_gimple_call (stmt))
3956 	{
3957 	  unsigned i;
3958 	  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3959 	    {
3960 	      tree arg = gimple_call_arg (stmt, i);
3961 	      while (handled_component_p (arg))
3962 		arg = TREE_OPERAND (arg, 0);
3963 	      if (TREE_CODE (arg) == MEM_REF
3964 		  && TREE_OPERAND (arg, 0) == name
3965 		  && integer_zerop (TREE_OPERAND (arg, 1))
3966 		  && types_compatible_p (TREE_TYPE (arg),
3967 					 TREE_TYPE (TREE_TYPE (name)))
3968 		  && !TREE_THIS_VOLATILE (arg))
3969 		uses_ok++;
3970 	    }
3971 	}
3972 
3973       /* If the number of valid uses does not match the number of
3974          uses in this stmt there is an unhandled use.  */
3975       FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3976 	--uses_ok;
3977 
3978       if (uses_ok != 0)
3979 	ret = true;
3980 
3981       if (ret)
3982 	BREAK_FROM_IMM_USE_STMT (ui);
3983     }
3984 
3985   return ret;
3986 }
3987 
3988 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3989    them in candidate_bitmap.  Note that these do not necessarily include
3990    parameter which are unused and thus can be removed.  Return true iff any
3991    such candidate has been found.  */
3992 
3993 static bool
3994 find_param_candidates (void)
3995 {
3996   tree parm;
3997   int count = 0;
3998   bool ret = false;
3999   const char *msg;
4000 
4001   for (parm = DECL_ARGUMENTS (current_function_decl);
4002        parm;
4003        parm = DECL_CHAIN (parm))
4004     {
4005       tree type = TREE_TYPE (parm);
4006       tree_node **slot;
4007 
4008       count++;
4009 
4010       if (TREE_THIS_VOLATILE (parm)
4011 	  || TREE_ADDRESSABLE (parm)
4012 	  || (!is_gimple_reg_type (type) && is_va_list_type (type)))
4013 	continue;
4014 
4015       if (is_unused_scalar_param (parm))
4016 	{
4017 	  ret = true;
4018 	  continue;
4019 	}
4020 
4021       if (POINTER_TYPE_P (type))
4022 	{
4023 	  type = TREE_TYPE (type);
4024 
4025 	  if (TREE_CODE (type) == FUNCTION_TYPE
4026 	      || TYPE_VOLATILE (type)
4027 	      || (TREE_CODE (type) == ARRAY_TYPE
4028 		  && TYPE_NONALIASED_COMPONENT (type))
4029 	      || !is_gimple_reg (parm)
4030 	      || is_va_list_type (type)
4031 	      || ptr_parm_has_direct_uses (parm))
4032 	    continue;
4033 	}
4034       else if (!AGGREGATE_TYPE_P (type))
4035 	continue;
4036 
4037       if (!COMPLETE_TYPE_P (type)
4038 	  || !tree_fits_uhwi_p (TYPE_SIZE (type))
4039           || tree_to_uhwi (TYPE_SIZE (type)) == 0
4040 	  || (AGGREGATE_TYPE_P (type)
4041 	      && type_internals_preclude_sra_p (type, &msg)))
4042 	continue;
4043 
4044       bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
4045       slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
4046       *slot = parm;
4047 
4048       ret = true;
4049       if (dump_file && (dump_flags & TDF_DETAILS))
4050 	{
4051 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
4052 	  print_generic_expr (dump_file, parm, 0);
4053 	  fprintf (dump_file, "\n");
4054 	}
4055     }
4056 
4057   func_param_count = count;
4058   return ret;
4059 }
4060 
4061 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
4062    maybe_modified. */
4063 
4064 static bool
4065 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
4066 		     void *data)
4067 {
4068   struct access *repr = (struct access *) data;
4069 
4070   repr->grp_maybe_modified = 1;
4071   return true;
4072 }
4073 
4074 /* Analyze what representatives (in linked lists accessible from
4075    REPRESENTATIVES) can be modified by side effects of statements in the
4076    current function.  */
4077 
4078 static void
4079 analyze_modified_params (vec<access_p> representatives)
4080 {
4081   int i;
4082 
4083   for (i = 0; i < func_param_count; i++)
4084     {
4085       struct access *repr;
4086 
4087       for (repr = representatives[i];
4088 	   repr;
4089 	   repr = repr->next_grp)
4090 	{
4091 	  struct access *access;
4092 	  bitmap visited;
4093 	  ao_ref ar;
4094 
4095 	  if (no_accesses_p (repr))
4096 	    continue;
4097 	  if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
4098 	      || repr->grp_maybe_modified)
4099 	    continue;
4100 
4101 	  ao_ref_init (&ar, repr->expr);
4102 	  visited = BITMAP_ALLOC (NULL);
4103 	  for (access = repr; access; access = access->next_sibling)
4104 	    {
4105 	      /* All accesses are read ones, otherwise grp_maybe_modified would
4106 		 be trivially set.  */
4107 	      walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
4108 				  mark_maybe_modified, repr, &visited);
4109 	      if (repr->grp_maybe_modified)
4110 		break;
4111 	    }
4112 	  BITMAP_FREE (visited);
4113 	}
4114     }
4115 }
4116 
4117 /* Propagate distances in bb_dereferences in the opposite direction than the
4118    control flow edges, in each step storing the maximum of the current value
4119    and the minimum of all successors.  These steps are repeated until the table
4120    stabilizes.  Note that BBs which might terminate the functions (according to
4121    final_bbs bitmap) never updated in this way.  */
4122 
4123 static void
4124 propagate_dereference_distances (void)
4125 {
4126   basic_block bb;
4127 
4128   auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
4129   queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
4130   FOR_EACH_BB_FN (bb, cfun)
4131     {
4132       queue.quick_push (bb);
4133       bb->aux = bb;
4134     }
4135 
4136   while (!queue.is_empty ())
4137     {
4138       edge_iterator ei;
4139       edge e;
4140       bool change = false;
4141       int i;
4142 
4143       bb = queue.pop ();
4144       bb->aux = NULL;
4145 
4146       if (bitmap_bit_p (final_bbs, bb->index))
4147 	continue;
4148 
4149       for (i = 0; i < func_param_count; i++)
4150 	{
4151 	  int idx = bb->index * func_param_count + i;
4152 	  bool first = true;
4153 	  HOST_WIDE_INT inh = 0;
4154 
4155 	  FOR_EACH_EDGE (e, ei, bb->succs)
4156 	  {
4157 	    int succ_idx = e->dest->index * func_param_count + i;
4158 
4159 	    if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
4160 	      continue;
4161 
4162 	    if (first)
4163 	      {
4164 		first = false;
4165 		inh = bb_dereferences [succ_idx];
4166 	      }
4167 	    else if (bb_dereferences [succ_idx] < inh)
4168 	      inh = bb_dereferences [succ_idx];
4169 	  }
4170 
4171 	  if (!first && bb_dereferences[idx] < inh)
4172 	    {
4173 	      bb_dereferences[idx] = inh;
4174 	      change = true;
4175 	    }
4176 	}
4177 
4178       if (change && !bitmap_bit_p (final_bbs, bb->index))
4179 	FOR_EACH_EDGE (e, ei, bb->preds)
4180 	  {
4181 	    if (e->src->aux)
4182 	      continue;
4183 
4184 	    e->src->aux = e->src;
4185 	    queue.quick_push (e->src);
4186 	  }
4187     }
4188 }
4189 
4190 /* Dump a dereferences TABLE with heading STR to file F.  */
4191 
4192 static void
4193 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
4194 {
4195   basic_block bb;
4196 
4197   fprintf (dump_file, "%s", str);
4198   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
4199 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
4200     {
4201       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
4202       if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
4203 	{
4204 	  int i;
4205 	  for (i = 0; i < func_param_count; i++)
4206 	    {
4207 	      int idx = bb->index * func_param_count + i;
4208 	      fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
4209 	    }
4210 	}
4211       fprintf (f, "\n");
4212     }
4213   fprintf (dump_file, "\n");
4214 }
4215 
4216 /* Determine what (parts of) parameters passed by reference that are not
4217    assigned to are not certainly dereferenced in this function and thus the
4218    dereferencing cannot be safely moved to the caller without potentially
4219    introducing a segfault.  Mark such REPRESENTATIVES as
4220    grp_not_necessarilly_dereferenced.
4221 
4222    The dereferenced maximum "distance," i.e. the offset + size of the accessed
4223    part is calculated rather than simple booleans are calculated for each
4224    pointer parameter to handle cases when only a fraction of the whole
4225    aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4226    an example).
4227 
4228    The maximum dereference distances for each pointer parameter and BB are
4229    already stored in bb_dereference.  This routine simply propagates these
4230    values upwards by propagate_dereference_distances and then compares the
4231    distances of individual parameters in the ENTRY BB to the equivalent
4232    distances of each representative of a (fraction of a) parameter.  */
4233 
4234 static void
4235 analyze_caller_dereference_legality (vec<access_p> representatives)
4236 {
4237   int i;
4238 
4239   if (dump_file && (dump_flags & TDF_DETAILS))
4240     dump_dereferences_table (dump_file,
4241 			     "Dereference table before propagation:\n",
4242 			     bb_dereferences);
4243 
4244   propagate_dereference_distances ();
4245 
4246   if (dump_file && (dump_flags & TDF_DETAILS))
4247     dump_dereferences_table (dump_file,
4248 			     "Dereference table after propagation:\n",
4249 			     bb_dereferences);
4250 
4251   for (i = 0; i < func_param_count; i++)
4252     {
4253       struct access *repr = representatives[i];
4254       int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
4255 
4256       if (!repr || no_accesses_p (repr))
4257 	continue;
4258 
4259       do
4260 	{
4261 	  if ((repr->offset + repr->size) > bb_dereferences[idx])
4262 	    repr->grp_not_necessarilly_dereferenced = 1;
4263 	  repr = repr->next_grp;
4264 	}
4265       while (repr);
4266     }
4267 }
4268 
4269 /* Return the representative access for the parameter declaration PARM if it is
4270    a scalar passed by reference which is not written to and the pointer value
4271    is not used directly.  Thus, if it is legal to dereference it in the caller
4272    and we can rule out modifications through aliases, such parameter should be
4273    turned into one passed by value.  Return NULL otherwise.  */
4274 
4275 static struct access *
4276 unmodified_by_ref_scalar_representative (tree parm)
4277 {
4278   int i, access_count;
4279   struct access *repr;
4280   vec<access_p> *access_vec;
4281 
4282   access_vec = get_base_access_vector (parm);
4283   gcc_assert (access_vec);
4284   repr = (*access_vec)[0];
4285   if (repr->write)
4286     return NULL;
4287   repr->group_representative = repr;
4288 
4289   access_count = access_vec->length ();
4290   for (i = 1; i < access_count; i++)
4291     {
4292       struct access *access = (*access_vec)[i];
4293       if (access->write)
4294 	return NULL;
4295       access->group_representative = repr;
4296       access->next_sibling = repr->next_sibling;
4297       repr->next_sibling = access;
4298     }
4299 
4300   repr->grp_read = 1;
4301   repr->grp_scalar_ptr = 1;
4302   return repr;
4303 }
4304 
4305 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4306    associated with.  REQ_ALIGN is the minimum required alignment.  */
4307 
4308 static bool
4309 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
4310 {
4311   unsigned int exp_align;
4312   /* Avoid issues such as the second simple testcase in PR 42025.  The problem
4313      is incompatible assign in a call statement (and possibly even in asm
4314      statements).  This can be relaxed by using a new temporary but only for
4315      non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4316      intraprocedural SRA we deal with this by keeping the old aggregate around,
4317      something we cannot do in IPA-SRA.)  */
4318   if (access->write
4319       && (is_gimple_call (access->stmt)
4320 	  || gimple_code (access->stmt) == GIMPLE_ASM))
4321     return true;
4322 
4323   exp_align = get_object_alignment (access->expr);
4324   if (exp_align < req_align)
4325     return true;
4326 
4327   return false;
4328 }
4329 
4330 
4331 /* Sort collected accesses for parameter PARM, identify representatives for
4332    each accessed region and link them together.  Return NULL if there are
4333    different but overlapping accesses, return the special ptr value meaning
4334    there are no accesses for this parameter if that is the case and return the
4335    first representative otherwise.  Set *RO_GRP if there is a group of accesses
4336    with only read (i.e. no write) accesses.  */
4337 
4338 static struct access *
4339 splice_param_accesses (tree parm, bool *ro_grp)
4340 {
4341   int i, j, access_count, group_count;
4342   int agg_size, total_size = 0;
4343   struct access *access, *res, **prev_acc_ptr = &res;
4344   vec<access_p> *access_vec;
4345 
4346   access_vec = get_base_access_vector (parm);
4347   if (!access_vec)
4348     return &no_accesses_representant;
4349   access_count = access_vec->length ();
4350 
4351   access_vec->qsort (compare_access_positions);
4352 
4353   i = 0;
4354   total_size = 0;
4355   group_count = 0;
4356   while (i < access_count)
4357     {
4358       bool modification;
4359       tree a1_alias_type;
4360       access = (*access_vec)[i];
4361       modification = access->write;
4362       if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
4363 	return NULL;
4364       a1_alias_type = reference_alias_ptr_type (access->expr);
4365 
4366       /* Access is about to become group representative unless we find some
4367 	 nasty overlap which would preclude us from breaking this parameter
4368 	 apart. */
4369 
4370       j = i + 1;
4371       while (j < access_count)
4372 	{
4373 	  struct access *ac2 = (*access_vec)[j];
4374 	  if (ac2->offset != access->offset)
4375 	    {
4376 	      /* All or nothing law for parameters. */
4377 	      if (access->offset + access->size > ac2->offset)
4378 		return NULL;
4379 	      else
4380 		break;
4381 	    }
4382 	  else if (ac2->size != access->size)
4383 	    return NULL;
4384 
4385 	  if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
4386 	      || (ac2->type != access->type
4387 		  && (TREE_ADDRESSABLE (ac2->type)
4388 		      || TREE_ADDRESSABLE (access->type)))
4389 	      || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
4390 	    return NULL;
4391 
4392 	  modification |= ac2->write;
4393 	  ac2->group_representative = access;
4394 	  ac2->next_sibling = access->next_sibling;
4395 	  access->next_sibling = ac2;
4396 	  j++;
4397 	}
4398 
4399       group_count++;
4400       access->grp_maybe_modified = modification;
4401       if (!modification)
4402 	*ro_grp = true;
4403       *prev_acc_ptr = access;
4404       prev_acc_ptr = &access->next_grp;
4405       total_size += access->size;
4406       i = j;
4407     }
4408 
4409   if (POINTER_TYPE_P (TREE_TYPE (parm)))
4410     agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4411   else
4412     agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4413   if (total_size >= agg_size)
4414     return NULL;
4415 
4416   gcc_assert (group_count > 0);
4417   return res;
4418 }
4419 
4420 /* Decide whether parameters with representative accesses given by REPR should
4421    be reduced into components.  */
4422 
4423 static int
4424 decide_one_param_reduction (struct access *repr)
4425 {
4426   int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4427   bool by_ref;
4428   tree parm;
4429 
4430   parm = repr->base;
4431   cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
4432   gcc_assert (cur_parm_size > 0);
4433 
4434   if (POINTER_TYPE_P (TREE_TYPE (parm)))
4435     {
4436       by_ref = true;
4437       agg_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))));
4438     }
4439   else
4440     {
4441       by_ref = false;
4442       agg_size = cur_parm_size;
4443     }
4444 
4445   if (dump_file)
4446     {
4447       struct access *acc;
4448       fprintf (dump_file, "Evaluating PARAM group sizes for ");
4449       print_generic_expr (dump_file, parm, 0);
4450       fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4451       for (acc = repr; acc; acc = acc->next_grp)
4452 	dump_access (dump_file, acc, true);
4453     }
4454 
4455   total_size = 0;
4456   new_param_count = 0;
4457 
4458   for (; repr; repr = repr->next_grp)
4459     {
4460       gcc_assert (parm == repr->base);
4461 
4462       /* Taking the address of a non-addressable field is verboten.  */
4463       if (by_ref && repr->non_addressable)
4464 	return 0;
4465 
4466       /* Do not decompose a non-BLKmode param in a way that would
4467          create BLKmode params.  Especially for by-reference passing
4468 	 (thus, pointer-type param) this is hardly worthwhile.  */
4469       if (DECL_MODE (parm) != BLKmode
4470 	  && TYPE_MODE (repr->type) == BLKmode)
4471 	return 0;
4472 
4473       if (!by_ref || (!repr->grp_maybe_modified
4474 		      && !repr->grp_not_necessarilly_dereferenced))
4475 	total_size += repr->size;
4476       else
4477 	total_size += cur_parm_size;
4478 
4479       new_param_count++;
4480     }
4481 
4482   gcc_assert (new_param_count > 0);
4483 
4484   if (optimize_function_for_size_p (cfun))
4485     parm_size_limit = cur_parm_size;
4486   else
4487     parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4488                        * cur_parm_size);
4489 
4490   if (total_size < agg_size
4491       && total_size <= parm_size_limit)
4492     {
4493       if (dump_file)
4494 	fprintf (dump_file, "    ....will be split into %i components\n",
4495 		 new_param_count);
4496       return new_param_count;
4497     }
4498   else
4499     return 0;
4500 }
4501 
4502 /* The order of the following enums is important, we need to do extra work for
4503    UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
4504 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4505 			  MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4506 
4507 /* Identify representatives of all accesses to all candidate parameters for
4508    IPA-SRA.  Return result based on what representatives have been found. */
4509 
4510 static enum ipa_splicing_result
4511 splice_all_param_accesses (vec<access_p> &representatives)
4512 {
4513   enum ipa_splicing_result result = NO_GOOD_ACCESS;
4514   tree parm;
4515   struct access *repr;
4516 
4517   representatives.create (func_param_count);
4518 
4519   for (parm = DECL_ARGUMENTS (current_function_decl);
4520        parm;
4521        parm = DECL_CHAIN (parm))
4522     {
4523       if (is_unused_scalar_param (parm))
4524 	{
4525 	  representatives.quick_push (&no_accesses_representant);
4526 	  if (result == NO_GOOD_ACCESS)
4527 	    result = UNUSED_PARAMS;
4528 	}
4529       else if (POINTER_TYPE_P (TREE_TYPE (parm))
4530 	       && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4531 	       && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4532 	{
4533 	  repr = unmodified_by_ref_scalar_representative (parm);
4534 	  representatives.quick_push (repr);
4535 	  if (repr)
4536 	    result = UNMODIF_BY_REF_ACCESSES;
4537 	}
4538       else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4539 	{
4540 	  bool ro_grp = false;
4541 	  repr = splice_param_accesses (parm, &ro_grp);
4542 	  representatives.quick_push (repr);
4543 
4544 	  if (repr && !no_accesses_p (repr))
4545 	    {
4546 	      if (POINTER_TYPE_P (TREE_TYPE (parm)))
4547 		{
4548 		  if (ro_grp)
4549 		    result = UNMODIF_BY_REF_ACCESSES;
4550 		  else if (result < MODIF_BY_REF_ACCESSES)
4551 		    result = MODIF_BY_REF_ACCESSES;
4552 		}
4553 	      else if (result < BY_VAL_ACCESSES)
4554 		result = BY_VAL_ACCESSES;
4555 	    }
4556 	  else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4557 	    result = UNUSED_PARAMS;
4558 	}
4559       else
4560 	representatives.quick_push (NULL);
4561     }
4562 
4563   if (result == NO_GOOD_ACCESS)
4564     {
4565       representatives.release ();
4566       return NO_GOOD_ACCESS;
4567     }
4568 
4569   return result;
4570 }
4571 
4572 /* Return the index of BASE in PARMS.  Abort if it is not found.  */
4573 
4574 static inline int
4575 get_param_index (tree base, vec<tree> parms)
4576 {
4577   int i, len;
4578 
4579   len = parms.length ();
4580   for (i = 0; i < len; i++)
4581     if (parms[i] == base)
4582       return i;
4583   gcc_unreachable ();
4584 }
4585 
4586 /* Convert the decisions made at the representative level into compact
4587    parameter adjustments.  REPRESENTATIVES are pointers to first
4588    representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4589    final number of adjustments.  */
4590 
4591 static ipa_parm_adjustment_vec
4592 turn_representatives_into_adjustments (vec<access_p> representatives,
4593 				       int adjustments_count)
4594 {
4595   vec<tree> parms;
4596   ipa_parm_adjustment_vec adjustments;
4597   tree parm;
4598   int i;
4599 
4600   gcc_assert (adjustments_count > 0);
4601   parms = ipa_get_vector_of_formal_parms (current_function_decl);
4602   adjustments.create (adjustments_count);
4603   parm = DECL_ARGUMENTS (current_function_decl);
4604   for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4605     {
4606       struct access *repr = representatives[i];
4607 
4608       if (!repr || no_accesses_p (repr))
4609 	{
4610 	  struct ipa_parm_adjustment adj;
4611 
4612 	  memset (&adj, 0, sizeof (adj));
4613 	  adj.base_index = get_param_index (parm, parms);
4614 	  adj.base = parm;
4615 	  if (!repr)
4616 	    adj.op = IPA_PARM_OP_COPY;
4617 	  else
4618 	    adj.op = IPA_PARM_OP_REMOVE;
4619 	  adj.arg_prefix = "ISRA";
4620 	  adjustments.quick_push (adj);
4621 	}
4622       else
4623 	{
4624 	  struct ipa_parm_adjustment adj;
4625 	  int index = get_param_index (parm, parms);
4626 
4627 	  for (; repr; repr = repr->next_grp)
4628 	    {
4629 	      memset (&adj, 0, sizeof (adj));
4630 	      gcc_assert (repr->base == parm);
4631 	      adj.base_index = index;
4632 	      adj.base = repr->base;
4633 	      adj.type = repr->type;
4634 	      adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4635 	      adj.offset = repr->offset;
4636 	      adj.reverse = repr->reverse;
4637 	      adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4638 			    && (repr->grp_maybe_modified
4639 				|| repr->grp_not_necessarilly_dereferenced));
4640 	      adj.arg_prefix = "ISRA";
4641 	      adjustments.quick_push (adj);
4642 	    }
4643 	}
4644     }
4645   parms.release ();
4646   return adjustments;
4647 }
4648 
4649 /* Analyze the collected accesses and produce a plan what to do with the
4650    parameters in the form of adjustments, NULL meaning nothing.  */
4651 
4652 static ipa_parm_adjustment_vec
4653 analyze_all_param_acesses (void)
4654 {
4655   enum ipa_splicing_result repr_state;
4656   bool proceed = false;
4657   int i, adjustments_count = 0;
4658   vec<access_p> representatives;
4659   ipa_parm_adjustment_vec adjustments;
4660 
4661   repr_state = splice_all_param_accesses (representatives);
4662   if (repr_state == NO_GOOD_ACCESS)
4663     return ipa_parm_adjustment_vec ();
4664 
4665   /* If there are any parameters passed by reference which are not modified
4666      directly, we need to check whether they can be modified indirectly.  */
4667   if (repr_state == UNMODIF_BY_REF_ACCESSES)
4668     {
4669       analyze_caller_dereference_legality (representatives);
4670       analyze_modified_params (representatives);
4671     }
4672 
4673   for (i = 0; i < func_param_count; i++)
4674     {
4675       struct access *repr = representatives[i];
4676 
4677       if (repr && !no_accesses_p (repr))
4678 	{
4679 	  if (repr->grp_scalar_ptr)
4680 	    {
4681 	      adjustments_count++;
4682 	      if (repr->grp_not_necessarilly_dereferenced
4683 		  || repr->grp_maybe_modified)
4684 		representatives[i] = NULL;
4685 	      else
4686 		{
4687 		  proceed = true;
4688 		  sra_stats.scalar_by_ref_to_by_val++;
4689 		}
4690 	    }
4691 	  else
4692 	    {
4693 	      int new_components = decide_one_param_reduction (repr);
4694 
4695 	      if (new_components == 0)
4696 		{
4697 		  representatives[i] = NULL;
4698 		  adjustments_count++;
4699 		}
4700 	      else
4701 		{
4702 		  adjustments_count += new_components;
4703 		  sra_stats.aggregate_params_reduced++;
4704 		  sra_stats.param_reductions_created += new_components;
4705 		  proceed = true;
4706 		}
4707 	    }
4708 	}
4709       else
4710 	{
4711 	  if (no_accesses_p (repr))
4712 	    {
4713 	      proceed = true;
4714 	      sra_stats.deleted_unused_parameters++;
4715 	    }
4716 	  adjustments_count++;
4717 	}
4718     }
4719 
4720   if (!proceed && dump_file)
4721     fprintf (dump_file, "NOT proceeding to change params.\n");
4722 
4723   if (proceed)
4724     adjustments = turn_representatives_into_adjustments (representatives,
4725 							 adjustments_count);
4726   else
4727     adjustments = ipa_parm_adjustment_vec ();
4728 
4729   representatives.release ();
4730   return adjustments;
4731 }
4732 
4733 /* If a parameter replacement identified by ADJ does not yet exist in the form
4734    of declaration, create it and record it, otherwise return the previously
4735    created one.  */
4736 
4737 static tree
4738 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4739 {
4740   tree repl;
4741   if (!adj->new_ssa_base)
4742     {
4743       char *pretty_name = make_fancy_name (adj->base);
4744 
4745       repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4746       DECL_NAME (repl) = get_identifier (pretty_name);
4747       DECL_NAMELESS (repl) = 1;
4748       obstack_free (&name_obstack, pretty_name);
4749 
4750       adj->new_ssa_base = repl;
4751     }
4752   else
4753     repl = adj->new_ssa_base;
4754   return repl;
4755 }
4756 
4757 /* Find the first adjustment for a particular parameter BASE in a vector of
4758    ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
4759    adjustment. */
4760 
4761 static struct ipa_parm_adjustment *
4762 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4763 {
4764   int i, len;
4765 
4766   len = adjustments.length ();
4767   for (i = 0; i < len; i++)
4768     {
4769       struct ipa_parm_adjustment *adj;
4770 
4771       adj = &adjustments[i];
4772       if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
4773 	return adj;
4774     }
4775 
4776   return NULL;
4777 }
4778 
4779 /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
4780    parameter which is to be removed because its value is not used, create a new
4781    SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
4782    original with it and return it.  If there is no need to re-map, return NULL.
4783    ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments.  */
4784 
4785 static tree
4786 replace_removed_params_ssa_names (tree old_name, gimple *stmt,
4787 				  ipa_parm_adjustment_vec adjustments)
4788 {
4789   struct ipa_parm_adjustment *adj;
4790   tree decl, repl, new_name;
4791 
4792   if (TREE_CODE (old_name) != SSA_NAME)
4793     return NULL;
4794 
4795   decl = SSA_NAME_VAR (old_name);
4796   if (decl == NULL_TREE
4797       || TREE_CODE (decl) != PARM_DECL)
4798     return NULL;
4799 
4800   adj = get_adjustment_for_base (adjustments, decl);
4801   if (!adj)
4802     return NULL;
4803 
4804   repl = get_replaced_param_substitute (adj);
4805   new_name = make_ssa_name (repl, stmt);
4806   SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
4807     = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
4808 
4809   if (dump_file)
4810     {
4811       fprintf (dump_file, "replacing an SSA name of a removed param ");
4812       print_generic_expr (dump_file, old_name, 0);
4813       fprintf (dump_file, " with ");
4814       print_generic_expr (dump_file, new_name, 0);
4815       fprintf (dump_file, "\n");
4816     }
4817 
4818   replace_uses_by (old_name, new_name);
4819   return new_name;
4820 }
4821 
4822 /* If the statement STMT contains any expressions that need to replaced with a
4823    different one as noted by ADJUSTMENTS, do so.  Handle any potential type
4824    incompatibilities (GSI is used to accommodate conversion statements and must
4825    point to the statement).  Return true iff the statement was modified.  */
4826 
4827 static bool
4828 sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
4829 		       ipa_parm_adjustment_vec adjustments)
4830 {
4831   tree *lhs_p, *rhs_p;
4832   bool any;
4833 
4834   if (!gimple_assign_single_p (stmt))
4835     return false;
4836 
4837   rhs_p = gimple_assign_rhs1_ptr (stmt);
4838   lhs_p = gimple_assign_lhs_ptr (stmt);
4839 
4840   any = ipa_modify_expr (rhs_p, false, adjustments);
4841   any |= ipa_modify_expr (lhs_p, false, adjustments);
4842   if (any)
4843     {
4844       tree new_rhs = NULL_TREE;
4845 
4846       if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4847 	{
4848 	  if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4849 	    {
4850 	      /* V_C_Es of constructors can cause trouble (PR 42714).  */
4851 	      if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4852 		*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4853 	      else
4854 		*rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4855 					    NULL);
4856 	    }
4857 	  else
4858 	    new_rhs = fold_build1_loc (gimple_location (stmt),
4859 				       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4860 				       *rhs_p);
4861 	}
4862       else if (REFERENCE_CLASS_P (*rhs_p)
4863 	       && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4864 	       && !is_gimple_reg (*lhs_p))
4865 	/* This can happen when an assignment in between two single field
4866 	   structures is turned into an assignment in between two pointers to
4867 	   scalars (PR 42237).  */
4868 	new_rhs = *rhs_p;
4869 
4870       if (new_rhs)
4871 	{
4872 	  tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4873 					       true, GSI_SAME_STMT);
4874 
4875 	  gimple_assign_set_rhs_from_tree (gsi, tmp);
4876 	}
4877 
4878       return true;
4879     }
4880 
4881   return false;
4882 }
4883 
4884 /* Traverse the function body and all modifications as described in
4885    ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4886 
4887 bool
4888 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4889 {
4890   bool cfg_changed = false;
4891   basic_block bb;
4892 
4893   FOR_EACH_BB_FN (bb, cfun)
4894     {
4895       gimple_stmt_iterator gsi;
4896 
4897       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4898 	{
4899 	  gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
4900 	  tree new_lhs, old_lhs = gimple_phi_result (phi);
4901 	  new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
4902 	  if (new_lhs)
4903 	    {
4904 	      gimple_phi_set_result (phi, new_lhs);
4905 	      release_ssa_name (old_lhs);
4906 	    }
4907 	}
4908 
4909       gsi = gsi_start_bb (bb);
4910       while (!gsi_end_p (gsi))
4911 	{
4912 	  gimple *stmt = gsi_stmt (gsi);
4913 	  bool modified = false;
4914 	  tree *t;
4915 	  unsigned i;
4916 
4917 	  switch (gimple_code (stmt))
4918 	    {
4919 	    case GIMPLE_RETURN:
4920 	      t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
4921 	      if (*t != NULL_TREE)
4922 		modified |= ipa_modify_expr (t, true, adjustments);
4923 	      break;
4924 
4925 	    case GIMPLE_ASSIGN:
4926 	      modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
4927 	      break;
4928 
4929 	    case GIMPLE_CALL:
4930 	      /* Operands must be processed before the lhs.  */
4931 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
4932 		{
4933 		  t = gimple_call_arg_ptr (stmt, i);
4934 		  modified |= ipa_modify_expr (t, true, adjustments);
4935 		}
4936 
4937 	      if (gimple_call_lhs (stmt))
4938 		{
4939 		  t = gimple_call_lhs_ptr (stmt);
4940 		  modified |= ipa_modify_expr (t, false, adjustments);
4941 		}
4942 	      break;
4943 
4944 	    case GIMPLE_ASM:
4945 	      {
4946 		gasm *asm_stmt = as_a <gasm *> (stmt);
4947 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
4948 		  {
4949 		    t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
4950 		    modified |= ipa_modify_expr (t, true, adjustments);
4951 		  }
4952 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
4953 		  {
4954 		    t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
4955 		    modified |= ipa_modify_expr (t, false, adjustments);
4956 		  }
4957 	      }
4958 	      break;
4959 
4960 	    default:
4961 	      break;
4962 	    }
4963 
4964 	  def_operand_p defp;
4965 	  ssa_op_iter iter;
4966 	  FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
4967 	    {
4968 	      tree old_def = DEF_FROM_PTR (defp);
4969 	      if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
4970 								   adjustments))
4971 		{
4972 		  SET_DEF (defp, new_def);
4973 		  release_ssa_name (old_def);
4974 		  modified = true;
4975 		}
4976 	    }
4977 
4978 	  if (modified)
4979 	    {
4980 	      update_stmt (stmt);
4981 	      if (maybe_clean_eh_stmt (stmt)
4982 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4983 		cfg_changed = true;
4984 	    }
4985 	  gsi_next (&gsi);
4986 	}
4987     }
4988 
4989   return cfg_changed;
4990 }
4991 
4992 /* Call gimple_debug_bind_reset_value on all debug statements describing
4993    gimple register parameters that are being removed or replaced.  */
4994 
4995 static void
4996 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4997 {
4998   int i, len;
4999   gimple_stmt_iterator *gsip = NULL, gsi;
5000 
5001   if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
5002     {
5003       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
5004       gsip = &gsi;
5005     }
5006   len = adjustments.length ();
5007   for (i = 0; i < len; i++)
5008     {
5009       struct ipa_parm_adjustment *adj;
5010       imm_use_iterator ui;
5011       gimple *stmt;
5012       gdebug *def_temp;
5013       tree name, vexpr, copy = NULL_TREE;
5014       use_operand_p use_p;
5015 
5016       adj = &adjustments[i];
5017       if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
5018 	continue;
5019       name = ssa_default_def (cfun, adj->base);
5020       vexpr = NULL;
5021       if (name)
5022 	FOR_EACH_IMM_USE_STMT (stmt, ui, name)
5023 	  {
5024 	    if (gimple_clobber_p (stmt))
5025 	      {
5026 		gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
5027 		unlink_stmt_vdef (stmt);
5028 		gsi_remove (&cgsi, true);
5029 		release_defs (stmt);
5030 		continue;
5031 	      }
5032 	    /* All other users must have been removed by
5033 	       ipa_sra_modify_function_body.  */
5034 	    gcc_assert (is_gimple_debug (stmt));
5035 	    if (vexpr == NULL && gsip != NULL)
5036 	      {
5037 		gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5038 		vexpr = make_node (DEBUG_EXPR_DECL);
5039 		def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
5040 							   NULL);
5041 		DECL_ARTIFICIAL (vexpr) = 1;
5042 		TREE_TYPE (vexpr) = TREE_TYPE (name);
5043 		SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
5044 		gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5045 	      }
5046 	    if (vexpr)
5047 	      {
5048 		FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
5049 		  SET_USE (use_p, vexpr);
5050 	      }
5051 	    else
5052 	      gimple_debug_bind_reset_value (stmt);
5053 	    update_stmt (stmt);
5054 	  }
5055       /* Create a VAR_DECL for debug info purposes.  */
5056       if (!DECL_IGNORED_P (adj->base))
5057 	{
5058 	  copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
5059 			     VAR_DECL, DECL_NAME (adj->base),
5060 			     TREE_TYPE (adj->base));
5061 	  if (DECL_PT_UID_SET_P (adj->base))
5062 	    SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
5063 	  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
5064 	  TREE_READONLY (copy) = TREE_READONLY (adj->base);
5065 	  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
5066 	  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
5067 	  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
5068 	  DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
5069 	  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
5070 	  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
5071 	  SET_DECL_RTL (copy, 0);
5072 	  TREE_USED (copy) = 1;
5073 	  DECL_CONTEXT (copy) = current_function_decl;
5074 	  add_local_decl (cfun, copy);
5075 	  DECL_CHAIN (copy) =
5076 	    BLOCK_VARS (DECL_INITIAL (current_function_decl));
5077 	  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
5078 	}
5079       if (gsip != NULL && copy && target_for_debug_bind (adj->base))
5080 	{
5081 	  gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
5082 	  if (vexpr)
5083 	    def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
5084 	  else
5085 	    def_temp = gimple_build_debug_source_bind (copy, adj->base,
5086 						       NULL);
5087 	  gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
5088 	}
5089     }
5090 }
5091 
5092 /* Return false if all callers have at least as many actual arguments as there
5093    are formal parameters in the current function and that their types
5094    match.  */
5095 
5096 static bool
5097 some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
5098 					  void *data ATTRIBUTE_UNUSED)
5099 {
5100   struct cgraph_edge *cs;
5101   for (cs = node->callers; cs; cs = cs->next_caller)
5102     if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
5103       return true;
5104 
5105   return false;
5106 }
5107 
5108 /* Return false if all callers have vuse attached to a call statement.  */
5109 
5110 static bool
5111 some_callers_have_no_vuse_p (struct cgraph_node *node,
5112 			     void *data ATTRIBUTE_UNUSED)
5113 {
5114   struct cgraph_edge *cs;
5115   for (cs = node->callers; cs; cs = cs->next_caller)
5116     if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
5117       return true;
5118 
5119   return false;
5120 }
5121 
5122 /* Convert all callers of NODE.  */
5123 
5124 static bool
5125 convert_callers_for_node (struct cgraph_node *node,
5126 		          void *data)
5127 {
5128   ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
5129   bitmap recomputed_callers = BITMAP_ALLOC (NULL);
5130   struct cgraph_edge *cs;
5131 
5132   for (cs = node->callers; cs; cs = cs->next_caller)
5133     {
5134       push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
5135 
5136       if (dump_file)
5137 	fprintf (dump_file, "Adjusting call %s/%i -> %s/%i\n",
5138 		 xstrdup_for_dump (cs->caller->name ()),
5139 		 cs->caller->order,
5140 		 xstrdup_for_dump (cs->callee->name ()),
5141 		 cs->callee->order);
5142 
5143       ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
5144 
5145       pop_cfun ();
5146     }
5147 
5148   for (cs = node->callers; cs; cs = cs->next_caller)
5149     if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
5150 	&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
5151       compute_inline_parameters (cs->caller, true);
5152   BITMAP_FREE (recomputed_callers);
5153 
5154   return true;
5155 }
5156 
5157 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
5158 
5159 static void
5160 convert_callers (struct cgraph_node *node, tree old_decl,
5161 		 ipa_parm_adjustment_vec adjustments)
5162 {
5163   basic_block this_block;
5164 
5165   node->call_for_symbol_and_aliases (convert_callers_for_node,
5166 				     &adjustments, false);
5167 
5168   if (!encountered_recursive_call)
5169     return;
5170 
5171   FOR_EACH_BB_FN (this_block, cfun)
5172     {
5173       gimple_stmt_iterator gsi;
5174 
5175       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
5176         {
5177 	  gcall *stmt;
5178 	  tree call_fndecl;
5179 	  stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
5180 	  if (!stmt)
5181 	    continue;
5182 	  call_fndecl = gimple_call_fndecl (stmt);
5183 	  if (call_fndecl == old_decl)
5184 	    {
5185 	      if (dump_file)
5186 		fprintf (dump_file, "Adjusting recursive call");
5187 	      gimple_call_set_fndecl (stmt, node->decl);
5188 	      ipa_modify_call_arguments (NULL, stmt, adjustments);
5189 	    }
5190 	}
5191     }
5192 
5193   return;
5194 }
5195 
5196 /* Perform all the modification required in IPA-SRA for NODE to have parameters
5197    as given in ADJUSTMENTS.  Return true iff the CFG has been changed.  */
5198 
5199 static bool
5200 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
5201 {
5202   struct cgraph_node *new_node;
5203   bool cfg_changed;
5204 
5205   cgraph_edge::rebuild_edges ();
5206   free_dominance_info (CDI_DOMINATORS);
5207   pop_cfun ();
5208 
5209   /* This must be done after rebuilding cgraph edges for node above.
5210      Otherwise any recursive calls to node that are recorded in
5211      redirect_callers will be corrupted.  */
5212   vec<cgraph_edge *> redirect_callers = node->collect_callers ();
5213   new_node = node->create_version_clone_with_body (redirect_callers, NULL,
5214 						   NULL, false, NULL, NULL,
5215 						   "isra");
5216   redirect_callers.release ();
5217 
5218   push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
5219   ipa_modify_formal_parameters (current_function_decl, adjustments);
5220   cfg_changed = ipa_sra_modify_function_body (adjustments);
5221   sra_ipa_reset_debug_stmts (adjustments);
5222   convert_callers (new_node, node->decl, adjustments);
5223   new_node->make_local ();
5224   return cfg_changed;
5225 }
5226 
5227 /* Means of communication between ipa_sra_check_caller and
5228    ipa_sra_preliminary_function_checks.  */
5229 
5230 struct ipa_sra_check_caller_data
5231 {
5232   bool has_callers;
5233   bool bad_arg_alignment;
5234   bool has_thunk;
5235 };
5236 
5237 /* If NODE has a caller, mark that fact in DATA which is pointer to
5238    ipa_sra_check_caller_data.  Also check all aggregate arguments in all known
5239    calls if they are unit aligned and if not, set the appropriate flag in DATA
5240    too. */
5241 
5242 static bool
5243 ipa_sra_check_caller (struct cgraph_node *node, void *data)
5244 {
5245   if (!node->callers)
5246     return false;
5247 
5248   struct ipa_sra_check_caller_data *iscc;
5249   iscc = (struct ipa_sra_check_caller_data *) data;
5250   iscc->has_callers = true;
5251 
5252   for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
5253     {
5254       if (cs->caller->thunk.thunk_p)
5255 	{
5256 	  iscc->has_thunk = true;
5257 	  return true;
5258 	}
5259       gimple *call_stmt = cs->call_stmt;
5260       unsigned count = gimple_call_num_args (call_stmt);
5261       for (unsigned i = 0; i < count; i++)
5262 	{
5263 	  tree arg = gimple_call_arg (call_stmt, i);
5264 	  if (is_gimple_reg (arg))
5265 	      continue;
5266 
5267 	  tree offset;
5268 	  HOST_WIDE_INT bitsize, bitpos;
5269 	  machine_mode mode;
5270 	  int unsignedp, reversep, volatilep = 0;
5271 	  get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
5272 			       &unsignedp, &reversep, &volatilep);
5273 	  if (bitpos % BITS_PER_UNIT)
5274 	    {
5275 	      iscc->bad_arg_alignment = true;
5276 	      return true;
5277 	    }
5278 	}
5279     }
5280 
5281   return false;
5282 }
5283 
5284 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5285    attributes, return true otherwise.  NODE is the cgraph node of the current
5286    function.  */
5287 
5288 static bool
5289 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
5290 {
5291   if (!node->can_be_local_p ())
5292     {
5293       if (dump_file)
5294 	fprintf (dump_file, "Function not local to this compilation unit.\n");
5295       return false;
5296     }
5297 
5298   if (!node->local.can_change_signature)
5299     {
5300       if (dump_file)
5301 	fprintf (dump_file, "Function can not change signature.\n");
5302       return false;
5303     }
5304 
5305   if (!tree_versionable_function_p (node->decl))
5306     {
5307       if (dump_file)
5308 	fprintf (dump_file, "Function is not versionable.\n");
5309       return false;
5310     }
5311 
5312   if (!opt_for_fn (node->decl, optimize)
5313       || !opt_for_fn (node->decl, flag_ipa_sra))
5314     {
5315       if (dump_file)
5316 	fprintf (dump_file, "Function not optimized.\n");
5317       return false;
5318     }
5319 
5320   if (DECL_VIRTUAL_P (current_function_decl))
5321     {
5322       if (dump_file)
5323 	fprintf (dump_file, "Function is a virtual method.\n");
5324       return false;
5325     }
5326 
5327   if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
5328       && inline_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
5329     {
5330       if (dump_file)
5331 	fprintf (dump_file, "Function too big to be made truly local.\n");
5332       return false;
5333     }
5334 
5335   if (cfun->stdarg)
5336     {
5337       if (dump_file)
5338 	fprintf (dump_file, "Function uses stdarg. \n");
5339       return false;
5340     }
5341 
5342   if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
5343     return false;
5344 
5345   if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
5346     {
5347       if (dump_file)
5348 	fprintf (dump_file, "Always inline function will be inlined "
5349 		 "anyway. \n");
5350       return false;
5351     }
5352 
5353   struct ipa_sra_check_caller_data iscc;
5354   memset (&iscc, 0, sizeof(iscc));
5355   node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
5356   if (!iscc.has_callers)
5357     {
5358       if (dump_file)
5359 	fprintf (dump_file,
5360 		 "Function has no callers in this compilation unit.\n");
5361       return false;
5362     }
5363 
5364   if (iscc.bad_arg_alignment)
5365     {
5366       if (dump_file)
5367 	fprintf (dump_file,
5368 		 "A function call has an argument with non-unit alignment.\n");
5369       return false;
5370     }
5371 
5372   if (iscc.has_thunk)
5373     {
5374       if (dump_file)
5375 	fprintf (dump_file,
5376 		 "A has thunk.\n");
5377       return false;
5378     }
5379 
5380   return true;
5381 }
5382 
5383 /* Perform early interprocedural SRA.  */
5384 
5385 static unsigned int
5386 ipa_early_sra (void)
5387 {
5388   struct cgraph_node *node = cgraph_node::get (current_function_decl);
5389   ipa_parm_adjustment_vec adjustments;
5390   int ret = 0;
5391 
5392   if (!ipa_sra_preliminary_function_checks (node))
5393     return 0;
5394 
5395   sra_initialize ();
5396   sra_mode = SRA_MODE_EARLY_IPA;
5397 
5398   if (!find_param_candidates ())
5399     {
5400       if (dump_file)
5401 	fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
5402       goto simple_out;
5403     }
5404 
5405   if (node->call_for_symbol_and_aliases
5406        (some_callers_have_mismatched_arguments_p, NULL, true))
5407     {
5408       if (dump_file)
5409 	fprintf (dump_file, "There are callers with insufficient number of "
5410 		 "arguments or arguments with type mismatches.\n");
5411       goto simple_out;
5412     }
5413 
5414   if (node->call_for_symbol_and_aliases
5415        (some_callers_have_no_vuse_p, NULL, true))
5416     {
5417       if (dump_file)
5418 	fprintf (dump_file, "There are callers with no VUSE attached "
5419 		 "to a call stmt.\n");
5420       goto simple_out;
5421     }
5422 
5423   bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
5424 				 func_param_count
5425 				 * last_basic_block_for_fn (cfun));
5426   final_bbs = BITMAP_ALLOC (NULL);
5427 
5428   scan_function ();
5429   if (encountered_apply_args)
5430     {
5431       if (dump_file)
5432 	fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
5433       goto out;
5434     }
5435 
5436   if (encountered_unchangable_recursive_call)
5437     {
5438       if (dump_file)
5439 	fprintf (dump_file, "Function calls itself with insufficient "
5440 		 "number of arguments.\n");
5441       goto out;
5442     }
5443 
5444   adjustments = analyze_all_param_acesses ();
5445   if (!adjustments.exists ())
5446     goto out;
5447   if (dump_file)
5448     ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
5449 
5450   if (modify_function (node, adjustments))
5451     ret = TODO_update_ssa | TODO_cleanup_cfg;
5452   else
5453     ret = TODO_update_ssa;
5454   adjustments.release ();
5455 
5456   statistics_counter_event (cfun, "Unused parameters deleted",
5457 			    sra_stats.deleted_unused_parameters);
5458   statistics_counter_event (cfun, "Scalar parameters converted to by-value",
5459 			    sra_stats.scalar_by_ref_to_by_val);
5460   statistics_counter_event (cfun, "Aggregate parameters broken up",
5461 			    sra_stats.aggregate_params_reduced);
5462   statistics_counter_event (cfun, "Aggregate parameter components created",
5463 			    sra_stats.param_reductions_created);
5464 
5465  out:
5466   BITMAP_FREE (final_bbs);
5467   free (bb_dereferences);
5468  simple_out:
5469   sra_deinitialize ();
5470   return ret;
5471 }
5472 
5473 namespace {
5474 
5475 const pass_data pass_data_early_ipa_sra =
5476 {
5477   GIMPLE_PASS, /* type */
5478   "eipa_sra", /* name */
5479   OPTGROUP_NONE, /* optinfo_flags */
5480   TV_IPA_SRA, /* tv_id */
5481   0, /* properties_required */
5482   0, /* properties_provided */
5483   0, /* properties_destroyed */
5484   0, /* todo_flags_start */
5485   TODO_dump_symtab, /* todo_flags_finish */
5486 };
5487 
5488 class pass_early_ipa_sra : public gimple_opt_pass
5489 {
5490 public:
5491   pass_early_ipa_sra (gcc::context *ctxt)
5492     : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
5493   {}
5494 
5495   /* opt_pass methods: */
5496   virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
5497   virtual unsigned int execute (function *) { return ipa_early_sra (); }
5498 
5499 }; // class pass_early_ipa_sra
5500 
5501 } // anon namespace
5502 
5503 gimple_opt_pass *
5504 make_pass_early_ipa_sra (gcc::context *ctxt)
5505 {
5506   return new pass_early_ipa_sra (ctxt);
5507 }
5508