xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-sra.c (revision d909946ca08dceb44d7d0f22ec9488679695d976)
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2    references into scalar references, exposing them to the scalar
3    optimizers.
4    Copyright (C) 2008-2013 Free Software Foundation, Inc.
5    Contributed by Martin Jambor <mjambor@suse.cz>
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 /* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
24    twice, once in the early stages of compilation (early SRA) and once in the
25    late stages (late SRA).  The aim of both is to turn references to scalar
26    parts of aggregates into uses of independent scalar variables.
27 
28    The two passes are nearly identical, the only difference is that early SRA
29    does not scalarize unions which are used as the result in a GIMPLE_RETURN
30    statement because together with inlining this can lead to weird type
31    conversions.
32 
33    Both passes operate in four stages:
34 
35    1. The declarations that have properties which make them candidates for
36       scalarization are identified in function find_var_candidates().  The
37       candidates are stored in candidate_bitmap.
38 
39    2. The function body is scanned.  In the process, declarations which are
40       used in a manner that prevent their scalarization are removed from the
41       candidate bitmap.  More importantly, for every access into an aggregate,
42       an access structure (struct access) is created by create_access() and
43       stored in a vector associated with the aggregate.  Among other
44       information, the aggregate declaration, the offset and size of the access
45       and its type are stored in the structure.
46 
47       On a related note, assign_link structures are created for every assign
48       statement between candidate aggregates and attached to the related
49       accesses.
50 
51    3. The vectors of accesses are analyzed.  They are first sorted according to
52       their offset and size and then scanned for partially overlapping accesses
53       (i.e. those which overlap but one is not entirely within another).  Such
54       an access disqualifies the whole aggregate from being scalarized.
55 
56       If there is no such inhibiting overlap, a representative access structure
57       is chosen for every unique combination of offset and size.  Afterwards,
58       the pass builds a set of trees from these structures, in which children
59       of an access are within their parent (in terms of offset and size).
60 
61       Then accesses  are propagated  whenever possible (i.e.  in cases  when it
62       does not create a partially overlapping access) across assign_links from
63       the right hand side to the left hand side.
64 
65       Then the set of trees for each declaration is traversed again and those
66       accesses which should be replaced by a scalar are identified.
67 
68    4. The function is traversed again, and for every reference into an
69       aggregate that has some component which is about to be scalarized,
70       statements are amended and new statements are created as necessary.
71       Finally, if a parameter got scalarized, the scalar replacements are
72       initialized with values from respective parameter aggregates.  */
73 
74 #include "config.h"
75 #include "system.h"
76 #include "coretypes.h"
77 #include "alloc-pool.h"
78 #include "tm.h"
79 #include "tree.h"
80 #include "gimple.h"
81 #include "cgraph.h"
82 #include "tree-flow.h"
83 #include "tree-pass.h"
84 #include "ipa-prop.h"
85 #include "statistics.h"
86 #include "params.h"
87 #include "target.h"
88 #include "flags.h"
89 #include "dbgcnt.h"
90 #include "tree-inline.h"
91 #include "gimple-pretty-print.h"
92 #include "ipa-inline.h"
93 
94 /* Enumeration of all aggregate reductions we can do.  */
95 enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
96 		SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
97 		SRA_MODE_INTRA };     /* late intraprocedural SRA */
98 
99 /* Global variable describing which aggregate reduction we are performing at
100    the moment.  */
101 static enum sra_mode sra_mode;
102 
103 struct assign_link;
104 
105 /* ACCESS represents each access to an aggregate variable (as a whole or a
106    part).  It can also represent a group of accesses that refer to exactly the
107    same fragment of an aggregate (i.e. those that have exactly the same offset
108    and size).  Such representatives for a single aggregate, once determined,
109    are linked in a linked list and have the group fields set.
110 
111    Moreover, when doing intraprocedural SRA, a tree is built from those
112    representatives (by the means of first_child and next_sibling pointers), in
113    which all items in a subtree are "within" the root, i.e. their offset is
114    greater or equal to offset of the root and offset+size is smaller or equal
115    to offset+size of the root.  Children of an access are sorted by offset.
116 
117    Note that accesses to parts of vector and complex number types always
118    represented by an access to the whole complex number or a vector.  It is a
119    duty of the modifying functions to replace them appropriately.  */
120 
121 struct access
122 {
123   /* Values returned by  `get_ref_base_and_extent' for each component reference
124      If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
125      `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
126   HOST_WIDE_INT offset;
127   HOST_WIDE_INT size;
128   tree base;
129 
130   /* Expression.  It is context dependent so do not use it to create new
131      expressions to access the original aggregate.  See PR 42154 for a
132      testcase.  */
133   tree expr;
134   /* Type.  */
135   tree type;
136 
137   /* The statement this access belongs to.  */
138   gimple stmt;
139 
140   /* Next group representative for this aggregate. */
141   struct access *next_grp;
142 
143   /* Pointer to the group representative.  Pointer to itself if the struct is
144      the representative.  */
145   struct access *group_representative;
146 
147   /* If this access has any children (in terms of the definition above), this
148      points to the first one.  */
149   struct access *first_child;
150 
151   /* In intraprocedural SRA, pointer to the next sibling in the access tree as
152      described above.  In IPA-SRA this is a pointer to the next access
153      belonging to the same group (having the same representative).  */
154   struct access *next_sibling;
155 
156   /* Pointers to the first and last element in the linked list of assign
157      links.  */
158   struct assign_link *first_link, *last_link;
159 
160   /* Pointer to the next access in the work queue.  */
161   struct access *next_queued;
162 
163   /* Replacement variable for this access "region."  Never to be accessed
164      directly, always only by the means of get_access_replacement() and only
165      when grp_to_be_replaced flag is set.  */
166   tree replacement_decl;
167 
168   /* Is this particular access write access? */
169   unsigned write : 1;
170 
171   /* Is this access an access to a non-addressable field? */
172   unsigned non_addressable : 1;
173 
174   /* Is this access currently in the work queue?  */
175   unsigned grp_queued : 1;
176 
177   /* Does this group contain a write access?  This flag is propagated down the
178      access tree.  */
179   unsigned grp_write : 1;
180 
181   /* Does this group contain a read access?  This flag is propagated down the
182      access tree.  */
183   unsigned grp_read : 1;
184 
185   /* Does this group contain a read access that comes from an assignment
186      statement?  This flag is propagated down the access tree.  */
187   unsigned grp_assignment_read : 1;
188 
189   /* Does this group contain a write access that comes from an assignment
190      statement?  This flag is propagated down the access tree.  */
191   unsigned grp_assignment_write : 1;
192 
193   /* Does this group contain a read access through a scalar type?  This flag is
194      not propagated in the access tree in any direction.  */
195   unsigned grp_scalar_read : 1;
196 
197   /* Does this group contain a write access through a scalar type?  This flag
198      is not propagated in the access tree in any direction.  */
199   unsigned grp_scalar_write : 1;
200 
201   /* Is this access an artificial one created to scalarize some record
202      entirely? */
203   unsigned grp_total_scalarization : 1;
204 
205   /* Other passes of the analysis use this bit to make function
206      analyze_access_subtree create scalar replacements for this group if
207      possible.  */
208   unsigned grp_hint : 1;
209 
210   /* Is the subtree rooted in this access fully covered by scalar
211      replacements?  */
212   unsigned grp_covered : 1;
213 
214   /* If set to true, this access and all below it in an access tree must not be
215      scalarized.  */
216   unsigned grp_unscalarizable_region : 1;
217 
218   /* Whether data have been written to parts of the aggregate covered by this
219      access which is not to be scalarized.  This flag is propagated up in the
220      access tree.  */
221   unsigned grp_unscalarized_data : 1;
222 
223   /* Does this access and/or group contain a write access through a
224      BIT_FIELD_REF?  */
225   unsigned grp_partial_lhs : 1;
226 
227   /* Set when a scalar replacement should be created for this variable.  */
228   unsigned grp_to_be_replaced : 1;
229 
230   /* Set when we want a replacement for the sole purpose of having it in
231      generated debug statements.  */
232   unsigned grp_to_be_debug_replaced : 1;
233 
234   /* Should TREE_NO_WARNING of a replacement be set?  */
235   unsigned grp_no_warning : 1;
236 
237   /* Is it possible that the group refers to data which might be (directly or
238      otherwise) modified?  */
239   unsigned grp_maybe_modified : 1;
240 
241   /* Set when this is a representative of a pointer to scalar (i.e. by
242      reference) parameter which we consider for turning into a plain scalar
243      (i.e. a by value parameter).  */
244   unsigned grp_scalar_ptr : 1;
245 
246   /* Set when we discover that this pointer is not safe to dereference in the
247      caller.  */
248   unsigned grp_not_necessarilly_dereferenced : 1;
249 };
250 
251 typedef struct access *access_p;
252 
253 
254 /* Alloc pool for allocating access structures.  */
255 static alloc_pool access_pool;
256 
257 /* A structure linking lhs and rhs accesses from an aggregate assignment.  They
258    are used to propagate subaccesses from rhs to lhs as long as they don't
259    conflict with what is already there.  */
260 struct assign_link
261 {
262   struct access *lacc, *racc;
263   struct assign_link *next;
264 };
265 
266 /* Alloc pool for allocating assign link structures.  */
267 static alloc_pool link_pool;
268 
269 /* Base (tree) -> Vector (vec<access_p> *) map.  */
270 static struct pointer_map_t *base_access_vec;
271 
272 /* Set of candidates.  */
273 static bitmap candidate_bitmap;
274 static htab_t candidates;
275 
276 /* For a candidate UID return the candidates decl.  */
277 
278 static inline tree
279 candidate (unsigned uid)
280 {
281  struct tree_decl_minimal t;
282  t.uid = uid;
283  return (tree) htab_find_with_hash (candidates, &t, uid);
284 }
285 
286 /* Bitmap of candidates which we should try to entirely scalarize away and
287    those which cannot be (because they are and need be used as a whole).  */
288 static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
289 
290 /* Obstack for creation of fancy names.  */
291 static struct obstack name_obstack;
292 
293 /* Head of a linked list of accesses that need to have its subaccesses
294    propagated to their assignment counterparts. */
295 static struct access *work_queue_head;
296 
297 /* Number of parameters of the analyzed function when doing early ipa SRA.  */
298 static int func_param_count;
299 
300 /* scan_function sets the following to true if it encounters a call to
301    __builtin_apply_args.  */
302 static bool encountered_apply_args;
303 
304 /* Set by scan_function when it finds a recursive call.  */
305 static bool encountered_recursive_call;
306 
307 /* Set by scan_function when it finds a recursive call with less actual
308    arguments than formal parameters..  */
309 static bool encountered_unchangable_recursive_call;
310 
311 /* This is a table in which for each basic block and parameter there is a
312    distance (offset + size) in that parameter which is dereferenced and
313    accessed in that BB.  */
314 static HOST_WIDE_INT *bb_dereferences;
315 /* Bitmap of BBs that can cause the function to "stop" progressing by
316    returning, throwing externally, looping infinitely or calling a function
317    which might abort etc.. */
318 static bitmap final_bbs;
319 
320 /* Representative of no accesses at all. */
321 static struct access  no_accesses_representant;
322 
323 /* Predicate to test the special value.  */
324 
325 static inline bool
326 no_accesses_p (struct access *access)
327 {
328   return access == &no_accesses_representant;
329 }
330 
331 /* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
332    representative fields are dumped, otherwise those which only describe the
333    individual access are.  */
334 
335 static struct
336 {
337   /* Number of processed aggregates is readily available in
338      analyze_all_variable_accesses and so is not stored here.  */
339 
340   /* Number of created scalar replacements.  */
341   int replacements;
342 
343   /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
344      expression.  */
345   int exprs;
346 
347   /* Number of statements created by generate_subtree_copies.  */
348   int subtree_copies;
349 
350   /* Number of statements created by load_assign_lhs_subreplacements.  */
351   int subreplacements;
352 
353   /* Number of times sra_modify_assign has deleted a statement.  */
354   int deleted;
355 
356   /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
357      RHS reparately due to type conversions or nonexistent matching
358      references.  */
359   int separate_lhs_rhs_handling;
360 
361   /* Number of parameters that were removed because they were unused.  */
362   int deleted_unused_parameters;
363 
364   /* Number of scalars passed as parameters by reference that have been
365      converted to be passed by value.  */
366   int scalar_by_ref_to_by_val;
367 
368   /* Number of aggregate parameters that were replaced by one or more of their
369      components.  */
370   int aggregate_params_reduced;
371 
372   /* Numbber of components created when splitting aggregate parameters.  */
373   int param_reductions_created;
374 } sra_stats;
375 
376 static void
377 dump_access (FILE *f, struct access *access, bool grp)
378 {
379   fprintf (f, "access { ");
380   fprintf (f, "base = (%d)'", DECL_UID (access->base));
381   print_generic_expr (f, access->base, 0);
382   fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
383   fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
384   fprintf (f, ", expr = ");
385   print_generic_expr (f, access->expr, 0);
386   fprintf (f, ", type = ");
387   print_generic_expr (f, access->type, 0);
388   if (grp)
389     fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
390 	     "grp_assignment_write = %d, grp_scalar_read = %d, "
391 	     "grp_scalar_write = %d, grp_total_scalarization = %d, "
392 	     "grp_hint = %d, grp_covered = %d, "
393 	     "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
394 	     "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
395 	     "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
396 	     "grp_not_necessarilly_dereferenced = %d\n",
397 	     access->grp_read, access->grp_write, access->grp_assignment_read,
398 	     access->grp_assignment_write, access->grp_scalar_read,
399 	     access->grp_scalar_write, access->grp_total_scalarization,
400 	     access->grp_hint, access->grp_covered,
401 	     access->grp_unscalarizable_region, access->grp_unscalarized_data,
402 	     access->grp_partial_lhs, access->grp_to_be_replaced,
403 	     access->grp_to_be_debug_replaced, access->grp_maybe_modified,
404 	     access->grp_not_necessarilly_dereferenced);
405   else
406     fprintf (f, ", write = %d, grp_total_scalarization = %d, "
407 	     "grp_partial_lhs = %d\n",
408 	     access->write, access->grp_total_scalarization,
409 	     access->grp_partial_lhs);
410 }
411 
412 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
413 
414 static void
415 dump_access_tree_1 (FILE *f, struct access *access, int level)
416 {
417   do
418     {
419       int i;
420 
421       for (i = 0; i < level; i++)
422 	fputs ("* ", dump_file);
423 
424       dump_access (f, access, true);
425 
426       if (access->first_child)
427 	dump_access_tree_1 (f, access->first_child, level + 1);
428 
429       access = access->next_sibling;
430     }
431   while (access);
432 }
433 
434 /* Dump all access trees for a variable, given the pointer to the first root in
435    ACCESS.  */
436 
437 static void
438 dump_access_tree (FILE *f, struct access *access)
439 {
440   for (; access; access = access->next_grp)
441     dump_access_tree_1 (f, access, 0);
442 }
443 
444 /* Return true iff ACC is non-NULL and has subaccesses.  */
445 
446 static inline bool
447 access_has_children_p (struct access *acc)
448 {
449   return acc && acc->first_child;
450 }
451 
452 /* Return true iff ACC is (partly) covered by at least one replacement.  */
453 
454 static bool
455 access_has_replacements_p (struct access *acc)
456 {
457   struct access *child;
458   if (acc->grp_to_be_replaced)
459     return true;
460   for (child = acc->first_child; child; child = child->next_sibling)
461     if (access_has_replacements_p (child))
462       return true;
463   return false;
464 }
465 
466 /* Return a vector of pointers to accesses for the variable given in BASE or
467    NULL if there is none.  */
468 
469 static vec<access_p> *
470 get_base_access_vector (tree base)
471 {
472   void **slot;
473 
474   slot = pointer_map_contains (base_access_vec, base);
475   if (!slot)
476     return NULL;
477   else
478     return *(vec<access_p> **) slot;
479 }
480 
481 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
482    in ACCESS.  Return NULL if it cannot be found.  */
483 
484 static struct access *
485 find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
486 			HOST_WIDE_INT size)
487 {
488   while (access && (access->offset != offset || access->size != size))
489     {
490       struct access *child = access->first_child;
491 
492       while (child && (child->offset + child->size <= offset))
493 	child = child->next_sibling;
494       access = child;
495     }
496 
497   return access;
498 }
499 
500 /* Return the first group representative for DECL or NULL if none exists.  */
501 
502 static struct access *
503 get_first_repr_for_decl (tree base)
504 {
505   vec<access_p> *access_vec;
506 
507   access_vec = get_base_access_vector (base);
508   if (!access_vec)
509     return NULL;
510 
511   return (*access_vec)[0];
512 }
513 
514 /* Find an access representative for the variable BASE and given OFFSET and
515    SIZE.  Requires that access trees have already been built.  Return NULL if
516    it cannot be found.  */
517 
518 static struct access *
519 get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
520 				 HOST_WIDE_INT size)
521 {
522   struct access *access;
523 
524   access = get_first_repr_for_decl (base);
525   while (access && (access->offset + access->size <= offset))
526     access = access->next_grp;
527   if (!access)
528     return NULL;
529 
530   return find_access_in_subtree (access, offset, size);
531 }
532 
533 /* Add LINK to the linked list of assign links of RACC.  */
534 static void
535 add_link_to_rhs (struct access *racc, struct assign_link *link)
536 {
537   gcc_assert (link->racc == racc);
538 
539   if (!racc->first_link)
540     {
541       gcc_assert (!racc->last_link);
542       racc->first_link = link;
543     }
544   else
545     racc->last_link->next = link;
546 
547   racc->last_link = link;
548   link->next = NULL;
549 }
550 
551 /* Move all link structures in their linked list in OLD_RACC to the linked list
552    in NEW_RACC.  */
553 static void
554 relink_to_new_repr (struct access *new_racc, struct access *old_racc)
555 {
556   if (!old_racc->first_link)
557     {
558       gcc_assert (!old_racc->last_link);
559       return;
560     }
561 
562   if (new_racc->first_link)
563     {
564       gcc_assert (!new_racc->last_link->next);
565       gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
566 
567       new_racc->last_link->next = old_racc->first_link;
568       new_racc->last_link = old_racc->last_link;
569     }
570   else
571     {
572       gcc_assert (!new_racc->last_link);
573 
574       new_racc->first_link = old_racc->first_link;
575       new_racc->last_link = old_racc->last_link;
576     }
577   old_racc->first_link = old_racc->last_link = NULL;
578 }
579 
580 /* Add ACCESS to the work queue (which is actually a stack).  */
581 
582 static void
583 add_access_to_work_queue (struct access *access)
584 {
585   if (!access->grp_queued)
586     {
587       gcc_assert (!access->next_queued);
588       access->next_queued = work_queue_head;
589       access->grp_queued = 1;
590       work_queue_head = access;
591     }
592 }
593 
594 /* Pop an access from the work queue, and return it, assuming there is one.  */
595 
596 static struct access *
597 pop_access_from_work_queue (void)
598 {
599   struct access *access = work_queue_head;
600 
601   work_queue_head = access->next_queued;
602   access->next_queued = NULL;
603   access->grp_queued = 0;
604   return access;
605 }
606 
607 
608 /* Allocate necessary structures.  */
609 
610 static void
611 sra_initialize (void)
612 {
613   candidate_bitmap = BITMAP_ALLOC (NULL);
614   candidates = htab_create (vec_safe_length (cfun->local_decls) / 2,
615 			    uid_decl_map_hash, uid_decl_map_eq, NULL);
616   should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
617   cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
618   gcc_obstack_init (&name_obstack);
619   access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16);
620   link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16);
621   base_access_vec = pointer_map_create ();
622   memset (&sra_stats, 0, sizeof (sra_stats));
623   encountered_apply_args = false;
624   encountered_recursive_call = false;
625   encountered_unchangable_recursive_call = false;
626 }
627 
628 /* Hook fed to pointer_map_traverse, deallocate stored vectors.  */
629 
630 static bool
631 delete_base_accesses (const void *key ATTRIBUTE_UNUSED, void **value,
632 		     void *data ATTRIBUTE_UNUSED)
633 {
634   vec<access_p> *access_vec = (vec<access_p> *) *value;
635   vec_free (access_vec);
636   return true;
637 }
638 
639 /* Deallocate all general structures.  */
640 
641 static void
642 sra_deinitialize (void)
643 {
644   BITMAP_FREE (candidate_bitmap);
645   htab_delete (candidates);
646   BITMAP_FREE (should_scalarize_away_bitmap);
647   BITMAP_FREE (cannot_scalarize_away_bitmap);
648   free_alloc_pool (access_pool);
649   free_alloc_pool (link_pool);
650   obstack_free (&name_obstack, NULL);
651 
652   pointer_map_traverse (base_access_vec, delete_base_accesses, NULL);
653   pointer_map_destroy (base_access_vec);
654 }
655 
656 /* Remove DECL from candidates for SRA and write REASON to the dump file if
657    there is one.  */
658 static void
659 disqualify_candidate (tree decl, const char *reason)
660 {
661   if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
662     htab_clear_slot (candidates,
663 		     htab_find_slot_with_hash (candidates, decl,
664 					       DECL_UID (decl), NO_INSERT));
665 
666   if (dump_file && (dump_flags & TDF_DETAILS))
667     {
668       fprintf (dump_file, "! Disqualifying ");
669       print_generic_expr (dump_file, decl, 0);
670       fprintf (dump_file, " - %s\n", reason);
671     }
672 }
673 
674 /* Return true iff the type contains a field or an element which does not allow
675    scalarization.  */
676 
677 static bool
678 type_internals_preclude_sra_p (tree type, const char **msg)
679 {
680   tree fld;
681   tree et;
682 
683   switch (TREE_CODE (type))
684     {
685     case RECORD_TYPE:
686     case UNION_TYPE:
687     case QUAL_UNION_TYPE:
688       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
689 	if (TREE_CODE (fld) == FIELD_DECL)
690 	  {
691 	    tree ft = TREE_TYPE (fld);
692 
693 	    if (TREE_THIS_VOLATILE (fld))
694 	      {
695 		*msg = "volatile structure field";
696 		return true;
697 	      }
698 	    if (!DECL_FIELD_OFFSET (fld))
699 	      {
700 		*msg = "no structure field offset";
701 		return true;
702 	      }
703 	    if (!DECL_SIZE (fld))
704 	      {
705 		*msg = "zero structure field size";
706 	        return true;
707 	      }
708 	    if (!host_integerp (DECL_FIELD_OFFSET (fld), 1))
709 	      {
710 		*msg = "structure field offset not fixed";
711 		return true;
712 	      }
713 	    if (!host_integerp (DECL_SIZE (fld), 1))
714 	      {
715 	        *msg = "structure field size not fixed";
716 		return true;
717 	      }
718 	    if (!host_integerp (bit_position (fld), 0))
719 	      {
720 	        *msg = "structure field size too big";
721 		return true;
722 	      }
723 	    if (AGGREGATE_TYPE_P (ft)
724 		    && int_bit_position (fld) % BITS_PER_UNIT != 0)
725 	      {
726 		*msg = "structure field is bit field";
727 	        return true;
728 	      }
729 
730 	    if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
731 	      return true;
732 	  }
733 
734       return false;
735 
736     case ARRAY_TYPE:
737       et = TREE_TYPE (type);
738 
739       if (TYPE_VOLATILE (et))
740 	{
741 	  *msg = "element type is volatile";
742 	  return true;
743 	}
744 
745       if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
746 	return true;
747 
748       return false;
749 
750     default:
751       return false;
752     }
753 }
754 
755 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
756    base variable if it is.  Return T if it is not an SSA_NAME.  */
757 
758 static tree
759 get_ssa_base_param (tree t)
760 {
761   if (TREE_CODE (t) == SSA_NAME)
762     {
763       if (SSA_NAME_IS_DEFAULT_DEF (t))
764 	return SSA_NAME_VAR (t);
765       else
766 	return NULL_TREE;
767     }
768   return t;
769 }
770 
771 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
772    belongs to, unless the BB has already been marked as a potentially
773    final.  */
774 
775 static void
776 mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt)
777 {
778   basic_block bb = gimple_bb (stmt);
779   int idx, parm_index = 0;
780   tree parm;
781 
782   if (bitmap_bit_p (final_bbs, bb->index))
783     return;
784 
785   for (parm = DECL_ARGUMENTS (current_function_decl);
786        parm && parm != base;
787        parm = DECL_CHAIN (parm))
788     parm_index++;
789 
790   gcc_assert (parm_index < func_param_count);
791 
792   idx = bb->index * func_param_count + parm_index;
793   if (bb_dereferences[idx] < dist)
794     bb_dereferences[idx] = dist;
795 }
796 
797 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
798    the three fields.  Also add it to the vector of accesses corresponding to
799    the base.  Finally, return the new access.  */
800 
801 static struct access *
802 create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
803 {
804   vec<access_p> *v;
805   struct access *access;
806   void **slot;
807 
808   access = (struct access *) pool_alloc (access_pool);
809   memset (access, 0, sizeof (struct access));
810   access->base = base;
811   access->offset = offset;
812   access->size = size;
813 
814   slot = pointer_map_contains (base_access_vec, base);
815   if (slot)
816     v = (vec<access_p> *) *slot;
817   else
818     vec_alloc (v, 32);
819 
820   v->safe_push (access);
821 
822   *((vec<access_p> **)
823 	pointer_map_insert (base_access_vec, base)) = v;
824 
825   return access;
826 }
827 
828 /* Create and insert access for EXPR. Return created access, or NULL if it is
829    not possible.  */
830 
831 static struct access *
832 create_access (tree expr, gimple stmt, bool write)
833 {
834   struct access *access;
835   HOST_WIDE_INT offset, size, max_size;
836   tree base = expr;
837   bool ptr, unscalarizable_region = false;
838 
839   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
840 
841   if (sra_mode == SRA_MODE_EARLY_IPA
842       && TREE_CODE (base) == MEM_REF)
843     {
844       base = get_ssa_base_param (TREE_OPERAND (base, 0));
845       if (!base)
846 	return NULL;
847       ptr = true;
848     }
849   else
850     ptr = false;
851 
852   if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
853     return NULL;
854 
855   if (sra_mode == SRA_MODE_EARLY_IPA)
856     {
857       if (size < 0 || size != max_size)
858 	{
859 	  disqualify_candidate (base, "Encountered a variable sized access.");
860 	  return NULL;
861 	}
862       if (TREE_CODE (expr) == COMPONENT_REF
863 	  && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
864 	{
865 	  disqualify_candidate (base, "Encountered a bit-field access.");
866 	  return NULL;
867 	}
868       gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
869 
870       if (ptr)
871 	mark_parm_dereference (base, offset + size, stmt);
872     }
873   else
874     {
875       if (size != max_size)
876 	{
877 	  size = max_size;
878 	  unscalarizable_region = true;
879 	}
880       if (size < 0)
881 	{
882 	  disqualify_candidate (base, "Encountered an unconstrained access.");
883 	  return NULL;
884 	}
885     }
886 
887   access = create_access_1 (base, offset, size);
888   access->expr = expr;
889   access->type = TREE_TYPE (expr);
890   access->write = write;
891   access->grp_unscalarizable_region = unscalarizable_region;
892   access->stmt = stmt;
893 
894   if (TREE_CODE (expr) == COMPONENT_REF
895       && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
896     access->non_addressable = 1;
897 
898   return access;
899 }
900 
901 
902 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
903    register types or (recursively) records with only these two kinds of fields.
904    It also returns false if any of these records contains a bit-field.  */
905 
906 static bool
907 type_consists_of_records_p (tree type)
908 {
909   tree fld;
910 
911   if (TREE_CODE (type) != RECORD_TYPE)
912     return false;
913 
914   for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
915     if (TREE_CODE (fld) == FIELD_DECL)
916       {
917 	tree ft = TREE_TYPE (fld);
918 
919 	if (DECL_BIT_FIELD (fld))
920 	  return false;
921 
922 	if (!is_gimple_reg_type (ft)
923 	    && !type_consists_of_records_p (ft))
924 	  return false;
925       }
926 
927   return true;
928 }
929 
930 /* Create total_scalarization accesses for all scalar type fields in DECL that
931    must be of a RECORD_TYPE conforming to type_consists_of_records_p.  BASE
932    must be the top-most VAR_DECL representing the variable, OFFSET must be the
933    offset of DECL within BASE.  REF must be the memory reference expression for
934    the given decl.  */
935 
936 static void
937 completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset,
938 			     tree ref)
939 {
940   tree fld, decl_type = TREE_TYPE (decl);
941 
942   for (fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
943     if (TREE_CODE (fld) == FIELD_DECL)
944       {
945 	HOST_WIDE_INT pos = offset + int_bit_position (fld);
946 	tree ft = TREE_TYPE (fld);
947 	tree nref = build3 (COMPONENT_REF, TREE_TYPE (fld), ref, fld,
948 			    NULL_TREE);
949 
950 	if (is_gimple_reg_type (ft))
951 	  {
952 	    struct access *access;
953 	    HOST_WIDE_INT size;
954 
955 	    size = tree_low_cst (DECL_SIZE (fld), 1);
956 	    access = create_access_1 (base, pos, size);
957 	    access->expr = nref;
958 	    access->type = ft;
959 	    access->grp_total_scalarization = 1;
960 	    /* Accesses for intraprocedural SRA can have their stmt NULL.  */
961 	  }
962 	else
963 	  completely_scalarize_record (base, fld, pos, nref);
964       }
965 }
966 
967 /* Create total_scalarization accesses for all scalar type fields in VAR and
968    for VAR a a whole.  VAR must be of a RECORD_TYPE conforming to
969    type_consists_of_records_p.   */
970 
971 static void
972 completely_scalarize_var (tree var)
973 {
974   HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (var), 1);
975   struct access *access;
976 
977   access = create_access_1 (var, 0, size);
978   access->expr = var;
979   access->type = TREE_TYPE (var);
980   access->grp_total_scalarization = 1;
981 
982   completely_scalarize_record (var, var, 0, var);
983 }
984 
985 /* Search the given tree for a declaration by skipping handled components and
986    exclude it from the candidates.  */
987 
988 static void
989 disqualify_base_of_expr (tree t, const char *reason)
990 {
991   t = get_base_address (t);
992   if (sra_mode == SRA_MODE_EARLY_IPA
993       && TREE_CODE (t) == MEM_REF)
994     t = get_ssa_base_param (TREE_OPERAND (t, 0));
995 
996   if (t && DECL_P (t))
997     disqualify_candidate (t, reason);
998 }
999 
1000 /* Scan expression EXPR and create access structures for all accesses to
1001    candidates for scalarization.  Return the created access or NULL if none is
1002    created.  */
1003 
1004 static struct access *
1005 build_access_from_expr_1 (tree expr, gimple stmt, bool write)
1006 {
1007   struct access *ret = NULL;
1008   bool partial_ref;
1009 
1010   if (TREE_CODE (expr) == BIT_FIELD_REF
1011       || TREE_CODE (expr) == IMAGPART_EXPR
1012       || TREE_CODE (expr) == REALPART_EXPR)
1013     {
1014       expr = TREE_OPERAND (expr, 0);
1015       partial_ref = true;
1016     }
1017   else
1018     partial_ref = false;
1019 
1020   /* We need to dive through V_C_Es in order to get the size of its parameter
1021      and not the result type.  Ada produces such statements.  We are also
1022      capable of handling the topmost V_C_E but not any of those buried in other
1023      handled components.  */
1024   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
1025     expr = TREE_OPERAND (expr, 0);
1026 
1027   if (contains_view_convert_expr_p (expr))
1028     {
1029       disqualify_base_of_expr (expr, "V_C_E under a different handled "
1030 			       "component.");
1031       return NULL;
1032     }
1033   if (TREE_THIS_VOLATILE (expr))
1034     {
1035       disqualify_base_of_expr (expr, "part of a volatile reference.");
1036       return NULL;
1037     }
1038 
1039   switch (TREE_CODE (expr))
1040     {
1041     case MEM_REF:
1042       if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
1043 	  && sra_mode != SRA_MODE_EARLY_IPA)
1044 	return NULL;
1045       /* fall through */
1046     case VAR_DECL:
1047     case PARM_DECL:
1048     case RESULT_DECL:
1049     case COMPONENT_REF:
1050     case ARRAY_REF:
1051     case ARRAY_RANGE_REF:
1052       ret = create_access (expr, stmt, write);
1053       break;
1054 
1055     default:
1056       break;
1057     }
1058 
1059   if (write && partial_ref && ret)
1060     ret->grp_partial_lhs = 1;
1061 
1062   return ret;
1063 }
1064 
1065 /* Scan expression EXPR and create access structures for all accesses to
1066    candidates for scalarization.  Return true if any access has been inserted.
1067    STMT must be the statement from which the expression is taken, WRITE must be
1068    true if the expression is a store and false otherwise. */
1069 
1070 static bool
1071 build_access_from_expr (tree expr, gimple stmt, bool write)
1072 {
1073   struct access *access;
1074 
1075   access = build_access_from_expr_1 (expr, stmt, write);
1076   if (access)
1077     {
1078       /* This means the aggregate is accesses as a whole in a way other than an
1079 	 assign statement and thus cannot be removed even if we had a scalar
1080 	 replacement for everything.  */
1081       if (cannot_scalarize_away_bitmap)
1082 	bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
1083       return true;
1084     }
1085   return false;
1086 }
1087 
1088 /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in
1089    modes in which it matters, return true iff they have been disqualified.  RHS
1090    may be NULL, in that case ignore it.  If we scalarize an aggregate in
1091    intra-SRA we may need to add statements after each statement.  This is not
1092    possible if a statement unconditionally has to end the basic block.  */
1093 static bool
1094 disqualify_ops_if_throwing_stmt (gimple stmt, tree lhs, tree rhs)
1095 {
1096   if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1097       && (stmt_can_throw_internal (stmt) || stmt_ends_bb_p (stmt)))
1098     {
1099       disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
1100       if (rhs)
1101 	disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
1102       return true;
1103     }
1104   return false;
1105 }
1106 
1107 /* Scan expressions occurring in STMT, create access structures for all accesses
1108    to candidates for scalarization and remove those candidates which occur in
1109    statements or expressions that prevent them from being split apart.  Return
1110    true if any access has been inserted.  */
1111 
1112 static bool
1113 build_accesses_from_assign (gimple stmt)
1114 {
1115   tree lhs, rhs;
1116   struct access *lacc, *racc;
1117 
1118   if (!gimple_assign_single_p (stmt)
1119       /* Scope clobbers don't influence scalarization.  */
1120       || gimple_clobber_p (stmt))
1121     return false;
1122 
1123   lhs = gimple_assign_lhs (stmt);
1124   rhs = gimple_assign_rhs1 (stmt);
1125 
1126   if (disqualify_ops_if_throwing_stmt (stmt, lhs, rhs))
1127     return false;
1128 
1129   racc = build_access_from_expr_1 (rhs, stmt, false);
1130   lacc = build_access_from_expr_1 (lhs, stmt, true);
1131 
1132   if (lacc)
1133     lacc->grp_assignment_write = 1;
1134 
1135   if (racc)
1136     {
1137       racc->grp_assignment_read = 1;
1138       if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
1139 	  && !is_gimple_reg_type (racc->type))
1140 	bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base));
1141     }
1142 
1143   if (lacc && racc
1144       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
1145       && !lacc->grp_unscalarizable_region
1146       && !racc->grp_unscalarizable_region
1147       && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
1148       && lacc->size == racc->size
1149       && useless_type_conversion_p (lacc->type, racc->type))
1150     {
1151       struct assign_link *link;
1152 
1153       link = (struct assign_link *) pool_alloc (link_pool);
1154       memset (link, 0, sizeof (struct assign_link));
1155 
1156       link->lacc = lacc;
1157       link->racc = racc;
1158 
1159       add_link_to_rhs (racc, link);
1160     }
1161 
1162   return lacc || racc;
1163 }
1164 
1165 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1166    GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
1167 
1168 static bool
1169 asm_visit_addr (gimple, tree op, tree, void *)
1170 {
1171   op = get_base_address (op);
1172   if (op
1173       && DECL_P (op))
1174     disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
1175 
1176   return false;
1177 }
1178 
1179 /* Return true iff callsite CALL has at least as many actual arguments as there
1180    are formal parameters of the function currently processed by IPA-SRA.  */
1181 
1182 static inline bool
1183 callsite_has_enough_arguments_p (gimple call)
1184 {
1185   return gimple_call_num_args (call) >= (unsigned) func_param_count;
1186 }
1187 
1188 /* Scan function and look for interesting expressions and create access
1189    structures for them.  Return true iff any access is created.  */
1190 
1191 static bool
1192 scan_function (void)
1193 {
1194   basic_block bb;
1195   bool ret = false;
1196 
1197   FOR_EACH_BB (bb)
1198     {
1199       gimple_stmt_iterator gsi;
1200       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1201 	{
1202 	  gimple stmt = gsi_stmt (gsi);
1203 	  tree t;
1204 	  unsigned i;
1205 
1206 	  if (final_bbs && stmt_can_throw_external (stmt))
1207 	    bitmap_set_bit (final_bbs, bb->index);
1208 	  switch (gimple_code (stmt))
1209 	    {
1210 	    case GIMPLE_RETURN:
1211 	      t = gimple_return_retval (stmt);
1212 	      if (t != NULL_TREE)
1213 		ret |= build_access_from_expr (t, stmt, false);
1214 	      if (final_bbs)
1215 		bitmap_set_bit (final_bbs, bb->index);
1216 	      break;
1217 
1218 	    case GIMPLE_ASSIGN:
1219 	      ret |= build_accesses_from_assign (stmt);
1220 	      break;
1221 
1222 	    case GIMPLE_CALL:
1223 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
1224 		ret |= build_access_from_expr (gimple_call_arg (stmt, i),
1225 					       stmt, false);
1226 
1227 	      if (sra_mode == SRA_MODE_EARLY_IPA)
1228 		{
1229 		  tree dest = gimple_call_fndecl (stmt);
1230 		  int flags = gimple_call_flags (stmt);
1231 
1232 		  if (dest)
1233 		    {
1234 		      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
1235 			  && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
1236 			encountered_apply_args = true;
1237 		      if (cgraph_get_node (dest)
1238 			  == cgraph_get_node (current_function_decl))
1239 			{
1240 			  encountered_recursive_call = true;
1241 			  if (!callsite_has_enough_arguments_p (stmt))
1242 			    encountered_unchangable_recursive_call = true;
1243 			}
1244 		    }
1245 
1246 		  if (final_bbs
1247 		      && (flags & (ECF_CONST | ECF_PURE)) == 0)
1248 		    bitmap_set_bit (final_bbs, bb->index);
1249 		}
1250 
1251 	      t = gimple_call_lhs (stmt);
1252 	      if (t && !disqualify_ops_if_throwing_stmt (stmt, t, NULL))
1253 		ret |= build_access_from_expr (t, stmt, true);
1254 	      break;
1255 
1256 	    case GIMPLE_ASM:
1257 	      walk_stmt_load_store_addr_ops (stmt, NULL, NULL, NULL,
1258 					     asm_visit_addr);
1259 	      if (final_bbs)
1260 		bitmap_set_bit (final_bbs, bb->index);
1261 
1262 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1263 		{
1264 		  t = TREE_VALUE (gimple_asm_input_op (stmt, i));
1265 		  ret |= build_access_from_expr (t, stmt, false);
1266 		}
1267 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1268 		{
1269 		  t = TREE_VALUE (gimple_asm_output_op (stmt, i));
1270 		  ret |= build_access_from_expr (t, stmt, true);
1271 		}
1272 	      break;
1273 
1274 	    default:
1275 	      break;
1276 	    }
1277 	}
1278     }
1279 
1280   return ret;
1281 }
1282 
1283 /* Helper of QSORT function. There are pointers to accesses in the array.  An
1284    access is considered smaller than another if it has smaller offset or if the
1285    offsets are the same but is size is bigger. */
1286 
1287 static int
1288 compare_access_positions (const void *a, const void *b)
1289 {
1290   const access_p *fp1 = (const access_p *) a;
1291   const access_p *fp2 = (const access_p *) b;
1292   const access_p f1 = *fp1;
1293   const access_p f2 = *fp2;
1294 
1295   if (f1->offset != f2->offset)
1296     return f1->offset < f2->offset ? -1 : 1;
1297 
1298   if (f1->size == f2->size)
1299     {
1300       if (f1->type == f2->type)
1301 	return 0;
1302       /* Put any non-aggregate type before any aggregate type.  */
1303       else if (!is_gimple_reg_type (f1->type)
1304 	  && is_gimple_reg_type (f2->type))
1305 	return 1;
1306       else if (is_gimple_reg_type (f1->type)
1307 	       && !is_gimple_reg_type (f2->type))
1308 	return -1;
1309       /* Put any complex or vector type before any other scalar type.  */
1310       else if (TREE_CODE (f1->type) != COMPLEX_TYPE
1311 	       && TREE_CODE (f1->type) != VECTOR_TYPE
1312 	       && (TREE_CODE (f2->type) == COMPLEX_TYPE
1313 		   || TREE_CODE (f2->type) == VECTOR_TYPE))
1314 	return 1;
1315       else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
1316 		|| TREE_CODE (f1->type) == VECTOR_TYPE)
1317 	       && TREE_CODE (f2->type) != COMPLEX_TYPE
1318 	       && TREE_CODE (f2->type) != VECTOR_TYPE)
1319 	return -1;
1320       /* Put the integral type with the bigger precision first.  */
1321       else if (INTEGRAL_TYPE_P (f1->type)
1322 	       && INTEGRAL_TYPE_P (f2->type))
1323 	return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
1324       /* Put any integral type with non-full precision last.  */
1325       else if (INTEGRAL_TYPE_P (f1->type)
1326 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f1->type))
1327 		   != TYPE_PRECISION (f1->type)))
1328 	return 1;
1329       else if (INTEGRAL_TYPE_P (f2->type)
1330 	       && (TREE_INT_CST_LOW (TYPE_SIZE (f2->type))
1331 		   != TYPE_PRECISION (f2->type)))
1332 	return -1;
1333       /* Stabilize the sort.  */
1334       return TYPE_UID (f1->type) - TYPE_UID (f2->type);
1335     }
1336 
1337   /* We want the bigger accesses first, thus the opposite operator in the next
1338      line: */
1339   return f1->size > f2->size ? -1 : 1;
1340 }
1341 
1342 
1343 /* Append a name of the declaration to the name obstack.  A helper function for
1344    make_fancy_name.  */
1345 
1346 static void
1347 make_fancy_decl_name (tree decl)
1348 {
1349   char buffer[32];
1350 
1351   tree name = DECL_NAME (decl);
1352   if (name)
1353     obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
1354 		  IDENTIFIER_LENGTH (name));
1355   else
1356     {
1357       sprintf (buffer, "D%u", DECL_UID (decl));
1358       obstack_grow (&name_obstack, buffer, strlen (buffer));
1359     }
1360 }
1361 
1362 /* Helper for make_fancy_name.  */
1363 
1364 static void
1365 make_fancy_name_1 (tree expr)
1366 {
1367   char buffer[32];
1368   tree index;
1369 
1370   if (DECL_P (expr))
1371     {
1372       make_fancy_decl_name (expr);
1373       return;
1374     }
1375 
1376   switch (TREE_CODE (expr))
1377     {
1378     case COMPONENT_REF:
1379       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1380       obstack_1grow (&name_obstack, '$');
1381       make_fancy_decl_name (TREE_OPERAND (expr, 1));
1382       break;
1383 
1384     case ARRAY_REF:
1385       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1386       obstack_1grow (&name_obstack, '$');
1387       /* Arrays with only one element may not have a constant as their
1388 	 index. */
1389       index = TREE_OPERAND (expr, 1);
1390       if (TREE_CODE (index) != INTEGER_CST)
1391 	break;
1392       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
1393       obstack_grow (&name_obstack, buffer, strlen (buffer));
1394       break;
1395 
1396     case ADDR_EXPR:
1397       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1398       break;
1399 
1400     case MEM_REF:
1401       make_fancy_name_1 (TREE_OPERAND (expr, 0));
1402       if (!integer_zerop (TREE_OPERAND (expr, 1)))
1403 	{
1404 	  obstack_1grow (&name_obstack, '$');
1405 	  sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
1406 		   TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
1407 	  obstack_grow (&name_obstack, buffer, strlen (buffer));
1408 	}
1409       break;
1410 
1411     case BIT_FIELD_REF:
1412     case REALPART_EXPR:
1413     case IMAGPART_EXPR:
1414       gcc_unreachable (); 	/* we treat these as scalars.  */
1415       break;
1416     default:
1417       break;
1418     }
1419 }
1420 
1421 /* Create a human readable name for replacement variable of ACCESS.  */
1422 
1423 static char *
1424 make_fancy_name (tree expr)
1425 {
1426   make_fancy_name_1 (expr);
1427   obstack_1grow (&name_obstack, '\0');
1428   return XOBFINISH (&name_obstack, char *);
1429 }
1430 
1431 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1432    EXP_TYPE at the given OFFSET.  If BASE is something for which
1433    get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1434    to insert new statements either before or below the current one as specified
1435    by INSERT_AFTER.  This function is not capable of handling bitfields.
1436 
1437    BASE must be either a declaration or a memory reference that has correct
1438    alignment ifformation embeded in it (e.g. a pre-existing one in SRA).  */
1439 
1440 tree
1441 build_ref_for_offset (location_t loc, tree base, HOST_WIDE_INT offset,
1442 		      tree exp_type, gimple_stmt_iterator *gsi,
1443 		      bool insert_after)
1444 {
1445   tree prev_base = base;
1446   tree off;
1447   tree mem_ref;
1448   HOST_WIDE_INT base_offset;
1449   unsigned HOST_WIDE_INT misalign;
1450   unsigned int align;
1451 
1452   gcc_checking_assert (offset % BITS_PER_UNIT == 0);
1453   get_object_alignment_1 (base, &align, &misalign);
1454   base = get_addr_base_and_unit_offset (base, &base_offset);
1455 
1456   /* get_addr_base_and_unit_offset returns NULL for references with a variable
1457      offset such as array[var_index].  */
1458   if (!base)
1459     {
1460       gimple stmt;
1461       tree tmp, addr;
1462 
1463       gcc_checking_assert (gsi);
1464       tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)), NULL);
1465       addr = build_fold_addr_expr (unshare_expr (prev_base));
1466       STRIP_USELESS_TYPE_CONVERSION (addr);
1467       stmt = gimple_build_assign (tmp, addr);
1468       gimple_set_location (stmt, loc);
1469       if (insert_after)
1470 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
1471       else
1472 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
1473 
1474       off = build_int_cst (reference_alias_ptr_type (prev_base),
1475 			   offset / BITS_PER_UNIT);
1476       base = tmp;
1477     }
1478   else if (TREE_CODE (base) == MEM_REF)
1479     {
1480       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1481 			   base_offset + offset / BITS_PER_UNIT);
1482       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1483       base = unshare_expr (TREE_OPERAND (base, 0));
1484     }
1485   else
1486     {
1487       off = build_int_cst (reference_alias_ptr_type (base),
1488 			   base_offset + offset / BITS_PER_UNIT);
1489       base = build_fold_addr_expr (unshare_expr (base));
1490     }
1491 
1492   misalign = (misalign + offset) & (align - 1);
1493   if (misalign != 0)
1494     align = (misalign & -misalign);
1495   if (align < TYPE_ALIGN (exp_type))
1496     exp_type = build_aligned_type (exp_type, align);
1497 
1498   mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
1499   if (TREE_THIS_VOLATILE (prev_base))
1500     TREE_THIS_VOLATILE (mem_ref) = 1;
1501   if (TREE_SIDE_EFFECTS (prev_base))
1502     TREE_SIDE_EFFECTS (mem_ref) = 1;
1503   return mem_ref;
1504 }
1505 
1506 /* Construct a memory reference to a part of an aggregate BASE at the given
1507    OFFSET and of the same type as MODEL.  In case this is a reference to a
1508    bit-field, the function will replicate the last component_ref of model's
1509    expr to access it.  GSI and INSERT_AFTER have the same meaning as in
1510    build_ref_for_offset.  */
1511 
1512 static tree
1513 build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1514 		     struct access *model, gimple_stmt_iterator *gsi,
1515 		     bool insert_after)
1516 {
1517   if (TREE_CODE (model->expr) == COMPONENT_REF
1518       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1519     {
1520       /* This access represents a bit-field.  */
1521       tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
1522 
1523       offset -= int_bit_position (fld);
1524       exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
1525       t = build_ref_for_offset (loc, base, offset, exp_type, gsi, insert_after);
1526       return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
1527 			      NULL_TREE);
1528     }
1529   else
1530     return build_ref_for_offset (loc, base, offset, model->type,
1531 				 gsi, insert_after);
1532 }
1533 
1534 /* Attempt to build a memory reference that we could but into a gimple
1535    debug_bind statement.  Similar to build_ref_for_model but punts if it has to
1536    create statements and return s NULL instead.  This function also ignores
1537    alignment issues and so its results should never end up in non-debug
1538    statements.  */
1539 
1540 static tree
1541 build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
1542 			   struct access *model)
1543 {
1544   HOST_WIDE_INT base_offset;
1545   tree off;
1546 
1547   if (TREE_CODE (model->expr) == COMPONENT_REF
1548       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
1549     return NULL_TREE;
1550 
1551   base = get_addr_base_and_unit_offset (base, &base_offset);
1552   if (!base)
1553     return NULL_TREE;
1554   if (TREE_CODE (base) == MEM_REF)
1555     {
1556       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
1557 			   base_offset + offset / BITS_PER_UNIT);
1558       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
1559       base = unshare_expr (TREE_OPERAND (base, 0));
1560     }
1561   else
1562     {
1563       off = build_int_cst (reference_alias_ptr_type (base),
1564 			   base_offset + offset / BITS_PER_UNIT);
1565       base = build_fold_addr_expr (unshare_expr (base));
1566     }
1567 
1568   return fold_build2_loc (loc, MEM_REF, model->type, base, off);
1569 }
1570 
1571 /* Construct a memory reference consisting of component_refs and array_refs to
1572    a part of an aggregate *RES (which is of type TYPE).  The requested part
1573    should have type EXP_TYPE at be the given OFFSET.  This function might not
1574    succeed, it returns true when it does and only then *RES points to something
1575    meaningful.  This function should be used only to build expressions that we
1576    might need to present to user (e.g. in warnings).  In all other situations,
1577    build_ref_for_model or build_ref_for_offset should be used instead.  */
1578 
1579 static bool
1580 build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
1581 				    tree exp_type)
1582 {
1583   while (1)
1584     {
1585       tree fld;
1586       tree tr_size, index, minidx;
1587       HOST_WIDE_INT el_size;
1588 
1589       if (offset == 0 && exp_type
1590 	  && types_compatible_p (exp_type, type))
1591 	return true;
1592 
1593       switch (TREE_CODE (type))
1594 	{
1595 	case UNION_TYPE:
1596 	case QUAL_UNION_TYPE:
1597 	case RECORD_TYPE:
1598 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1599 	    {
1600 	      HOST_WIDE_INT pos, size;
1601 	      tree tr_pos, expr, *expr_ptr;
1602 
1603 	      if (TREE_CODE (fld) != FIELD_DECL)
1604 		continue;
1605 
1606 	      tr_pos = bit_position (fld);
1607 	      if (!tr_pos || !host_integerp (tr_pos, 1))
1608 		continue;
1609 	      pos = TREE_INT_CST_LOW (tr_pos);
1610 	      gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
1611 	      tr_size = DECL_SIZE (fld);
1612 	      if (!tr_size || !host_integerp (tr_size, 1))
1613 		continue;
1614 	      size = TREE_INT_CST_LOW (tr_size);
1615 	      if (size == 0)
1616 		{
1617 		  if (pos != offset)
1618 		    continue;
1619 		}
1620 	      else if (pos > offset || (pos + size) <= offset)
1621 		continue;
1622 
1623 	      expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
1624 			     NULL_TREE);
1625 	      expr_ptr = &expr;
1626 	      if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
1627 						      offset - pos, exp_type))
1628 		{
1629 		  *res = expr;
1630 		  return true;
1631 		}
1632 	    }
1633 	  return false;
1634 
1635 	case ARRAY_TYPE:
1636 	  tr_size = TYPE_SIZE (TREE_TYPE (type));
1637 	  if (!tr_size || !host_integerp (tr_size, 1))
1638 	    return false;
1639 	  el_size = tree_low_cst (tr_size, 1);
1640 
1641 	  minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1642 	  if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
1643 	    return false;
1644 	  index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
1645 	  if (!integer_zerop (minidx))
1646 	    index = int_const_binop (PLUS_EXPR, index, minidx);
1647 	  *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
1648 			 NULL_TREE, NULL_TREE);
1649 	  offset = offset % el_size;
1650 	  type = TREE_TYPE (type);
1651 	  break;
1652 
1653 	default:
1654 	  if (offset != 0)
1655 	    return false;
1656 
1657 	  if (exp_type)
1658 	    return false;
1659 	  else
1660 	    return true;
1661 	}
1662     }
1663 }
1664 
1665 /* Return true iff TYPE is stdarg va_list type.  */
1666 
1667 static inline bool
1668 is_va_list_type (tree type)
1669 {
1670   return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
1671 }
1672 
1673 /* Print message to dump file why a variable was rejected. */
1674 
1675 static void
1676 reject (tree var, const char *msg)
1677 {
1678   if (dump_file && (dump_flags & TDF_DETAILS))
1679     {
1680       fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
1681       print_generic_expr (dump_file, var, 0);
1682       fprintf (dump_file, "\n");
1683     }
1684 }
1685 
1686 /* Return true if VAR is a candidate for SRA.  */
1687 
1688 static bool
1689 maybe_add_sra_candidate (tree var)
1690 {
1691   tree type = TREE_TYPE (var);
1692   const char *msg;
1693   void **slot;
1694 
1695   if (!AGGREGATE_TYPE_P (type))
1696     {
1697       reject (var, "not aggregate");
1698       return false;
1699     }
1700   if (needs_to_live_in_memory (var))
1701     {
1702       reject (var, "needs to live in memory");
1703       return false;
1704     }
1705   if (TREE_THIS_VOLATILE (var))
1706     {
1707       reject (var, "is volatile");
1708       return false;
1709     }
1710   if (!COMPLETE_TYPE_P (type))
1711     {
1712       reject (var, "has incomplete type");
1713       return false;
1714     }
1715   if (!host_integerp (TYPE_SIZE (type), 1))
1716     {
1717       reject (var, "type size not fixed");
1718       return false;
1719     }
1720   if (tree_low_cst (TYPE_SIZE (type), 1) == 0)
1721     {
1722       reject (var, "type size is zero");
1723       return false;
1724     }
1725   if (type_internals_preclude_sra_p (type, &msg))
1726     {
1727       reject (var, msg);
1728       return false;
1729     }
1730   if (/* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
1731 	 we also want to schedule it rather late.  Thus we ignore it in
1732 	 the early pass. */
1733       (sra_mode == SRA_MODE_EARLY_INTRA
1734        && is_va_list_type (type)))
1735     {
1736       reject (var, "is va_list");
1737       return false;
1738     }
1739 
1740   bitmap_set_bit (candidate_bitmap, DECL_UID (var));
1741   slot = htab_find_slot_with_hash (candidates, var, DECL_UID (var), INSERT);
1742   *slot = (void *) var;
1743 
1744   if (dump_file && (dump_flags & TDF_DETAILS))
1745     {
1746       fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
1747       print_generic_expr (dump_file, var, 0);
1748       fprintf (dump_file, "\n");
1749     }
1750 
1751   return true;
1752 }
1753 
1754 /* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
1755    those with type which is suitable for scalarization.  */
1756 
1757 static bool
1758 find_var_candidates (void)
1759 {
1760   tree var, parm;
1761   unsigned int i;
1762   bool ret = false;
1763 
1764   for (parm = DECL_ARGUMENTS (current_function_decl);
1765        parm;
1766        parm = DECL_CHAIN (parm))
1767     ret |= maybe_add_sra_candidate (parm);
1768 
1769   FOR_EACH_LOCAL_DECL (cfun, i, var)
1770     {
1771       if (TREE_CODE (var) != VAR_DECL)
1772         continue;
1773 
1774       ret |= maybe_add_sra_candidate (var);
1775     }
1776 
1777   return ret;
1778 }
1779 
1780 /* Sort all accesses for the given variable, check for partial overlaps and
1781    return NULL if there are any.  If there are none, pick a representative for
1782    each combination of offset and size and create a linked list out of them.
1783    Return the pointer to the first representative and make sure it is the first
1784    one in the vector of accesses.  */
1785 
1786 static struct access *
1787 sort_and_splice_var_accesses (tree var)
1788 {
1789   int i, j, access_count;
1790   struct access *res, **prev_acc_ptr = &res;
1791   vec<access_p> *access_vec;
1792   bool first = true;
1793   HOST_WIDE_INT low = -1, high = 0;
1794 
1795   access_vec = get_base_access_vector (var);
1796   if (!access_vec)
1797     return NULL;
1798   access_count = access_vec->length ();
1799 
1800   /* Sort by <OFFSET, SIZE>.  */
1801   access_vec->qsort (compare_access_positions);
1802 
1803   i = 0;
1804   while (i < access_count)
1805     {
1806       struct access *access = (*access_vec)[i];
1807       bool grp_write = access->write;
1808       bool grp_read = !access->write;
1809       bool grp_scalar_write = access->write
1810 	&& is_gimple_reg_type (access->type);
1811       bool grp_scalar_read = !access->write
1812 	&& is_gimple_reg_type (access->type);
1813       bool grp_assignment_read = access->grp_assignment_read;
1814       bool grp_assignment_write = access->grp_assignment_write;
1815       bool multiple_scalar_reads = false;
1816       bool total_scalarization = access->grp_total_scalarization;
1817       bool grp_partial_lhs = access->grp_partial_lhs;
1818       bool first_scalar = is_gimple_reg_type (access->type);
1819       bool unscalarizable_region = access->grp_unscalarizable_region;
1820 
1821       if (first || access->offset >= high)
1822 	{
1823 	  first = false;
1824 	  low = access->offset;
1825 	  high = access->offset + access->size;
1826 	}
1827       else if (access->offset > low && access->offset + access->size > high)
1828 	return NULL;
1829       else
1830 	gcc_assert (access->offset >= low
1831 		    && access->offset + access->size <= high);
1832 
1833       j = i + 1;
1834       while (j < access_count)
1835 	{
1836 	  struct access *ac2 = (*access_vec)[j];
1837 	  if (ac2->offset != access->offset || ac2->size != access->size)
1838 	    break;
1839 	  if (ac2->write)
1840 	    {
1841 	      grp_write = true;
1842 	      grp_scalar_write = (grp_scalar_write
1843 				  || is_gimple_reg_type (ac2->type));
1844 	    }
1845 	  else
1846 	    {
1847 	      grp_read = true;
1848 	      if (is_gimple_reg_type (ac2->type))
1849 		{
1850 		  if (grp_scalar_read)
1851 		    multiple_scalar_reads = true;
1852 		  else
1853 		    grp_scalar_read = true;
1854 		}
1855 	    }
1856 	  grp_assignment_read |= ac2->grp_assignment_read;
1857 	  grp_assignment_write |= ac2->grp_assignment_write;
1858 	  grp_partial_lhs |= ac2->grp_partial_lhs;
1859 	  unscalarizable_region |= ac2->grp_unscalarizable_region;
1860 	  total_scalarization |= ac2->grp_total_scalarization;
1861 	  relink_to_new_repr (access, ac2);
1862 
1863 	  /* If there are both aggregate-type and scalar-type accesses with
1864 	     this combination of size and offset, the comparison function
1865 	     should have put the scalars first.  */
1866 	  gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
1867 	  ac2->group_representative = access;
1868 	  j++;
1869 	}
1870 
1871       i = j;
1872 
1873       access->group_representative = access;
1874       access->grp_write = grp_write;
1875       access->grp_read = grp_read;
1876       access->grp_scalar_read = grp_scalar_read;
1877       access->grp_scalar_write = grp_scalar_write;
1878       access->grp_assignment_read = grp_assignment_read;
1879       access->grp_assignment_write = grp_assignment_write;
1880       access->grp_hint = multiple_scalar_reads || total_scalarization;
1881       access->grp_total_scalarization = total_scalarization;
1882       access->grp_partial_lhs = grp_partial_lhs;
1883       access->grp_unscalarizable_region = unscalarizable_region;
1884       if (access->first_link)
1885 	add_access_to_work_queue (access);
1886 
1887       *prev_acc_ptr = access;
1888       prev_acc_ptr = &access->next_grp;
1889     }
1890 
1891   gcc_assert (res == (*access_vec)[0]);
1892   return res;
1893 }
1894 
1895 /* Create a variable for the given ACCESS which determines the type, name and a
1896    few other properties.  Return the variable declaration and store it also to
1897    ACCESS->replacement.  */
1898 
1899 static tree
1900 create_access_replacement (struct access *access)
1901 {
1902   tree repl;
1903 
1904   if (access->grp_to_be_debug_replaced)
1905     {
1906       repl = create_tmp_var_raw (access->type, NULL);
1907       DECL_CONTEXT (repl) = current_function_decl;
1908     }
1909   else
1910     repl = create_tmp_var (access->type, "SR");
1911   if (TREE_CODE (access->type) == COMPLEX_TYPE
1912       || TREE_CODE (access->type) == VECTOR_TYPE)
1913     {
1914       if (!access->grp_partial_lhs)
1915 	DECL_GIMPLE_REG_P (repl) = 1;
1916     }
1917   else if (access->grp_partial_lhs
1918 	   && is_gimple_reg_type (access->type))
1919     TREE_ADDRESSABLE (repl) = 1;
1920 
1921   DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
1922   DECL_ARTIFICIAL (repl) = 1;
1923   DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
1924 
1925   if (DECL_NAME (access->base)
1926       && !DECL_IGNORED_P (access->base)
1927       && !DECL_ARTIFICIAL (access->base))
1928     {
1929       char *pretty_name = make_fancy_name (access->expr);
1930       tree debug_expr = unshare_expr_without_location (access->expr), d;
1931       bool fail = false;
1932 
1933       DECL_NAME (repl) = get_identifier (pretty_name);
1934       obstack_free (&name_obstack, pretty_name);
1935 
1936       /* Get rid of any SSA_NAMEs embedded in debug_expr,
1937 	 as DECL_DEBUG_EXPR isn't considered when looking for still
1938 	 used SSA_NAMEs and thus they could be freed.  All debug info
1939 	 generation cares is whether something is constant or variable
1940 	 and that get_ref_base_and_extent works properly on the
1941 	 expression.  It cannot handle accesses at a non-constant offset
1942 	 though, so just give up in those cases.  */
1943       for (d = debug_expr;
1944 	   !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
1945 	   d = TREE_OPERAND (d, 0))
1946 	switch (TREE_CODE (d))
1947 	  {
1948 	  case ARRAY_REF:
1949 	  case ARRAY_RANGE_REF:
1950 	    if (TREE_OPERAND (d, 1)
1951 		&& TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
1952 	      fail = true;
1953 	    if (TREE_OPERAND (d, 3)
1954 		&& TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
1955 	      fail = true;
1956 	    /* FALLTHRU */
1957 	  case COMPONENT_REF:
1958 	    if (TREE_OPERAND (d, 2)
1959 		&& TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
1960 	      fail = true;
1961 	    break;
1962 	  case MEM_REF:
1963 	    if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
1964 	      fail = true;
1965 	    else
1966 	      d = TREE_OPERAND (d, 0);
1967 	    break;
1968 	  default:
1969 	    break;
1970 	  }
1971       if (!fail)
1972 	{
1973 	  SET_DECL_DEBUG_EXPR (repl, debug_expr);
1974 	  DECL_DEBUG_EXPR_IS_FROM (repl) = 1;
1975 	}
1976       if (access->grp_no_warning)
1977 	TREE_NO_WARNING (repl) = 1;
1978       else
1979 	TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
1980     }
1981   else
1982     TREE_NO_WARNING (repl) = 1;
1983 
1984   if (dump_file)
1985     {
1986       if (access->grp_to_be_debug_replaced)
1987 	{
1988 	  fprintf (dump_file, "Created a debug-only replacement for ");
1989 	  print_generic_expr (dump_file, access->base, 0);
1990 	  fprintf (dump_file, " offset: %u, size: %u\n",
1991 		   (unsigned) access->offset, (unsigned) access->size);
1992 	}
1993       else
1994 	{
1995 	  fprintf (dump_file, "Created a replacement for ");
1996 	  print_generic_expr (dump_file, access->base, 0);
1997 	  fprintf (dump_file, " offset: %u, size: %u: ",
1998 		   (unsigned) access->offset, (unsigned) access->size);
1999 	  print_generic_expr (dump_file, repl, 0);
2000 	  fprintf (dump_file, "\n");
2001 	}
2002     }
2003   sra_stats.replacements++;
2004 
2005   return repl;
2006 }
2007 
2008 /* Return ACCESS scalar replacement, create it if it does not exist yet.  */
2009 
2010 static inline tree
2011 get_access_replacement (struct access *access)
2012 {
2013   gcc_checking_assert (access->replacement_decl);
2014   return access->replacement_decl;
2015 }
2016 
2017 
2018 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2019    linked list along the way.  Stop when *ACCESS is NULL or the access pointed
2020    to it is not "within" the root.  Return false iff some accesses partially
2021    overlap.  */
2022 
2023 static bool
2024 build_access_subtree (struct access **access)
2025 {
2026   struct access *root = *access, *last_child = NULL;
2027   HOST_WIDE_INT limit = root->offset + root->size;
2028 
2029   *access = (*access)->next_grp;
2030   while  (*access && (*access)->offset + (*access)->size <= limit)
2031     {
2032       if (!last_child)
2033 	root->first_child = *access;
2034       else
2035 	last_child->next_sibling = *access;
2036       last_child = *access;
2037 
2038       if (!build_access_subtree (access))
2039 	return false;
2040     }
2041 
2042   if (*access && (*access)->offset < limit)
2043     return false;
2044 
2045   return true;
2046 }
2047 
2048 /* Build a tree of access representatives, ACCESS is the pointer to the first
2049    one, others are linked in a list by the next_grp field.  Return false iff
2050    some accesses partially overlap.  */
2051 
2052 static bool
2053 build_access_trees (struct access *access)
2054 {
2055   while (access)
2056     {
2057       struct access *root = access;
2058 
2059       if (!build_access_subtree (&access))
2060 	return false;
2061       root->next_grp = access;
2062     }
2063   return true;
2064 }
2065 
2066 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2067    array.  */
2068 
2069 static bool
2070 expr_with_var_bounded_array_refs_p (tree expr)
2071 {
2072   while (handled_component_p (expr))
2073     {
2074       if (TREE_CODE (expr) == ARRAY_REF
2075 	  && !host_integerp (array_ref_low_bound (expr), 0))
2076 	return true;
2077       expr = TREE_OPERAND (expr, 0);
2078     }
2079   return false;
2080 }
2081 
2082 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2083    both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
2084    sorts of access flags appropriately along the way, notably always set
2085    grp_read and grp_assign_read according to MARK_READ and grp_write when
2086    MARK_WRITE is true.
2087 
2088    Creating a replacement for a scalar access is considered beneficial if its
2089    grp_hint is set (this means we are either attempting total scalarization or
2090    there is more than one direct read access) or according to the following
2091    table:
2092 
2093    Access written to through a scalar type (once or more times)
2094    |
2095    |	Written to in an assignment statement
2096    |	|
2097    |	|	Access read as scalar _once_
2098    |	|	|
2099    |   	|	|	Read in an assignment statement
2100    |	|	|	|
2101    |   	|	|	|	Scalarize	Comment
2102 -----------------------------------------------------------------------------
2103    0	0	0	0			No access for the scalar
2104    0	0	0	1			No access for the scalar
2105    0	0	1	0	No		Single read - won't help
2106    0	0	1	1	No		The same case
2107    0	1	0	0			No access for the scalar
2108    0	1	0	1			No access for the scalar
2109    0	1	1	0	Yes		s = *g; return s.i;
2110    0	1	1	1       Yes		The same case as above
2111    1	0	0	0	No		Won't help
2112    1	0	0	1	Yes		s.i = 1; *g = s;
2113    1	0	1	0	Yes		s.i = 5; g = s.i;
2114    1	0	1	1	Yes		The same case as above
2115    1	1	0	0	No		Won't help.
2116    1	1	0	1	Yes		s.i = 1; *g = s;
2117    1	1	1	0	Yes		s = *g; return s.i;
2118    1	1	1	1	Yes		Any of the above yeses  */
2119 
2120 static bool
2121 analyze_access_subtree (struct access *root, struct access *parent,
2122 			bool allow_replacements)
2123 {
2124   struct access *child;
2125   HOST_WIDE_INT limit = root->offset + root->size;
2126   HOST_WIDE_INT covered_to = root->offset;
2127   bool scalar = is_gimple_reg_type (root->type);
2128   bool hole = false, sth_created = false;
2129 
2130   if (parent)
2131     {
2132       if (parent->grp_read)
2133 	root->grp_read = 1;
2134       if (parent->grp_assignment_read)
2135 	root->grp_assignment_read = 1;
2136       if (parent->grp_write)
2137 	root->grp_write = 1;
2138       if (parent->grp_assignment_write)
2139 	root->grp_assignment_write = 1;
2140       if (parent->grp_total_scalarization)
2141 	root->grp_total_scalarization = 1;
2142     }
2143 
2144   if (root->grp_unscalarizable_region)
2145     allow_replacements = false;
2146 
2147   if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
2148     allow_replacements = false;
2149 
2150   for (child = root->first_child; child; child = child->next_sibling)
2151     {
2152       hole |= covered_to < child->offset;
2153       sth_created |= analyze_access_subtree (child, root,
2154 					     allow_replacements && !scalar);
2155 
2156       root->grp_unscalarized_data |= child->grp_unscalarized_data;
2157       root->grp_total_scalarization &= child->grp_total_scalarization;
2158       if (child->grp_covered)
2159 	covered_to += child->size;
2160       else
2161 	hole = true;
2162     }
2163 
2164   if (allow_replacements && scalar && !root->first_child
2165       && (root->grp_hint
2166 	  || ((root->grp_scalar_read || root->grp_assignment_read)
2167 	      && (root->grp_scalar_write || root->grp_assignment_write))))
2168     {
2169       /* Always create access replacements that cover the whole access.
2170          For integral types this means the precision has to match.
2171 	 Avoid assumptions based on the integral type kind, too.  */
2172       if (INTEGRAL_TYPE_P (root->type)
2173 	  && (TREE_CODE (root->type) != INTEGER_TYPE
2174 	      || TYPE_PRECISION (root->type) != root->size)
2175 	  /* But leave bitfield accesses alone.  */
2176 	  && (TREE_CODE (root->expr) != COMPONENT_REF
2177 	      || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
2178 	{
2179 	  tree rt = root->type;
2180 	  gcc_assert ((root->offset % BITS_PER_UNIT) == 0
2181 		      && (root->size % BITS_PER_UNIT) == 0);
2182 	  root->type = build_nonstandard_integer_type (root->size,
2183 						       TYPE_UNSIGNED (rt));
2184 	  root->expr = build_ref_for_offset (UNKNOWN_LOCATION,
2185 					     root->base, root->offset,
2186 					     root->type, NULL, false);
2187 
2188 	  if (dump_file && (dump_flags & TDF_DETAILS))
2189 	    {
2190 	      fprintf (dump_file, "Changing the type of a replacement for ");
2191 	      print_generic_expr (dump_file, root->base, 0);
2192 	      fprintf (dump_file, " offset: %u, size: %u ",
2193 		       (unsigned) root->offset, (unsigned) root->size);
2194 	      fprintf (dump_file, " to an integer.\n");
2195 	    }
2196 	}
2197 
2198       root->grp_to_be_replaced = 1;
2199       root->replacement_decl = create_access_replacement (root);
2200       sth_created = true;
2201       hole = false;
2202     }
2203   else
2204     {
2205       if (allow_replacements
2206 	  && scalar && !root->first_child
2207 	  && (root->grp_scalar_write || root->grp_assignment_write)
2208 	  && !bitmap_bit_p (cannot_scalarize_away_bitmap,
2209 			    DECL_UID (root->base)))
2210 	{
2211 	  gcc_checking_assert (!root->grp_scalar_read
2212 			       && !root->grp_assignment_read);
2213 	  sth_created = true;
2214 	  if (MAY_HAVE_DEBUG_STMTS)
2215 	    {
2216 	      root->grp_to_be_debug_replaced = 1;
2217 	      root->replacement_decl = create_access_replacement (root);
2218 	    }
2219 	}
2220 
2221       if (covered_to < limit)
2222 	hole = true;
2223       if (scalar)
2224 	root->grp_total_scalarization = 0;
2225     }
2226 
2227   if (!hole || root->grp_total_scalarization)
2228     root->grp_covered = 1;
2229   else if (root->grp_write || TREE_CODE (root->base) == PARM_DECL)
2230     root->grp_unscalarized_data = 1; /* not covered and written to */
2231   return sth_created;
2232 }
2233 
2234 /* Analyze all access trees linked by next_grp by the means of
2235    analyze_access_subtree.  */
2236 static bool
2237 analyze_access_trees (struct access *access)
2238 {
2239   bool ret = false;
2240 
2241   while (access)
2242     {
2243       if (analyze_access_subtree (access, NULL, true))
2244 	ret = true;
2245       access = access->next_grp;
2246     }
2247 
2248   return ret;
2249 }
2250 
2251 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2252    SIZE would conflict with an already existing one.  If exactly such a child
2253    already exists in LACC, store a pointer to it in EXACT_MATCH.  */
2254 
2255 static bool
2256 child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
2257 			      HOST_WIDE_INT size, struct access **exact_match)
2258 {
2259   struct access *child;
2260 
2261   for (child = lacc->first_child; child; child = child->next_sibling)
2262     {
2263       if (child->offset == norm_offset && child->size == size)
2264 	{
2265 	  *exact_match = child;
2266 	  return true;
2267 	}
2268 
2269       if (child->offset < norm_offset + size
2270 	  && child->offset + child->size > norm_offset)
2271 	return true;
2272     }
2273 
2274   return false;
2275 }
2276 
2277 /* Create a new child access of PARENT, with all properties just like MODEL
2278    except for its offset and with its grp_write false and grp_read true.
2279    Return the new access or NULL if it cannot be created.  Note that this access
2280    is created long after all splicing and sorting, it's not located in any
2281    access vector and is automatically a representative of its group.  */
2282 
2283 static struct access *
2284 create_artificial_child_access (struct access *parent, struct access *model,
2285 				HOST_WIDE_INT new_offset)
2286 {
2287   struct access *access;
2288   struct access **child;
2289   tree expr = parent->base;
2290 
2291   gcc_assert (!model->grp_unscalarizable_region);
2292 
2293   access = (struct access *) pool_alloc (access_pool);
2294   memset (access, 0, sizeof (struct access));
2295   if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
2296 					   model->type))
2297     {
2298       access->grp_no_warning = true;
2299       expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
2300 				  new_offset, model, NULL, false);
2301     }
2302 
2303   access->base = parent->base;
2304   access->expr = expr;
2305   access->offset = new_offset;
2306   access->size = model->size;
2307   access->type = model->type;
2308   access->grp_write = true;
2309   access->grp_read = false;
2310 
2311   child = &parent->first_child;
2312   while (*child && (*child)->offset < new_offset)
2313     child = &(*child)->next_sibling;
2314 
2315   access->next_sibling = *child;
2316   *child = access;
2317 
2318   return access;
2319 }
2320 
2321 
2322 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2323    true if any new subaccess was created.  Additionally, if RACC is a scalar
2324    access but LACC is not, change the type of the latter, if possible.  */
2325 
2326 static bool
2327 propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
2328 {
2329   struct access *rchild;
2330   HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
2331   bool ret = false;
2332 
2333   if (is_gimple_reg_type (lacc->type)
2334       || lacc->grp_unscalarizable_region
2335       || racc->grp_unscalarizable_region)
2336     return false;
2337 
2338   if (is_gimple_reg_type (racc->type))
2339     {
2340       if (!lacc->first_child && !racc->first_child)
2341 	{
2342 	  tree t = lacc->base;
2343 
2344 	  lacc->type = racc->type;
2345 	  if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
2346 						  lacc->offset, racc->type))
2347 	    lacc->expr = t;
2348 	  else
2349 	    {
2350 	      lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
2351 						lacc->base, lacc->offset,
2352 						racc, NULL, false);
2353 	      lacc->grp_no_warning = true;
2354 	    }
2355 	}
2356       return false;
2357     }
2358 
2359   for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
2360     {
2361       struct access *new_acc = NULL;
2362       HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
2363 
2364       if (rchild->grp_unscalarizable_region)
2365 	continue;
2366 
2367       if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
2368 					&new_acc))
2369 	{
2370 	  if (new_acc)
2371 	    {
2372 	      rchild->grp_hint = 1;
2373 	      new_acc->grp_hint |= new_acc->grp_read;
2374 	      if (rchild->first_child)
2375 		ret |= propagate_subaccesses_across_link (new_acc, rchild);
2376 	    }
2377 	  continue;
2378 	}
2379 
2380       rchild->grp_hint = 1;
2381       new_acc = create_artificial_child_access (lacc, rchild, norm_offset);
2382       if (new_acc)
2383 	{
2384 	  ret = true;
2385 	  if (racc->first_child)
2386 	    propagate_subaccesses_across_link (new_acc, rchild);
2387 	}
2388     }
2389 
2390   return ret;
2391 }
2392 
2393 /* Propagate all subaccesses across assignment links.  */
2394 
2395 static void
2396 propagate_all_subaccesses (void)
2397 {
2398   while (work_queue_head)
2399     {
2400       struct access *racc = pop_access_from_work_queue ();
2401       struct assign_link *link;
2402 
2403       gcc_assert (racc->first_link);
2404 
2405       for (link = racc->first_link; link; link = link->next)
2406 	{
2407 	  struct access *lacc = link->lacc;
2408 
2409 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
2410 	    continue;
2411 	  lacc = lacc->group_representative;
2412 	  if (propagate_subaccesses_across_link (lacc, racc)
2413 	      && lacc->first_link)
2414 	    add_access_to_work_queue (lacc);
2415 	}
2416     }
2417 }
2418 
2419 /* Go through all accesses collected throughout the (intraprocedural) analysis
2420    stage, exclude overlapping ones, identify representatives and build trees
2421    out of them, making decisions about scalarization on the way.  Return true
2422    iff there are any to-be-scalarized variables after this stage. */
2423 
2424 static bool
2425 analyze_all_variable_accesses (void)
2426 {
2427   int res = 0;
2428   bitmap tmp = BITMAP_ALLOC (NULL);
2429   bitmap_iterator bi;
2430   unsigned i, max_total_scalarization_size;
2431 
2432   max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT
2433     * MOVE_RATIO (optimize_function_for_speed_p (cfun));
2434 
2435   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
2436     if (bitmap_bit_p (should_scalarize_away_bitmap, i)
2437 	&& !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
2438       {
2439 	tree var = candidate (i);
2440 
2441 	if (TREE_CODE (var) == VAR_DECL
2442 	    && type_consists_of_records_p (TREE_TYPE (var)))
2443 	  {
2444 	    if ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1)
2445 		<= max_total_scalarization_size)
2446 	      {
2447 		completely_scalarize_var (var);
2448 		if (dump_file && (dump_flags & TDF_DETAILS))
2449 		  {
2450 		    fprintf (dump_file, "Will attempt to totally scalarize ");
2451 		    print_generic_expr (dump_file, var, 0);
2452 		    fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2453 		  }
2454 	      }
2455 	    else if (dump_file && (dump_flags & TDF_DETAILS))
2456 	      {
2457 		fprintf (dump_file, "Too big to totally scalarize: ");
2458 		print_generic_expr (dump_file, var, 0);
2459 		fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
2460 	      }
2461 	  }
2462       }
2463 
2464   bitmap_copy (tmp, candidate_bitmap);
2465   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2466     {
2467       tree var = candidate (i);
2468       struct access *access;
2469 
2470       access = sort_and_splice_var_accesses (var);
2471       if (!access || !build_access_trees (access))
2472 	disqualify_candidate (var,
2473 			      "No or inhibitingly overlapping accesses.");
2474     }
2475 
2476   propagate_all_subaccesses ();
2477 
2478   bitmap_copy (tmp, candidate_bitmap);
2479   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
2480     {
2481       tree var = candidate (i);
2482       struct access *access = get_first_repr_for_decl (var);
2483 
2484       if (analyze_access_trees (access))
2485 	{
2486 	  res++;
2487 	  if (dump_file && (dump_flags & TDF_DETAILS))
2488 	    {
2489 	      fprintf (dump_file, "\nAccess trees for ");
2490 	      print_generic_expr (dump_file, var, 0);
2491 	      fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
2492 	      dump_access_tree (dump_file, access);
2493 	      fprintf (dump_file, "\n");
2494 	    }
2495 	}
2496       else
2497 	disqualify_candidate (var, "No scalar replacements to be created.");
2498     }
2499 
2500   BITMAP_FREE (tmp);
2501 
2502   if (res)
2503     {
2504       statistics_counter_event (cfun, "Scalarized aggregates", res);
2505       return true;
2506     }
2507   else
2508     return false;
2509 }
2510 
2511 /* Generate statements copying scalar replacements of accesses within a subtree
2512    into or out of AGG.  ACCESS, all its children, siblings and their children
2513    are to be processed.  AGG is an aggregate type expression (can be a
2514    declaration but does not have to be, it can for example also be a mem_ref or
2515    a series of handled components).  TOP_OFFSET is the offset of the processed
2516    subtree which has to be subtracted from offsets of individual accesses to
2517    get corresponding offsets for AGG.  If CHUNK_SIZE is non-null, copy only
2518    replacements in the interval <start_offset, start_offset + chunk_size>,
2519    otherwise copy all.  GSI is a statement iterator used to place the new
2520    statements.  WRITE should be true when the statements should write from AGG
2521    to the replacement and false if vice versa.  if INSERT_AFTER is true, new
2522    statements will be added after the current statement in GSI, they will be
2523    added before the statement otherwise.  */
2524 
2525 static void
2526 generate_subtree_copies (struct access *access, tree agg,
2527 			 HOST_WIDE_INT top_offset,
2528 			 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
2529 			 gimple_stmt_iterator *gsi, bool write,
2530 			 bool insert_after, location_t loc)
2531 {
2532   do
2533     {
2534       if (chunk_size && access->offset >= start_offset + chunk_size)
2535 	return;
2536 
2537       if (access->grp_to_be_replaced
2538 	  && (chunk_size == 0
2539 	      || access->offset + access->size > start_offset))
2540 	{
2541 	  tree expr, repl = get_access_replacement (access);
2542 	  gimple stmt;
2543 
2544 	  expr = build_ref_for_model (loc, agg, access->offset - top_offset,
2545 				      access, gsi, insert_after);
2546 
2547 	  if (write)
2548 	    {
2549 	      if (access->grp_partial_lhs)
2550 		expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
2551 						 !insert_after,
2552 						 insert_after ? GSI_NEW_STMT
2553 						 : GSI_SAME_STMT);
2554 	      stmt = gimple_build_assign (repl, expr);
2555 	    }
2556 	  else
2557 	    {
2558 	      TREE_NO_WARNING (repl) = 1;
2559 	      if (access->grp_partial_lhs)
2560 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2561 						 !insert_after,
2562 						 insert_after ? GSI_NEW_STMT
2563 						 : GSI_SAME_STMT);
2564 	      stmt = gimple_build_assign (expr, repl);
2565 	    }
2566 	  gimple_set_location (stmt, loc);
2567 
2568 	  if (insert_after)
2569 	    gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2570 	  else
2571 	    gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2572 	  update_stmt (stmt);
2573 	  sra_stats.subtree_copies++;
2574 	}
2575       else if (write
2576 	       && access->grp_to_be_debug_replaced
2577 	       && (chunk_size == 0
2578 		   || access->offset + access->size > start_offset))
2579 	{
2580 	  gimple ds;
2581 	  tree drhs = build_debug_ref_for_model (loc, agg,
2582 						 access->offset - top_offset,
2583 						 access);
2584 	  ds = gimple_build_debug_bind (get_access_replacement (access),
2585 					drhs, gsi_stmt (*gsi));
2586 	  if (insert_after)
2587 	    gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2588 	  else
2589 	    gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2590 	}
2591 
2592       if (access->first_child)
2593 	generate_subtree_copies (access->first_child, agg, top_offset,
2594 				 start_offset, chunk_size, gsi,
2595 				 write, insert_after, loc);
2596 
2597       access = access->next_sibling;
2598     }
2599   while (access);
2600 }
2601 
2602 /* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
2603    the root of the subtree to be processed.  GSI is the statement iterator used
2604    for inserting statements which are added after the current statement if
2605    INSERT_AFTER is true or before it otherwise.  */
2606 
2607 static void
2608 init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
2609 			bool insert_after, location_t loc)
2610 
2611 {
2612   struct access *child;
2613 
2614   if (access->grp_to_be_replaced)
2615     {
2616       gimple stmt;
2617 
2618       stmt = gimple_build_assign (get_access_replacement (access),
2619 				  build_zero_cst (access->type));
2620       if (insert_after)
2621 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2622       else
2623 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2624       update_stmt (stmt);
2625       gimple_set_location (stmt, loc);
2626     }
2627   else if (access->grp_to_be_debug_replaced)
2628     {
2629       gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2630 					   build_zero_cst (access->type),
2631 					   gsi_stmt (*gsi));
2632       if (insert_after)
2633 	gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2634       else
2635 	gsi_insert_before (gsi, ds, GSI_SAME_STMT);
2636     }
2637 
2638   for (child = access->first_child; child; child = child->next_sibling)
2639     init_subtree_with_zero (child, gsi, insert_after, loc);
2640 }
2641 
2642 /* Search for an access representative for the given expression EXPR and
2643    return it or NULL if it cannot be found.  */
2644 
2645 static struct access *
2646 get_access_for_expr (tree expr)
2647 {
2648   HOST_WIDE_INT offset, size, max_size;
2649   tree base;
2650 
2651   /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2652      a different size than the size of its argument and we need the latter
2653      one.  */
2654   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
2655     expr = TREE_OPERAND (expr, 0);
2656 
2657   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
2658   if (max_size == -1 || !DECL_P (base))
2659     return NULL;
2660 
2661   if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
2662     return NULL;
2663 
2664   return get_var_base_offset_size_access (base, offset, max_size);
2665 }
2666 
2667 /* Replace the expression EXPR with a scalar replacement if there is one and
2668    generate other statements to do type conversion or subtree copying if
2669    necessary.  GSI is used to place newly created statements, WRITE is true if
2670    the expression is being written to (it is on a LHS of a statement or output
2671    in an assembly statement).  */
2672 
2673 static bool
2674 sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
2675 {
2676   location_t loc;
2677   struct access *access;
2678   tree type, bfr;
2679 
2680   if (TREE_CODE (*expr) == BIT_FIELD_REF)
2681     {
2682       bfr = *expr;
2683       expr = &TREE_OPERAND (*expr, 0);
2684     }
2685   else
2686     bfr = NULL_TREE;
2687 
2688   if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
2689     expr = &TREE_OPERAND (*expr, 0);
2690   access = get_access_for_expr (*expr);
2691   if (!access)
2692     return false;
2693   type = TREE_TYPE (*expr);
2694 
2695   loc = gimple_location (gsi_stmt (*gsi));
2696   if (access->grp_to_be_replaced)
2697     {
2698       tree repl = get_access_replacement (access);
2699       /* If we replace a non-register typed access simply use the original
2700          access expression to extract the scalar component afterwards.
2701 	 This happens if scalarizing a function return value or parameter
2702 	 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2703 	 gcc.c-torture/compile/20011217-1.c.
2704 
2705          We also want to use this when accessing a complex or vector which can
2706          be accessed as a different type too, potentially creating a need for
2707          type conversion (see PR42196) and when scalarized unions are involved
2708          in assembler statements (see PR42398).  */
2709       if (!useless_type_conversion_p (type, access->type))
2710 	{
2711 	  tree ref;
2712 
2713 	  ref = build_ref_for_model (loc, access->base, access->offset, access,
2714 				     NULL, false);
2715 
2716 	  if (write)
2717 	    {
2718 	      gimple stmt;
2719 
2720 	      if (access->grp_partial_lhs)
2721 		ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
2722 						 false, GSI_NEW_STMT);
2723 	      stmt = gimple_build_assign (repl, ref);
2724 	      gimple_set_location (stmt, loc);
2725 	      gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
2726 	    }
2727 	  else
2728 	    {
2729 	      gimple stmt;
2730 
2731 	      if (access->grp_partial_lhs)
2732 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
2733 						 true, GSI_SAME_STMT);
2734 	      stmt = gimple_build_assign (ref, repl);
2735 	      gimple_set_location (stmt, loc);
2736 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
2737 	    }
2738 	}
2739       else
2740 	*expr = repl;
2741       sra_stats.exprs++;
2742     }
2743   else if (write && access->grp_to_be_debug_replaced)
2744     {
2745       gimple ds = gimple_build_debug_bind (get_access_replacement (access),
2746 					   NULL_TREE,
2747 					   gsi_stmt (*gsi));
2748       gsi_insert_after (gsi, ds, GSI_NEW_STMT);
2749     }
2750 
2751   if (access->first_child)
2752     {
2753       HOST_WIDE_INT start_offset, chunk_size;
2754       if (bfr
2755 	  && host_integerp (TREE_OPERAND (bfr, 1), 1)
2756 	  && host_integerp (TREE_OPERAND (bfr, 2), 1))
2757 	{
2758 	  chunk_size = tree_low_cst (TREE_OPERAND (bfr, 1), 1);
2759 	  start_offset = access->offset
2760 	    + tree_low_cst (TREE_OPERAND (bfr, 2), 1);
2761 	}
2762       else
2763 	start_offset = chunk_size = 0;
2764 
2765       generate_subtree_copies (access->first_child, access->base, 0,
2766 			       start_offset, chunk_size, gsi, write, write,
2767 			       loc);
2768     }
2769   return true;
2770 }
2771 
2772 /* Where scalar replacements of the RHS have been written to when a replacement
2773    of a LHS of an assigments cannot be direclty loaded from a replacement of
2774    the RHS. */
2775 enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
2776 				  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
2777 				  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
2778 
2779 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2780    base aggregate if there are unscalarized data or directly to LHS of the
2781    statement that is pointed to by GSI otherwise.  */
2782 
2783 static enum unscalarized_data_handling
2784 handle_unscalarized_data_in_subtree (struct access *top_racc,
2785 				     gimple_stmt_iterator *gsi)
2786 {
2787   if (top_racc->grp_unscalarized_data)
2788     {
2789       generate_subtree_copies (top_racc->first_child, top_racc->base, 0, 0, 0,
2790 			       gsi, false, false,
2791 			       gimple_location (gsi_stmt (*gsi)));
2792       return SRA_UDH_RIGHT;
2793     }
2794   else
2795     {
2796       tree lhs = gimple_assign_lhs (gsi_stmt (*gsi));
2797       generate_subtree_copies (top_racc->first_child, lhs, top_racc->offset,
2798 			       0, 0, gsi, false, false,
2799 			       gimple_location (gsi_stmt (*gsi)));
2800       return SRA_UDH_LEFT;
2801     }
2802 }
2803 
2804 
2805 /* Try to generate statements to load all sub-replacements in an access subtree
2806    formed by children of LACC from scalar replacements in the TOP_RACC subtree.
2807    If that is not possible, refresh the TOP_RACC base aggregate and load the
2808    accesses from it.  LEFT_OFFSET is the offset of the left whole subtree being
2809    copied. NEW_GSI is stmt iterator used for statement insertions after the
2810    original assignment, OLD_GSI is used to insert statements before the
2811    assignment.  *REFRESHED keeps the information whether we have needed to
2812    refresh replacements of the LHS and from which side of the assignments this
2813    takes place.  */
2814 
2815 static void
2816 load_assign_lhs_subreplacements (struct access *lacc, struct access *top_racc,
2817 				 HOST_WIDE_INT left_offset,
2818 				 gimple_stmt_iterator *old_gsi,
2819 				 gimple_stmt_iterator *new_gsi,
2820 				 enum unscalarized_data_handling *refreshed)
2821 {
2822   location_t loc = gimple_location (gsi_stmt (*old_gsi));
2823   for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
2824     {
2825       HOST_WIDE_INT offset = lacc->offset - left_offset + top_racc->offset;
2826 
2827       if (lacc->grp_to_be_replaced)
2828 	{
2829 	  struct access *racc;
2830 	  gimple stmt;
2831 	  tree rhs;
2832 
2833 	  racc = find_access_in_subtree (top_racc, offset, lacc->size);
2834 	  if (racc && racc->grp_to_be_replaced)
2835 	    {
2836 	      rhs = get_access_replacement (racc);
2837 	      if (!useless_type_conversion_p (lacc->type, racc->type))
2838 		rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, lacc->type, rhs);
2839 
2840 	      if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
2841 		rhs = force_gimple_operand_gsi (old_gsi, rhs, true, NULL_TREE,
2842 						true, GSI_SAME_STMT);
2843 	    }
2844 	  else
2845 	    {
2846 	      /* No suitable access on the right hand side, need to load from
2847 		 the aggregate.  See if we have to update it first... */
2848 	      if (*refreshed == SRA_UDH_NONE)
2849 		*refreshed = handle_unscalarized_data_in_subtree (top_racc,
2850 								  old_gsi);
2851 
2852 	      if (*refreshed == SRA_UDH_LEFT)
2853 		rhs = build_ref_for_model (loc, lacc->base, lacc->offset, lacc,
2854 					    new_gsi, true);
2855 	      else
2856 		rhs = build_ref_for_model (loc, top_racc->base, offset, lacc,
2857 					    new_gsi, true);
2858 	      if (lacc->grp_partial_lhs)
2859 		rhs = force_gimple_operand_gsi (new_gsi, rhs, true, NULL_TREE,
2860 						false, GSI_NEW_STMT);
2861 	    }
2862 
2863 	  stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
2864 	  gsi_insert_after (new_gsi, stmt, GSI_NEW_STMT);
2865 	  gimple_set_location (stmt, loc);
2866 	  update_stmt (stmt);
2867 	  sra_stats.subreplacements++;
2868 	}
2869       else
2870 	{
2871 	  if (*refreshed == SRA_UDH_NONE
2872 	      && lacc->grp_read && !lacc->grp_covered)
2873 	    *refreshed = handle_unscalarized_data_in_subtree (top_racc,
2874 							      old_gsi);
2875 	  if (lacc && lacc->grp_to_be_debug_replaced)
2876 	    {
2877 	      gimple ds;
2878 	      tree drhs;
2879 	      struct access *racc = find_access_in_subtree (top_racc, offset,
2880 							    lacc->size);
2881 
2882 	      if (racc && racc->grp_to_be_replaced)
2883 		{
2884 		  if (racc->grp_write)
2885 		    drhs = get_access_replacement (racc);
2886 		  else
2887 		    drhs = NULL;
2888 		}
2889 	      else if (*refreshed == SRA_UDH_LEFT)
2890 		drhs = build_debug_ref_for_model (loc, lacc->base, lacc->offset,
2891 						  lacc);
2892 	      else if (*refreshed == SRA_UDH_RIGHT)
2893 		drhs = build_debug_ref_for_model (loc, top_racc->base, offset,
2894 						  lacc);
2895 	      else
2896 		drhs = NULL_TREE;
2897 	      if (drhs
2898 		  && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
2899 		drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
2900 					lacc->type, drhs);
2901 	      ds = gimple_build_debug_bind (get_access_replacement (lacc),
2902 					    drhs, gsi_stmt (*old_gsi));
2903 	      gsi_insert_after (new_gsi, ds, GSI_NEW_STMT);
2904 	    }
2905 	}
2906 
2907       if (lacc->first_child)
2908 	load_assign_lhs_subreplacements (lacc, top_racc, left_offset,
2909 					 old_gsi, new_gsi, refreshed);
2910     }
2911 }
2912 
2913 /* Result code for SRA assignment modification.  */
2914 enum assignment_mod_result { SRA_AM_NONE,       /* nothing done for the stmt */
2915 			     SRA_AM_MODIFIED,  /* stmt changed but not
2916 						  removed */
2917 			     SRA_AM_REMOVED };  /* stmt eliminated */
2918 
2919 /* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
2920    to the assignment and GSI is the statement iterator pointing at it.  Returns
2921    the same values as sra_modify_assign.  */
2922 
2923 static enum assignment_mod_result
2924 sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
2925 {
2926   tree lhs = gimple_assign_lhs (*stmt);
2927   struct access *acc;
2928   location_t loc;
2929 
2930   acc = get_access_for_expr (lhs);
2931   if (!acc)
2932     return SRA_AM_NONE;
2933 
2934   if (gimple_clobber_p (*stmt))
2935     {
2936       /* Remove clobbers of fully scalarized variables, otherwise
2937 	 do nothing.  */
2938       if (acc->grp_covered)
2939 	{
2940 	  unlink_stmt_vdef (*stmt);
2941 	  gsi_remove (gsi, true);
2942 	  release_defs (*stmt);
2943 	  return SRA_AM_REMOVED;
2944 	}
2945       else
2946 	return SRA_AM_NONE;
2947     }
2948 
2949   loc = gimple_location (*stmt);
2950   if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (*stmt))) > 0)
2951     {
2952       /* I have never seen this code path trigger but if it can happen the
2953 	 following should handle it gracefully.  */
2954       if (access_has_children_p (acc))
2955 	generate_subtree_copies (acc->first_child, acc->base, 0, 0, 0, gsi,
2956 				 true, true, loc);
2957       return SRA_AM_MODIFIED;
2958     }
2959 
2960   if (acc->grp_covered)
2961     {
2962       init_subtree_with_zero (acc, gsi, false, loc);
2963       unlink_stmt_vdef (*stmt);
2964       gsi_remove (gsi, true);
2965       release_defs (*stmt);
2966       return SRA_AM_REMOVED;
2967     }
2968   else
2969     {
2970       init_subtree_with_zero (acc, gsi, true, loc);
2971       return SRA_AM_MODIFIED;
2972     }
2973 }
2974 
2975 /* Create and return a new suitable default definition SSA_NAME for RACC which
2976    is an access describing an uninitialized part of an aggregate that is being
2977    loaded.  */
2978 
2979 static tree
2980 get_repl_default_def_ssa_name (struct access *racc)
2981 {
2982   gcc_checking_assert (!racc->grp_to_be_replaced &&
2983 		       !racc->grp_to_be_debug_replaced);
2984   if (!racc->replacement_decl)
2985     racc->replacement_decl = create_access_replacement (racc);
2986   return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
2987 }
2988 
2989 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
2990    bit-field field declaration somewhere in it.  */
2991 
2992 static inline bool
2993 contains_vce_or_bfcref_p (const_tree ref)
2994 {
2995   while (handled_component_p (ref))
2996     {
2997       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
2998 	  || (TREE_CODE (ref) == COMPONENT_REF
2999 	      && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
3000 	return true;
3001       ref = TREE_OPERAND (ref, 0);
3002     }
3003 
3004   return false;
3005 }
3006 
3007 /* Examine both sides of the assignment statement pointed to by STMT, replace
3008    them with a scalare replacement if there is one and generate copying of
3009    replacements if scalarized aggregates have been used in the assignment.  GSI
3010    is used to hold generated statements for type conversions and subtree
3011    copying.  */
3012 
3013 static enum assignment_mod_result
3014 sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
3015 {
3016   struct access *lacc, *racc;
3017   tree lhs, rhs;
3018   bool modify_this_stmt = false;
3019   bool force_gimple_rhs = false;
3020   location_t loc;
3021   gimple_stmt_iterator orig_gsi = *gsi;
3022 
3023   if (!gimple_assign_single_p (*stmt))
3024     return SRA_AM_NONE;
3025   lhs = gimple_assign_lhs (*stmt);
3026   rhs = gimple_assign_rhs1 (*stmt);
3027 
3028   if (TREE_CODE (rhs) == CONSTRUCTOR)
3029     return sra_modify_constructor_assign (stmt, gsi);
3030 
3031   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
3032       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
3033       || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
3034     {
3035       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (*stmt),
3036 					  gsi, false);
3037       modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (*stmt),
3038 					   gsi, true);
3039       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3040     }
3041 
3042   lacc = get_access_for_expr (lhs);
3043   racc = get_access_for_expr (rhs);
3044   if (!lacc && !racc)
3045     return SRA_AM_NONE;
3046 
3047   loc = gimple_location (*stmt);
3048   if (lacc && lacc->grp_to_be_replaced)
3049     {
3050       lhs = get_access_replacement (lacc);
3051       gimple_assign_set_lhs (*stmt, lhs);
3052       modify_this_stmt = true;
3053       if (lacc->grp_partial_lhs)
3054 	force_gimple_rhs = true;
3055       sra_stats.exprs++;
3056     }
3057 
3058   if (racc && racc->grp_to_be_replaced)
3059     {
3060       rhs = get_access_replacement (racc);
3061       modify_this_stmt = true;
3062       if (racc->grp_partial_lhs)
3063 	force_gimple_rhs = true;
3064       sra_stats.exprs++;
3065     }
3066   else if (racc
3067 	   && !racc->grp_unscalarized_data
3068 	   && TREE_CODE (lhs) == SSA_NAME
3069 	   && !access_has_replacements_p (racc))
3070     {
3071       rhs = get_repl_default_def_ssa_name (racc);
3072       modify_this_stmt = true;
3073       sra_stats.exprs++;
3074     }
3075 
3076   if (modify_this_stmt)
3077     {
3078       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3079 	{
3080 	  /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3081 	     ???  This should move to fold_stmt which we simply should
3082 	     call after building a VIEW_CONVERT_EXPR here.  */
3083 	  if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
3084 	      && !contains_bitfld_component_ref_p (lhs))
3085 	    {
3086 	      lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
3087 	      gimple_assign_set_lhs (*stmt, lhs);
3088 	    }
3089 	  else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
3090 		   && !contains_vce_or_bfcref_p (rhs))
3091 	    rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
3092 
3093 	  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
3094 	    {
3095 	      rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
3096 				     rhs);
3097 	      if (is_gimple_reg_type (TREE_TYPE (lhs))
3098 		  && TREE_CODE (lhs) != SSA_NAME)
3099 		force_gimple_rhs = true;
3100 	    }
3101 	}
3102     }
3103 
3104   if (lacc && lacc->grp_to_be_debug_replaced)
3105     {
3106       tree dlhs = get_access_replacement (lacc);
3107       tree drhs = unshare_expr (rhs);
3108       if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
3109 	{
3110 	  if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
3111 	      && !contains_vce_or_bfcref_p (drhs))
3112 	    drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
3113 	  if (drhs
3114 	      && !useless_type_conversion_p (TREE_TYPE (dlhs),
3115 					     TREE_TYPE (drhs)))
3116 	    drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
3117 				    TREE_TYPE (dlhs), drhs);
3118 	}
3119       gimple ds = gimple_build_debug_bind (dlhs, drhs, *stmt);
3120       gsi_insert_before (gsi, ds, GSI_SAME_STMT);
3121     }
3122 
3123   /* From this point on, the function deals with assignments in between
3124      aggregates when at least one has scalar reductions of some of its
3125      components.  There are three possible scenarios: Both the LHS and RHS have
3126      to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3127 
3128      In the first case, we would like to load the LHS components from RHS
3129      components whenever possible.  If that is not possible, we would like to
3130      read it directly from the RHS (after updating it by storing in it its own
3131      components).  If there are some necessary unscalarized data in the LHS,
3132      those will be loaded by the original assignment too.  If neither of these
3133      cases happen, the original statement can be removed.  Most of this is done
3134      by load_assign_lhs_subreplacements.
3135 
3136      In the second case, we would like to store all RHS scalarized components
3137      directly into LHS and if they cover the aggregate completely, remove the
3138      statement too.  In the third case, we want the LHS components to be loaded
3139      directly from the RHS (DSE will remove the original statement if it
3140      becomes redundant).
3141 
3142      This is a bit complex but manageable when types match and when unions do
3143      not cause confusion in a way that we cannot really load a component of LHS
3144      from the RHS or vice versa (the access representing this level can have
3145      subaccesses that are accessible only through a different union field at a
3146      higher level - different from the one used in the examined expression).
3147      Unions are fun.
3148 
3149      Therefore, I specially handle a fourth case, happening when there is a
3150      specific type cast or it is impossible to locate a scalarized subaccess on
3151      the other side of the expression.  If that happens, I simply "refresh" the
3152      RHS by storing in it is scalarized components leave the original statement
3153      there to do the copying and then load the scalar replacements of the LHS.
3154      This is what the first branch does.  */
3155 
3156   if (modify_this_stmt
3157       || gimple_has_volatile_ops (*stmt)
3158       || contains_vce_or_bfcref_p (rhs)
3159       || contains_vce_or_bfcref_p (lhs))
3160     {
3161       if (access_has_children_p (racc))
3162 	generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3163 				 gsi, false, false, loc);
3164       if (access_has_children_p (lacc))
3165 	generate_subtree_copies (lacc->first_child, lacc->base, 0, 0, 0,
3166 				 gsi, true, true, loc);
3167       sra_stats.separate_lhs_rhs_handling++;
3168 
3169       /* This gimplification must be done after generate_subtree_copies,
3170 	 lest we insert the subtree copies in the middle of the gimplified
3171 	 sequence.  */
3172       if (force_gimple_rhs)
3173 	rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
3174 					true, GSI_SAME_STMT);
3175       if (gimple_assign_rhs1 (*stmt) != rhs)
3176 	{
3177 	  modify_this_stmt = true;
3178 	  gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
3179 	  gcc_assert (*stmt == gsi_stmt (orig_gsi));
3180 	}
3181 
3182       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
3183     }
3184   else
3185     {
3186       if (access_has_children_p (lacc)
3187 	  && access_has_children_p (racc)
3188 	  /* When an access represents an unscalarizable region, it usually
3189 	     represents accesses with variable offset and thus must not be used
3190 	     to generate new memory accesses.  */
3191 	  && !lacc->grp_unscalarizable_region
3192 	  && !racc->grp_unscalarizable_region)
3193 	{
3194 	  gimple_stmt_iterator orig_gsi = *gsi;
3195 	  enum unscalarized_data_handling refreshed;
3196 
3197 	  if (lacc->grp_read && !lacc->grp_covered)
3198 	    refreshed = handle_unscalarized_data_in_subtree (racc, gsi);
3199 	  else
3200 	    refreshed = SRA_UDH_NONE;
3201 
3202 	  load_assign_lhs_subreplacements (lacc, racc, lacc->offset,
3203 					   &orig_gsi, gsi, &refreshed);
3204 	  if (refreshed != SRA_UDH_RIGHT)
3205 	    {
3206 	      gsi_next (gsi);
3207 	      unlink_stmt_vdef (*stmt);
3208 	      gsi_remove (&orig_gsi, true);
3209 	      release_defs (*stmt);
3210 	      sra_stats.deleted++;
3211 	      return SRA_AM_REMOVED;
3212 	    }
3213 	}
3214       else
3215 	{
3216 	  if (access_has_children_p (racc)
3217 	      && !racc->grp_unscalarized_data)
3218 	    {
3219 	      if (dump_file)
3220 		{
3221 		  fprintf (dump_file, "Removing load: ");
3222 		  print_gimple_stmt (dump_file, *stmt, 0, 0);
3223 		}
3224 	      generate_subtree_copies (racc->first_child, lhs,
3225 				       racc->offset, 0, 0, gsi,
3226 				       false, false, loc);
3227 	      gcc_assert (*stmt == gsi_stmt (*gsi));
3228 	      unlink_stmt_vdef (*stmt);
3229 	      gsi_remove (gsi, true);
3230 	      release_defs (*stmt);
3231 	      sra_stats.deleted++;
3232 	      return SRA_AM_REMOVED;
3233 	    }
3234 	  /* Restore the aggregate RHS from its components so the
3235 	     prevailing aggregate copy does the right thing.  */
3236 	  if (access_has_children_p (racc))
3237 	    generate_subtree_copies (racc->first_child, racc->base, 0, 0, 0,
3238 				     gsi, false, false, loc);
3239 	  /* Re-load the components of the aggregate copy destination.
3240 	     But use the RHS aggregate to load from to expose more
3241 	     optimization opportunities.  */
3242 	  if (access_has_children_p (lacc))
3243 	    generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
3244 				     0, 0, gsi, true, true, loc);
3245 	}
3246 
3247       return SRA_AM_NONE;
3248     }
3249 }
3250 
3251 /* Traverse the function body and all modifications as decided in
3252    analyze_all_variable_accesses.  Return true iff the CFG has been
3253    changed.  */
3254 
3255 static bool
3256 sra_modify_function_body (void)
3257 {
3258   bool cfg_changed = false;
3259   basic_block bb;
3260 
3261   FOR_EACH_BB (bb)
3262     {
3263       gimple_stmt_iterator gsi = gsi_start_bb (bb);
3264       while (!gsi_end_p (gsi))
3265 	{
3266 	  gimple stmt = gsi_stmt (gsi);
3267 	  enum assignment_mod_result assign_result;
3268 	  bool modified = false, deleted = false;
3269 	  tree *t;
3270 	  unsigned i;
3271 
3272 	  switch (gimple_code (stmt))
3273 	    {
3274 	    case GIMPLE_RETURN:
3275 	      t = gimple_return_retval_ptr (stmt);
3276 	      if (*t != NULL_TREE)
3277 		modified |= sra_modify_expr (t, &gsi, false);
3278 	      break;
3279 
3280 	    case GIMPLE_ASSIGN:
3281 	      assign_result = sra_modify_assign (&stmt, &gsi);
3282 	      modified |= assign_result == SRA_AM_MODIFIED;
3283 	      deleted = assign_result == SRA_AM_REMOVED;
3284 	      break;
3285 
3286 	    case GIMPLE_CALL:
3287 	      /* Operands must be processed before the lhs.  */
3288 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
3289 		{
3290 		  t = gimple_call_arg_ptr (stmt, i);
3291 		  modified |= sra_modify_expr (t, &gsi, false);
3292 		}
3293 
3294 	      if (gimple_call_lhs (stmt))
3295 		{
3296 		  t = gimple_call_lhs_ptr (stmt);
3297 		  modified |= sra_modify_expr (t, &gsi, true);
3298 		}
3299 	      break;
3300 
3301 	    case GIMPLE_ASM:
3302 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
3303 		{
3304 		  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
3305 		  modified |= sra_modify_expr (t, &gsi, false);
3306 		}
3307 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
3308 		{
3309 		  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
3310 		  modified |= sra_modify_expr (t, &gsi, true);
3311 		}
3312 	      break;
3313 
3314 	    default:
3315 	      break;
3316 	    }
3317 
3318 	  if (modified)
3319 	    {
3320 	      update_stmt (stmt);
3321 	      if (maybe_clean_eh_stmt (stmt)
3322 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
3323 		cfg_changed = true;
3324 	    }
3325 	  if (!deleted)
3326 	    gsi_next (&gsi);
3327 	}
3328     }
3329 
3330   return cfg_changed;
3331 }
3332 
3333 /* Generate statements initializing scalar replacements of parts of function
3334    parameters.  */
3335 
3336 static void
3337 initialize_parameter_reductions (void)
3338 {
3339   gimple_stmt_iterator gsi;
3340   gimple_seq seq = NULL;
3341   tree parm;
3342 
3343   gsi = gsi_start (seq);
3344   for (parm = DECL_ARGUMENTS (current_function_decl);
3345        parm;
3346        parm = DECL_CHAIN (parm))
3347     {
3348       vec<access_p> *access_vec;
3349       struct access *access;
3350 
3351       if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
3352 	continue;
3353       access_vec = get_base_access_vector (parm);
3354       if (!access_vec)
3355 	continue;
3356 
3357       for (access = (*access_vec)[0];
3358 	   access;
3359 	   access = access->next_grp)
3360 	generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
3361 				 EXPR_LOCATION (parm));
3362     }
3363 
3364   seq = gsi_seq (gsi);
3365   if (seq)
3366     gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR), seq);
3367 }
3368 
3369 /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
3370    it reveals there are components of some aggregates to be scalarized, it runs
3371    the required transformations.  */
3372 static unsigned int
3373 perform_intra_sra (void)
3374 {
3375   int ret = 0;
3376   sra_initialize ();
3377 
3378   if (!find_var_candidates ())
3379     goto out;
3380 
3381   if (!scan_function ())
3382     goto out;
3383 
3384   if (!analyze_all_variable_accesses ())
3385     goto out;
3386 
3387   if (sra_modify_function_body ())
3388     ret = TODO_update_ssa | TODO_cleanup_cfg;
3389   else
3390     ret = TODO_update_ssa;
3391   initialize_parameter_reductions ();
3392 
3393   statistics_counter_event (cfun, "Scalar replacements created",
3394 			    sra_stats.replacements);
3395   statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
3396   statistics_counter_event (cfun, "Subtree copy stmts",
3397 			    sra_stats.subtree_copies);
3398   statistics_counter_event (cfun, "Subreplacement stmts",
3399 			    sra_stats.subreplacements);
3400   statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
3401   statistics_counter_event (cfun, "Separate LHS and RHS handling",
3402 			    sra_stats.separate_lhs_rhs_handling);
3403 
3404  out:
3405   sra_deinitialize ();
3406   return ret;
3407 }
3408 
3409 /* Perform early intraprocedural SRA.  */
3410 static unsigned int
3411 early_intra_sra (void)
3412 {
3413   sra_mode = SRA_MODE_EARLY_INTRA;
3414   return perform_intra_sra ();
3415 }
3416 
3417 /* Perform "late" intraprocedural SRA.  */
3418 static unsigned int
3419 late_intra_sra (void)
3420 {
3421   sra_mode = SRA_MODE_INTRA;
3422   return perform_intra_sra ();
3423 }
3424 
3425 
3426 static bool
3427 gate_intra_sra (void)
3428 {
3429   return flag_tree_sra != 0 && dbg_cnt (tree_sra);
3430 }
3431 
3432 
3433 struct gimple_opt_pass pass_sra_early =
3434 {
3435  {
3436   GIMPLE_PASS,
3437   "esra",	 			/* name */
3438   OPTGROUP_NONE,                        /* optinfo_flags */
3439   gate_intra_sra,			/* gate */
3440   early_intra_sra,			/* execute */
3441   NULL,					/* sub */
3442   NULL,					/* next */
3443   0,					/* static_pass_number */
3444   TV_TREE_SRA,				/* tv_id */
3445   PROP_cfg | PROP_ssa,                  /* properties_required */
3446   0,					/* properties_provided */
3447   0,					/* properties_destroyed */
3448   0,					/* todo_flags_start */
3449   TODO_update_ssa
3450   | TODO_ggc_collect
3451   | TODO_verify_ssa			/* todo_flags_finish */
3452  }
3453 };
3454 
3455 struct gimple_opt_pass pass_sra =
3456 {
3457  {
3458   GIMPLE_PASS,
3459   "sra",	 			/* name */
3460   OPTGROUP_NONE,                        /* optinfo_flags */
3461   gate_intra_sra,			/* gate */
3462   late_intra_sra,			/* execute */
3463   NULL,					/* sub */
3464   NULL,					/* next */
3465   0,					/* static_pass_number */
3466   TV_TREE_SRA,				/* tv_id */
3467   PROP_cfg | PROP_ssa,                  /* properties_required */
3468   0,					/* properties_provided */
3469   0,					/* properties_destroyed */
3470   TODO_update_address_taken,		/* todo_flags_start */
3471   TODO_update_ssa
3472   | TODO_ggc_collect
3473   | TODO_verify_ssa			/* todo_flags_finish */
3474  }
3475 };
3476 
3477 
3478 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3479    parameter.  */
3480 
3481 static bool
3482 is_unused_scalar_param (tree parm)
3483 {
3484   tree name;
3485   return (is_gimple_reg (parm)
3486 	  && (!(name = ssa_default_def (cfun, parm))
3487 	      || has_zero_uses (name)));
3488 }
3489 
3490 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3491    examine whether there are any direct or otherwise infeasible ones.  If so,
3492    return true, otherwise return false.  PARM must be a gimple register with a
3493    non-NULL default definition.  */
3494 
3495 static bool
3496 ptr_parm_has_direct_uses (tree parm)
3497 {
3498   imm_use_iterator ui;
3499   gimple stmt;
3500   tree name = ssa_default_def (cfun, parm);
3501   bool ret = false;
3502 
3503   FOR_EACH_IMM_USE_STMT (stmt, ui, name)
3504     {
3505       int uses_ok = 0;
3506       use_operand_p use_p;
3507 
3508       if (is_gimple_debug (stmt))
3509 	continue;
3510 
3511       /* Valid uses include dereferences on the lhs and the rhs.  */
3512       if (gimple_has_lhs (stmt))
3513 	{
3514 	  tree lhs = gimple_get_lhs (stmt);
3515 	  while (handled_component_p (lhs))
3516 	    lhs = TREE_OPERAND (lhs, 0);
3517 	  if (TREE_CODE (lhs) == MEM_REF
3518 	      && TREE_OPERAND (lhs, 0) == name
3519 	      && integer_zerop (TREE_OPERAND (lhs, 1))
3520 	      && types_compatible_p (TREE_TYPE (lhs),
3521 				     TREE_TYPE (TREE_TYPE (name)))
3522 	      && !TREE_THIS_VOLATILE (lhs))
3523 	    uses_ok++;
3524 	}
3525       if (gimple_assign_single_p (stmt))
3526 	{
3527 	  tree rhs = gimple_assign_rhs1 (stmt);
3528 	  while (handled_component_p (rhs))
3529 	    rhs = TREE_OPERAND (rhs, 0);
3530 	  if (TREE_CODE (rhs) == MEM_REF
3531 	      && TREE_OPERAND (rhs, 0) == name
3532 	      && integer_zerop (TREE_OPERAND (rhs, 1))
3533 	      && types_compatible_p (TREE_TYPE (rhs),
3534 				     TREE_TYPE (TREE_TYPE (name)))
3535 	      && !TREE_THIS_VOLATILE (rhs))
3536 	    uses_ok++;
3537 	}
3538       else if (is_gimple_call (stmt))
3539 	{
3540 	  unsigned i;
3541 	  for (i = 0; i < gimple_call_num_args (stmt); ++i)
3542 	    {
3543 	      tree arg = gimple_call_arg (stmt, i);
3544 	      while (handled_component_p (arg))
3545 		arg = TREE_OPERAND (arg, 0);
3546 	      if (TREE_CODE (arg) == MEM_REF
3547 		  && TREE_OPERAND (arg, 0) == name
3548 		  && integer_zerop (TREE_OPERAND (arg, 1))
3549 		  && types_compatible_p (TREE_TYPE (arg),
3550 					 TREE_TYPE (TREE_TYPE (name)))
3551 		  && !TREE_THIS_VOLATILE (arg))
3552 		uses_ok++;
3553 	    }
3554 	}
3555 
3556       /* If the number of valid uses does not match the number of
3557          uses in this stmt there is an unhandled use.  */
3558       FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
3559 	--uses_ok;
3560 
3561       if (uses_ok != 0)
3562 	ret = true;
3563 
3564       if (ret)
3565 	BREAK_FROM_IMM_USE_STMT (ui);
3566     }
3567 
3568   return ret;
3569 }
3570 
3571 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3572    them in candidate_bitmap.  Note that these do not necessarily include
3573    parameter which are unused and thus can be removed.  Return true iff any
3574    such candidate has been found.  */
3575 
3576 static bool
3577 find_param_candidates (void)
3578 {
3579   tree parm;
3580   int count = 0;
3581   bool ret = false;
3582   const char *msg;
3583 
3584   for (parm = DECL_ARGUMENTS (current_function_decl);
3585        parm;
3586        parm = DECL_CHAIN (parm))
3587     {
3588       tree type = TREE_TYPE (parm);
3589       void **slot;
3590 
3591       count++;
3592 
3593       if (TREE_THIS_VOLATILE (parm)
3594 	  || TREE_ADDRESSABLE (parm)
3595 	  || (!is_gimple_reg_type (type) && is_va_list_type (type)))
3596 	continue;
3597 
3598       if (is_unused_scalar_param (parm))
3599 	{
3600 	  ret = true;
3601 	  continue;
3602 	}
3603 
3604       if (POINTER_TYPE_P (type))
3605 	{
3606 	  type = TREE_TYPE (type);
3607 
3608 	  if (TREE_CODE (type) == FUNCTION_TYPE
3609 	      || TYPE_VOLATILE (type)
3610 	      || (TREE_CODE (type) == ARRAY_TYPE
3611 		  && TYPE_NONALIASED_COMPONENT (type))
3612 	      || !is_gimple_reg (parm)
3613 	      || is_va_list_type (type)
3614 	      || ptr_parm_has_direct_uses (parm))
3615 	    continue;
3616 	}
3617       else if (!AGGREGATE_TYPE_P (type))
3618 	continue;
3619 
3620       if (!COMPLETE_TYPE_P (type)
3621 	  || !host_integerp (TYPE_SIZE (type), 1)
3622           || tree_low_cst (TYPE_SIZE (type), 1) == 0
3623 	  || (AGGREGATE_TYPE_P (type)
3624 	      && type_internals_preclude_sra_p (type, &msg)))
3625 	continue;
3626 
3627       bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
3628       slot = htab_find_slot_with_hash (candidates, parm,
3629 				       DECL_UID (parm), INSERT);
3630       *slot = (void *) parm;
3631 
3632       ret = true;
3633       if (dump_file && (dump_flags & TDF_DETAILS))
3634 	{
3635 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
3636 	  print_generic_expr (dump_file, parm, 0);
3637 	  fprintf (dump_file, "\n");
3638 	}
3639     }
3640 
3641   func_param_count = count;
3642   return ret;
3643 }
3644 
3645 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3646    maybe_modified. */
3647 
3648 static bool
3649 mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
3650 		     void *data)
3651 {
3652   struct access *repr = (struct access *) data;
3653 
3654   repr->grp_maybe_modified = 1;
3655   return true;
3656 }
3657 
3658 /* Analyze what representatives (in linked lists accessible from
3659    REPRESENTATIVES) can be modified by side effects of statements in the
3660    current function.  */
3661 
3662 static void
3663 analyze_modified_params (vec<access_p> representatives)
3664 {
3665   int i;
3666 
3667   for (i = 0; i < func_param_count; i++)
3668     {
3669       struct access *repr;
3670 
3671       for (repr = representatives[i];
3672 	   repr;
3673 	   repr = repr->next_grp)
3674 	{
3675 	  struct access *access;
3676 	  bitmap visited;
3677 	  ao_ref ar;
3678 
3679 	  if (no_accesses_p (repr))
3680 	    continue;
3681 	  if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
3682 	      || repr->grp_maybe_modified)
3683 	    continue;
3684 
3685 	  ao_ref_init (&ar, repr->expr);
3686 	  visited = BITMAP_ALLOC (NULL);
3687 	  for (access = repr; access; access = access->next_sibling)
3688 	    {
3689 	      /* All accesses are read ones, otherwise grp_maybe_modified would
3690 		 be trivially set.  */
3691 	      walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
3692 				  mark_maybe_modified, repr, &visited);
3693 	      if (repr->grp_maybe_modified)
3694 		break;
3695 	    }
3696 	  BITMAP_FREE (visited);
3697 	}
3698     }
3699 }
3700 
3701 /* Propagate distances in bb_dereferences in the opposite direction than the
3702    control flow edges, in each step storing the maximum of the current value
3703    and the minimum of all successors.  These steps are repeated until the table
3704    stabilizes.  Note that BBs which might terminate the functions (according to
3705    final_bbs bitmap) never updated in this way.  */
3706 
3707 static void
3708 propagate_dereference_distances (void)
3709 {
3710   vec<basic_block> queue;
3711   basic_block bb;
3712 
3713   queue.create (last_basic_block_for_function (cfun));
3714   queue.quick_push (ENTRY_BLOCK_PTR);
3715   FOR_EACH_BB (bb)
3716     {
3717       queue.quick_push (bb);
3718       bb->aux = bb;
3719     }
3720 
3721   while (!queue.is_empty ())
3722     {
3723       edge_iterator ei;
3724       edge e;
3725       bool change = false;
3726       int i;
3727 
3728       bb = queue.pop ();
3729       bb->aux = NULL;
3730 
3731       if (bitmap_bit_p (final_bbs, bb->index))
3732 	continue;
3733 
3734       for (i = 0; i < func_param_count; i++)
3735 	{
3736 	  int idx = bb->index * func_param_count + i;
3737 	  bool first = true;
3738 	  HOST_WIDE_INT inh = 0;
3739 
3740 	  FOR_EACH_EDGE (e, ei, bb->succs)
3741 	  {
3742 	    int succ_idx = e->dest->index * func_param_count + i;
3743 
3744 	    if (e->src == EXIT_BLOCK_PTR)
3745 	      continue;
3746 
3747 	    if (first)
3748 	      {
3749 		first = false;
3750 		inh = bb_dereferences [succ_idx];
3751 	      }
3752 	    else if (bb_dereferences [succ_idx] < inh)
3753 	      inh = bb_dereferences [succ_idx];
3754 	  }
3755 
3756 	  if (!first && bb_dereferences[idx] < inh)
3757 	    {
3758 	      bb_dereferences[idx] = inh;
3759 	      change = true;
3760 	    }
3761 	}
3762 
3763       if (change && !bitmap_bit_p (final_bbs, bb->index))
3764 	FOR_EACH_EDGE (e, ei, bb->preds)
3765 	  {
3766 	    if (e->src->aux)
3767 	      continue;
3768 
3769 	    e->src->aux = e->src;
3770 	    queue.quick_push (e->src);
3771 	  }
3772     }
3773 
3774   queue.release ();
3775 }
3776 
3777 /* Dump a dereferences TABLE with heading STR to file F.  */
3778 
3779 static void
3780 dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
3781 {
3782   basic_block bb;
3783 
3784   fprintf (dump_file, str);
3785   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
3786     {
3787       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
3788       if (bb != EXIT_BLOCK_PTR)
3789 	{
3790 	  int i;
3791 	  for (i = 0; i < func_param_count; i++)
3792 	    {
3793 	      int idx = bb->index * func_param_count + i;
3794 	      fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
3795 	    }
3796 	}
3797       fprintf (f, "\n");
3798     }
3799   fprintf (dump_file, "\n");
3800 }
3801 
3802 /* Determine what (parts of) parameters passed by reference that are not
3803    assigned to are not certainly dereferenced in this function and thus the
3804    dereferencing cannot be safely moved to the caller without potentially
3805    introducing a segfault.  Mark such REPRESENTATIVES as
3806    grp_not_necessarilly_dereferenced.
3807 
3808    The dereferenced maximum "distance," i.e. the offset + size of the accessed
3809    part is calculated rather than simple booleans are calculated for each
3810    pointer parameter to handle cases when only a fraction of the whole
3811    aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
3812    an example).
3813 
3814    The maximum dereference distances for each pointer parameter and BB are
3815    already stored in bb_dereference.  This routine simply propagates these
3816    values upwards by propagate_dereference_distances and then compares the
3817    distances of individual parameters in the ENTRY BB to the equivalent
3818    distances of each representative of a (fraction of a) parameter.  */
3819 
3820 static void
3821 analyze_caller_dereference_legality (vec<access_p> representatives)
3822 {
3823   int i;
3824 
3825   if (dump_file && (dump_flags & TDF_DETAILS))
3826     dump_dereferences_table (dump_file,
3827 			     "Dereference table before propagation:\n",
3828 			     bb_dereferences);
3829 
3830   propagate_dereference_distances ();
3831 
3832   if (dump_file && (dump_flags & TDF_DETAILS))
3833     dump_dereferences_table (dump_file,
3834 			     "Dereference table after propagation:\n",
3835 			     bb_dereferences);
3836 
3837   for (i = 0; i < func_param_count; i++)
3838     {
3839       struct access *repr = representatives[i];
3840       int idx = ENTRY_BLOCK_PTR->index * func_param_count + i;
3841 
3842       if (!repr || no_accesses_p (repr))
3843 	continue;
3844 
3845       do
3846 	{
3847 	  if ((repr->offset + repr->size) > bb_dereferences[idx])
3848 	    repr->grp_not_necessarilly_dereferenced = 1;
3849 	  repr = repr->next_grp;
3850 	}
3851       while (repr);
3852     }
3853 }
3854 
3855 /* Return the representative access for the parameter declaration PARM if it is
3856    a scalar passed by reference which is not written to and the pointer value
3857    is not used directly.  Thus, if it is legal to dereference it in the caller
3858    and we can rule out modifications through aliases, such parameter should be
3859    turned into one passed by value.  Return NULL otherwise.  */
3860 
3861 static struct access *
3862 unmodified_by_ref_scalar_representative (tree parm)
3863 {
3864   int i, access_count;
3865   struct access *repr;
3866   vec<access_p> *access_vec;
3867 
3868   access_vec = get_base_access_vector (parm);
3869   gcc_assert (access_vec);
3870   repr = (*access_vec)[0];
3871   if (repr->write)
3872     return NULL;
3873   repr->group_representative = repr;
3874 
3875   access_count = access_vec->length ();
3876   for (i = 1; i < access_count; i++)
3877     {
3878       struct access *access = (*access_vec)[i];
3879       if (access->write)
3880 	return NULL;
3881       access->group_representative = repr;
3882       access->next_sibling = repr->next_sibling;
3883       repr->next_sibling = access;
3884     }
3885 
3886   repr->grp_read = 1;
3887   repr->grp_scalar_ptr = 1;
3888   return repr;
3889 }
3890 
3891 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
3892    associated with.  REQ_ALIGN is the minimum required alignment.  */
3893 
3894 static bool
3895 access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
3896 {
3897   unsigned int exp_align;
3898   /* Avoid issues such as the second simple testcase in PR 42025.  The problem
3899      is incompatible assign in a call statement (and possibly even in asm
3900      statements).  This can be relaxed by using a new temporary but only for
3901      non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
3902      intraprocedural SRA we deal with this by keeping the old aggregate around,
3903      something we cannot do in IPA-SRA.)  */
3904   if (access->write
3905       && (is_gimple_call (access->stmt)
3906 	  || gimple_code (access->stmt) == GIMPLE_ASM))
3907     return true;
3908 
3909   exp_align = get_object_alignment (access->expr);
3910   if (exp_align < req_align)
3911     return true;
3912 
3913   return false;
3914 }
3915 
3916 
3917 /* Sort collected accesses for parameter PARM, identify representatives for
3918    each accessed region and link them together.  Return NULL if there are
3919    different but overlapping accesses, return the special ptr value meaning
3920    there are no accesses for this parameter if that is the case and return the
3921    first representative otherwise.  Set *RO_GRP if there is a group of accesses
3922    with only read (i.e. no write) accesses.  */
3923 
3924 static struct access *
3925 splice_param_accesses (tree parm, bool *ro_grp)
3926 {
3927   int i, j, access_count, group_count;
3928   int agg_size, total_size = 0;
3929   struct access *access, *res, **prev_acc_ptr = &res;
3930   vec<access_p> *access_vec;
3931 
3932   access_vec = get_base_access_vector (parm);
3933   if (!access_vec)
3934     return &no_accesses_representant;
3935   access_count = access_vec->length ();
3936 
3937   access_vec->qsort (compare_access_positions);
3938 
3939   i = 0;
3940   total_size = 0;
3941   group_count = 0;
3942   while (i < access_count)
3943     {
3944       bool modification;
3945       tree a1_alias_type;
3946       access = (*access_vec)[i];
3947       modification = access->write;
3948       if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
3949 	return NULL;
3950       a1_alias_type = reference_alias_ptr_type (access->expr);
3951 
3952       /* Access is about to become group representative unless we find some
3953 	 nasty overlap which would preclude us from breaking this parameter
3954 	 apart. */
3955 
3956       j = i + 1;
3957       while (j < access_count)
3958 	{
3959 	  struct access *ac2 = (*access_vec)[j];
3960 	  if (ac2->offset != access->offset)
3961 	    {
3962 	      /* All or nothing law for parameters. */
3963 	      if (access->offset + access->size > ac2->offset)
3964 		return NULL;
3965 	      else
3966 		break;
3967 	    }
3968 	  else if (ac2->size != access->size)
3969 	    return NULL;
3970 
3971 	  if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
3972 	      || (ac2->type != access->type
3973 		  && (TREE_ADDRESSABLE (ac2->type)
3974 		      || TREE_ADDRESSABLE (access->type)))
3975 	      || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
3976 	    return NULL;
3977 
3978 	  modification |= ac2->write;
3979 	  ac2->group_representative = access;
3980 	  ac2->next_sibling = access->next_sibling;
3981 	  access->next_sibling = ac2;
3982 	  j++;
3983 	}
3984 
3985       group_count++;
3986       access->grp_maybe_modified = modification;
3987       if (!modification)
3988 	*ro_grp = true;
3989       *prev_acc_ptr = access;
3990       prev_acc_ptr = &access->next_grp;
3991       total_size += access->size;
3992       i = j;
3993     }
3994 
3995   if (POINTER_TYPE_P (TREE_TYPE (parm)))
3996     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
3997   else
3998     agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
3999   if (total_size >= agg_size)
4000     return NULL;
4001 
4002   gcc_assert (group_count > 0);
4003   return res;
4004 }
4005 
4006 /* Decide whether parameters with representative accesses given by REPR should
4007    be reduced into components.  */
4008 
4009 static int
4010 decide_one_param_reduction (struct access *repr)
4011 {
4012   int total_size, cur_parm_size, agg_size, new_param_count, parm_size_limit;
4013   bool by_ref;
4014   tree parm;
4015 
4016   parm = repr->base;
4017   cur_parm_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (parm)), 1);
4018   gcc_assert (cur_parm_size > 0);
4019 
4020   if (POINTER_TYPE_P (TREE_TYPE (parm)))
4021     {
4022       by_ref = true;
4023       agg_size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm))), 1);
4024     }
4025   else
4026     {
4027       by_ref = false;
4028       agg_size = cur_parm_size;
4029     }
4030 
4031   if (dump_file)
4032     {
4033       struct access *acc;
4034       fprintf (dump_file, "Evaluating PARAM group sizes for ");
4035       print_generic_expr (dump_file, parm, 0);
4036       fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
4037       for (acc = repr; acc; acc = acc->next_grp)
4038 	dump_access (dump_file, acc, true);
4039     }
4040 
4041   total_size = 0;
4042   new_param_count = 0;
4043 
4044   for (; repr; repr = repr->next_grp)
4045     {
4046       gcc_assert (parm == repr->base);
4047 
4048       /* Taking the address of a non-addressable field is verboten.  */
4049       if (by_ref && repr->non_addressable)
4050 	return 0;
4051 
4052       /* Do not decompose a non-BLKmode param in a way that would
4053          create BLKmode params.  Especially for by-reference passing
4054 	 (thus, pointer-type param) this is hardly worthwhile.  */
4055       if (DECL_MODE (parm) != BLKmode
4056 	  && TYPE_MODE (repr->type) == BLKmode)
4057 	return 0;
4058 
4059       if (!by_ref || (!repr->grp_maybe_modified
4060 		      && !repr->grp_not_necessarilly_dereferenced))
4061 	total_size += repr->size;
4062       else
4063 	total_size += cur_parm_size;
4064 
4065       new_param_count++;
4066     }
4067 
4068   gcc_assert (new_param_count > 0);
4069 
4070   if (optimize_function_for_size_p (cfun))
4071     parm_size_limit = cur_parm_size;
4072   else
4073     parm_size_limit = (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR)
4074                        * cur_parm_size);
4075 
4076   if (total_size < agg_size
4077       && total_size <= parm_size_limit)
4078     {
4079       if (dump_file)
4080 	fprintf (dump_file, "    ....will be split into %i components\n",
4081 		 new_param_count);
4082       return new_param_count;
4083     }
4084   else
4085     return 0;
4086 }
4087 
4088 /* The order of the following enums is important, we need to do extra work for
4089    UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
4090 enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
4091 			  MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
4092 
4093 /* Identify representatives of all accesses to all candidate parameters for
4094    IPA-SRA.  Return result based on what representatives have been found. */
4095 
4096 static enum ipa_splicing_result
4097 splice_all_param_accesses (vec<access_p> &representatives)
4098 {
4099   enum ipa_splicing_result result = NO_GOOD_ACCESS;
4100   tree parm;
4101   struct access *repr;
4102 
4103   representatives.create (func_param_count);
4104 
4105   for (parm = DECL_ARGUMENTS (current_function_decl);
4106        parm;
4107        parm = DECL_CHAIN (parm))
4108     {
4109       if (is_unused_scalar_param (parm))
4110 	{
4111 	  representatives.quick_push (&no_accesses_representant);
4112 	  if (result == NO_GOOD_ACCESS)
4113 	    result = UNUSED_PARAMS;
4114 	}
4115       else if (POINTER_TYPE_P (TREE_TYPE (parm))
4116 	       && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
4117 	       && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4118 	{
4119 	  repr = unmodified_by_ref_scalar_representative (parm);
4120 	  representatives.quick_push (repr);
4121 	  if (repr)
4122 	    result = UNMODIF_BY_REF_ACCESSES;
4123 	}
4124       else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
4125 	{
4126 	  bool ro_grp = false;
4127 	  repr = splice_param_accesses (parm, &ro_grp);
4128 	  representatives.quick_push (repr);
4129 
4130 	  if (repr && !no_accesses_p (repr))
4131 	    {
4132 	      if (POINTER_TYPE_P (TREE_TYPE (parm)))
4133 		{
4134 		  if (ro_grp)
4135 		    result = UNMODIF_BY_REF_ACCESSES;
4136 		  else if (result < MODIF_BY_REF_ACCESSES)
4137 		    result = MODIF_BY_REF_ACCESSES;
4138 		}
4139 	      else if (result < BY_VAL_ACCESSES)
4140 		result = BY_VAL_ACCESSES;
4141 	    }
4142 	  else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
4143 	    result = UNUSED_PARAMS;
4144 	}
4145       else
4146 	representatives.quick_push (NULL);
4147     }
4148 
4149   if (result == NO_GOOD_ACCESS)
4150     {
4151       representatives.release ();
4152       return NO_GOOD_ACCESS;
4153     }
4154 
4155   return result;
4156 }
4157 
4158 /* Return the index of BASE in PARMS.  Abort if it is not found.  */
4159 
4160 static inline int
4161 get_param_index (tree base, vec<tree> parms)
4162 {
4163   int i, len;
4164 
4165   len = parms.length ();
4166   for (i = 0; i < len; i++)
4167     if (parms[i] == base)
4168       return i;
4169   gcc_unreachable ();
4170 }
4171 
4172 /* Convert the decisions made at the representative level into compact
4173    parameter adjustments.  REPRESENTATIVES are pointers to first
4174    representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4175    final number of adjustments.  */
4176 
4177 static ipa_parm_adjustment_vec
4178 turn_representatives_into_adjustments (vec<access_p> representatives,
4179 				       int adjustments_count)
4180 {
4181   vec<tree> parms;
4182   ipa_parm_adjustment_vec adjustments;
4183   tree parm;
4184   int i;
4185 
4186   gcc_assert (adjustments_count > 0);
4187   parms = ipa_get_vector_of_formal_parms (current_function_decl);
4188   adjustments.create (adjustments_count);
4189   parm = DECL_ARGUMENTS (current_function_decl);
4190   for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
4191     {
4192       struct access *repr = representatives[i];
4193 
4194       if (!repr || no_accesses_p (repr))
4195 	{
4196 	  struct ipa_parm_adjustment adj;
4197 
4198 	  memset (&adj, 0, sizeof (adj));
4199 	  adj.base_index = get_param_index (parm, parms);
4200 	  adj.base = parm;
4201 	  if (!repr)
4202 	    adj.copy_param = 1;
4203 	  else
4204 	    adj.remove_param = 1;
4205 	  adjustments.quick_push (adj);
4206 	}
4207       else
4208 	{
4209 	  struct ipa_parm_adjustment adj;
4210 	  int index = get_param_index (parm, parms);
4211 
4212 	  for (; repr; repr = repr->next_grp)
4213 	    {
4214 	      memset (&adj, 0, sizeof (adj));
4215 	      gcc_assert (repr->base == parm);
4216 	      adj.base_index = index;
4217 	      adj.base = repr->base;
4218 	      adj.type = repr->type;
4219 	      adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
4220 	      adj.offset = repr->offset;
4221 	      adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
4222 			    && (repr->grp_maybe_modified
4223 				|| repr->grp_not_necessarilly_dereferenced));
4224 	      adjustments.quick_push (adj);
4225 	    }
4226 	}
4227     }
4228   parms.release ();
4229   return adjustments;
4230 }
4231 
4232 /* Analyze the collected accesses and produce a plan what to do with the
4233    parameters in the form of adjustments, NULL meaning nothing.  */
4234 
4235 static ipa_parm_adjustment_vec
4236 analyze_all_param_acesses (void)
4237 {
4238   enum ipa_splicing_result repr_state;
4239   bool proceed = false;
4240   int i, adjustments_count = 0;
4241   vec<access_p> representatives;
4242   ipa_parm_adjustment_vec adjustments;
4243 
4244   repr_state = splice_all_param_accesses (representatives);
4245   if (repr_state == NO_GOOD_ACCESS)
4246     return ipa_parm_adjustment_vec();
4247 
4248   /* If there are any parameters passed by reference which are not modified
4249      directly, we need to check whether they can be modified indirectly.  */
4250   if (repr_state == UNMODIF_BY_REF_ACCESSES)
4251     {
4252       analyze_caller_dereference_legality (representatives);
4253       analyze_modified_params (representatives);
4254     }
4255 
4256   for (i = 0; i < func_param_count; i++)
4257     {
4258       struct access *repr = representatives[i];
4259 
4260       if (repr && !no_accesses_p (repr))
4261 	{
4262 	  if (repr->grp_scalar_ptr)
4263 	    {
4264 	      adjustments_count++;
4265 	      if (repr->grp_not_necessarilly_dereferenced
4266 		  || repr->grp_maybe_modified)
4267 		representatives[i] = NULL;
4268 	      else
4269 		{
4270 		  proceed = true;
4271 		  sra_stats.scalar_by_ref_to_by_val++;
4272 		}
4273 	    }
4274 	  else
4275 	    {
4276 	      int new_components = decide_one_param_reduction (repr);
4277 
4278 	      if (new_components == 0)
4279 		{
4280 		  representatives[i] = NULL;
4281 		  adjustments_count++;
4282 		}
4283 	      else
4284 		{
4285 		  adjustments_count += new_components;
4286 		  sra_stats.aggregate_params_reduced++;
4287 		  sra_stats.param_reductions_created += new_components;
4288 		  proceed = true;
4289 		}
4290 	    }
4291 	}
4292       else
4293 	{
4294 	  if (no_accesses_p (repr))
4295 	    {
4296 	      proceed = true;
4297 	      sra_stats.deleted_unused_parameters++;
4298 	    }
4299 	  adjustments_count++;
4300 	}
4301     }
4302 
4303   if (!proceed && dump_file)
4304     fprintf (dump_file, "NOT proceeding to change params.\n");
4305 
4306   if (proceed)
4307     adjustments = turn_representatives_into_adjustments (representatives,
4308 							 adjustments_count);
4309   else
4310     adjustments = ipa_parm_adjustment_vec();
4311 
4312   representatives.release ();
4313   return adjustments;
4314 }
4315 
4316 /* If a parameter replacement identified by ADJ does not yet exist in the form
4317    of declaration, create it and record it, otherwise return the previously
4318    created one.  */
4319 
4320 static tree
4321 get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
4322 {
4323   tree repl;
4324   if (!adj->new_ssa_base)
4325     {
4326       char *pretty_name = make_fancy_name (adj->base);
4327 
4328       repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
4329       DECL_NAME (repl) = get_identifier (pretty_name);
4330       obstack_free (&name_obstack, pretty_name);
4331 
4332       adj->new_ssa_base = repl;
4333     }
4334   else
4335     repl = adj->new_ssa_base;
4336   return repl;
4337 }
4338 
4339 /* Find the first adjustment for a particular parameter BASE in a vector of
4340    ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
4341    adjustment. */
4342 
4343 static struct ipa_parm_adjustment *
4344 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
4345 {
4346   int i, len;
4347 
4348   len = adjustments.length ();
4349   for (i = 0; i < len; i++)
4350     {
4351       struct ipa_parm_adjustment *adj;
4352 
4353       adj = &adjustments[i];
4354       if (!adj->copy_param && adj->base == base)
4355 	return adj;
4356     }
4357 
4358   return NULL;
4359 }
4360 
4361 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4362    removed because its value is not used, replace the SSA_NAME with a one
4363    relating to a created VAR_DECL together all of its uses and return true.
4364    ADJUSTMENTS is a pointer to an adjustments vector.  */
4365 
4366 static bool
4367 replace_removed_params_ssa_names (gimple stmt,
4368 				  ipa_parm_adjustment_vec adjustments)
4369 {
4370   struct ipa_parm_adjustment *adj;
4371   tree lhs, decl, repl, name;
4372 
4373   if (gimple_code (stmt) == GIMPLE_PHI)
4374     lhs = gimple_phi_result (stmt);
4375   else if (is_gimple_assign (stmt))
4376     lhs = gimple_assign_lhs (stmt);
4377   else if (is_gimple_call (stmt))
4378     lhs = gimple_call_lhs (stmt);
4379   else
4380     gcc_unreachable ();
4381 
4382   if (TREE_CODE (lhs) != SSA_NAME)
4383     return false;
4384 
4385   decl = SSA_NAME_VAR (lhs);
4386   if (decl == NULL_TREE
4387       || TREE_CODE (decl) != PARM_DECL)
4388     return false;
4389 
4390   adj = get_adjustment_for_base (adjustments, decl);
4391   if (!adj)
4392     return false;
4393 
4394   repl = get_replaced_param_substitute (adj);
4395   name = make_ssa_name (repl, stmt);
4396 
4397   if (dump_file)
4398     {
4399       fprintf (dump_file, "replacing an SSA name of a removed param ");
4400       print_generic_expr (dump_file, lhs, 0);
4401       fprintf (dump_file, " with ");
4402       print_generic_expr (dump_file, name, 0);
4403       fprintf (dump_file, "\n");
4404     }
4405 
4406   if (is_gimple_assign (stmt))
4407     gimple_assign_set_lhs (stmt, name);
4408   else if (is_gimple_call (stmt))
4409     gimple_call_set_lhs (stmt, name);
4410   else
4411     gimple_phi_set_result (stmt, name);
4412 
4413   replace_uses_by (lhs, name);
4414   release_ssa_name (lhs);
4415   return true;
4416 }
4417 
4418 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4419    so.  ADJUSTMENTS is a pointer to a vector of adjustments.  CONVERT
4420    specifies whether the function should care about type incompatibility the
4421    current and new expressions.  If it is false, the function will leave
4422    incompatibility issues to the caller.  Return true iff the expression
4423    was modified. */
4424 
4425 static bool
4426 sra_ipa_modify_expr (tree *expr, bool convert,
4427 		     ipa_parm_adjustment_vec adjustments)
4428 {
4429   int i, len;
4430   struct ipa_parm_adjustment *adj, *cand = NULL;
4431   HOST_WIDE_INT offset, size, max_size;
4432   tree base, src;
4433 
4434   len = adjustments.length ();
4435 
4436   if (TREE_CODE (*expr) == BIT_FIELD_REF
4437       || TREE_CODE (*expr) == IMAGPART_EXPR
4438       || TREE_CODE (*expr) == REALPART_EXPR)
4439     {
4440       expr = &TREE_OPERAND (*expr, 0);
4441       convert = true;
4442     }
4443 
4444   base = get_ref_base_and_extent (*expr, &offset, &size, &max_size);
4445   if (!base || size == -1 || max_size == -1)
4446     return false;
4447 
4448   if (TREE_CODE (base) == MEM_REF)
4449     {
4450       offset += mem_ref_offset (base).low * BITS_PER_UNIT;
4451       base = TREE_OPERAND (base, 0);
4452     }
4453 
4454   base = get_ssa_base_param (base);
4455   if (!base || TREE_CODE (base) != PARM_DECL)
4456     return false;
4457 
4458   for (i = 0; i < len; i++)
4459     {
4460       adj = &adjustments[i];
4461 
4462       if (adj->base == base &&
4463 	  (adj->offset == offset || adj->remove_param))
4464 	{
4465 	  cand = adj;
4466 	  break;
4467 	}
4468     }
4469   if (!cand || cand->copy_param || cand->remove_param)
4470     return false;
4471 
4472   if (cand->by_ref)
4473     src = build_simple_mem_ref (cand->reduction);
4474   else
4475     src = cand->reduction;
4476 
4477   if (dump_file && (dump_flags & TDF_DETAILS))
4478     {
4479       fprintf (dump_file, "About to replace expr ");
4480       print_generic_expr (dump_file, *expr, 0);
4481       fprintf (dump_file, " with ");
4482       print_generic_expr (dump_file, src, 0);
4483       fprintf (dump_file, "\n");
4484     }
4485 
4486   if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4487     {
4488       tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4489       *expr = vce;
4490     }
4491   else
4492     *expr = src;
4493   return true;
4494 }
4495 
4496 /* If the statement pointed to by STMT_PTR contains any expressions that need
4497    to replaced with a different one as noted by ADJUSTMENTS, do so.  Handle any
4498    potential type incompatibilities (GSI is used to accommodate conversion
4499    statements and must point to the statement).  Return true iff the statement
4500    was modified.  */
4501 
4502 static bool
4503 sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi,
4504 		       ipa_parm_adjustment_vec adjustments)
4505 {
4506   gimple stmt = *stmt_ptr;
4507   tree *lhs_p, *rhs_p;
4508   bool any;
4509 
4510   if (!gimple_assign_single_p (stmt))
4511     return false;
4512 
4513   rhs_p = gimple_assign_rhs1_ptr (stmt);
4514   lhs_p = gimple_assign_lhs_ptr (stmt);
4515 
4516   any = sra_ipa_modify_expr (rhs_p, false, adjustments);
4517   any |= sra_ipa_modify_expr (lhs_p, false, adjustments);
4518   if (any)
4519     {
4520       tree new_rhs = NULL_TREE;
4521 
4522       if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
4523 	{
4524 	  if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
4525 	    {
4526 	      /* V_C_Es of constructors can cause trouble (PR 42714).  */
4527 	      if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
4528 		*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
4529 	      else
4530 		*rhs_p = build_constructor (TREE_TYPE (*lhs_p),
4531 					    NULL);
4532 	    }
4533 	  else
4534 	    new_rhs = fold_build1_loc (gimple_location (stmt),
4535 				       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
4536 				       *rhs_p);
4537 	}
4538       else if (REFERENCE_CLASS_P (*rhs_p)
4539 	       && is_gimple_reg_type (TREE_TYPE (*lhs_p))
4540 	       && !is_gimple_reg (*lhs_p))
4541 	/* This can happen when an assignment in between two single field
4542 	   structures is turned into an assignment in between two pointers to
4543 	   scalars (PR 42237).  */
4544 	new_rhs = *rhs_p;
4545 
4546       if (new_rhs)
4547 	{
4548 	  tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
4549 					       true, GSI_SAME_STMT);
4550 
4551 	  gimple_assign_set_rhs_from_tree (gsi, tmp);
4552 	}
4553 
4554       return true;
4555     }
4556 
4557   return false;
4558 }
4559 
4560 /* Traverse the function body and all modifications as described in
4561    ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4562 
4563 static bool
4564 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
4565 {
4566   bool cfg_changed = false;
4567   basic_block bb;
4568 
4569   FOR_EACH_BB (bb)
4570     {
4571       gimple_stmt_iterator gsi;
4572 
4573       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4574 	replace_removed_params_ssa_names (gsi_stmt (gsi), adjustments);
4575 
4576       gsi = gsi_start_bb (bb);
4577       while (!gsi_end_p (gsi))
4578 	{
4579 	  gimple stmt = gsi_stmt (gsi);
4580 	  bool modified = false;
4581 	  tree *t;
4582 	  unsigned i;
4583 
4584 	  switch (gimple_code (stmt))
4585 	    {
4586 	    case GIMPLE_RETURN:
4587 	      t = gimple_return_retval_ptr (stmt);
4588 	      if (*t != NULL_TREE)
4589 		modified |= sra_ipa_modify_expr (t, true, adjustments);
4590 	      break;
4591 
4592 	    case GIMPLE_ASSIGN:
4593 	      modified |= sra_ipa_modify_assign (&stmt, &gsi, adjustments);
4594 	      modified |= replace_removed_params_ssa_names (stmt, adjustments);
4595 	      break;
4596 
4597 	    case GIMPLE_CALL:
4598 	      /* Operands must be processed before the lhs.  */
4599 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
4600 		{
4601 		  t = gimple_call_arg_ptr (stmt, i);
4602 		  modified |= sra_ipa_modify_expr (t, true, adjustments);
4603 		}
4604 
4605 	      if (gimple_call_lhs (stmt))
4606 		{
4607 		  t = gimple_call_lhs_ptr (stmt);
4608 		  modified |= sra_ipa_modify_expr (t, false, adjustments);
4609 		  modified |= replace_removed_params_ssa_names (stmt,
4610 								adjustments);
4611 		}
4612 	      break;
4613 
4614 	    case GIMPLE_ASM:
4615 	      for (i = 0; i < gimple_asm_ninputs (stmt); i++)
4616 		{
4617 		  t = &TREE_VALUE (gimple_asm_input_op (stmt, i));
4618 		  modified |= sra_ipa_modify_expr (t, true, adjustments);
4619 		}
4620 	      for (i = 0; i < gimple_asm_noutputs (stmt); i++)
4621 		{
4622 		  t = &TREE_VALUE (gimple_asm_output_op (stmt, i));
4623 		  modified |= sra_ipa_modify_expr (t, false, adjustments);
4624 		}
4625 	      break;
4626 
4627 	    default:
4628 	      break;
4629 	    }
4630 
4631 	  if (modified)
4632 	    {
4633 	      update_stmt (stmt);
4634 	      if (maybe_clean_eh_stmt (stmt)
4635 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4636 		cfg_changed = true;
4637 	    }
4638 	  gsi_next (&gsi);
4639 	}
4640     }
4641 
4642   return cfg_changed;
4643 }
4644 
4645 /* Call gimple_debug_bind_reset_value on all debug statements describing
4646    gimple register parameters that are being removed or replaced.  */
4647 
4648 static void
4649 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
4650 {
4651   int i, len;
4652   gimple_stmt_iterator *gsip = NULL, gsi;
4653 
4654   if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR))
4655     {
4656       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR));
4657       gsip = &gsi;
4658     }
4659   len = adjustments.length ();
4660   for (i = 0; i < len; i++)
4661     {
4662       struct ipa_parm_adjustment *adj;
4663       imm_use_iterator ui;
4664       gimple stmt, def_temp;
4665       tree name, vexpr, copy = NULL_TREE;
4666       use_operand_p use_p;
4667 
4668       adj = &adjustments[i];
4669       if (adj->copy_param || !is_gimple_reg (adj->base))
4670 	continue;
4671       name = ssa_default_def (cfun, adj->base);
4672       vexpr = NULL;
4673       if (name)
4674 	FOR_EACH_IMM_USE_STMT (stmt, ui, name)
4675 	  {
4676 	    /* All other users must have been removed by
4677 	       ipa_sra_modify_function_body.  */
4678 	    gcc_assert (is_gimple_debug (stmt));
4679 	    if (vexpr == NULL && gsip != NULL)
4680 	      {
4681 		gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4682 		vexpr = make_node (DEBUG_EXPR_DECL);
4683 		def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
4684 							   NULL);
4685 		DECL_ARTIFICIAL (vexpr) = 1;
4686 		TREE_TYPE (vexpr) = TREE_TYPE (name);
4687 		DECL_MODE (vexpr) = DECL_MODE (adj->base);
4688 		gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4689 	      }
4690 	    if (vexpr)
4691 	      {
4692 		FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
4693 		  SET_USE (use_p, vexpr);
4694 	      }
4695 	    else
4696 	      gimple_debug_bind_reset_value (stmt);
4697 	    update_stmt (stmt);
4698 	  }
4699       /* Create a VAR_DECL for debug info purposes.  */
4700       if (!DECL_IGNORED_P (adj->base))
4701 	{
4702 	  copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
4703 			     VAR_DECL, DECL_NAME (adj->base),
4704 			     TREE_TYPE (adj->base));
4705 	  if (DECL_PT_UID_SET_P (adj->base))
4706 	    SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
4707 	  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
4708 	  TREE_READONLY (copy) = TREE_READONLY (adj->base);
4709 	  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
4710 	  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
4711 	  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
4712 	  DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
4713 	  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
4714 	  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
4715 	  SET_DECL_RTL (copy, 0);
4716 	  TREE_USED (copy) = 1;
4717 	  DECL_CONTEXT (copy) = current_function_decl;
4718 	  add_local_decl (cfun, copy);
4719 	  DECL_CHAIN (copy) =
4720 	    BLOCK_VARS (DECL_INITIAL (current_function_decl));
4721 	  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
4722 	}
4723       if (gsip != NULL && copy && target_for_debug_bind (adj->base))
4724 	{
4725 	  gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
4726 	  if (vexpr)
4727 	    def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
4728 	  else
4729 	    def_temp = gimple_build_debug_source_bind (copy, adj->base,
4730 						       NULL);
4731 	  gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
4732 	}
4733     }
4734 }
4735 
4736 /* Return false iff all callers have at least as many actual arguments as there
4737    are formal parameters in the current function.  */
4738 
4739 static bool
4740 not_all_callers_have_enough_arguments_p (struct cgraph_node *node,
4741 					 void *data ATTRIBUTE_UNUSED)
4742 {
4743   struct cgraph_edge *cs;
4744   for (cs = node->callers; cs; cs = cs->next_caller)
4745     if (!callsite_has_enough_arguments_p (cs->call_stmt))
4746       return true;
4747 
4748   return false;
4749 }
4750 
4751 /* Convert all callers of NODE.  */
4752 
4753 static bool
4754 convert_callers_for_node (struct cgraph_node *node,
4755 		          void *data)
4756 {
4757   ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
4758   bitmap recomputed_callers = BITMAP_ALLOC (NULL);
4759   struct cgraph_edge *cs;
4760 
4761   for (cs = node->callers; cs; cs = cs->next_caller)
4762     {
4763       push_cfun (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl));
4764 
4765       if (dump_file)
4766 	fprintf (dump_file, "Adjusting call (%i -> %i) %s -> %s\n",
4767 		 cs->caller->uid, cs->callee->uid,
4768 		 xstrdup (cgraph_node_name (cs->caller)),
4769 		 xstrdup (cgraph_node_name (cs->callee)));
4770 
4771       ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
4772 
4773       pop_cfun ();
4774     }
4775 
4776   for (cs = node->callers; cs; cs = cs->next_caller)
4777     if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
4778 	&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->symbol.decl)))
4779       compute_inline_parameters (cs->caller, true);
4780   BITMAP_FREE (recomputed_callers);
4781 
4782   return true;
4783 }
4784 
4785 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
4786 
4787 static void
4788 convert_callers (struct cgraph_node *node, tree old_decl,
4789 		 ipa_parm_adjustment_vec adjustments)
4790 {
4791   basic_block this_block;
4792 
4793   cgraph_for_node_and_aliases (node, convert_callers_for_node,
4794 			       &adjustments, false);
4795 
4796   if (!encountered_recursive_call)
4797     return;
4798 
4799   FOR_EACH_BB (this_block)
4800     {
4801       gimple_stmt_iterator gsi;
4802 
4803       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
4804         {
4805 	  gimple stmt = gsi_stmt (gsi);
4806 	  tree call_fndecl;
4807 	  if (gimple_code (stmt) != GIMPLE_CALL)
4808 	    continue;
4809 	  call_fndecl = gimple_call_fndecl (stmt);
4810 	  if (call_fndecl == old_decl)
4811 	    {
4812 	      if (dump_file)
4813 		fprintf (dump_file, "Adjusting recursive call");
4814 	      gimple_call_set_fndecl (stmt, node->symbol.decl);
4815 	      ipa_modify_call_arguments (NULL, stmt, adjustments);
4816 	    }
4817 	}
4818     }
4819 
4820   return;
4821 }
4822 
4823 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4824    as given in ADJUSTMENTS.  Return true iff the CFG has been changed.  */
4825 
4826 static bool
4827 modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
4828 {
4829   struct cgraph_node *new_node;
4830   bool cfg_changed;
4831   vec<cgraph_edge_p> redirect_callers = collect_callers_of_node (node);
4832 
4833   rebuild_cgraph_edges ();
4834   free_dominance_info (CDI_DOMINATORS);
4835   pop_cfun ();
4836 
4837   new_node = cgraph_function_versioning (node, redirect_callers,
4838 					 NULL,
4839 					 NULL, false, NULL, NULL, "isra");
4840   redirect_callers.release ();
4841 
4842   push_cfun (DECL_STRUCT_FUNCTION (new_node->symbol.decl));
4843   ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA");
4844   cfg_changed = ipa_sra_modify_function_body (adjustments);
4845   sra_ipa_reset_debug_stmts (adjustments);
4846   convert_callers (new_node, node->symbol.decl, adjustments);
4847   cgraph_make_node_local (new_node);
4848   return cfg_changed;
4849 }
4850 
4851 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
4852    attributes, return true otherwise.  NODE is the cgraph node of the current
4853    function.  */
4854 
4855 static bool
4856 ipa_sra_preliminary_function_checks (struct cgraph_node *node)
4857 {
4858   if (!cgraph_node_can_be_local_p (node))
4859     {
4860       if (dump_file)
4861 	fprintf (dump_file, "Function not local to this compilation unit.\n");
4862       return false;
4863     }
4864 
4865   if (!node->local.can_change_signature)
4866     {
4867       if (dump_file)
4868 	fprintf (dump_file, "Function can not change signature.\n");
4869       return false;
4870     }
4871 
4872   if (!tree_versionable_function_p (node->symbol.decl))
4873     {
4874       if (dump_file)
4875 	fprintf (dump_file, "Function is not versionable.\n");
4876       return false;
4877     }
4878 
4879   if (!opt_for_fn (node->symbol.decl, optimize)
4880       || !opt_for_fn (node->symbol.decl, flag_ipa_sra))
4881     {
4882       if (dump_file)
4883 	fprintf (dump_file, "Function not optimized.\n");
4884       return false;
4885     }
4886 
4887   if (DECL_VIRTUAL_P (current_function_decl))
4888     {
4889       if (dump_file)
4890 	fprintf (dump_file, "Function is a virtual method.\n");
4891       return false;
4892     }
4893 
4894   if ((DECL_COMDAT (node->symbol.decl) || DECL_EXTERNAL (node->symbol.decl))
4895       && inline_summary(node)->size >= MAX_INLINE_INSNS_AUTO)
4896     {
4897       if (dump_file)
4898 	fprintf (dump_file, "Function too big to be made truly local.\n");
4899       return false;
4900     }
4901 
4902   if (!node->callers)
4903     {
4904       if (dump_file)
4905 	fprintf (dump_file,
4906 		 "Function has no callers in this compilation unit.\n");
4907       return false;
4908     }
4909 
4910   if (cfun->stdarg)
4911     {
4912       if (dump_file)
4913 	fprintf (dump_file, "Function uses stdarg. \n");
4914       return false;
4915     }
4916 
4917   if (TYPE_ATTRIBUTES (TREE_TYPE (node->symbol.decl)))
4918     return false;
4919 
4920   return true;
4921 }
4922 
4923 /* Perform early interprocedural SRA.  */
4924 
4925 static unsigned int
4926 ipa_early_sra (void)
4927 {
4928   struct cgraph_node *node = cgraph_get_node (current_function_decl);
4929   ipa_parm_adjustment_vec adjustments;
4930   int ret = 0;
4931 
4932   if (!ipa_sra_preliminary_function_checks (node))
4933     return 0;
4934 
4935   sra_initialize ();
4936   sra_mode = SRA_MODE_EARLY_IPA;
4937 
4938   if (!find_param_candidates ())
4939     {
4940       if (dump_file)
4941 	fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
4942       goto simple_out;
4943     }
4944 
4945   if (cgraph_for_node_and_aliases (node, not_all_callers_have_enough_arguments_p,
4946 				   NULL, true))
4947     {
4948       if (dump_file)
4949 	fprintf (dump_file, "There are callers with insufficient number of "
4950 		 "arguments.\n");
4951       goto simple_out;
4952     }
4953 
4954   bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
4955 				 func_param_count
4956 				 * last_basic_block_for_function (cfun));
4957   final_bbs = BITMAP_ALLOC (NULL);
4958 
4959   scan_function ();
4960   if (encountered_apply_args)
4961     {
4962       if (dump_file)
4963 	fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
4964       goto out;
4965     }
4966 
4967   if (encountered_unchangable_recursive_call)
4968     {
4969       if (dump_file)
4970 	fprintf (dump_file, "Function calls itself with insufficient "
4971 		 "number of arguments.\n");
4972       goto out;
4973     }
4974 
4975   adjustments = analyze_all_param_acesses ();
4976   if (!adjustments.exists ())
4977     goto out;
4978   if (dump_file)
4979     ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
4980 
4981   if (modify_function (node, adjustments))
4982     ret = TODO_update_ssa | TODO_cleanup_cfg;
4983   else
4984     ret = TODO_update_ssa;
4985   adjustments.release ();
4986 
4987   statistics_counter_event (cfun, "Unused parameters deleted",
4988 			    sra_stats.deleted_unused_parameters);
4989   statistics_counter_event (cfun, "Scalar parameters converted to by-value",
4990 			    sra_stats.scalar_by_ref_to_by_val);
4991   statistics_counter_event (cfun, "Aggregate parameters broken up",
4992 			    sra_stats.aggregate_params_reduced);
4993   statistics_counter_event (cfun, "Aggregate parameter components created",
4994 			    sra_stats.param_reductions_created);
4995 
4996  out:
4997   BITMAP_FREE (final_bbs);
4998   free (bb_dereferences);
4999  simple_out:
5000   sra_deinitialize ();
5001   return ret;
5002 }
5003 
5004 /* Return if early ipa sra shall be performed.  */
5005 static bool
5006 ipa_early_sra_gate (void)
5007 {
5008   return flag_ipa_sra && dbg_cnt (eipa_sra);
5009 }
5010 
5011 struct gimple_opt_pass pass_early_ipa_sra =
5012 {
5013  {
5014   GIMPLE_PASS,
5015   "eipa_sra",	 			/* name */
5016   OPTGROUP_NONE,                        /* optinfo_flags */
5017   ipa_early_sra_gate,			/* gate */
5018   ipa_early_sra,			/* execute */
5019   NULL,					/* sub */
5020   NULL,					/* next */
5021   0,					/* static_pass_number */
5022   TV_IPA_SRA,				/* tv_id */
5023   0,	                                /* properties_required */
5024   0,					/* properties_provided */
5025   0,					/* properties_destroyed */
5026   0,					/* todo_flags_start */
5027   TODO_dump_symtab              	/* todo_flags_finish */
5028  }
5029 };
5030