xref: /dflybsd-src/contrib/gcc-8.0/gcc/tree-sra.c (revision 95059079af47f9a66a175f374f2da1a5020e3255)
138fd1498Szrj /* Scalar Replacement of Aggregates (SRA) converts some structure
238fd1498Szrj    references into scalar references, exposing them to the scalar
338fd1498Szrj    optimizers.
438fd1498Szrj    Copyright (C) 2008-2018 Free Software Foundation, Inc.
538fd1498Szrj    Contributed by Martin Jambor <mjambor@suse.cz>
638fd1498Szrj 
738fd1498Szrj This file is part of GCC.
838fd1498Szrj 
938fd1498Szrj GCC is free software; you can redistribute it and/or modify it under
1038fd1498Szrj the terms of the GNU General Public License as published by the Free
1138fd1498Szrj Software Foundation; either version 3, or (at your option) any later
1238fd1498Szrj version.
1338fd1498Szrj 
1438fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT ANY
1538fd1498Szrj WARRANTY; without even the implied warranty of MERCHANTABILITY or
1638fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1738fd1498Szrj for more details.
1838fd1498Szrj 
1938fd1498Szrj You should have received a copy of the GNU General Public License
2038fd1498Szrj along with GCC; see the file COPYING3.  If not see
2138fd1498Szrj <http://www.gnu.org/licenses/>.  */
2238fd1498Szrj 
2338fd1498Szrj /* This file implements Scalar Reduction of Aggregates (SRA).  SRA is run
2438fd1498Szrj    twice, once in the early stages of compilation (early SRA) and once in the
2538fd1498Szrj    late stages (late SRA).  The aim of both is to turn references to scalar
2638fd1498Szrj    parts of aggregates into uses of independent scalar variables.
2738fd1498Szrj 
2838fd1498Szrj    The two passes are nearly identical, the only difference is that early SRA
2938fd1498Szrj    does not scalarize unions which are used as the result in a GIMPLE_RETURN
3038fd1498Szrj    statement because together with inlining this can lead to weird type
3138fd1498Szrj    conversions.
3238fd1498Szrj 
3338fd1498Szrj    Both passes operate in four stages:
3438fd1498Szrj 
3538fd1498Szrj    1. The declarations that have properties which make them candidates for
3638fd1498Szrj       scalarization are identified in function find_var_candidates().  The
3738fd1498Szrj       candidates are stored in candidate_bitmap.
3838fd1498Szrj 
3938fd1498Szrj    2. The function body is scanned.  In the process, declarations which are
4038fd1498Szrj       used in a manner that prevent their scalarization are removed from the
4138fd1498Szrj       candidate bitmap.  More importantly, for every access into an aggregate,
4238fd1498Szrj       an access structure (struct access) is created by create_access() and
4338fd1498Szrj       stored in a vector associated with the aggregate.  Among other
4438fd1498Szrj       information, the aggregate declaration, the offset and size of the access
4538fd1498Szrj       and its type are stored in the structure.
4638fd1498Szrj 
4738fd1498Szrj       On a related note, assign_link structures are created for every assign
4838fd1498Szrj       statement between candidate aggregates and attached to the related
4938fd1498Szrj       accesses.
5038fd1498Szrj 
5138fd1498Szrj    3. The vectors of accesses are analyzed.  They are first sorted according to
5238fd1498Szrj       their offset and size and then scanned for partially overlapping accesses
5338fd1498Szrj       (i.e. those which overlap but one is not entirely within another).  Such
5438fd1498Szrj       an access disqualifies the whole aggregate from being scalarized.
5538fd1498Szrj 
5638fd1498Szrj       If there is no such inhibiting overlap, a representative access structure
5738fd1498Szrj       is chosen for every unique combination of offset and size.  Afterwards,
5838fd1498Szrj       the pass builds a set of trees from these structures, in which children
5938fd1498Szrj       of an access are within their parent (in terms of offset and size).
6038fd1498Szrj 
6138fd1498Szrj       Then accesses  are propagated  whenever possible (i.e.  in cases  when it
6238fd1498Szrj       does not create a partially overlapping access) across assign_links from
6338fd1498Szrj       the right hand side to the left hand side.
6438fd1498Szrj 
6538fd1498Szrj       Then the set of trees for each declaration is traversed again and those
6638fd1498Szrj       accesses which should be replaced by a scalar are identified.
6738fd1498Szrj 
6838fd1498Szrj    4. The function is traversed again, and for every reference into an
6938fd1498Szrj       aggregate that has some component which is about to be scalarized,
7038fd1498Szrj       statements are amended and new statements are created as necessary.
7138fd1498Szrj       Finally, if a parameter got scalarized, the scalar replacements are
7238fd1498Szrj       initialized with values from respective parameter aggregates.  */
7338fd1498Szrj 
7438fd1498Szrj #include "config.h"
7538fd1498Szrj #include "system.h"
7638fd1498Szrj #include "coretypes.h"
7738fd1498Szrj #include "backend.h"
7838fd1498Szrj #include "target.h"
7938fd1498Szrj #include "rtl.h"
8038fd1498Szrj #include "tree.h"
8138fd1498Szrj #include "gimple.h"
8238fd1498Szrj #include "predict.h"
8338fd1498Szrj #include "alloc-pool.h"
8438fd1498Szrj #include "tree-pass.h"
8538fd1498Szrj #include "ssa.h"
8638fd1498Szrj #include "cgraph.h"
8738fd1498Szrj #include "gimple-pretty-print.h"
8838fd1498Szrj #include "alias.h"
8938fd1498Szrj #include "fold-const.h"
9038fd1498Szrj #include "tree-eh.h"
9138fd1498Szrj #include "stor-layout.h"
9238fd1498Szrj #include "gimplify.h"
9338fd1498Szrj #include "gimple-iterator.h"
9438fd1498Szrj #include "gimplify-me.h"
9538fd1498Szrj #include "gimple-walk.h"
9638fd1498Szrj #include "tree-cfg.h"
9738fd1498Szrj #include "tree-dfa.h"
9838fd1498Szrj #include "tree-ssa.h"
9938fd1498Szrj #include "symbol-summary.h"
10038fd1498Szrj #include "ipa-param-manipulation.h"
10138fd1498Szrj #include "ipa-prop.h"
10238fd1498Szrj #include "params.h"
10338fd1498Szrj #include "dbgcnt.h"
10438fd1498Szrj #include "tree-inline.h"
10538fd1498Szrj #include "ipa-fnsummary.h"
10638fd1498Szrj #include "ipa-utils.h"
10738fd1498Szrj #include "builtins.h"
10838fd1498Szrj 
10938fd1498Szrj /* Enumeration of all aggregate reductions we can do.  */
11038fd1498Szrj enum sra_mode { SRA_MODE_EARLY_IPA,   /* early call regularization */
11138fd1498Szrj 		SRA_MODE_EARLY_INTRA, /* early intraprocedural SRA */
11238fd1498Szrj 		SRA_MODE_INTRA };     /* late intraprocedural SRA */
11338fd1498Szrj 
11438fd1498Szrj /* Global variable describing which aggregate reduction we are performing at
11538fd1498Szrj    the moment.  */
11638fd1498Szrj static enum sra_mode sra_mode;
11738fd1498Szrj 
11838fd1498Szrj struct assign_link;
11938fd1498Szrj 
12038fd1498Szrj /* ACCESS represents each access to an aggregate variable (as a whole or a
12138fd1498Szrj    part).  It can also represent a group of accesses that refer to exactly the
12238fd1498Szrj    same fragment of an aggregate (i.e. those that have exactly the same offset
12338fd1498Szrj    and size).  Such representatives for a single aggregate, once determined,
12438fd1498Szrj    are linked in a linked list and have the group fields set.
12538fd1498Szrj 
12638fd1498Szrj    Moreover, when doing intraprocedural SRA, a tree is built from those
12738fd1498Szrj    representatives (by the means of first_child and next_sibling pointers), in
12838fd1498Szrj    which all items in a subtree are "within" the root, i.e. their offset is
12938fd1498Szrj    greater or equal to offset of the root and offset+size is smaller or equal
13038fd1498Szrj    to offset+size of the root.  Children of an access are sorted by offset.
13138fd1498Szrj 
13238fd1498Szrj    Note that accesses to parts of vector and complex number types always
13338fd1498Szrj    represented by an access to the whole complex number or a vector.  It is a
13438fd1498Szrj    duty of the modifying functions to replace them appropriately.  */
13538fd1498Szrj 
13638fd1498Szrj struct access
13738fd1498Szrj {
13838fd1498Szrj   /* Values returned by  `get_ref_base_and_extent' for each component reference
13938fd1498Szrj      If EXPR isn't a component reference  just set `BASE = EXPR', `OFFSET = 0',
14038fd1498Szrj      `SIZE = TREE_SIZE (TREE_TYPE (expr))'.  */
14138fd1498Szrj   HOST_WIDE_INT offset;
14238fd1498Szrj   HOST_WIDE_INT size;
14338fd1498Szrj   tree base;
14438fd1498Szrj 
14538fd1498Szrj   /* Expression.  It is context dependent so do not use it to create new
14638fd1498Szrj      expressions to access the original aggregate.  See PR 42154 for a
14738fd1498Szrj      testcase.  */
14838fd1498Szrj   tree expr;
14938fd1498Szrj   /* Type.  */
15038fd1498Szrj   tree type;
15138fd1498Szrj 
15238fd1498Szrj   /* The statement this access belongs to.  */
15338fd1498Szrj   gimple *stmt;
15438fd1498Szrj 
15538fd1498Szrj   /* Next group representative for this aggregate. */
15638fd1498Szrj   struct access *next_grp;
15738fd1498Szrj 
15838fd1498Szrj   /* Pointer to the group representative.  Pointer to itself if the struct is
15938fd1498Szrj      the representative.  */
16038fd1498Szrj   struct access *group_representative;
16138fd1498Szrj 
16238fd1498Szrj   /* After access tree has been constructed, this points to the parent of the
16338fd1498Szrj      current access, if there is one.  NULL for roots.  */
16438fd1498Szrj   struct access *parent;
16538fd1498Szrj 
16638fd1498Szrj   /* If this access has any children (in terms of the definition above), this
16738fd1498Szrj      points to the first one.  */
16838fd1498Szrj   struct access *first_child;
16938fd1498Szrj 
17038fd1498Szrj   /* In intraprocedural SRA, pointer to the next sibling in the access tree as
17138fd1498Szrj      described above.  In IPA-SRA this is a pointer to the next access
17238fd1498Szrj      belonging to the same group (having the same representative).  */
17338fd1498Szrj   struct access *next_sibling;
17438fd1498Szrj 
17538fd1498Szrj   /* Pointers to the first and last element in the linked list of assign
17638fd1498Szrj      links.  */
17738fd1498Szrj   struct assign_link *first_link, *last_link;
17838fd1498Szrj 
17938fd1498Szrj   /* Pointer to the next access in the work queue.  */
18038fd1498Szrj   struct access *next_queued;
18138fd1498Szrj 
18238fd1498Szrj   /* Replacement variable for this access "region."  Never to be accessed
18338fd1498Szrj      directly, always only by the means of get_access_replacement() and only
18438fd1498Szrj      when grp_to_be_replaced flag is set.  */
18538fd1498Szrj   tree replacement_decl;
18638fd1498Szrj 
18738fd1498Szrj   /* Is this access an access to a non-addressable field? */
18838fd1498Szrj   unsigned non_addressable : 1;
18938fd1498Szrj 
19038fd1498Szrj   /* Is this access made in reverse storage order? */
19138fd1498Szrj   unsigned reverse : 1;
19238fd1498Szrj 
19338fd1498Szrj   /* Is this particular access write access? */
19438fd1498Szrj   unsigned write : 1;
19538fd1498Szrj 
19638fd1498Szrj   /* Is this access currently in the work queue?  */
19738fd1498Szrj   unsigned grp_queued : 1;
19838fd1498Szrj 
19938fd1498Szrj   /* Does this group contain a write access?  This flag is propagated down the
20038fd1498Szrj      access tree.  */
20138fd1498Szrj   unsigned grp_write : 1;
20238fd1498Szrj 
20338fd1498Szrj   /* Does this group contain a read access?  This flag is propagated down the
20438fd1498Szrj      access tree.  */
20538fd1498Szrj   unsigned grp_read : 1;
20638fd1498Szrj 
20738fd1498Szrj   /* Does this group contain a read access that comes from an assignment
20838fd1498Szrj      statement?  This flag is propagated down the access tree.  */
20938fd1498Szrj   unsigned grp_assignment_read : 1;
21038fd1498Szrj 
21138fd1498Szrj   /* Does this group contain a write access that comes from an assignment
21238fd1498Szrj      statement?  This flag is propagated down the access tree.  */
21338fd1498Szrj   unsigned grp_assignment_write : 1;
21438fd1498Szrj 
21538fd1498Szrj   /* Does this group contain a read access through a scalar type?  This flag is
21638fd1498Szrj      not propagated in the access tree in any direction.  */
21738fd1498Szrj   unsigned grp_scalar_read : 1;
21838fd1498Szrj 
21938fd1498Szrj   /* Does this group contain a write access through a scalar type?  This flag
22038fd1498Szrj      is not propagated in the access tree in any direction.  */
22138fd1498Szrj   unsigned grp_scalar_write : 1;
22238fd1498Szrj 
22338fd1498Szrj   /* Is this access an artificial one created to scalarize some record
22438fd1498Szrj      entirely? */
22538fd1498Szrj   unsigned grp_total_scalarization : 1;
22638fd1498Szrj 
22738fd1498Szrj   /* Other passes of the analysis use this bit to make function
22838fd1498Szrj      analyze_access_subtree create scalar replacements for this group if
22938fd1498Szrj      possible.  */
23038fd1498Szrj   unsigned grp_hint : 1;
23138fd1498Szrj 
23238fd1498Szrj   /* Is the subtree rooted in this access fully covered by scalar
23338fd1498Szrj      replacements?  */
23438fd1498Szrj   unsigned grp_covered : 1;
23538fd1498Szrj 
23638fd1498Szrj   /* If set to true, this access and all below it in an access tree must not be
23738fd1498Szrj      scalarized.  */
23838fd1498Szrj   unsigned grp_unscalarizable_region : 1;
23938fd1498Szrj 
24038fd1498Szrj   /* Whether data have been written to parts of the aggregate covered by this
24138fd1498Szrj      access which is not to be scalarized.  This flag is propagated up in the
24238fd1498Szrj      access tree.  */
24338fd1498Szrj   unsigned grp_unscalarized_data : 1;
24438fd1498Szrj 
24538fd1498Szrj   /* Does this access and/or group contain a write access through a
24638fd1498Szrj      BIT_FIELD_REF?  */
24738fd1498Szrj   unsigned grp_partial_lhs : 1;
24838fd1498Szrj 
24938fd1498Szrj   /* Set when a scalar replacement should be created for this variable.  */
25038fd1498Szrj   unsigned grp_to_be_replaced : 1;
25138fd1498Szrj 
25238fd1498Szrj   /* Set when we want a replacement for the sole purpose of having it in
25338fd1498Szrj      generated debug statements.  */
25438fd1498Szrj   unsigned grp_to_be_debug_replaced : 1;
25538fd1498Szrj 
25638fd1498Szrj   /* Should TREE_NO_WARNING of a replacement be set?  */
25738fd1498Szrj   unsigned grp_no_warning : 1;
25838fd1498Szrj 
25938fd1498Szrj   /* Is it possible that the group refers to data which might be (directly or
26038fd1498Szrj      otherwise) modified?  */
26138fd1498Szrj   unsigned grp_maybe_modified : 1;
26238fd1498Szrj 
26338fd1498Szrj   /* Set when this is a representative of a pointer to scalar (i.e. by
26438fd1498Szrj      reference) parameter which we consider for turning into a plain scalar
26538fd1498Szrj      (i.e. a by value parameter).  */
26638fd1498Szrj   unsigned grp_scalar_ptr : 1;
26738fd1498Szrj 
26838fd1498Szrj   /* Set when we discover that this pointer is not safe to dereference in the
26938fd1498Szrj      caller.  */
27038fd1498Szrj   unsigned grp_not_necessarilly_dereferenced : 1;
27138fd1498Szrj };
27238fd1498Szrj 
27338fd1498Szrj typedef struct access *access_p;
27438fd1498Szrj 
27538fd1498Szrj 
27638fd1498Szrj /* Alloc pool for allocating access structures.  */
27738fd1498Szrj static object_allocator<struct access> access_pool ("SRA accesses");
27838fd1498Szrj 
27938fd1498Szrj /* A structure linking lhs and rhs accesses from an aggregate assignment.  They
28038fd1498Szrj    are used to propagate subaccesses from rhs to lhs as long as they don't
28138fd1498Szrj    conflict with what is already there.  */
28238fd1498Szrj struct assign_link
28338fd1498Szrj {
28438fd1498Szrj   struct access *lacc, *racc;
28538fd1498Szrj   struct assign_link *next;
28638fd1498Szrj };
28738fd1498Szrj 
28838fd1498Szrj /* Alloc pool for allocating assign link structures.  */
28938fd1498Szrj static object_allocator<assign_link> assign_link_pool ("SRA links");
29038fd1498Szrj 
29138fd1498Szrj /* Base (tree) -> Vector (vec<access_p> *) map.  */
29238fd1498Szrj static hash_map<tree, auto_vec<access_p> > *base_access_vec;
29338fd1498Szrj 
29438fd1498Szrj /* Candidate hash table helpers.  */
29538fd1498Szrj 
29638fd1498Szrj struct uid_decl_hasher : nofree_ptr_hash <tree_node>
29738fd1498Szrj {
29838fd1498Szrj   static inline hashval_t hash (const tree_node *);
29938fd1498Szrj   static inline bool equal (const tree_node *, const tree_node *);
30038fd1498Szrj };
30138fd1498Szrj 
30238fd1498Szrj /* Hash a tree in a uid_decl_map.  */
30338fd1498Szrj 
30438fd1498Szrj inline hashval_t
hash(const tree_node * item)30538fd1498Szrj uid_decl_hasher::hash (const tree_node *item)
30638fd1498Szrj {
30738fd1498Szrj   return item->decl_minimal.uid;
30838fd1498Szrj }
30938fd1498Szrj 
31038fd1498Szrj /* Return true if the DECL_UID in both trees are equal.  */
31138fd1498Szrj 
31238fd1498Szrj inline bool
equal(const tree_node * a,const tree_node * b)31338fd1498Szrj uid_decl_hasher::equal (const tree_node *a, const tree_node *b)
31438fd1498Szrj {
31538fd1498Szrj   return (a->decl_minimal.uid == b->decl_minimal.uid);
31638fd1498Szrj }
31738fd1498Szrj 
31838fd1498Szrj /* Set of candidates.  */
31938fd1498Szrj static bitmap candidate_bitmap;
32038fd1498Szrj static hash_table<uid_decl_hasher> *candidates;
32138fd1498Szrj 
32238fd1498Szrj /* For a candidate UID return the candidates decl.  */
32338fd1498Szrj 
32438fd1498Szrj static inline tree
candidate(unsigned uid)32538fd1498Szrj candidate (unsigned uid)
32638fd1498Szrj {
32738fd1498Szrj  tree_node t;
32838fd1498Szrj  t.decl_minimal.uid = uid;
32938fd1498Szrj  return candidates->find_with_hash (&t, static_cast <hashval_t> (uid));
33038fd1498Szrj }
33138fd1498Szrj 
33238fd1498Szrj /* Bitmap of candidates which we should try to entirely scalarize away and
33338fd1498Szrj    those which cannot be (because they are and need be used as a whole).  */
33438fd1498Szrj static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap;
33538fd1498Szrj 
33638fd1498Szrj /* Bitmap of candidates in the constant pool, which cannot be scalarized
33738fd1498Szrj    because this would produce non-constant expressions (e.g. Ada).  */
33838fd1498Szrj static bitmap disqualified_constants;
33938fd1498Szrj 
34038fd1498Szrj /* Obstack for creation of fancy names.  */
34138fd1498Szrj static struct obstack name_obstack;
34238fd1498Szrj 
34338fd1498Szrj /* Head of a linked list of accesses that need to have its subaccesses
34438fd1498Szrj    propagated to their assignment counterparts. */
34538fd1498Szrj static struct access *work_queue_head;
34638fd1498Szrj 
34738fd1498Szrj /* Number of parameters of the analyzed function when doing early ipa SRA.  */
34838fd1498Szrj static int func_param_count;
34938fd1498Szrj 
35038fd1498Szrj /* scan_function sets the following to true if it encounters a call to
35138fd1498Szrj    __builtin_apply_args.  */
35238fd1498Szrj static bool encountered_apply_args;
35338fd1498Szrj 
35438fd1498Szrj /* Set by scan_function when it finds a recursive call.  */
35538fd1498Szrj static bool encountered_recursive_call;
35638fd1498Szrj 
35738fd1498Szrj /* Set by scan_function when it finds a recursive call with less actual
35838fd1498Szrj    arguments than formal parameters..  */
35938fd1498Szrj static bool encountered_unchangable_recursive_call;
36038fd1498Szrj 
36138fd1498Szrj /* This is a table in which for each basic block and parameter there is a
36238fd1498Szrj    distance (offset + size) in that parameter which is dereferenced and
36338fd1498Szrj    accessed in that BB.  */
36438fd1498Szrj static HOST_WIDE_INT *bb_dereferences;
36538fd1498Szrj /* Bitmap of BBs that can cause the function to "stop" progressing by
36638fd1498Szrj    returning, throwing externally, looping infinitely or calling a function
36738fd1498Szrj    which might abort etc.. */
36838fd1498Szrj static bitmap final_bbs;
36938fd1498Szrj 
37038fd1498Szrj /* Representative of no accesses at all. */
37138fd1498Szrj static struct access  no_accesses_representant;
37238fd1498Szrj 
37338fd1498Szrj /* Predicate to test the special value.  */
37438fd1498Szrj 
37538fd1498Szrj static inline bool
no_accesses_p(struct access * access)37638fd1498Szrj no_accesses_p (struct access *access)
37738fd1498Szrj {
37838fd1498Szrj   return access == &no_accesses_representant;
37938fd1498Szrj }
38038fd1498Szrj 
38138fd1498Szrj /* Dump contents of ACCESS to file F in a human friendly way.  If GRP is true,
38238fd1498Szrj    representative fields are dumped, otherwise those which only describe the
38338fd1498Szrj    individual access are.  */
38438fd1498Szrj 
38538fd1498Szrj static struct
38638fd1498Szrj {
38738fd1498Szrj   /* Number of processed aggregates is readily available in
38838fd1498Szrj      analyze_all_variable_accesses and so is not stored here.  */
38938fd1498Szrj 
39038fd1498Szrj   /* Number of created scalar replacements.  */
39138fd1498Szrj   int replacements;
39238fd1498Szrj 
39338fd1498Szrj   /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
39438fd1498Szrj      expression.  */
39538fd1498Szrj   int exprs;
39638fd1498Szrj 
39738fd1498Szrj   /* Number of statements created by generate_subtree_copies.  */
39838fd1498Szrj   int subtree_copies;
39938fd1498Szrj 
40038fd1498Szrj   /* Number of statements created by load_assign_lhs_subreplacements.  */
40138fd1498Szrj   int subreplacements;
40238fd1498Szrj 
40338fd1498Szrj   /* Number of times sra_modify_assign has deleted a statement.  */
40438fd1498Szrj   int deleted;
40538fd1498Szrj 
40638fd1498Szrj   /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
40738fd1498Szrj      RHS reparately due to type conversions or nonexistent matching
40838fd1498Szrj      references.  */
40938fd1498Szrj   int separate_lhs_rhs_handling;
41038fd1498Szrj 
41138fd1498Szrj   /* Number of parameters that were removed because they were unused.  */
41238fd1498Szrj   int deleted_unused_parameters;
41338fd1498Szrj 
41438fd1498Szrj   /* Number of scalars passed as parameters by reference that have been
41538fd1498Szrj      converted to be passed by value.  */
41638fd1498Szrj   int scalar_by_ref_to_by_val;
41738fd1498Szrj 
41838fd1498Szrj   /* Number of aggregate parameters that were replaced by one or more of their
41938fd1498Szrj      components.  */
42038fd1498Szrj   int aggregate_params_reduced;
42138fd1498Szrj 
42238fd1498Szrj   /* Numbber of components created when splitting aggregate parameters.  */
42338fd1498Szrj   int param_reductions_created;
42438fd1498Szrj } sra_stats;
42538fd1498Szrj 
42638fd1498Szrj static void
dump_access(FILE * f,struct access * access,bool grp)42738fd1498Szrj dump_access (FILE *f, struct access *access, bool grp)
42838fd1498Szrj {
42938fd1498Szrj   fprintf (f, "access { ");
43038fd1498Szrj   fprintf (f, "base = (%d)'", DECL_UID (access->base));
43138fd1498Szrj   print_generic_expr (f, access->base);
43238fd1498Szrj   fprintf (f, "', offset = " HOST_WIDE_INT_PRINT_DEC, access->offset);
43338fd1498Szrj   fprintf (f, ", size = " HOST_WIDE_INT_PRINT_DEC, access->size);
43438fd1498Szrj   fprintf (f, ", expr = ");
43538fd1498Szrj   print_generic_expr (f, access->expr);
43638fd1498Szrj   fprintf (f, ", type = ");
43738fd1498Szrj   print_generic_expr (f, access->type);
43838fd1498Szrj   fprintf (f, ", non_addressable = %d, reverse = %d",
43938fd1498Szrj 	   access->non_addressable, access->reverse);
44038fd1498Szrj   if (grp)
44138fd1498Szrj     fprintf (f, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
44238fd1498Szrj 	     "grp_assignment_write = %d, grp_scalar_read = %d, "
44338fd1498Szrj 	     "grp_scalar_write = %d, grp_total_scalarization = %d, "
44438fd1498Szrj 	     "grp_hint = %d, grp_covered = %d, "
44538fd1498Szrj 	     "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
44638fd1498Szrj 	     "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
44738fd1498Szrj 	     "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
44838fd1498Szrj 	     "grp_not_necessarilly_dereferenced = %d\n",
44938fd1498Szrj 	     access->grp_read, access->grp_write, access->grp_assignment_read,
45038fd1498Szrj 	     access->grp_assignment_write, access->grp_scalar_read,
45138fd1498Szrj 	     access->grp_scalar_write, access->grp_total_scalarization,
45238fd1498Szrj 	     access->grp_hint, access->grp_covered,
45338fd1498Szrj 	     access->grp_unscalarizable_region, access->grp_unscalarized_data,
45438fd1498Szrj 	     access->grp_partial_lhs, access->grp_to_be_replaced,
45538fd1498Szrj 	     access->grp_to_be_debug_replaced, access->grp_maybe_modified,
45638fd1498Szrj 	     access->grp_not_necessarilly_dereferenced);
45738fd1498Szrj   else
45838fd1498Szrj     fprintf (f, ", write = %d, grp_total_scalarization = %d, "
45938fd1498Szrj 	     "grp_partial_lhs = %d\n",
46038fd1498Szrj 	     access->write, access->grp_total_scalarization,
46138fd1498Szrj 	     access->grp_partial_lhs);
46238fd1498Szrj }
46338fd1498Szrj 
46438fd1498Szrj /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL.  */
46538fd1498Szrj 
46638fd1498Szrj static void
dump_access_tree_1(FILE * f,struct access * access,int level)46738fd1498Szrj dump_access_tree_1 (FILE *f, struct access *access, int level)
46838fd1498Szrj {
46938fd1498Szrj   do
47038fd1498Szrj     {
47138fd1498Szrj       int i;
47238fd1498Szrj 
47338fd1498Szrj       for (i = 0; i < level; i++)
47438fd1498Szrj 	fputs ("* ", f);
47538fd1498Szrj 
47638fd1498Szrj       dump_access (f, access, true);
47738fd1498Szrj 
47838fd1498Szrj       if (access->first_child)
47938fd1498Szrj 	dump_access_tree_1 (f, access->first_child, level + 1);
48038fd1498Szrj 
48138fd1498Szrj       access = access->next_sibling;
48238fd1498Szrj     }
48338fd1498Szrj   while (access);
48438fd1498Szrj }
48538fd1498Szrj 
48638fd1498Szrj /* Dump all access trees for a variable, given the pointer to the first root in
48738fd1498Szrj    ACCESS.  */
48838fd1498Szrj 
48938fd1498Szrj static void
dump_access_tree(FILE * f,struct access * access)49038fd1498Szrj dump_access_tree (FILE *f, struct access *access)
49138fd1498Szrj {
49238fd1498Szrj   for (; access; access = access->next_grp)
49338fd1498Szrj     dump_access_tree_1 (f, access, 0);
49438fd1498Szrj }
49538fd1498Szrj 
49638fd1498Szrj /* Return true iff ACC is non-NULL and has subaccesses.  */
49738fd1498Szrj 
49838fd1498Szrj static inline bool
access_has_children_p(struct access * acc)49938fd1498Szrj access_has_children_p (struct access *acc)
50038fd1498Szrj {
50138fd1498Szrj   return acc && acc->first_child;
50238fd1498Szrj }
50338fd1498Szrj 
50438fd1498Szrj /* Return true iff ACC is (partly) covered by at least one replacement.  */
50538fd1498Szrj 
50638fd1498Szrj static bool
access_has_replacements_p(struct access * acc)50738fd1498Szrj access_has_replacements_p (struct access *acc)
50838fd1498Szrj {
50938fd1498Szrj   struct access *child;
51038fd1498Szrj   if (acc->grp_to_be_replaced)
51138fd1498Szrj     return true;
51238fd1498Szrj   for (child = acc->first_child; child; child = child->next_sibling)
51338fd1498Szrj     if (access_has_replacements_p (child))
51438fd1498Szrj       return true;
51538fd1498Szrj   return false;
51638fd1498Szrj }
51738fd1498Szrj 
51838fd1498Szrj /* Return a vector of pointers to accesses for the variable given in BASE or
51938fd1498Szrj    NULL if there is none.  */
52038fd1498Szrj 
52138fd1498Szrj static vec<access_p> *
get_base_access_vector(tree base)52238fd1498Szrj get_base_access_vector (tree base)
52338fd1498Szrj {
52438fd1498Szrj   return base_access_vec->get (base);
52538fd1498Szrj }
52638fd1498Szrj 
52738fd1498Szrj /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
52838fd1498Szrj    in ACCESS.  Return NULL if it cannot be found.  */
52938fd1498Szrj 
53038fd1498Szrj static struct access *
find_access_in_subtree(struct access * access,HOST_WIDE_INT offset,HOST_WIDE_INT size)53138fd1498Szrj find_access_in_subtree (struct access *access, HOST_WIDE_INT offset,
53238fd1498Szrj 			HOST_WIDE_INT size)
53338fd1498Szrj {
53438fd1498Szrj   while (access && (access->offset != offset || access->size != size))
53538fd1498Szrj     {
53638fd1498Szrj       struct access *child = access->first_child;
53738fd1498Szrj 
53838fd1498Szrj       while (child && (child->offset + child->size <= offset))
53938fd1498Szrj 	child = child->next_sibling;
54038fd1498Szrj       access = child;
54138fd1498Szrj     }
54238fd1498Szrj 
54338fd1498Szrj   return access;
54438fd1498Szrj }
54538fd1498Szrj 
54638fd1498Szrj /* Return the first group representative for DECL or NULL if none exists.  */
54738fd1498Szrj 
54838fd1498Szrj static struct access *
get_first_repr_for_decl(tree base)54938fd1498Szrj get_first_repr_for_decl (tree base)
55038fd1498Szrj {
55138fd1498Szrj   vec<access_p> *access_vec;
55238fd1498Szrj 
55338fd1498Szrj   access_vec = get_base_access_vector (base);
55438fd1498Szrj   if (!access_vec)
55538fd1498Szrj     return NULL;
55638fd1498Szrj 
55738fd1498Szrj   return (*access_vec)[0];
55838fd1498Szrj }
55938fd1498Szrj 
56038fd1498Szrj /* Find an access representative for the variable BASE and given OFFSET and
56138fd1498Szrj    SIZE.  Requires that access trees have already been built.  Return NULL if
56238fd1498Szrj    it cannot be found.  */
56338fd1498Szrj 
56438fd1498Szrj static struct access *
get_var_base_offset_size_access(tree base,HOST_WIDE_INT offset,HOST_WIDE_INT size)56538fd1498Szrj get_var_base_offset_size_access (tree base, HOST_WIDE_INT offset,
56638fd1498Szrj 				 HOST_WIDE_INT size)
56738fd1498Szrj {
56838fd1498Szrj   struct access *access;
56938fd1498Szrj 
57038fd1498Szrj   access = get_first_repr_for_decl (base);
57138fd1498Szrj   while (access && (access->offset + access->size <= offset))
57238fd1498Szrj     access = access->next_grp;
57338fd1498Szrj   if (!access)
57438fd1498Szrj     return NULL;
57538fd1498Szrj 
57638fd1498Szrj   return find_access_in_subtree (access, offset, size);
57738fd1498Szrj }
57838fd1498Szrj 
57938fd1498Szrj /* Add LINK to the linked list of assign links of RACC.  */
58038fd1498Szrj static void
add_link_to_rhs(struct access * racc,struct assign_link * link)58138fd1498Szrj add_link_to_rhs (struct access *racc, struct assign_link *link)
58238fd1498Szrj {
58338fd1498Szrj   gcc_assert (link->racc == racc);
58438fd1498Szrj 
58538fd1498Szrj   if (!racc->first_link)
58638fd1498Szrj     {
58738fd1498Szrj       gcc_assert (!racc->last_link);
58838fd1498Szrj       racc->first_link = link;
58938fd1498Szrj     }
59038fd1498Szrj   else
59138fd1498Szrj     racc->last_link->next = link;
59238fd1498Szrj 
59338fd1498Szrj   racc->last_link = link;
59438fd1498Szrj   link->next = NULL;
59538fd1498Szrj }
59638fd1498Szrj 
59738fd1498Szrj /* Move all link structures in their linked list in OLD_RACC to the linked list
59838fd1498Szrj    in NEW_RACC.  */
59938fd1498Szrj static void
relink_to_new_repr(struct access * new_racc,struct access * old_racc)60038fd1498Szrj relink_to_new_repr (struct access *new_racc, struct access *old_racc)
60138fd1498Szrj {
60238fd1498Szrj   if (!old_racc->first_link)
60338fd1498Szrj     {
60438fd1498Szrj       gcc_assert (!old_racc->last_link);
60538fd1498Szrj       return;
60638fd1498Szrj     }
60738fd1498Szrj 
60838fd1498Szrj   if (new_racc->first_link)
60938fd1498Szrj     {
61038fd1498Szrj       gcc_assert (!new_racc->last_link->next);
61138fd1498Szrj       gcc_assert (!old_racc->last_link || !old_racc->last_link->next);
61238fd1498Szrj 
61338fd1498Szrj       new_racc->last_link->next = old_racc->first_link;
61438fd1498Szrj       new_racc->last_link = old_racc->last_link;
61538fd1498Szrj     }
61638fd1498Szrj   else
61738fd1498Szrj     {
61838fd1498Szrj       gcc_assert (!new_racc->last_link);
61938fd1498Szrj 
62038fd1498Szrj       new_racc->first_link = old_racc->first_link;
62138fd1498Szrj       new_racc->last_link = old_racc->last_link;
62238fd1498Szrj     }
62338fd1498Szrj   old_racc->first_link = old_racc->last_link = NULL;
62438fd1498Szrj }
62538fd1498Szrj 
62638fd1498Szrj /* Add ACCESS to the work queue (which is actually a stack).  */
62738fd1498Szrj 
62838fd1498Szrj static void
add_access_to_work_queue(struct access * access)62938fd1498Szrj add_access_to_work_queue (struct access *access)
63038fd1498Szrj {
63138fd1498Szrj   if (access->first_link && !access->grp_queued)
63238fd1498Szrj     {
63338fd1498Szrj       gcc_assert (!access->next_queued);
63438fd1498Szrj       access->next_queued = work_queue_head;
63538fd1498Szrj       access->grp_queued = 1;
63638fd1498Szrj       work_queue_head = access;
63738fd1498Szrj     }
63838fd1498Szrj }
63938fd1498Szrj 
64038fd1498Szrj /* Pop an access from the work queue, and return it, assuming there is one.  */
64138fd1498Szrj 
64238fd1498Szrj static struct access *
pop_access_from_work_queue(void)64338fd1498Szrj pop_access_from_work_queue (void)
64438fd1498Szrj {
64538fd1498Szrj   struct access *access = work_queue_head;
64638fd1498Szrj 
64738fd1498Szrj   work_queue_head = access->next_queued;
64838fd1498Szrj   access->next_queued = NULL;
64938fd1498Szrj   access->grp_queued = 0;
65038fd1498Szrj   return access;
65138fd1498Szrj }
65238fd1498Szrj 
65338fd1498Szrj 
65438fd1498Szrj /* Allocate necessary structures.  */
65538fd1498Szrj 
65638fd1498Szrj static void
sra_initialize(void)65738fd1498Szrj sra_initialize (void)
65838fd1498Szrj {
65938fd1498Szrj   candidate_bitmap = BITMAP_ALLOC (NULL);
66038fd1498Szrj   candidates = new hash_table<uid_decl_hasher>
66138fd1498Szrj     (vec_safe_length (cfun->local_decls) / 2);
66238fd1498Szrj   should_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
66338fd1498Szrj   cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL);
66438fd1498Szrj   disqualified_constants = BITMAP_ALLOC (NULL);
66538fd1498Szrj   gcc_obstack_init (&name_obstack);
66638fd1498Szrj   base_access_vec = new hash_map<tree, auto_vec<access_p> >;
66738fd1498Szrj   memset (&sra_stats, 0, sizeof (sra_stats));
66838fd1498Szrj   encountered_apply_args = false;
66938fd1498Szrj   encountered_recursive_call = false;
67038fd1498Szrj   encountered_unchangable_recursive_call = false;
67138fd1498Szrj }
67238fd1498Szrj 
67338fd1498Szrj /* Deallocate all general structures.  */
67438fd1498Szrj 
67538fd1498Szrj static void
sra_deinitialize(void)67638fd1498Szrj sra_deinitialize (void)
67738fd1498Szrj {
67838fd1498Szrj   BITMAP_FREE (candidate_bitmap);
67938fd1498Szrj   delete candidates;
68038fd1498Szrj   candidates = NULL;
68138fd1498Szrj   BITMAP_FREE (should_scalarize_away_bitmap);
68238fd1498Szrj   BITMAP_FREE (cannot_scalarize_away_bitmap);
68338fd1498Szrj   BITMAP_FREE (disqualified_constants);
68438fd1498Szrj   access_pool.release ();
68538fd1498Szrj   assign_link_pool.release ();
68638fd1498Szrj   obstack_free (&name_obstack, NULL);
68738fd1498Szrj 
68838fd1498Szrj   delete base_access_vec;
68938fd1498Szrj }
69038fd1498Szrj 
69138fd1498Szrj /* Return true if DECL is a VAR_DECL in the constant pool, false otherwise.  */
69238fd1498Szrj 
constant_decl_p(tree decl)69338fd1498Szrj static bool constant_decl_p (tree decl)
69438fd1498Szrj {
69538fd1498Szrj   return VAR_P (decl) && DECL_IN_CONSTANT_POOL (decl);
69638fd1498Szrj }
69738fd1498Szrj 
69838fd1498Szrj /* Remove DECL from candidates for SRA and write REASON to the dump file if
69938fd1498Szrj    there is one.  */
70038fd1498Szrj 
70138fd1498Szrj static void
disqualify_candidate(tree decl,const char * reason)70238fd1498Szrj disqualify_candidate (tree decl, const char *reason)
70338fd1498Szrj {
70438fd1498Szrj   if (bitmap_clear_bit (candidate_bitmap, DECL_UID (decl)))
70538fd1498Szrj     candidates->remove_elt_with_hash (decl, DECL_UID (decl));
70638fd1498Szrj   if (constant_decl_p (decl))
70738fd1498Szrj     bitmap_set_bit (disqualified_constants, DECL_UID (decl));
70838fd1498Szrj 
70938fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
71038fd1498Szrj     {
71138fd1498Szrj       fprintf (dump_file, "! Disqualifying ");
71238fd1498Szrj       print_generic_expr (dump_file, decl);
71338fd1498Szrj       fprintf (dump_file, " - %s\n", reason);
71438fd1498Szrj     }
71538fd1498Szrj }
71638fd1498Szrj 
71738fd1498Szrj /* Return true iff the type contains a field or an element which does not allow
71838fd1498Szrj    scalarization.  */
71938fd1498Szrj 
72038fd1498Szrj static bool
type_internals_preclude_sra_p(tree type,const char ** msg)72138fd1498Szrj type_internals_preclude_sra_p (tree type, const char **msg)
72238fd1498Szrj {
72338fd1498Szrj   tree fld;
72438fd1498Szrj   tree et;
72538fd1498Szrj 
72638fd1498Szrj   switch (TREE_CODE (type))
72738fd1498Szrj     {
72838fd1498Szrj     case RECORD_TYPE:
72938fd1498Szrj     case UNION_TYPE:
73038fd1498Szrj     case QUAL_UNION_TYPE:
73138fd1498Szrj       for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
73238fd1498Szrj 	if (TREE_CODE (fld) == FIELD_DECL)
73338fd1498Szrj 	  {
73438fd1498Szrj 	    tree ft = TREE_TYPE (fld);
73538fd1498Szrj 
73638fd1498Szrj 	    if (TREE_THIS_VOLATILE (fld))
73738fd1498Szrj 	      {
73838fd1498Szrj 		*msg = "volatile structure field";
73938fd1498Szrj 		return true;
74038fd1498Szrj 	      }
74138fd1498Szrj 	    if (!DECL_FIELD_OFFSET (fld))
74238fd1498Szrj 	      {
74338fd1498Szrj 		*msg = "no structure field offset";
74438fd1498Szrj 		return true;
74538fd1498Szrj 	      }
74638fd1498Szrj 	    if (!DECL_SIZE (fld))
74738fd1498Szrj 	      {
74838fd1498Szrj 		*msg = "zero structure field size";
74938fd1498Szrj 	        return true;
75038fd1498Szrj 	      }
75138fd1498Szrj 	    if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
75238fd1498Szrj 	      {
75338fd1498Szrj 		*msg = "structure field offset not fixed";
75438fd1498Szrj 		return true;
75538fd1498Szrj 	      }
75638fd1498Szrj 	    if (!tree_fits_uhwi_p (DECL_SIZE (fld)))
75738fd1498Szrj 	      {
75838fd1498Szrj 	        *msg = "structure field size not fixed";
75938fd1498Szrj 		return true;
76038fd1498Szrj 	      }
76138fd1498Szrj 	    if (!tree_fits_shwi_p (bit_position (fld)))
76238fd1498Szrj 	      {
76338fd1498Szrj 	        *msg = "structure field size too big";
76438fd1498Szrj 		return true;
76538fd1498Szrj 	      }
76638fd1498Szrj 	    if (AGGREGATE_TYPE_P (ft)
76738fd1498Szrj 		    && int_bit_position (fld) % BITS_PER_UNIT != 0)
76838fd1498Szrj 	      {
76938fd1498Szrj 		*msg = "structure field is bit field";
77038fd1498Szrj 	        return true;
77138fd1498Szrj 	      }
77238fd1498Szrj 
77338fd1498Szrj 	    if (AGGREGATE_TYPE_P (ft) && type_internals_preclude_sra_p (ft, msg))
77438fd1498Szrj 	      return true;
77538fd1498Szrj 	  }
77638fd1498Szrj 
77738fd1498Szrj       return false;
77838fd1498Szrj 
77938fd1498Szrj     case ARRAY_TYPE:
78038fd1498Szrj       et = TREE_TYPE (type);
78138fd1498Szrj 
78238fd1498Szrj       if (TYPE_VOLATILE (et))
78338fd1498Szrj 	{
78438fd1498Szrj 	  *msg = "element type is volatile";
78538fd1498Szrj 	  return true;
78638fd1498Szrj 	}
78738fd1498Szrj 
78838fd1498Szrj       if (AGGREGATE_TYPE_P (et) && type_internals_preclude_sra_p (et, msg))
78938fd1498Szrj 	return true;
79038fd1498Szrj 
79138fd1498Szrj       return false;
79238fd1498Szrj 
79338fd1498Szrj     default:
79438fd1498Szrj       return false;
79538fd1498Szrj     }
79638fd1498Szrj }
79738fd1498Szrj 
79838fd1498Szrj /* If T is an SSA_NAME, return NULL if it is not a default def or return its
79938fd1498Szrj    base variable if it is.  Return T if it is not an SSA_NAME.  */
80038fd1498Szrj 
80138fd1498Szrj static tree
get_ssa_base_param(tree t)80238fd1498Szrj get_ssa_base_param (tree t)
80338fd1498Szrj {
80438fd1498Szrj   if (TREE_CODE (t) == SSA_NAME)
80538fd1498Szrj     {
80638fd1498Szrj       if (SSA_NAME_IS_DEFAULT_DEF (t))
80738fd1498Szrj 	return SSA_NAME_VAR (t);
80838fd1498Szrj       else
80938fd1498Szrj 	return NULL_TREE;
81038fd1498Szrj     }
81138fd1498Szrj   return t;
81238fd1498Szrj }
81338fd1498Szrj 
81438fd1498Szrj /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
81538fd1498Szrj    belongs to, unless the BB has already been marked as a potentially
81638fd1498Szrj    final.  */
81738fd1498Szrj 
81838fd1498Szrj static void
mark_parm_dereference(tree base,HOST_WIDE_INT dist,gimple * stmt)81938fd1498Szrj mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple *stmt)
82038fd1498Szrj {
82138fd1498Szrj   basic_block bb = gimple_bb (stmt);
82238fd1498Szrj   int idx, parm_index = 0;
82338fd1498Szrj   tree parm;
82438fd1498Szrj 
82538fd1498Szrj   if (bitmap_bit_p (final_bbs, bb->index))
82638fd1498Szrj     return;
82738fd1498Szrj 
82838fd1498Szrj   for (parm = DECL_ARGUMENTS (current_function_decl);
82938fd1498Szrj        parm && parm != base;
83038fd1498Szrj        parm = DECL_CHAIN (parm))
83138fd1498Szrj     parm_index++;
83238fd1498Szrj 
83338fd1498Szrj   gcc_assert (parm_index < func_param_count);
83438fd1498Szrj 
83538fd1498Szrj   idx = bb->index * func_param_count + parm_index;
83638fd1498Szrj   if (bb_dereferences[idx] < dist)
83738fd1498Szrj     bb_dereferences[idx] = dist;
83838fd1498Szrj }
83938fd1498Szrj 
84038fd1498Szrj /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
84138fd1498Szrj    the three fields.  Also add it to the vector of accesses corresponding to
84238fd1498Szrj    the base.  Finally, return the new access.  */
84338fd1498Szrj 
84438fd1498Szrj static struct access *
create_access_1(tree base,HOST_WIDE_INT offset,HOST_WIDE_INT size)84538fd1498Szrj create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size)
84638fd1498Szrj {
84738fd1498Szrj   struct access *access = access_pool.allocate ();
84838fd1498Szrj 
84938fd1498Szrj   memset (access, 0, sizeof (struct access));
85038fd1498Szrj   access->base = base;
85138fd1498Szrj   access->offset = offset;
85238fd1498Szrj   access->size = size;
85338fd1498Szrj 
85438fd1498Szrj   base_access_vec->get_or_insert (base).safe_push (access);
85538fd1498Szrj 
85638fd1498Szrj   return access;
85738fd1498Szrj }
85838fd1498Szrj 
85938fd1498Szrj static bool maybe_add_sra_candidate (tree);
86038fd1498Szrj 
86138fd1498Szrj /* Create and insert access for EXPR. Return created access, or NULL if it is
86238fd1498Szrj    not possible.  Also scan for uses of constant pool as we go along and add
86338fd1498Szrj    to candidates.  */
86438fd1498Szrj 
86538fd1498Szrj static struct access *
create_access(tree expr,gimple * stmt,bool write)86638fd1498Szrj create_access (tree expr, gimple *stmt, bool write)
86738fd1498Szrj {
86838fd1498Szrj   struct access *access;
86938fd1498Szrj   poly_int64 poffset, psize, pmax_size;
87038fd1498Szrj   HOST_WIDE_INT offset, size, max_size;
87138fd1498Szrj   tree base = expr;
87238fd1498Szrj   bool reverse, ptr, unscalarizable_region = false;
87338fd1498Szrj 
87438fd1498Szrj   base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
87538fd1498Szrj 				  &reverse);
87638fd1498Szrj   if (!poffset.is_constant (&offset)
87738fd1498Szrj       || !psize.is_constant (&size)
87838fd1498Szrj       || !pmax_size.is_constant (&max_size))
87938fd1498Szrj     {
88038fd1498Szrj       disqualify_candidate (base, "Encountered a polynomial-sized access.");
88138fd1498Szrj       return NULL;
88238fd1498Szrj     }
88338fd1498Szrj 
88438fd1498Szrj   if (sra_mode == SRA_MODE_EARLY_IPA
88538fd1498Szrj       && TREE_CODE (base) == MEM_REF)
88638fd1498Szrj     {
88738fd1498Szrj       base = get_ssa_base_param (TREE_OPERAND (base, 0));
88838fd1498Szrj       if (!base)
88938fd1498Szrj 	return NULL;
89038fd1498Szrj       ptr = true;
89138fd1498Szrj     }
89238fd1498Szrj   else
89338fd1498Szrj     ptr = false;
89438fd1498Szrj 
89538fd1498Szrj   /* For constant-pool entries, check we can substitute the constant value.  */
89638fd1498Szrj   if (constant_decl_p (base)
89738fd1498Szrj       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA))
89838fd1498Szrj     {
89938fd1498Szrj       gcc_assert (!bitmap_bit_p (disqualified_constants, DECL_UID (base)));
90038fd1498Szrj       if (expr != base
90138fd1498Szrj 	  && !is_gimple_reg_type (TREE_TYPE (expr))
90238fd1498Szrj 	  && dump_file && (dump_flags & TDF_DETAILS))
90338fd1498Szrj 	{
90438fd1498Szrj 	  /* This occurs in Ada with accesses to ARRAY_RANGE_REFs,
90538fd1498Szrj 	     and elements of multidimensional arrays (which are
90638fd1498Szrj 	     multi-element arrays in their own right).  */
90738fd1498Szrj 	  fprintf (dump_file, "Allowing non-reg-type load of part"
90838fd1498Szrj 			      " of constant-pool entry: ");
90938fd1498Szrj 	  print_generic_expr (dump_file, expr);
91038fd1498Szrj 	}
91138fd1498Szrj       maybe_add_sra_candidate (base);
91238fd1498Szrj     }
91338fd1498Szrj 
91438fd1498Szrj   if (!DECL_P (base) || !bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
91538fd1498Szrj     return NULL;
91638fd1498Szrj 
91738fd1498Szrj   if (sra_mode == SRA_MODE_EARLY_IPA)
91838fd1498Szrj     {
91938fd1498Szrj       if (size < 0 || size != max_size)
92038fd1498Szrj 	{
92138fd1498Szrj 	  disqualify_candidate (base, "Encountered a variable sized access.");
92238fd1498Szrj 	  return NULL;
92338fd1498Szrj 	}
92438fd1498Szrj       if (TREE_CODE (expr) == COMPONENT_REF
92538fd1498Szrj 	  && DECL_BIT_FIELD (TREE_OPERAND (expr, 1)))
92638fd1498Szrj 	{
92738fd1498Szrj 	  disqualify_candidate (base, "Encountered a bit-field access.");
92838fd1498Szrj 	  return NULL;
92938fd1498Szrj 	}
93038fd1498Szrj       gcc_checking_assert ((offset % BITS_PER_UNIT) == 0);
93138fd1498Szrj 
93238fd1498Szrj       if (ptr)
93338fd1498Szrj 	mark_parm_dereference (base, offset + size, stmt);
93438fd1498Szrj     }
93538fd1498Szrj   else
93638fd1498Szrj     {
93738fd1498Szrj       if (size != max_size)
93838fd1498Szrj 	{
93938fd1498Szrj 	  size = max_size;
94038fd1498Szrj 	  unscalarizable_region = true;
94138fd1498Szrj 	}
94238fd1498Szrj       if (size < 0)
94338fd1498Szrj 	{
94438fd1498Szrj 	  disqualify_candidate (base, "Encountered an unconstrained access.");
94538fd1498Szrj 	  return NULL;
94638fd1498Szrj 	}
94738fd1498Szrj     }
94838fd1498Szrj 
94938fd1498Szrj   access = create_access_1 (base, offset, size);
95038fd1498Szrj   access->expr = expr;
95138fd1498Szrj   access->type = TREE_TYPE (expr);
95238fd1498Szrj   access->write = write;
95338fd1498Szrj   access->grp_unscalarizable_region = unscalarizable_region;
95438fd1498Szrj   access->stmt = stmt;
95538fd1498Szrj   access->reverse = reverse;
95638fd1498Szrj 
95738fd1498Szrj   if (TREE_CODE (expr) == COMPONENT_REF
95838fd1498Szrj       && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr, 1)))
95938fd1498Szrj     access->non_addressable = 1;
96038fd1498Szrj 
96138fd1498Szrj   return access;
96238fd1498Szrj }
96338fd1498Szrj 
96438fd1498Szrj 
96538fd1498Szrj /* Return true iff TYPE is scalarizable - i.e. a RECORD_TYPE or fixed-length
96638fd1498Szrj    ARRAY_TYPE with fields that are either of gimple register types (excluding
96738fd1498Szrj    bit-fields) or (recursively) scalarizable types.  CONST_DECL must be true if
96838fd1498Szrj    we are considering a decl from constant pool.  If it is false, char arrays
96938fd1498Szrj    will be refused.  */
97038fd1498Szrj 
97138fd1498Szrj static bool
scalarizable_type_p(tree type,bool const_decl)97238fd1498Szrj scalarizable_type_p (tree type, bool const_decl)
97338fd1498Szrj {
97438fd1498Szrj   gcc_assert (!is_gimple_reg_type (type));
97538fd1498Szrj   if (type_contains_placeholder_p (type))
97638fd1498Szrj     return false;
97738fd1498Szrj 
97838fd1498Szrj   switch (TREE_CODE (type))
97938fd1498Szrj   {
98038fd1498Szrj   case RECORD_TYPE:
98138fd1498Szrj     for (tree fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
98238fd1498Szrj       if (TREE_CODE (fld) == FIELD_DECL)
98338fd1498Szrj 	{
98438fd1498Szrj 	  tree ft = TREE_TYPE (fld);
98538fd1498Szrj 
98638fd1498Szrj 	  if (DECL_BIT_FIELD (fld))
98738fd1498Szrj 	    return false;
98838fd1498Szrj 
98938fd1498Szrj 	  if (!is_gimple_reg_type (ft)
99038fd1498Szrj 	      && !scalarizable_type_p (ft, const_decl))
99138fd1498Szrj 	    return false;
99238fd1498Szrj 	}
99338fd1498Szrj 
99438fd1498Szrj     return true;
99538fd1498Szrj 
99638fd1498Szrj   case ARRAY_TYPE:
99738fd1498Szrj     {
99838fd1498Szrj       HOST_WIDE_INT min_elem_size;
99938fd1498Szrj       if (const_decl)
100038fd1498Szrj 	min_elem_size = 0;
100138fd1498Szrj       else
100238fd1498Szrj 	min_elem_size = BITS_PER_UNIT;
100338fd1498Szrj 
100438fd1498Szrj       if (TYPE_DOMAIN (type) == NULL_TREE
100538fd1498Szrj 	  || !tree_fits_shwi_p (TYPE_SIZE (type))
100638fd1498Szrj 	  || !tree_fits_shwi_p (TYPE_SIZE (TREE_TYPE (type)))
100738fd1498Szrj 	  || (tree_to_shwi (TYPE_SIZE (TREE_TYPE (type))) <= min_elem_size)
100838fd1498Szrj 	  || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
100938fd1498Szrj 	return false;
101038fd1498Szrj       if (tree_to_shwi (TYPE_SIZE (type)) == 0
101138fd1498Szrj 	  && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) == NULL_TREE)
101238fd1498Szrj 	/* Zero-element array, should not prevent scalarization.  */
101338fd1498Szrj 	;
101438fd1498Szrj       else if ((tree_to_shwi (TYPE_SIZE (type)) <= 0)
101538fd1498Szrj 	       || !tree_fits_shwi_p (TYPE_MAX_VALUE (TYPE_DOMAIN (type))))
101638fd1498Szrj 	/* Variable-length array, do not allow scalarization.  */
101738fd1498Szrj 	return false;
101838fd1498Szrj 
101938fd1498Szrj       tree elem = TREE_TYPE (type);
102038fd1498Szrj       if (!is_gimple_reg_type (elem)
102138fd1498Szrj 	  && !scalarizable_type_p (elem, const_decl))
102238fd1498Szrj 	return false;
102338fd1498Szrj       return true;
102438fd1498Szrj     }
102538fd1498Szrj   default:
102638fd1498Szrj     return false;
102738fd1498Szrj   }
102838fd1498Szrj }
102938fd1498Szrj 
103038fd1498Szrj static void scalarize_elem (tree, HOST_WIDE_INT, HOST_WIDE_INT, bool, tree, tree);
103138fd1498Szrj 
103238fd1498Szrj /* Create total_scalarization accesses for all scalar fields of a member
103338fd1498Szrj    of type DECL_TYPE conforming to scalarizable_type_p.  BASE
103438fd1498Szrj    must be the top-most VAR_DECL representing the variable; within that,
103538fd1498Szrj    OFFSET locates the member and REF must be the memory reference expression for
103638fd1498Szrj    the member.  */
103738fd1498Szrj 
103838fd1498Szrj static void
completely_scalarize(tree base,tree decl_type,HOST_WIDE_INT offset,tree ref)103938fd1498Szrj completely_scalarize (tree base, tree decl_type, HOST_WIDE_INT offset, tree ref)
104038fd1498Szrj {
104138fd1498Szrj   switch (TREE_CODE (decl_type))
104238fd1498Szrj     {
104338fd1498Szrj     case RECORD_TYPE:
104438fd1498Szrj       for (tree fld = TYPE_FIELDS (decl_type); fld; fld = DECL_CHAIN (fld))
104538fd1498Szrj 	if (TREE_CODE (fld) == FIELD_DECL)
104638fd1498Szrj 	  {
104738fd1498Szrj 	    HOST_WIDE_INT pos = offset + int_bit_position (fld);
104838fd1498Szrj 	    tree ft = TREE_TYPE (fld);
104938fd1498Szrj 	    tree nref = build3 (COMPONENT_REF, ft, ref, fld, NULL_TREE);
105038fd1498Szrj 
105138fd1498Szrj 	    scalarize_elem (base, pos, tree_to_uhwi (DECL_SIZE (fld)),
105238fd1498Szrj 			    TYPE_REVERSE_STORAGE_ORDER (decl_type),
105338fd1498Szrj 			    nref, ft);
105438fd1498Szrj 	  }
105538fd1498Szrj       break;
105638fd1498Szrj     case ARRAY_TYPE:
105738fd1498Szrj       {
105838fd1498Szrj 	tree elemtype = TREE_TYPE (decl_type);
105938fd1498Szrj 	tree elem_size = TYPE_SIZE (elemtype);
106038fd1498Szrj 	gcc_assert (elem_size && tree_fits_shwi_p (elem_size));
106138fd1498Szrj 	HOST_WIDE_INT el_size = tree_to_shwi (elem_size);
106238fd1498Szrj 	gcc_assert (el_size > 0);
106338fd1498Szrj 
106438fd1498Szrj 	tree minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (decl_type));
106538fd1498Szrj 	gcc_assert (TREE_CODE (minidx) == INTEGER_CST);
106638fd1498Szrj 	tree maxidx = TYPE_MAX_VALUE (TYPE_DOMAIN (decl_type));
106738fd1498Szrj 	/* Skip (some) zero-length arrays; others have MAXIDX == MINIDX - 1.  */
106838fd1498Szrj 	if (maxidx)
106938fd1498Szrj 	  {
107038fd1498Szrj 	    gcc_assert (TREE_CODE (maxidx) == INTEGER_CST);
107138fd1498Szrj 	    tree domain = TYPE_DOMAIN (decl_type);
107238fd1498Szrj 	    /* MINIDX and MAXIDX are inclusive, and must be interpreted in
107338fd1498Szrj 	       DOMAIN (e.g. signed int, whereas min/max may be size_int).  */
107438fd1498Szrj 	    offset_int idx = wi::to_offset (minidx);
107538fd1498Szrj 	    offset_int max = wi::to_offset (maxidx);
107638fd1498Szrj 	    if (!TYPE_UNSIGNED (domain))
107738fd1498Szrj 	      {
107838fd1498Szrj 		idx = wi::sext (idx, TYPE_PRECISION (domain));
107938fd1498Szrj 		max = wi::sext (max, TYPE_PRECISION (domain));
108038fd1498Szrj 	      }
108138fd1498Szrj 	    for (int el_off = offset; idx <= max; ++idx)
108238fd1498Szrj 	      {
108338fd1498Szrj 		tree nref = build4 (ARRAY_REF, elemtype,
108438fd1498Szrj 				    ref,
108538fd1498Szrj 				    wide_int_to_tree (domain, idx),
108638fd1498Szrj 				    NULL_TREE, NULL_TREE);
108738fd1498Szrj 		scalarize_elem (base, el_off, el_size,
108838fd1498Szrj 				TYPE_REVERSE_STORAGE_ORDER (decl_type),
108938fd1498Szrj 				nref, elemtype);
109038fd1498Szrj 		el_off += el_size;
109138fd1498Szrj 	      }
109238fd1498Szrj 	  }
109338fd1498Szrj       }
109438fd1498Szrj       break;
109538fd1498Szrj     default:
109638fd1498Szrj       gcc_unreachable ();
109738fd1498Szrj     }
109838fd1498Szrj }
109938fd1498Szrj 
110038fd1498Szrj /* Create total_scalarization accesses for a member of type TYPE, which must
110138fd1498Szrj    satisfy either is_gimple_reg_type or scalarizable_type_p.  BASE must be the
110238fd1498Szrj    top-most VAR_DECL representing the variable; within that, POS and SIZE locate
110338fd1498Szrj    the member, REVERSE gives its torage order. and REF must be the reference
110438fd1498Szrj    expression for it.  */
110538fd1498Szrj 
110638fd1498Szrj static void
scalarize_elem(tree base,HOST_WIDE_INT pos,HOST_WIDE_INT size,bool reverse,tree ref,tree type)110738fd1498Szrj scalarize_elem (tree base, HOST_WIDE_INT pos, HOST_WIDE_INT size, bool reverse,
110838fd1498Szrj 		tree ref, tree type)
110938fd1498Szrj {
111038fd1498Szrj   if (is_gimple_reg_type (type))
111138fd1498Szrj   {
111238fd1498Szrj     struct access *access = create_access_1 (base, pos, size);
111338fd1498Szrj     access->expr = ref;
111438fd1498Szrj     access->type = type;
111538fd1498Szrj     access->grp_total_scalarization = 1;
111638fd1498Szrj     access->reverse = reverse;
111738fd1498Szrj     /* Accesses for intraprocedural SRA can have their stmt NULL.  */
111838fd1498Szrj   }
111938fd1498Szrj   else
112038fd1498Szrj     completely_scalarize (base, type, pos, ref);
112138fd1498Szrj }
112238fd1498Szrj 
112338fd1498Szrj /* Create a total_scalarization access for VAR as a whole.  VAR must be of a
112438fd1498Szrj    RECORD_TYPE or ARRAY_TYPE conforming to scalarizable_type_p.  */
112538fd1498Szrj 
112638fd1498Szrj static void
create_total_scalarization_access(tree var)112738fd1498Szrj create_total_scalarization_access (tree var)
112838fd1498Szrj {
112938fd1498Szrj   HOST_WIDE_INT size = tree_to_uhwi (DECL_SIZE (var));
113038fd1498Szrj   struct access *access;
113138fd1498Szrj 
113238fd1498Szrj   access = create_access_1 (var, 0, size);
113338fd1498Szrj   access->expr = var;
113438fd1498Szrj   access->type = TREE_TYPE (var);
113538fd1498Szrj   access->grp_total_scalarization = 1;
113638fd1498Szrj }
113738fd1498Szrj 
113838fd1498Szrj /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it.  */
113938fd1498Szrj 
114038fd1498Szrj static inline bool
contains_view_convert_expr_p(const_tree ref)114138fd1498Szrj contains_view_convert_expr_p (const_tree ref)
114238fd1498Szrj {
114338fd1498Szrj   while (handled_component_p (ref))
114438fd1498Szrj     {
114538fd1498Szrj       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR)
114638fd1498Szrj 	return true;
114738fd1498Szrj       ref = TREE_OPERAND (ref, 0);
114838fd1498Szrj     }
114938fd1498Szrj 
115038fd1498Szrj   return false;
115138fd1498Szrj }
115238fd1498Szrj 
115338fd1498Szrj /* Return true if REF contains a VIEW_CONVERT_EXPR or a MEM_REF that performs
115438fd1498Szrj    type conversion or a COMPONENT_REF with a bit-field field declaration.  */
115538fd1498Szrj 
115638fd1498Szrj static bool
contains_vce_or_bfcref_p(const_tree ref)115738fd1498Szrj contains_vce_or_bfcref_p (const_tree ref)
115838fd1498Szrj {
115938fd1498Szrj   while (handled_component_p (ref))
116038fd1498Szrj     {
116138fd1498Szrj       if (TREE_CODE (ref) == VIEW_CONVERT_EXPR
116238fd1498Szrj 	  || (TREE_CODE (ref) == COMPONENT_REF
116338fd1498Szrj 	      && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))))
116438fd1498Szrj 	return true;
116538fd1498Szrj       ref = TREE_OPERAND (ref, 0);
116638fd1498Szrj     }
116738fd1498Szrj 
116838fd1498Szrj   if (TREE_CODE (ref) != MEM_REF
116938fd1498Szrj       || TREE_CODE (TREE_OPERAND (ref, 0)) != ADDR_EXPR)
117038fd1498Szrj     return false;
117138fd1498Szrj 
117238fd1498Szrj   tree mem = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
117338fd1498Szrj   if (TYPE_MAIN_VARIANT (TREE_TYPE (ref))
117438fd1498Szrj       != TYPE_MAIN_VARIANT (TREE_TYPE (mem)))
117538fd1498Szrj     return true;
117638fd1498Szrj 
117738fd1498Szrj   return false;
117838fd1498Szrj }
117938fd1498Szrj 
118038fd1498Szrj /* Search the given tree for a declaration by skipping handled components and
118138fd1498Szrj    exclude it from the candidates.  */
118238fd1498Szrj 
118338fd1498Szrj static void
disqualify_base_of_expr(tree t,const char * reason)118438fd1498Szrj disqualify_base_of_expr (tree t, const char *reason)
118538fd1498Szrj {
118638fd1498Szrj   t = get_base_address (t);
118738fd1498Szrj   if (sra_mode == SRA_MODE_EARLY_IPA
118838fd1498Szrj       && TREE_CODE (t) == MEM_REF)
118938fd1498Szrj     t = get_ssa_base_param (TREE_OPERAND (t, 0));
119038fd1498Szrj 
119138fd1498Szrj   if (t && DECL_P (t))
119238fd1498Szrj     disqualify_candidate (t, reason);
119338fd1498Szrj }
119438fd1498Szrj 
119538fd1498Szrj /* Scan expression EXPR and create access structures for all accesses to
119638fd1498Szrj    candidates for scalarization.  Return the created access or NULL if none is
119738fd1498Szrj    created.  */
119838fd1498Szrj 
119938fd1498Szrj static struct access *
build_access_from_expr_1(tree expr,gimple * stmt,bool write)120038fd1498Szrj build_access_from_expr_1 (tree expr, gimple *stmt, bool write)
120138fd1498Szrj {
120238fd1498Szrj   struct access *ret = NULL;
120338fd1498Szrj   bool partial_ref;
120438fd1498Szrj 
120538fd1498Szrj   if (TREE_CODE (expr) == BIT_FIELD_REF
120638fd1498Szrj       || TREE_CODE (expr) == IMAGPART_EXPR
120738fd1498Szrj       || TREE_CODE (expr) == REALPART_EXPR)
120838fd1498Szrj     {
120938fd1498Szrj       expr = TREE_OPERAND (expr, 0);
121038fd1498Szrj       partial_ref = true;
121138fd1498Szrj     }
121238fd1498Szrj   else
121338fd1498Szrj     partial_ref = false;
121438fd1498Szrj 
121538fd1498Szrj   if (storage_order_barrier_p (expr))
121638fd1498Szrj     {
121738fd1498Szrj       disqualify_base_of_expr (expr, "storage order barrier.");
121838fd1498Szrj       return NULL;
121938fd1498Szrj     }
122038fd1498Szrj 
122138fd1498Szrj   /* We need to dive through V_C_Es in order to get the size of its parameter
122238fd1498Szrj      and not the result type.  Ada produces such statements.  We are also
122338fd1498Szrj      capable of handling the topmost V_C_E but not any of those buried in other
122438fd1498Szrj      handled components.  */
122538fd1498Szrj   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
122638fd1498Szrj     expr = TREE_OPERAND (expr, 0);
122738fd1498Szrj 
122838fd1498Szrj   if (contains_view_convert_expr_p (expr))
122938fd1498Szrj     {
123038fd1498Szrj       disqualify_base_of_expr (expr, "V_C_E under a different handled "
123138fd1498Szrj 			       "component.");
123238fd1498Szrj       return NULL;
123338fd1498Szrj     }
123438fd1498Szrj   if (TREE_THIS_VOLATILE (expr))
123538fd1498Szrj     {
123638fd1498Szrj       disqualify_base_of_expr (expr, "part of a volatile reference.");
123738fd1498Szrj       return NULL;
123838fd1498Szrj     }
123938fd1498Szrj 
124038fd1498Szrj   switch (TREE_CODE (expr))
124138fd1498Szrj     {
124238fd1498Szrj     case MEM_REF:
124338fd1498Szrj       if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
124438fd1498Szrj 	  && sra_mode != SRA_MODE_EARLY_IPA)
124538fd1498Szrj 	return NULL;
124638fd1498Szrj       /* fall through */
124738fd1498Szrj     case VAR_DECL:
124838fd1498Szrj     case PARM_DECL:
124938fd1498Szrj     case RESULT_DECL:
125038fd1498Szrj     case COMPONENT_REF:
125138fd1498Szrj     case ARRAY_REF:
125238fd1498Szrj     case ARRAY_RANGE_REF:
125338fd1498Szrj       ret = create_access (expr, stmt, write);
125438fd1498Szrj       break;
125538fd1498Szrj 
125638fd1498Szrj     default:
125738fd1498Szrj       break;
125838fd1498Szrj     }
125938fd1498Szrj 
126038fd1498Szrj   if (write && partial_ref && ret)
126138fd1498Szrj     ret->grp_partial_lhs = 1;
126238fd1498Szrj 
126338fd1498Szrj   return ret;
126438fd1498Szrj }
126538fd1498Szrj 
126638fd1498Szrj /* Scan expression EXPR and create access structures for all accesses to
126738fd1498Szrj    candidates for scalarization.  Return true if any access has been inserted.
126838fd1498Szrj    STMT must be the statement from which the expression is taken, WRITE must be
126938fd1498Szrj    true if the expression is a store and false otherwise. */
127038fd1498Szrj 
127138fd1498Szrj static bool
build_access_from_expr(tree expr,gimple * stmt,bool write)127238fd1498Szrj build_access_from_expr (tree expr, gimple *stmt, bool write)
127338fd1498Szrj {
127438fd1498Szrj   struct access *access;
127538fd1498Szrj 
127638fd1498Szrj   access = build_access_from_expr_1 (expr, stmt, write);
127738fd1498Szrj   if (access)
127838fd1498Szrj     {
127938fd1498Szrj       /* This means the aggregate is accesses as a whole in a way other than an
128038fd1498Szrj 	 assign statement and thus cannot be removed even if we had a scalar
128138fd1498Szrj 	 replacement for everything.  */
128238fd1498Szrj       if (cannot_scalarize_away_bitmap)
128338fd1498Szrj 	bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base));
128438fd1498Szrj       return true;
128538fd1498Szrj     }
128638fd1498Szrj   return false;
128738fd1498Szrj }
128838fd1498Szrj 
128938fd1498Szrj /* Return the single non-EH successor edge of BB or NULL if there is none or
129038fd1498Szrj    more than one.  */
129138fd1498Szrj 
129238fd1498Szrj static edge
single_non_eh_succ(basic_block bb)129338fd1498Szrj single_non_eh_succ (basic_block bb)
129438fd1498Szrj {
129538fd1498Szrj   edge e, res = NULL;
129638fd1498Szrj   edge_iterator ei;
129738fd1498Szrj 
129838fd1498Szrj   FOR_EACH_EDGE (e, ei, bb->succs)
129938fd1498Szrj     if (!(e->flags & EDGE_EH))
130038fd1498Szrj       {
130138fd1498Szrj 	if (res)
130238fd1498Szrj 	  return NULL;
130338fd1498Szrj 	res = e;
130438fd1498Szrj       }
130538fd1498Szrj 
130638fd1498Szrj   return res;
130738fd1498Szrj }
130838fd1498Szrj 
130938fd1498Szrj /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
131038fd1498Szrj    there is no alternative spot where to put statements SRA might need to
131138fd1498Szrj    generate after it.  The spot we are looking for is an edge leading to a
131238fd1498Szrj    single non-EH successor, if it exists and is indeed single.  RHS may be
131338fd1498Szrj    NULL, in that case ignore it.  */
131438fd1498Szrj 
131538fd1498Szrj static bool
disqualify_if_bad_bb_terminating_stmt(gimple * stmt,tree lhs,tree rhs)131638fd1498Szrj disqualify_if_bad_bb_terminating_stmt (gimple *stmt, tree lhs, tree rhs)
131738fd1498Szrj {
131838fd1498Szrj   if ((sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
131938fd1498Szrj       && stmt_ends_bb_p (stmt))
132038fd1498Szrj     {
132138fd1498Szrj       if (single_non_eh_succ (gimple_bb (stmt)))
132238fd1498Szrj 	return false;
132338fd1498Szrj 
132438fd1498Szrj       disqualify_base_of_expr (lhs, "LHS of a throwing stmt.");
132538fd1498Szrj       if (rhs)
132638fd1498Szrj 	disqualify_base_of_expr (rhs, "RHS of a throwing stmt.");
132738fd1498Szrj       return true;
132838fd1498Szrj     }
132938fd1498Szrj   return false;
133038fd1498Szrj }
133138fd1498Szrj 
133238fd1498Szrj /* Return true if the nature of BASE is such that it contains data even if
133338fd1498Szrj    there is no write to it in the function.  */
133438fd1498Szrj 
133538fd1498Szrj static bool
comes_initialized_p(tree base)133638fd1498Szrj comes_initialized_p (tree base)
133738fd1498Szrj {
133838fd1498Szrj   return TREE_CODE (base) == PARM_DECL || constant_decl_p (base);
133938fd1498Szrj }
134038fd1498Szrj 
134138fd1498Szrj /* Scan expressions occurring in STMT, create access structures for all accesses
134238fd1498Szrj    to candidates for scalarization and remove those candidates which occur in
134338fd1498Szrj    statements or expressions that prevent them from being split apart.  Return
134438fd1498Szrj    true if any access has been inserted.  */
134538fd1498Szrj 
134638fd1498Szrj static bool
build_accesses_from_assign(gimple * stmt)134738fd1498Szrj build_accesses_from_assign (gimple *stmt)
134838fd1498Szrj {
134938fd1498Szrj   tree lhs, rhs;
135038fd1498Szrj   struct access *lacc, *racc;
135138fd1498Szrj 
135238fd1498Szrj   if (!gimple_assign_single_p (stmt)
135338fd1498Szrj       /* Scope clobbers don't influence scalarization.  */
135438fd1498Szrj       || gimple_clobber_p (stmt))
135538fd1498Szrj     return false;
135638fd1498Szrj 
135738fd1498Szrj   lhs = gimple_assign_lhs (stmt);
135838fd1498Szrj   rhs = gimple_assign_rhs1 (stmt);
135938fd1498Szrj 
136038fd1498Szrj   if (disqualify_if_bad_bb_terminating_stmt (stmt, lhs, rhs))
136138fd1498Szrj     return false;
136238fd1498Szrj 
136338fd1498Szrj   racc = build_access_from_expr_1 (rhs, stmt, false);
136438fd1498Szrj   lacc = build_access_from_expr_1 (lhs, stmt, true);
136538fd1498Szrj 
136638fd1498Szrj   if (lacc)
136738fd1498Szrj     {
136838fd1498Szrj       lacc->grp_assignment_write = 1;
136938fd1498Szrj       if (storage_order_barrier_p (rhs))
137038fd1498Szrj 	lacc->grp_unscalarizable_region = 1;
137138fd1498Szrj     }
137238fd1498Szrj 
137338fd1498Szrj   if (racc)
137438fd1498Szrj     {
137538fd1498Szrj       racc->grp_assignment_read = 1;
137638fd1498Szrj       if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt)
137738fd1498Szrj 	  && !is_gimple_reg_type (racc->type))
137838fd1498Szrj 	{
137938fd1498Szrj 	  if (contains_vce_or_bfcref_p (rhs))
138038fd1498Szrj 	    bitmap_set_bit (cannot_scalarize_away_bitmap,
138138fd1498Szrj 			    DECL_UID (racc->base));
138238fd1498Szrj 	  else
138338fd1498Szrj 	    bitmap_set_bit (should_scalarize_away_bitmap,
138438fd1498Szrj 			    DECL_UID (racc->base));
138538fd1498Szrj 	}
138638fd1498Szrj       if (storage_order_barrier_p (lhs))
138738fd1498Szrj 	racc->grp_unscalarizable_region = 1;
138838fd1498Szrj     }
138938fd1498Szrj 
139038fd1498Szrj   if (lacc && racc
139138fd1498Szrj       && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA)
139238fd1498Szrj       && !lacc->grp_unscalarizable_region
139338fd1498Szrj       && !racc->grp_unscalarizable_region
139438fd1498Szrj       && AGGREGATE_TYPE_P (TREE_TYPE (lhs))
139538fd1498Szrj       && lacc->size == racc->size
139638fd1498Szrj       && useless_type_conversion_p (lacc->type, racc->type))
139738fd1498Szrj     {
139838fd1498Szrj       struct assign_link *link;
139938fd1498Szrj 
140038fd1498Szrj       link = assign_link_pool.allocate ();
140138fd1498Szrj       memset (link, 0, sizeof (struct assign_link));
140238fd1498Szrj 
140338fd1498Szrj       link->lacc = lacc;
140438fd1498Szrj       link->racc = racc;
140538fd1498Szrj       add_link_to_rhs (racc, link);
140638fd1498Szrj       add_access_to_work_queue (racc);
140738fd1498Szrj 
140838fd1498Szrj       /* Let's delay marking the areas as written until propagation of accesses
140938fd1498Szrj 	 across link, unless the nature of rhs tells us that its data comes
141038fd1498Szrj 	 from elsewhere.  */
141138fd1498Szrj       if (!comes_initialized_p (racc->base))
141238fd1498Szrj 	lacc->write = false;
141338fd1498Szrj     }
141438fd1498Szrj 
141538fd1498Szrj   return lacc || racc;
141638fd1498Szrj }
141738fd1498Szrj 
141838fd1498Szrj /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
141938fd1498Szrj    GIMPLE_ASM operands with memory constrains which cannot be scalarized.  */
142038fd1498Szrj 
142138fd1498Szrj static bool
asm_visit_addr(gimple *,tree op,tree,void *)142238fd1498Szrj asm_visit_addr (gimple *, tree op, tree, void *)
142338fd1498Szrj {
142438fd1498Szrj   op = get_base_address (op);
142538fd1498Szrj   if (op
142638fd1498Szrj       && DECL_P (op))
142738fd1498Szrj     disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand.");
142838fd1498Szrj 
142938fd1498Szrj   return false;
143038fd1498Szrj }
143138fd1498Szrj 
143238fd1498Szrj /* Return true iff callsite CALL has at least as many actual arguments as there
143338fd1498Szrj    are formal parameters of the function currently processed by IPA-SRA and
143438fd1498Szrj    that their types match.  */
143538fd1498Szrj 
143638fd1498Szrj static inline bool
callsite_arguments_match_p(gimple * call)143738fd1498Szrj callsite_arguments_match_p (gimple *call)
143838fd1498Szrj {
143938fd1498Szrj   if (gimple_call_num_args (call) < (unsigned) func_param_count)
144038fd1498Szrj     return false;
144138fd1498Szrj 
144238fd1498Szrj   tree parm;
144338fd1498Szrj   int i;
144438fd1498Szrj   for (parm = DECL_ARGUMENTS (current_function_decl), i = 0;
144538fd1498Szrj        parm;
144638fd1498Szrj        parm = DECL_CHAIN (parm), i++)
144738fd1498Szrj     {
144838fd1498Szrj       tree arg = gimple_call_arg (call, i);
144938fd1498Szrj       if (!useless_type_conversion_p (TREE_TYPE (parm), TREE_TYPE (arg)))
145038fd1498Szrj 	return false;
145138fd1498Szrj     }
145238fd1498Szrj   return true;
145338fd1498Szrj }
145438fd1498Szrj 
145538fd1498Szrj /* Scan function and look for interesting expressions and create access
145638fd1498Szrj    structures for them.  Return true iff any access is created.  */
145738fd1498Szrj 
145838fd1498Szrj static bool
scan_function(void)145938fd1498Szrj scan_function (void)
146038fd1498Szrj {
146138fd1498Szrj   basic_block bb;
146238fd1498Szrj   bool ret = false;
146338fd1498Szrj 
146438fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
146538fd1498Szrj     {
146638fd1498Szrj       gimple_stmt_iterator gsi;
146738fd1498Szrj       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
146838fd1498Szrj 	{
146938fd1498Szrj 	  gimple *stmt = gsi_stmt (gsi);
147038fd1498Szrj 	  tree t;
147138fd1498Szrj 	  unsigned i;
147238fd1498Szrj 
147338fd1498Szrj 	  if (final_bbs && stmt_can_throw_external (stmt))
147438fd1498Szrj 	    bitmap_set_bit (final_bbs, bb->index);
147538fd1498Szrj 	  switch (gimple_code (stmt))
147638fd1498Szrj 	    {
147738fd1498Szrj 	    case GIMPLE_RETURN:
147838fd1498Szrj 	      t = gimple_return_retval (as_a <greturn *> (stmt));
147938fd1498Szrj 	      if (t != NULL_TREE)
148038fd1498Szrj 		ret |= build_access_from_expr (t, stmt, false);
148138fd1498Szrj 	      if (final_bbs)
148238fd1498Szrj 		bitmap_set_bit (final_bbs, bb->index);
148338fd1498Szrj 	      break;
148438fd1498Szrj 
148538fd1498Szrj 	    case GIMPLE_ASSIGN:
148638fd1498Szrj 	      ret |= build_accesses_from_assign (stmt);
148738fd1498Szrj 	      break;
148838fd1498Szrj 
148938fd1498Szrj 	    case GIMPLE_CALL:
149038fd1498Szrj 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
149138fd1498Szrj 		ret |= build_access_from_expr (gimple_call_arg (stmt, i),
149238fd1498Szrj 					       stmt, false);
149338fd1498Szrj 
149438fd1498Szrj 	      if (sra_mode == SRA_MODE_EARLY_IPA)
149538fd1498Szrj 		{
149638fd1498Szrj 		  tree dest = gimple_call_fndecl (stmt);
149738fd1498Szrj 		  int flags = gimple_call_flags (stmt);
149838fd1498Szrj 
149938fd1498Szrj 		  if (dest)
150038fd1498Szrj 		    {
150138fd1498Szrj 		      if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL
150238fd1498Szrj 			  && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS)
150338fd1498Szrj 			encountered_apply_args = true;
150438fd1498Szrj 		      if (recursive_call_p (current_function_decl, dest))
150538fd1498Szrj 			{
150638fd1498Szrj 			  encountered_recursive_call = true;
150738fd1498Szrj 			  if (!callsite_arguments_match_p (stmt))
150838fd1498Szrj 			    encountered_unchangable_recursive_call = true;
150938fd1498Szrj 			}
151038fd1498Szrj 		    }
151138fd1498Szrj 
151238fd1498Szrj 		  if (final_bbs
151338fd1498Szrj 		      && (flags & (ECF_CONST | ECF_PURE)) == 0)
151438fd1498Szrj 		    bitmap_set_bit (final_bbs, bb->index);
151538fd1498Szrj 		}
151638fd1498Szrj 
151738fd1498Szrj 	      t = gimple_call_lhs (stmt);
151838fd1498Szrj 	      if (t && !disqualify_if_bad_bb_terminating_stmt (stmt, t, NULL))
151938fd1498Szrj 		ret |= build_access_from_expr (t, stmt, true);
152038fd1498Szrj 	      break;
152138fd1498Szrj 
152238fd1498Szrj 	    case GIMPLE_ASM:
152338fd1498Szrj 	      {
152438fd1498Szrj 		gasm *asm_stmt = as_a <gasm *> (stmt);
152538fd1498Szrj 		walk_stmt_load_store_addr_ops (asm_stmt, NULL, NULL, NULL,
152638fd1498Szrj 					       asm_visit_addr);
152738fd1498Szrj 		if (final_bbs)
152838fd1498Szrj 		  bitmap_set_bit (final_bbs, bb->index);
152938fd1498Szrj 
153038fd1498Szrj 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
153138fd1498Szrj 		  {
153238fd1498Szrj 		    t = TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
153338fd1498Szrj 		    ret |= build_access_from_expr (t, asm_stmt, false);
153438fd1498Szrj 		  }
153538fd1498Szrj 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
153638fd1498Szrj 		  {
153738fd1498Szrj 		    t = TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
153838fd1498Szrj 		    ret |= build_access_from_expr (t, asm_stmt, true);
153938fd1498Szrj 		  }
154038fd1498Szrj 	      }
154138fd1498Szrj 	      break;
154238fd1498Szrj 
154338fd1498Szrj 	    default:
154438fd1498Szrj 	      break;
154538fd1498Szrj 	    }
154638fd1498Szrj 	}
154738fd1498Szrj     }
154838fd1498Szrj 
154938fd1498Szrj   return ret;
155038fd1498Szrj }
155138fd1498Szrj 
155238fd1498Szrj /* Helper of QSORT function. There are pointers to accesses in the array.  An
155338fd1498Szrj    access is considered smaller than another if it has smaller offset or if the
155438fd1498Szrj    offsets are the same but is size is bigger. */
155538fd1498Szrj 
155638fd1498Szrj static int
compare_access_positions(const void * a,const void * b)155738fd1498Szrj compare_access_positions (const void *a, const void *b)
155838fd1498Szrj {
155938fd1498Szrj   const access_p *fp1 = (const access_p *) a;
156038fd1498Szrj   const access_p *fp2 = (const access_p *) b;
156138fd1498Szrj   const access_p f1 = *fp1;
156238fd1498Szrj   const access_p f2 = *fp2;
156338fd1498Szrj 
156438fd1498Szrj   if (f1->offset != f2->offset)
156538fd1498Szrj     return f1->offset < f2->offset ? -1 : 1;
156638fd1498Szrj 
156738fd1498Szrj   if (f1->size == f2->size)
156838fd1498Szrj     {
156938fd1498Szrj       if (f1->type == f2->type)
157038fd1498Szrj 	return 0;
157138fd1498Szrj       /* Put any non-aggregate type before any aggregate type.  */
157238fd1498Szrj       else if (!is_gimple_reg_type (f1->type)
157338fd1498Szrj 	  && is_gimple_reg_type (f2->type))
157438fd1498Szrj 	return 1;
157538fd1498Szrj       else if (is_gimple_reg_type (f1->type)
157638fd1498Szrj 	       && !is_gimple_reg_type (f2->type))
157738fd1498Szrj 	return -1;
157838fd1498Szrj       /* Put any complex or vector type before any other scalar type.  */
157938fd1498Szrj       else if (TREE_CODE (f1->type) != COMPLEX_TYPE
158038fd1498Szrj 	       && TREE_CODE (f1->type) != VECTOR_TYPE
158138fd1498Szrj 	       && (TREE_CODE (f2->type) == COMPLEX_TYPE
158238fd1498Szrj 		   || TREE_CODE (f2->type) == VECTOR_TYPE))
158338fd1498Szrj 	return 1;
158438fd1498Szrj       else if ((TREE_CODE (f1->type) == COMPLEX_TYPE
158538fd1498Szrj 		|| TREE_CODE (f1->type) == VECTOR_TYPE)
158638fd1498Szrj 	       && TREE_CODE (f2->type) != COMPLEX_TYPE
158738fd1498Szrj 	       && TREE_CODE (f2->type) != VECTOR_TYPE)
158838fd1498Szrj 	return -1;
158938fd1498Szrj       /* Put any integral type before any non-integral type.  When splicing, we
159038fd1498Szrj 	 make sure that those with insufficient precision and occupying the
159138fd1498Szrj 	 same space are not scalarized.  */
159238fd1498Szrj       else if (INTEGRAL_TYPE_P (f1->type)
159338fd1498Szrj 	       && !INTEGRAL_TYPE_P (f2->type))
159438fd1498Szrj 	return -1;
159538fd1498Szrj       else if (!INTEGRAL_TYPE_P (f1->type)
159638fd1498Szrj 	       && INTEGRAL_TYPE_P (f2->type))
159738fd1498Szrj 	return 1;
159838fd1498Szrj       /* Put the integral type with the bigger precision first.  */
159938fd1498Szrj       else if (INTEGRAL_TYPE_P (f1->type)
160038fd1498Szrj 	       && INTEGRAL_TYPE_P (f2->type)
160138fd1498Szrj 	       && (TYPE_PRECISION (f2->type) != TYPE_PRECISION (f1->type)))
160238fd1498Szrj 	return TYPE_PRECISION (f2->type) - TYPE_PRECISION (f1->type);
160338fd1498Szrj       /* Stabilize the sort.  */
160438fd1498Szrj       return TYPE_UID (f1->type) - TYPE_UID (f2->type);
160538fd1498Szrj     }
160638fd1498Szrj 
160738fd1498Szrj   /* We want the bigger accesses first, thus the opposite operator in the next
160838fd1498Szrj      line: */
160938fd1498Szrj   return f1->size > f2->size ? -1 : 1;
161038fd1498Szrj }
161138fd1498Szrj 
161238fd1498Szrj 
161338fd1498Szrj /* Append a name of the declaration to the name obstack.  A helper function for
161438fd1498Szrj    make_fancy_name.  */
161538fd1498Szrj 
161638fd1498Szrj static void
make_fancy_decl_name(tree decl)161738fd1498Szrj make_fancy_decl_name (tree decl)
161838fd1498Szrj {
161938fd1498Szrj   char buffer[32];
162038fd1498Szrj 
162138fd1498Szrj   tree name = DECL_NAME (decl);
162238fd1498Szrj   if (name)
162338fd1498Szrj     obstack_grow (&name_obstack, IDENTIFIER_POINTER (name),
162438fd1498Szrj 		  IDENTIFIER_LENGTH (name));
162538fd1498Szrj   else
162638fd1498Szrj     {
162738fd1498Szrj       sprintf (buffer, "D%u", DECL_UID (decl));
162838fd1498Szrj       obstack_grow (&name_obstack, buffer, strlen (buffer));
162938fd1498Szrj     }
163038fd1498Szrj }
163138fd1498Szrj 
163238fd1498Szrj /* Helper for make_fancy_name.  */
163338fd1498Szrj 
163438fd1498Szrj static void
make_fancy_name_1(tree expr)163538fd1498Szrj make_fancy_name_1 (tree expr)
163638fd1498Szrj {
163738fd1498Szrj   char buffer[32];
163838fd1498Szrj   tree index;
163938fd1498Szrj 
164038fd1498Szrj   if (DECL_P (expr))
164138fd1498Szrj     {
164238fd1498Szrj       make_fancy_decl_name (expr);
164338fd1498Szrj       return;
164438fd1498Szrj     }
164538fd1498Szrj 
164638fd1498Szrj   switch (TREE_CODE (expr))
164738fd1498Szrj     {
164838fd1498Szrj     case COMPONENT_REF:
164938fd1498Szrj       make_fancy_name_1 (TREE_OPERAND (expr, 0));
165038fd1498Szrj       obstack_1grow (&name_obstack, '$');
165138fd1498Szrj       make_fancy_decl_name (TREE_OPERAND (expr, 1));
165238fd1498Szrj       break;
165338fd1498Szrj 
165438fd1498Szrj     case ARRAY_REF:
165538fd1498Szrj       make_fancy_name_1 (TREE_OPERAND (expr, 0));
165638fd1498Szrj       obstack_1grow (&name_obstack, '$');
165738fd1498Szrj       /* Arrays with only one element may not have a constant as their
165838fd1498Szrj 	 index. */
165938fd1498Szrj       index = TREE_OPERAND (expr, 1);
166038fd1498Szrj       if (TREE_CODE (index) != INTEGER_CST)
166138fd1498Szrj 	break;
166238fd1498Szrj       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
166338fd1498Szrj       obstack_grow (&name_obstack, buffer, strlen (buffer));
166438fd1498Szrj       break;
166538fd1498Szrj 
166638fd1498Szrj     case ADDR_EXPR:
166738fd1498Szrj       make_fancy_name_1 (TREE_OPERAND (expr, 0));
166838fd1498Szrj       break;
166938fd1498Szrj 
167038fd1498Szrj     case MEM_REF:
167138fd1498Szrj       make_fancy_name_1 (TREE_OPERAND (expr, 0));
167238fd1498Szrj       if (!integer_zerop (TREE_OPERAND (expr, 1)))
167338fd1498Szrj 	{
167438fd1498Szrj 	  obstack_1grow (&name_obstack, '$');
167538fd1498Szrj 	  sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
167638fd1498Szrj 		   TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
167738fd1498Szrj 	  obstack_grow (&name_obstack, buffer, strlen (buffer));
167838fd1498Szrj 	}
167938fd1498Szrj       break;
168038fd1498Szrj 
168138fd1498Szrj     case BIT_FIELD_REF:
168238fd1498Szrj     case REALPART_EXPR:
168338fd1498Szrj     case IMAGPART_EXPR:
168438fd1498Szrj       gcc_unreachable (); 	/* we treat these as scalars.  */
168538fd1498Szrj       break;
168638fd1498Szrj     default:
168738fd1498Szrj       break;
168838fd1498Szrj     }
168938fd1498Szrj }
169038fd1498Szrj 
169138fd1498Szrj /* Create a human readable name for replacement variable of ACCESS.  */
169238fd1498Szrj 
169338fd1498Szrj static char *
make_fancy_name(tree expr)169438fd1498Szrj make_fancy_name (tree expr)
169538fd1498Szrj {
169638fd1498Szrj   make_fancy_name_1 (expr);
169738fd1498Szrj   obstack_1grow (&name_obstack, '\0');
169838fd1498Szrj   return XOBFINISH (&name_obstack, char *);
169938fd1498Szrj }
170038fd1498Szrj 
170138fd1498Szrj /* Construct a MEM_REF that would reference a part of aggregate BASE of type
170238fd1498Szrj    EXP_TYPE at the given OFFSET and with storage order REVERSE.  If BASE is
170338fd1498Szrj    something for which get_addr_base_and_unit_offset returns NULL, gsi must
170438fd1498Szrj    be non-NULL and is used to insert new statements either before or below
170538fd1498Szrj    the current one as specified by INSERT_AFTER.  This function is not capable
170638fd1498Szrj    of handling bitfields.  */
170738fd1498Szrj 
170838fd1498Szrj tree
build_ref_for_offset(location_t loc,tree base,poly_int64 offset,bool reverse,tree exp_type,gimple_stmt_iterator * gsi,bool insert_after)170938fd1498Szrj build_ref_for_offset (location_t loc, tree base, poly_int64 offset,
171038fd1498Szrj 		      bool reverse, tree exp_type, gimple_stmt_iterator *gsi,
171138fd1498Szrj 		      bool insert_after)
171238fd1498Szrj {
171338fd1498Szrj   tree prev_base = base;
171438fd1498Szrj   tree off;
171538fd1498Szrj   tree mem_ref;
171638fd1498Szrj   poly_int64 base_offset;
171738fd1498Szrj   unsigned HOST_WIDE_INT misalign;
171838fd1498Szrj   unsigned int align;
171938fd1498Szrj 
172038fd1498Szrj   /* Preserve address-space information.  */
172138fd1498Szrj   addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
172238fd1498Szrj   if (as != TYPE_ADDR_SPACE (exp_type))
172338fd1498Szrj     exp_type = build_qualified_type (exp_type,
172438fd1498Szrj 				     TYPE_QUALS (exp_type)
172538fd1498Szrj 				     | ENCODE_QUAL_ADDR_SPACE (as));
172638fd1498Szrj 
172738fd1498Szrj   poly_int64 byte_offset = exact_div (offset, BITS_PER_UNIT);
172838fd1498Szrj   get_object_alignment_1 (base, &align, &misalign);
172938fd1498Szrj   base = get_addr_base_and_unit_offset (base, &base_offset);
173038fd1498Szrj 
173138fd1498Szrj   /* get_addr_base_and_unit_offset returns NULL for references with a variable
173238fd1498Szrj      offset such as array[var_index].  */
173338fd1498Szrj   if (!base)
173438fd1498Szrj     {
173538fd1498Szrj       gassign *stmt;
173638fd1498Szrj       tree tmp, addr;
173738fd1498Szrj 
173838fd1498Szrj       gcc_checking_assert (gsi);
173938fd1498Szrj       tmp = make_ssa_name (build_pointer_type (TREE_TYPE (prev_base)));
174038fd1498Szrj       addr = build_fold_addr_expr (unshare_expr (prev_base));
174138fd1498Szrj       STRIP_USELESS_TYPE_CONVERSION (addr);
174238fd1498Szrj       stmt = gimple_build_assign (tmp, addr);
174338fd1498Szrj       gimple_set_location (stmt, loc);
174438fd1498Szrj       if (insert_after)
174538fd1498Szrj 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
174638fd1498Szrj       else
174738fd1498Szrj 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
174838fd1498Szrj 
174938fd1498Szrj       off = build_int_cst (reference_alias_ptr_type (prev_base), byte_offset);
175038fd1498Szrj       base = tmp;
175138fd1498Szrj     }
175238fd1498Szrj   else if (TREE_CODE (base) == MEM_REF)
175338fd1498Szrj     {
175438fd1498Szrj       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
175538fd1498Szrj 			   base_offset + byte_offset);
175638fd1498Szrj       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
175738fd1498Szrj       base = unshare_expr (TREE_OPERAND (base, 0));
175838fd1498Szrj     }
175938fd1498Szrj   else
176038fd1498Szrj     {
176138fd1498Szrj       off = build_int_cst (reference_alias_ptr_type (prev_base),
176238fd1498Szrj 			   base_offset + byte_offset);
176338fd1498Szrj       base = build_fold_addr_expr (unshare_expr (base));
176438fd1498Szrj     }
176538fd1498Szrj 
176638fd1498Szrj   unsigned int align_bound = known_alignment (misalign + offset);
176738fd1498Szrj   if (align_bound != 0)
176838fd1498Szrj     align = MIN (align, align_bound);
176938fd1498Szrj   if (align != TYPE_ALIGN (exp_type))
177038fd1498Szrj     exp_type = build_aligned_type (exp_type, align);
177138fd1498Szrj 
177238fd1498Szrj   mem_ref = fold_build2_loc (loc, MEM_REF, exp_type, base, off);
177338fd1498Szrj   REF_REVERSE_STORAGE_ORDER (mem_ref) = reverse;
177438fd1498Szrj   if (TREE_THIS_VOLATILE (prev_base))
177538fd1498Szrj     TREE_THIS_VOLATILE (mem_ref) = 1;
177638fd1498Szrj   if (TREE_SIDE_EFFECTS (prev_base))
177738fd1498Szrj     TREE_SIDE_EFFECTS (mem_ref) = 1;
177838fd1498Szrj   return mem_ref;
177938fd1498Szrj }
178038fd1498Szrj 
178138fd1498Szrj /* Construct a memory reference to a part of an aggregate BASE at the given
178238fd1498Szrj    OFFSET and of the same type as MODEL.  In case this is a reference to a
178338fd1498Szrj    bit-field, the function will replicate the last component_ref of model's
178438fd1498Szrj    expr to access it.  GSI and INSERT_AFTER have the same meaning as in
178538fd1498Szrj    build_ref_for_offset.  */
178638fd1498Szrj 
178738fd1498Szrj static tree
build_ref_for_model(location_t loc,tree base,HOST_WIDE_INT offset,struct access * model,gimple_stmt_iterator * gsi,bool insert_after)178838fd1498Szrj build_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
178938fd1498Szrj 		     struct access *model, gimple_stmt_iterator *gsi,
179038fd1498Szrj 		     bool insert_after)
179138fd1498Szrj {
179238fd1498Szrj   if (TREE_CODE (model->expr) == COMPONENT_REF
179338fd1498Szrj       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
179438fd1498Szrj     {
179538fd1498Szrj       /* This access represents a bit-field.  */
179638fd1498Szrj       tree t, exp_type, fld = TREE_OPERAND (model->expr, 1);
179738fd1498Szrj 
179838fd1498Szrj       offset -= int_bit_position (fld);
179938fd1498Szrj       exp_type = TREE_TYPE (TREE_OPERAND (model->expr, 0));
180038fd1498Szrj       t = build_ref_for_offset (loc, base, offset, model->reverse, exp_type,
180138fd1498Szrj 				gsi, insert_after);
180238fd1498Szrj       /* The flag will be set on the record type.  */
180338fd1498Szrj       REF_REVERSE_STORAGE_ORDER (t) = 0;
180438fd1498Szrj       return fold_build3_loc (loc, COMPONENT_REF, TREE_TYPE (fld), t, fld,
180538fd1498Szrj 			      NULL_TREE);
180638fd1498Szrj     }
180738fd1498Szrj   else
180838fd1498Szrj     return
180938fd1498Szrj       build_ref_for_offset (loc, base, offset, model->reverse, model->type,
181038fd1498Szrj 			    gsi, insert_after);
181138fd1498Szrj }
181238fd1498Szrj 
181338fd1498Szrj /* Attempt to build a memory reference that we could but into a gimple
181438fd1498Szrj    debug_bind statement.  Similar to build_ref_for_model but punts if it has to
181538fd1498Szrj    create statements and return s NULL instead.  This function also ignores
181638fd1498Szrj    alignment issues and so its results should never end up in non-debug
181738fd1498Szrj    statements.  */
181838fd1498Szrj 
181938fd1498Szrj static tree
build_debug_ref_for_model(location_t loc,tree base,HOST_WIDE_INT offset,struct access * model)182038fd1498Szrj build_debug_ref_for_model (location_t loc, tree base, HOST_WIDE_INT offset,
182138fd1498Szrj 			   struct access *model)
182238fd1498Szrj {
182338fd1498Szrj   poly_int64 base_offset;
182438fd1498Szrj   tree off;
182538fd1498Szrj 
182638fd1498Szrj   if (TREE_CODE (model->expr) == COMPONENT_REF
182738fd1498Szrj       && DECL_BIT_FIELD (TREE_OPERAND (model->expr, 1)))
182838fd1498Szrj     return NULL_TREE;
182938fd1498Szrj 
183038fd1498Szrj   base = get_addr_base_and_unit_offset (base, &base_offset);
183138fd1498Szrj   if (!base)
183238fd1498Szrj     return NULL_TREE;
183338fd1498Szrj   if (TREE_CODE (base) == MEM_REF)
183438fd1498Szrj     {
183538fd1498Szrj       off = build_int_cst (TREE_TYPE (TREE_OPERAND (base, 1)),
183638fd1498Szrj 			   base_offset + offset / BITS_PER_UNIT);
183738fd1498Szrj       off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1), off);
183838fd1498Szrj       base = unshare_expr (TREE_OPERAND (base, 0));
183938fd1498Szrj     }
184038fd1498Szrj   else
184138fd1498Szrj     {
184238fd1498Szrj       off = build_int_cst (reference_alias_ptr_type (base),
184338fd1498Szrj 			   base_offset + offset / BITS_PER_UNIT);
184438fd1498Szrj       base = build_fold_addr_expr (unshare_expr (base));
184538fd1498Szrj     }
184638fd1498Szrj 
184738fd1498Szrj   return fold_build2_loc (loc, MEM_REF, model->type, base, off);
184838fd1498Szrj }
184938fd1498Szrj 
185038fd1498Szrj /* Construct a memory reference consisting of component_refs and array_refs to
185138fd1498Szrj    a part of an aggregate *RES (which is of type TYPE).  The requested part
185238fd1498Szrj    should have type EXP_TYPE at be the given OFFSET.  This function might not
185338fd1498Szrj    succeed, it returns true when it does and only then *RES points to something
185438fd1498Szrj    meaningful.  This function should be used only to build expressions that we
185538fd1498Szrj    might need to present to user (e.g. in warnings).  In all other situations,
185638fd1498Szrj    build_ref_for_model or build_ref_for_offset should be used instead.  */
185738fd1498Szrj 
185838fd1498Szrj static bool
build_user_friendly_ref_for_offset(tree * res,tree type,HOST_WIDE_INT offset,tree exp_type)185938fd1498Szrj build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset,
186038fd1498Szrj 				    tree exp_type)
186138fd1498Szrj {
186238fd1498Szrj   while (1)
186338fd1498Szrj     {
186438fd1498Szrj       tree fld;
186538fd1498Szrj       tree tr_size, index, minidx;
186638fd1498Szrj       HOST_WIDE_INT el_size;
186738fd1498Szrj 
186838fd1498Szrj       if (offset == 0 && exp_type
186938fd1498Szrj 	  && types_compatible_p (exp_type, type))
187038fd1498Szrj 	return true;
187138fd1498Szrj 
187238fd1498Szrj       switch (TREE_CODE (type))
187338fd1498Szrj 	{
187438fd1498Szrj 	case UNION_TYPE:
187538fd1498Szrj 	case QUAL_UNION_TYPE:
187638fd1498Szrj 	case RECORD_TYPE:
187738fd1498Szrj 	  for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
187838fd1498Szrj 	    {
187938fd1498Szrj 	      HOST_WIDE_INT pos, size;
188038fd1498Szrj 	      tree tr_pos, expr, *expr_ptr;
188138fd1498Szrj 
188238fd1498Szrj 	      if (TREE_CODE (fld) != FIELD_DECL)
188338fd1498Szrj 		continue;
188438fd1498Szrj 
188538fd1498Szrj 	      tr_pos = bit_position (fld);
188638fd1498Szrj 	      if (!tr_pos || !tree_fits_uhwi_p (tr_pos))
188738fd1498Szrj 		continue;
188838fd1498Szrj 	      pos = tree_to_uhwi (tr_pos);
188938fd1498Szrj 	      gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0);
189038fd1498Szrj 	      tr_size = DECL_SIZE (fld);
189138fd1498Szrj 	      if (!tr_size || !tree_fits_uhwi_p (tr_size))
189238fd1498Szrj 		continue;
189338fd1498Szrj 	      size = tree_to_uhwi (tr_size);
189438fd1498Szrj 	      if (size == 0)
189538fd1498Szrj 		{
189638fd1498Szrj 		  if (pos != offset)
189738fd1498Szrj 		    continue;
189838fd1498Szrj 		}
189938fd1498Szrj 	      else if (pos > offset || (pos + size) <= offset)
190038fd1498Szrj 		continue;
190138fd1498Szrj 
190238fd1498Szrj 	      expr = build3 (COMPONENT_REF, TREE_TYPE (fld), *res, fld,
190338fd1498Szrj 			     NULL_TREE);
190438fd1498Szrj 	      expr_ptr = &expr;
190538fd1498Szrj 	      if (build_user_friendly_ref_for_offset (expr_ptr, TREE_TYPE (fld),
190638fd1498Szrj 						      offset - pos, exp_type))
190738fd1498Szrj 		{
190838fd1498Szrj 		  *res = expr;
190938fd1498Szrj 		  return true;
191038fd1498Szrj 		}
191138fd1498Szrj 	    }
191238fd1498Szrj 	  return false;
191338fd1498Szrj 
191438fd1498Szrj 	case ARRAY_TYPE:
191538fd1498Szrj 	  tr_size = TYPE_SIZE (TREE_TYPE (type));
191638fd1498Szrj 	  if (!tr_size || !tree_fits_uhwi_p (tr_size))
191738fd1498Szrj 	    return false;
191838fd1498Szrj 	  el_size = tree_to_uhwi (tr_size);
191938fd1498Szrj 
192038fd1498Szrj 	  minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
192138fd1498Szrj 	  if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0)
192238fd1498Szrj 	    return false;
192338fd1498Szrj 	  index = build_int_cst (TYPE_DOMAIN (type), offset / el_size);
192438fd1498Szrj 	  if (!integer_zerop (minidx))
192538fd1498Szrj 	    index = int_const_binop (PLUS_EXPR, index, minidx);
192638fd1498Szrj 	  *res = build4 (ARRAY_REF, TREE_TYPE (type), *res, index,
192738fd1498Szrj 			 NULL_TREE, NULL_TREE);
192838fd1498Szrj 	  offset = offset % el_size;
192938fd1498Szrj 	  type = TREE_TYPE (type);
193038fd1498Szrj 	  break;
193138fd1498Szrj 
193238fd1498Szrj 	default:
193338fd1498Szrj 	  if (offset != 0)
193438fd1498Szrj 	    return false;
193538fd1498Szrj 
193638fd1498Szrj 	  if (exp_type)
193738fd1498Szrj 	    return false;
193838fd1498Szrj 	  else
193938fd1498Szrj 	    return true;
194038fd1498Szrj 	}
194138fd1498Szrj     }
194238fd1498Szrj }
194338fd1498Szrj 
194438fd1498Szrj /* Return true iff TYPE is stdarg va_list type.  */
194538fd1498Szrj 
194638fd1498Szrj static inline bool
is_va_list_type(tree type)194738fd1498Szrj is_va_list_type (tree type)
194838fd1498Szrj {
194938fd1498Szrj   return TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (va_list_type_node);
195038fd1498Szrj }
195138fd1498Szrj 
195238fd1498Szrj /* Print message to dump file why a variable was rejected. */
195338fd1498Szrj 
195438fd1498Szrj static void
reject(tree var,const char * msg)195538fd1498Szrj reject (tree var, const char *msg)
195638fd1498Szrj {
195738fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
195838fd1498Szrj     {
195938fd1498Szrj       fprintf (dump_file, "Rejected (%d): %s: ", DECL_UID (var), msg);
196038fd1498Szrj       print_generic_expr (dump_file, var);
196138fd1498Szrj       fprintf (dump_file, "\n");
196238fd1498Szrj     }
196338fd1498Szrj }
196438fd1498Szrj 
196538fd1498Szrj /* Return true if VAR is a candidate for SRA.  */
196638fd1498Szrj 
196738fd1498Szrj static bool
maybe_add_sra_candidate(tree var)196838fd1498Szrj maybe_add_sra_candidate (tree var)
196938fd1498Szrj {
197038fd1498Szrj   tree type = TREE_TYPE (var);
197138fd1498Szrj   const char *msg;
197238fd1498Szrj   tree_node **slot;
197338fd1498Szrj 
197438fd1498Szrj   if (!AGGREGATE_TYPE_P (type))
197538fd1498Szrj     {
197638fd1498Szrj       reject (var, "not aggregate");
197738fd1498Szrj       return false;
197838fd1498Szrj     }
197938fd1498Szrj   /* Allow constant-pool entries (that "need to live in memory")
198038fd1498Szrj      unless we are doing IPA SRA.  */
198138fd1498Szrj   if (needs_to_live_in_memory (var)
198238fd1498Szrj       && (sra_mode == SRA_MODE_EARLY_IPA || !constant_decl_p (var)))
198338fd1498Szrj     {
198438fd1498Szrj       reject (var, "needs to live in memory");
198538fd1498Szrj       return false;
198638fd1498Szrj     }
198738fd1498Szrj   if (TREE_THIS_VOLATILE (var))
198838fd1498Szrj     {
198938fd1498Szrj       reject (var, "is volatile");
199038fd1498Szrj       return false;
199138fd1498Szrj     }
199238fd1498Szrj   if (!COMPLETE_TYPE_P (type))
199338fd1498Szrj     {
199438fd1498Szrj       reject (var, "has incomplete type");
199538fd1498Szrj       return false;
199638fd1498Szrj     }
199738fd1498Szrj   if (!tree_fits_uhwi_p (TYPE_SIZE (type)))
199838fd1498Szrj     {
199938fd1498Szrj       reject (var, "type size not fixed");
200038fd1498Szrj       return false;
200138fd1498Szrj     }
200238fd1498Szrj   if (tree_to_uhwi (TYPE_SIZE (type)) == 0)
200338fd1498Szrj     {
200438fd1498Szrj       reject (var, "type size is zero");
200538fd1498Szrj       return false;
200638fd1498Szrj     }
200738fd1498Szrj   if (type_internals_preclude_sra_p (type, &msg))
200838fd1498Szrj     {
200938fd1498Szrj       reject (var, msg);
201038fd1498Szrj       return false;
201138fd1498Szrj     }
201238fd1498Szrj   if (/* Fix for PR 41089.  tree-stdarg.c needs to have va_lists intact but
201338fd1498Szrj 	 we also want to schedule it rather late.  Thus we ignore it in
201438fd1498Szrj 	 the early pass. */
201538fd1498Szrj       (sra_mode == SRA_MODE_EARLY_INTRA
201638fd1498Szrj        && is_va_list_type (type)))
201738fd1498Szrj     {
201838fd1498Szrj       reject (var, "is va_list");
201938fd1498Szrj       return false;
202038fd1498Szrj     }
202138fd1498Szrj 
202238fd1498Szrj   bitmap_set_bit (candidate_bitmap, DECL_UID (var));
202338fd1498Szrj   slot = candidates->find_slot_with_hash (var, DECL_UID (var), INSERT);
202438fd1498Szrj   *slot = var;
202538fd1498Szrj 
202638fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
202738fd1498Szrj     {
202838fd1498Szrj       fprintf (dump_file, "Candidate (%d): ", DECL_UID (var));
202938fd1498Szrj       print_generic_expr (dump_file, var);
203038fd1498Szrj       fprintf (dump_file, "\n");
203138fd1498Szrj     }
203238fd1498Szrj 
203338fd1498Szrj   return true;
203438fd1498Szrj }
203538fd1498Szrj 
203638fd1498Szrj /* The very first phase of intraprocedural SRA.  It marks in candidate_bitmap
203738fd1498Szrj    those with type which is suitable for scalarization.  */
203838fd1498Szrj 
203938fd1498Szrj static bool
find_var_candidates(void)204038fd1498Szrj find_var_candidates (void)
204138fd1498Szrj {
204238fd1498Szrj   tree var, parm;
204338fd1498Szrj   unsigned int i;
204438fd1498Szrj   bool ret = false;
204538fd1498Szrj 
204638fd1498Szrj   for (parm = DECL_ARGUMENTS (current_function_decl);
204738fd1498Szrj        parm;
204838fd1498Szrj        parm = DECL_CHAIN (parm))
204938fd1498Szrj     ret |= maybe_add_sra_candidate (parm);
205038fd1498Szrj 
205138fd1498Szrj   FOR_EACH_LOCAL_DECL (cfun, i, var)
205238fd1498Szrj     {
205338fd1498Szrj       if (!VAR_P (var))
205438fd1498Szrj         continue;
205538fd1498Szrj 
205638fd1498Szrj       ret |= maybe_add_sra_candidate (var);
205738fd1498Szrj     }
205838fd1498Szrj 
205938fd1498Szrj   return ret;
206038fd1498Szrj }
206138fd1498Szrj 
206238fd1498Szrj /* Sort all accesses for the given variable, check for partial overlaps and
206338fd1498Szrj    return NULL if there are any.  If there are none, pick a representative for
206438fd1498Szrj    each combination of offset and size and create a linked list out of them.
206538fd1498Szrj    Return the pointer to the first representative and make sure it is the first
206638fd1498Szrj    one in the vector of accesses.  */
206738fd1498Szrj 
206838fd1498Szrj static struct access *
sort_and_splice_var_accesses(tree var)206938fd1498Szrj sort_and_splice_var_accesses (tree var)
207038fd1498Szrj {
207138fd1498Szrj   int i, j, access_count;
207238fd1498Szrj   struct access *res, **prev_acc_ptr = &res;
207338fd1498Szrj   vec<access_p> *access_vec;
207438fd1498Szrj   bool first = true;
207538fd1498Szrj   HOST_WIDE_INT low = -1, high = 0;
207638fd1498Szrj 
207738fd1498Szrj   access_vec = get_base_access_vector (var);
207838fd1498Szrj   if (!access_vec)
207938fd1498Szrj     return NULL;
208038fd1498Szrj   access_count = access_vec->length ();
208138fd1498Szrj 
208238fd1498Szrj   /* Sort by <OFFSET, SIZE>.  */
208338fd1498Szrj   access_vec->qsort (compare_access_positions);
208438fd1498Szrj 
208538fd1498Szrj   i = 0;
208638fd1498Szrj   while (i < access_count)
208738fd1498Szrj     {
208838fd1498Szrj       struct access *access = (*access_vec)[i];
208938fd1498Szrj       bool grp_write = access->write;
209038fd1498Szrj       bool grp_read = !access->write;
209138fd1498Szrj       bool grp_scalar_write = access->write
209238fd1498Szrj 	&& is_gimple_reg_type (access->type);
209338fd1498Szrj       bool grp_scalar_read = !access->write
209438fd1498Szrj 	&& is_gimple_reg_type (access->type);
209538fd1498Szrj       bool grp_assignment_read = access->grp_assignment_read;
209638fd1498Szrj       bool grp_assignment_write = access->grp_assignment_write;
209738fd1498Szrj       bool multiple_scalar_reads = false;
209838fd1498Szrj       bool total_scalarization = access->grp_total_scalarization;
209938fd1498Szrj       bool grp_partial_lhs = access->grp_partial_lhs;
210038fd1498Szrj       bool first_scalar = is_gimple_reg_type (access->type);
210138fd1498Szrj       bool unscalarizable_region = access->grp_unscalarizable_region;
210238fd1498Szrj       bool bf_non_full_precision
210338fd1498Szrj 	= (INTEGRAL_TYPE_P (access->type)
210438fd1498Szrj 	   && TYPE_PRECISION (access->type) != access->size
210538fd1498Szrj 	   && TREE_CODE (access->expr) == COMPONENT_REF
210638fd1498Szrj 	   && DECL_BIT_FIELD (TREE_OPERAND (access->expr, 1)));
210738fd1498Szrj 
210838fd1498Szrj       if (first || access->offset >= high)
210938fd1498Szrj 	{
211038fd1498Szrj 	  first = false;
211138fd1498Szrj 	  low = access->offset;
211238fd1498Szrj 	  high = access->offset + access->size;
211338fd1498Szrj 	}
211438fd1498Szrj       else if (access->offset > low && access->offset + access->size > high)
211538fd1498Szrj 	return NULL;
211638fd1498Szrj       else
211738fd1498Szrj 	gcc_assert (access->offset >= low
211838fd1498Szrj 		    && access->offset + access->size <= high);
211938fd1498Szrj 
212038fd1498Szrj       j = i + 1;
212138fd1498Szrj       while (j < access_count)
212238fd1498Szrj 	{
212338fd1498Szrj 	  struct access *ac2 = (*access_vec)[j];
212438fd1498Szrj 	  if (ac2->offset != access->offset || ac2->size != access->size)
212538fd1498Szrj 	    break;
212638fd1498Szrj 	  if (ac2->write)
212738fd1498Szrj 	    {
212838fd1498Szrj 	      grp_write = true;
212938fd1498Szrj 	      grp_scalar_write = (grp_scalar_write
213038fd1498Szrj 				  || is_gimple_reg_type (ac2->type));
213138fd1498Szrj 	    }
213238fd1498Szrj 	  else
213338fd1498Szrj 	    {
213438fd1498Szrj 	      grp_read = true;
213538fd1498Szrj 	      if (is_gimple_reg_type (ac2->type))
213638fd1498Szrj 		{
213738fd1498Szrj 		  if (grp_scalar_read)
213838fd1498Szrj 		    multiple_scalar_reads = true;
213938fd1498Szrj 		  else
214038fd1498Szrj 		    grp_scalar_read = true;
214138fd1498Szrj 		}
214238fd1498Szrj 	    }
214338fd1498Szrj 	  grp_assignment_read |= ac2->grp_assignment_read;
214438fd1498Szrj 	  grp_assignment_write |= ac2->grp_assignment_write;
214538fd1498Szrj 	  grp_partial_lhs |= ac2->grp_partial_lhs;
214638fd1498Szrj 	  unscalarizable_region |= ac2->grp_unscalarizable_region;
214738fd1498Szrj 	  total_scalarization |= ac2->grp_total_scalarization;
214838fd1498Szrj 	  relink_to_new_repr (access, ac2);
214938fd1498Szrj 
215038fd1498Szrj 	  /* If there are both aggregate-type and scalar-type accesses with
215138fd1498Szrj 	     this combination of size and offset, the comparison function
215238fd1498Szrj 	     should have put the scalars first.  */
215338fd1498Szrj 	  gcc_assert (first_scalar || !is_gimple_reg_type (ac2->type));
215438fd1498Szrj 	  /* It also prefers integral types to non-integral.  However, when the
215538fd1498Szrj 	     precision of the selected type does not span the entire area and
215638fd1498Szrj 	     should also be used for a non-integer (i.e. float), we must not
215738fd1498Szrj 	     let that happen.  Normally analyze_access_subtree expands the type
215838fd1498Szrj 	     to cover the entire area but for bit-fields it doesn't.  */
215938fd1498Szrj 	  if (bf_non_full_precision && !INTEGRAL_TYPE_P (ac2->type))
216038fd1498Szrj 	    {
216138fd1498Szrj 	      if (dump_file && (dump_flags & TDF_DETAILS))
216238fd1498Szrj 		{
216338fd1498Szrj 		  fprintf (dump_file, "Cannot scalarize the following access "
216438fd1498Szrj 			   "because insufficient precision integer type was "
216538fd1498Szrj 			   "selected.\n  ");
216638fd1498Szrj 		  dump_access (dump_file, access, false);
216738fd1498Szrj 		}
216838fd1498Szrj 	      unscalarizable_region = true;
216938fd1498Szrj 	    }
217038fd1498Szrj 	  ac2->group_representative = access;
217138fd1498Szrj 	  j++;
217238fd1498Szrj 	}
217338fd1498Szrj 
217438fd1498Szrj       i = j;
217538fd1498Szrj 
217638fd1498Szrj       access->group_representative = access;
217738fd1498Szrj       access->grp_write = grp_write;
217838fd1498Szrj       access->grp_read = grp_read;
217938fd1498Szrj       access->grp_scalar_read = grp_scalar_read;
218038fd1498Szrj       access->grp_scalar_write = grp_scalar_write;
218138fd1498Szrj       access->grp_assignment_read = grp_assignment_read;
218238fd1498Szrj       access->grp_assignment_write = grp_assignment_write;
218338fd1498Szrj       access->grp_hint = total_scalarization
218438fd1498Szrj 	|| (multiple_scalar_reads && !constant_decl_p (var));
218538fd1498Szrj       access->grp_total_scalarization = total_scalarization;
218638fd1498Szrj       access->grp_partial_lhs = grp_partial_lhs;
218738fd1498Szrj       access->grp_unscalarizable_region = unscalarizable_region;
218838fd1498Szrj 
218938fd1498Szrj       *prev_acc_ptr = access;
219038fd1498Szrj       prev_acc_ptr = &access->next_grp;
219138fd1498Szrj     }
219238fd1498Szrj 
219338fd1498Szrj   gcc_assert (res == (*access_vec)[0]);
219438fd1498Szrj   return res;
219538fd1498Szrj }
219638fd1498Szrj 
219738fd1498Szrj /* Create a variable for the given ACCESS which determines the type, name and a
219838fd1498Szrj    few other properties.  Return the variable declaration and store it also to
219938fd1498Szrj    ACCESS->replacement.  */
220038fd1498Szrj 
220138fd1498Szrj static tree
create_access_replacement(struct access * access)220238fd1498Szrj create_access_replacement (struct access *access)
220338fd1498Szrj {
220438fd1498Szrj   tree repl;
220538fd1498Szrj 
220638fd1498Szrj   if (access->grp_to_be_debug_replaced)
220738fd1498Szrj     {
220838fd1498Szrj       repl = create_tmp_var_raw (access->type);
220938fd1498Szrj       DECL_CONTEXT (repl) = current_function_decl;
221038fd1498Szrj     }
221138fd1498Szrj   else
221238fd1498Szrj     /* Drop any special alignment on the type if it's not on the main
221338fd1498Szrj        variant.  This avoids issues with weirdo ABIs like AAPCS.  */
221438fd1498Szrj     repl = create_tmp_var (build_qualified_type
221538fd1498Szrj 			     (TYPE_MAIN_VARIANT (access->type),
221638fd1498Szrj 			      TYPE_QUALS (access->type)), "SR");
221738fd1498Szrj   if (TREE_CODE (access->type) == COMPLEX_TYPE
221838fd1498Szrj       || TREE_CODE (access->type) == VECTOR_TYPE)
221938fd1498Szrj     {
222038fd1498Szrj       if (!access->grp_partial_lhs)
222138fd1498Szrj 	DECL_GIMPLE_REG_P (repl) = 1;
222238fd1498Szrj     }
222338fd1498Szrj   else if (access->grp_partial_lhs
222438fd1498Szrj 	   && is_gimple_reg_type (access->type))
222538fd1498Szrj     TREE_ADDRESSABLE (repl) = 1;
222638fd1498Szrj 
222738fd1498Szrj   DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base);
222838fd1498Szrj   DECL_ARTIFICIAL (repl) = 1;
222938fd1498Szrj   DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base);
223038fd1498Szrj 
223138fd1498Szrj   if (DECL_NAME (access->base)
223238fd1498Szrj       && !DECL_IGNORED_P (access->base)
223338fd1498Szrj       && !DECL_ARTIFICIAL (access->base))
223438fd1498Szrj     {
223538fd1498Szrj       char *pretty_name = make_fancy_name (access->expr);
223638fd1498Szrj       tree debug_expr = unshare_expr_without_location (access->expr), d;
223738fd1498Szrj       bool fail = false;
223838fd1498Szrj 
223938fd1498Szrj       DECL_NAME (repl) = get_identifier (pretty_name);
224038fd1498Szrj       DECL_NAMELESS (repl) = 1;
224138fd1498Szrj       obstack_free (&name_obstack, pretty_name);
224238fd1498Szrj 
224338fd1498Szrj       /* Get rid of any SSA_NAMEs embedded in debug_expr,
224438fd1498Szrj 	 as DECL_DEBUG_EXPR isn't considered when looking for still
224538fd1498Szrj 	 used SSA_NAMEs and thus they could be freed.  All debug info
224638fd1498Szrj 	 generation cares is whether something is constant or variable
224738fd1498Szrj 	 and that get_ref_base_and_extent works properly on the
224838fd1498Szrj 	 expression.  It cannot handle accesses at a non-constant offset
224938fd1498Szrj 	 though, so just give up in those cases.  */
225038fd1498Szrj       for (d = debug_expr;
225138fd1498Szrj 	   !fail && (handled_component_p (d) || TREE_CODE (d) == MEM_REF);
225238fd1498Szrj 	   d = TREE_OPERAND (d, 0))
225338fd1498Szrj 	switch (TREE_CODE (d))
225438fd1498Szrj 	  {
225538fd1498Szrj 	  case ARRAY_REF:
225638fd1498Szrj 	  case ARRAY_RANGE_REF:
225738fd1498Szrj 	    if (TREE_OPERAND (d, 1)
225838fd1498Szrj 		&& TREE_CODE (TREE_OPERAND (d, 1)) != INTEGER_CST)
225938fd1498Szrj 	      fail = true;
226038fd1498Szrj 	    if (TREE_OPERAND (d, 3)
226138fd1498Szrj 		&& TREE_CODE (TREE_OPERAND (d, 3)) != INTEGER_CST)
226238fd1498Szrj 	      fail = true;
226338fd1498Szrj 	    /* FALLTHRU */
226438fd1498Szrj 	  case COMPONENT_REF:
226538fd1498Szrj 	    if (TREE_OPERAND (d, 2)
226638fd1498Szrj 		&& TREE_CODE (TREE_OPERAND (d, 2)) != INTEGER_CST)
226738fd1498Szrj 	      fail = true;
226838fd1498Szrj 	    break;
226938fd1498Szrj 	  case MEM_REF:
227038fd1498Szrj 	    if (TREE_CODE (TREE_OPERAND (d, 0)) != ADDR_EXPR)
227138fd1498Szrj 	      fail = true;
227238fd1498Szrj 	    else
227338fd1498Szrj 	      d = TREE_OPERAND (d, 0);
227438fd1498Szrj 	    break;
227538fd1498Szrj 	  default:
227638fd1498Szrj 	    break;
227738fd1498Szrj 	  }
227838fd1498Szrj       if (!fail)
227938fd1498Szrj 	{
228038fd1498Szrj 	  SET_DECL_DEBUG_EXPR (repl, debug_expr);
228138fd1498Szrj 	  DECL_HAS_DEBUG_EXPR_P (repl) = 1;
228238fd1498Szrj 	}
228338fd1498Szrj       if (access->grp_no_warning)
228438fd1498Szrj 	TREE_NO_WARNING (repl) = 1;
228538fd1498Szrj       else
228638fd1498Szrj 	TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base);
228738fd1498Szrj     }
228838fd1498Szrj   else
228938fd1498Szrj     TREE_NO_WARNING (repl) = 1;
229038fd1498Szrj 
229138fd1498Szrj   if (dump_file)
229238fd1498Szrj     {
229338fd1498Szrj       if (access->grp_to_be_debug_replaced)
229438fd1498Szrj 	{
229538fd1498Szrj 	  fprintf (dump_file, "Created a debug-only replacement for ");
229638fd1498Szrj 	  print_generic_expr (dump_file, access->base);
229738fd1498Szrj 	  fprintf (dump_file, " offset: %u, size: %u\n",
229838fd1498Szrj 		   (unsigned) access->offset, (unsigned) access->size);
229938fd1498Szrj 	}
230038fd1498Szrj       else
230138fd1498Szrj 	{
230238fd1498Szrj 	  fprintf (dump_file, "Created a replacement for ");
230338fd1498Szrj 	  print_generic_expr (dump_file, access->base);
230438fd1498Szrj 	  fprintf (dump_file, " offset: %u, size: %u: ",
230538fd1498Szrj 		   (unsigned) access->offset, (unsigned) access->size);
230638fd1498Szrj 	  print_generic_expr (dump_file, repl);
230738fd1498Szrj 	  fprintf (dump_file, "\n");
230838fd1498Szrj 	}
230938fd1498Szrj     }
231038fd1498Szrj   sra_stats.replacements++;
231138fd1498Szrj 
231238fd1498Szrj   return repl;
231338fd1498Szrj }
231438fd1498Szrj 
231538fd1498Szrj /* Return ACCESS scalar replacement, which must exist.  */
231638fd1498Szrj 
231738fd1498Szrj static inline tree
get_access_replacement(struct access * access)231838fd1498Szrj get_access_replacement (struct access *access)
231938fd1498Szrj {
232038fd1498Szrj   gcc_checking_assert (access->replacement_decl);
232138fd1498Szrj   return access->replacement_decl;
232238fd1498Szrj }
232338fd1498Szrj 
232438fd1498Szrj 
232538fd1498Szrj /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
232638fd1498Szrj    linked list along the way.  Stop when *ACCESS is NULL or the access pointed
232738fd1498Szrj    to it is not "within" the root.  Return false iff some accesses partially
232838fd1498Szrj    overlap.  */
232938fd1498Szrj 
233038fd1498Szrj static bool
build_access_subtree(struct access ** access)233138fd1498Szrj build_access_subtree (struct access **access)
233238fd1498Szrj {
233338fd1498Szrj   struct access *root = *access, *last_child = NULL;
233438fd1498Szrj   HOST_WIDE_INT limit = root->offset + root->size;
233538fd1498Szrj 
233638fd1498Szrj   *access = (*access)->next_grp;
233738fd1498Szrj   while  (*access && (*access)->offset + (*access)->size <= limit)
233838fd1498Szrj     {
233938fd1498Szrj       if (!last_child)
234038fd1498Szrj 	root->first_child = *access;
234138fd1498Szrj       else
234238fd1498Szrj 	last_child->next_sibling = *access;
234338fd1498Szrj       last_child = *access;
234438fd1498Szrj       (*access)->parent = root;
234538fd1498Szrj       (*access)->grp_write |= root->grp_write;
234638fd1498Szrj 
234738fd1498Szrj       if (!build_access_subtree (access))
234838fd1498Szrj 	return false;
234938fd1498Szrj     }
235038fd1498Szrj 
235138fd1498Szrj   if (*access && (*access)->offset < limit)
235238fd1498Szrj     return false;
235338fd1498Szrj 
235438fd1498Szrj   return true;
235538fd1498Szrj }
235638fd1498Szrj 
235738fd1498Szrj /* Build a tree of access representatives, ACCESS is the pointer to the first
235838fd1498Szrj    one, others are linked in a list by the next_grp field.  Return false iff
235938fd1498Szrj    some accesses partially overlap.  */
236038fd1498Szrj 
236138fd1498Szrj static bool
build_access_trees(struct access * access)236238fd1498Szrj build_access_trees (struct access *access)
236338fd1498Szrj {
236438fd1498Szrj   while (access)
236538fd1498Szrj     {
236638fd1498Szrj       struct access *root = access;
236738fd1498Szrj 
236838fd1498Szrj       if (!build_access_subtree (&access))
236938fd1498Szrj 	return false;
237038fd1498Szrj       root->next_grp = access;
237138fd1498Szrj     }
237238fd1498Szrj   return true;
237338fd1498Szrj }
237438fd1498Szrj 
237538fd1498Szrj /* Return true if expr contains some ARRAY_REFs into a variable bounded
237638fd1498Szrj    array.  */
237738fd1498Szrj 
237838fd1498Szrj static bool
expr_with_var_bounded_array_refs_p(tree expr)237938fd1498Szrj expr_with_var_bounded_array_refs_p (tree expr)
238038fd1498Szrj {
238138fd1498Szrj   while (handled_component_p (expr))
238238fd1498Szrj     {
238338fd1498Szrj       if (TREE_CODE (expr) == ARRAY_REF
238438fd1498Szrj 	  && !tree_fits_shwi_p (array_ref_low_bound (expr)))
238538fd1498Szrj 	return true;
238638fd1498Szrj       expr = TREE_OPERAND (expr, 0);
238738fd1498Szrj     }
238838fd1498Szrj   return false;
238938fd1498Szrj }
239038fd1498Szrj 
239138fd1498Szrj /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
239238fd1498Szrj    both seeming beneficial and when ALLOW_REPLACEMENTS allows it.  Also set all
239338fd1498Szrj    sorts of access flags appropriately along the way, notably always set
239438fd1498Szrj    grp_read and grp_assign_read according to MARK_READ and grp_write when
239538fd1498Szrj    MARK_WRITE is true.
239638fd1498Szrj 
239738fd1498Szrj    Creating a replacement for a scalar access is considered beneficial if its
239838fd1498Szrj    grp_hint is set (this means we are either attempting total scalarization or
239938fd1498Szrj    there is more than one direct read access) or according to the following
240038fd1498Szrj    table:
240138fd1498Szrj 
240238fd1498Szrj    Access written to through a scalar type (once or more times)
240338fd1498Szrj    |
240438fd1498Szrj    |	Written to in an assignment statement
240538fd1498Szrj    |	|
240638fd1498Szrj    |	|	Access read as scalar _once_
240738fd1498Szrj    |	|	|
240838fd1498Szrj    |   	|	|	Read in an assignment statement
240938fd1498Szrj    |	|	|	|
241038fd1498Szrj    |   	|	|	|	Scalarize	Comment
241138fd1498Szrj -----------------------------------------------------------------------------
241238fd1498Szrj    0	0	0	0			No access for the scalar
241338fd1498Szrj    0	0	0	1			No access for the scalar
241438fd1498Szrj    0	0	1	0	No		Single read - won't help
241538fd1498Szrj    0	0	1	1	No		The same case
241638fd1498Szrj    0	1	0	0			No access for the scalar
241738fd1498Szrj    0	1	0	1			No access for the scalar
241838fd1498Szrj    0	1	1	0	Yes		s = *g; return s.i;
241938fd1498Szrj    0	1	1	1       Yes		The same case as above
242038fd1498Szrj    1	0	0	0	No		Won't help
242138fd1498Szrj    1	0	0	1	Yes		s.i = 1; *g = s;
242238fd1498Szrj    1	0	1	0	Yes		s.i = 5; g = s.i;
242338fd1498Szrj    1	0	1	1	Yes		The same case as above
242438fd1498Szrj    1	1	0	0	No		Won't help.
242538fd1498Szrj    1	1	0	1	Yes		s.i = 1; *g = s;
242638fd1498Szrj    1	1	1	0	Yes		s = *g; return s.i;
242738fd1498Szrj    1	1	1	1	Yes		Any of the above yeses  */
242838fd1498Szrj 
242938fd1498Szrj static bool
analyze_access_subtree(struct access * root,struct access * parent,bool allow_replacements)243038fd1498Szrj analyze_access_subtree (struct access *root, struct access *parent,
243138fd1498Szrj 			bool allow_replacements)
243238fd1498Szrj {
243338fd1498Szrj   struct access *child;
243438fd1498Szrj   HOST_WIDE_INT limit = root->offset + root->size;
243538fd1498Szrj   HOST_WIDE_INT covered_to = root->offset;
243638fd1498Szrj   bool scalar = is_gimple_reg_type (root->type);
243738fd1498Szrj   bool hole = false, sth_created = false;
243838fd1498Szrj 
243938fd1498Szrj   if (parent)
244038fd1498Szrj     {
244138fd1498Szrj       if (parent->grp_read)
244238fd1498Szrj 	root->grp_read = 1;
244338fd1498Szrj       if (parent->grp_assignment_read)
244438fd1498Szrj 	root->grp_assignment_read = 1;
244538fd1498Szrj       if (parent->grp_write)
244638fd1498Szrj 	root->grp_write = 1;
244738fd1498Szrj       if (parent->grp_assignment_write)
244838fd1498Szrj 	root->grp_assignment_write = 1;
244938fd1498Szrj       if (parent->grp_total_scalarization)
245038fd1498Szrj 	root->grp_total_scalarization = 1;
245138fd1498Szrj     }
245238fd1498Szrj 
245338fd1498Szrj   if (root->grp_unscalarizable_region)
245438fd1498Szrj     allow_replacements = false;
245538fd1498Szrj 
245638fd1498Szrj   if (allow_replacements && expr_with_var_bounded_array_refs_p (root->expr))
245738fd1498Szrj     allow_replacements = false;
245838fd1498Szrj 
245938fd1498Szrj   for (child = root->first_child; child; child = child->next_sibling)
246038fd1498Szrj     {
246138fd1498Szrj       hole |= covered_to < child->offset;
246238fd1498Szrj       sth_created |= analyze_access_subtree (child, root,
246338fd1498Szrj 					     allow_replacements && !scalar);
246438fd1498Szrj 
246538fd1498Szrj       root->grp_unscalarized_data |= child->grp_unscalarized_data;
246638fd1498Szrj       root->grp_total_scalarization &= child->grp_total_scalarization;
246738fd1498Szrj       if (child->grp_covered)
246838fd1498Szrj 	covered_to += child->size;
246938fd1498Szrj       else
247038fd1498Szrj 	hole = true;
247138fd1498Szrj     }
247238fd1498Szrj 
247338fd1498Szrj   if (allow_replacements && scalar && !root->first_child
247438fd1498Szrj       && (root->grp_hint
247538fd1498Szrj 	  || ((root->grp_scalar_read || root->grp_assignment_read)
247638fd1498Szrj 	      && (root->grp_scalar_write || root->grp_assignment_write))))
247738fd1498Szrj     {
247838fd1498Szrj       /* Always create access replacements that cover the whole access.
247938fd1498Szrj          For integral types this means the precision has to match.
248038fd1498Szrj 	 Avoid assumptions based on the integral type kind, too.  */
248138fd1498Szrj       if (INTEGRAL_TYPE_P (root->type)
248238fd1498Szrj 	  && (TREE_CODE (root->type) != INTEGER_TYPE
248338fd1498Szrj 	      || TYPE_PRECISION (root->type) != root->size)
248438fd1498Szrj 	  /* But leave bitfield accesses alone.  */
248538fd1498Szrj 	  && (TREE_CODE (root->expr) != COMPONENT_REF
248638fd1498Szrj 	      || !DECL_BIT_FIELD (TREE_OPERAND (root->expr, 1))))
248738fd1498Szrj 	{
248838fd1498Szrj 	  tree rt = root->type;
248938fd1498Szrj 	  gcc_assert ((root->offset % BITS_PER_UNIT) == 0
249038fd1498Szrj 		      && (root->size % BITS_PER_UNIT) == 0);
249138fd1498Szrj 	  root->type = build_nonstandard_integer_type (root->size,
249238fd1498Szrj 						       TYPE_UNSIGNED (rt));
249338fd1498Szrj 	  root->expr = build_ref_for_offset (UNKNOWN_LOCATION, root->base,
249438fd1498Szrj 					     root->offset, root->reverse,
249538fd1498Szrj 					     root->type, NULL, false);
249638fd1498Szrj 
249738fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
249838fd1498Szrj 	    {
249938fd1498Szrj 	      fprintf (dump_file, "Changing the type of a replacement for ");
250038fd1498Szrj 	      print_generic_expr (dump_file, root->base);
250138fd1498Szrj 	      fprintf (dump_file, " offset: %u, size: %u ",
250238fd1498Szrj 		       (unsigned) root->offset, (unsigned) root->size);
250338fd1498Szrj 	      fprintf (dump_file, " to an integer.\n");
250438fd1498Szrj 	    }
250538fd1498Szrj 	}
250638fd1498Szrj 
250738fd1498Szrj       root->grp_to_be_replaced = 1;
250838fd1498Szrj       root->replacement_decl = create_access_replacement (root);
250938fd1498Szrj       sth_created = true;
251038fd1498Szrj       hole = false;
251138fd1498Szrj     }
251238fd1498Szrj   else
251338fd1498Szrj     {
251438fd1498Szrj       if (allow_replacements
251538fd1498Szrj 	  && scalar && !root->first_child
251638fd1498Szrj 	  && (root->grp_scalar_write || root->grp_assignment_write)
251738fd1498Szrj 	  && !bitmap_bit_p (cannot_scalarize_away_bitmap,
251838fd1498Szrj 			    DECL_UID (root->base)))
251938fd1498Szrj 	{
252038fd1498Szrj 	  gcc_checking_assert (!root->grp_scalar_read
252138fd1498Szrj 			       && !root->grp_assignment_read);
252238fd1498Szrj 	  sth_created = true;
252338fd1498Szrj 	  if (MAY_HAVE_DEBUG_BIND_STMTS)
252438fd1498Szrj 	    {
252538fd1498Szrj 	      root->grp_to_be_debug_replaced = 1;
252638fd1498Szrj 	      root->replacement_decl = create_access_replacement (root);
252738fd1498Szrj 	    }
252838fd1498Szrj 	}
252938fd1498Szrj 
253038fd1498Szrj       if (covered_to < limit)
253138fd1498Szrj 	hole = true;
253238fd1498Szrj       if (scalar || !allow_replacements)
253338fd1498Szrj 	root->grp_total_scalarization = 0;
253438fd1498Szrj     }
253538fd1498Szrj 
253638fd1498Szrj   if (!hole || root->grp_total_scalarization)
253738fd1498Szrj     root->grp_covered = 1;
253838fd1498Szrj   else if (root->grp_write || comes_initialized_p (root->base))
253938fd1498Szrj     root->grp_unscalarized_data = 1; /* not covered and written to */
254038fd1498Szrj   return sth_created;
254138fd1498Szrj }
254238fd1498Szrj 
254338fd1498Szrj /* Analyze all access trees linked by next_grp by the means of
254438fd1498Szrj    analyze_access_subtree.  */
254538fd1498Szrj static bool
analyze_access_trees(struct access * access)254638fd1498Szrj analyze_access_trees (struct access *access)
254738fd1498Szrj {
254838fd1498Szrj   bool ret = false;
254938fd1498Szrj 
255038fd1498Szrj   while (access)
255138fd1498Szrj     {
255238fd1498Szrj       if (analyze_access_subtree (access, NULL, true))
255338fd1498Szrj 	ret = true;
255438fd1498Szrj       access = access->next_grp;
255538fd1498Szrj     }
255638fd1498Szrj 
255738fd1498Szrj   return ret;
255838fd1498Szrj }
255938fd1498Szrj 
256038fd1498Szrj /* Return true iff a potential new child of LACC at offset OFFSET and with size
256138fd1498Szrj    SIZE would conflict with an already existing one.  If exactly such a child
256238fd1498Szrj    already exists in LACC, store a pointer to it in EXACT_MATCH.  */
256338fd1498Szrj 
256438fd1498Szrj static bool
child_would_conflict_in_lacc(struct access * lacc,HOST_WIDE_INT norm_offset,HOST_WIDE_INT size,struct access ** exact_match)256538fd1498Szrj child_would_conflict_in_lacc (struct access *lacc, HOST_WIDE_INT norm_offset,
256638fd1498Szrj 			      HOST_WIDE_INT size, struct access **exact_match)
256738fd1498Szrj {
256838fd1498Szrj   struct access *child;
256938fd1498Szrj 
257038fd1498Szrj   for (child = lacc->first_child; child; child = child->next_sibling)
257138fd1498Szrj     {
257238fd1498Szrj       if (child->offset == norm_offset && child->size == size)
257338fd1498Szrj 	{
257438fd1498Szrj 	  *exact_match = child;
257538fd1498Szrj 	  return true;
257638fd1498Szrj 	}
257738fd1498Szrj 
257838fd1498Szrj       if (child->offset < norm_offset + size
257938fd1498Szrj 	  && child->offset + child->size > norm_offset)
258038fd1498Szrj 	return true;
258138fd1498Szrj     }
258238fd1498Szrj 
258338fd1498Szrj   return false;
258438fd1498Szrj }
258538fd1498Szrj 
258638fd1498Szrj /* Create a new child access of PARENT, with all properties just like MODEL
258738fd1498Szrj    except for its offset and with its grp_write false and grp_read true.
258838fd1498Szrj    Return the new access or NULL if it cannot be created.  Note that this
258938fd1498Szrj    access is created long after all splicing and sorting, it's not located in
259038fd1498Szrj    any access vector and is automatically a representative of its group.  Set
259138fd1498Szrj    the gpr_write flag of the new accesss if SET_GRP_WRITE is true.  */
259238fd1498Szrj 
259338fd1498Szrj static struct access *
create_artificial_child_access(struct access * parent,struct access * model,HOST_WIDE_INT new_offset,bool set_grp_write)259438fd1498Szrj create_artificial_child_access (struct access *parent, struct access *model,
259538fd1498Szrj 				HOST_WIDE_INT new_offset,
259638fd1498Szrj 				bool set_grp_write)
259738fd1498Szrj {
259838fd1498Szrj   struct access **child;
259938fd1498Szrj   tree expr = parent->base;
260038fd1498Szrj 
260138fd1498Szrj   gcc_assert (!model->grp_unscalarizable_region);
260238fd1498Szrj 
260338fd1498Szrj   struct access *access = access_pool.allocate ();
260438fd1498Szrj   memset (access, 0, sizeof (struct access));
260538fd1498Szrj   if (!build_user_friendly_ref_for_offset (&expr, TREE_TYPE (expr), new_offset,
260638fd1498Szrj 					   model->type))
260738fd1498Szrj     {
260838fd1498Szrj       access->grp_no_warning = true;
260938fd1498Szrj       expr = build_ref_for_model (EXPR_LOCATION (parent->base), parent->base,
261038fd1498Szrj 				  new_offset, model, NULL, false);
261138fd1498Szrj     }
261238fd1498Szrj 
261338fd1498Szrj   access->base = parent->base;
261438fd1498Szrj   access->expr = expr;
261538fd1498Szrj   access->offset = new_offset;
261638fd1498Szrj   access->size = model->size;
261738fd1498Szrj   access->type = model->type;
261838fd1498Szrj   access->grp_write = set_grp_write;
261938fd1498Szrj   access->grp_read = false;
262038fd1498Szrj   access->reverse = model->reverse;
262138fd1498Szrj 
262238fd1498Szrj   child = &parent->first_child;
262338fd1498Szrj   while (*child && (*child)->offset < new_offset)
262438fd1498Szrj     child = &(*child)->next_sibling;
262538fd1498Szrj 
262638fd1498Szrj   access->next_sibling = *child;
262738fd1498Szrj   *child = access;
262838fd1498Szrj 
262938fd1498Szrj   return access;
263038fd1498Szrj }
263138fd1498Szrj 
263238fd1498Szrj 
263338fd1498Szrj /* Beginning with ACCESS, traverse its whole access subtree and mark all
263438fd1498Szrj    sub-trees as written to.  If any of them has not been marked so previously
263538fd1498Szrj    and has assignment links leading from it, re-enqueue it.  */
263638fd1498Szrj 
263738fd1498Szrj static void
subtree_mark_written_and_enqueue(struct access * access)263838fd1498Szrj subtree_mark_written_and_enqueue (struct access *access)
263938fd1498Szrj {
264038fd1498Szrj   if (access->grp_write)
264138fd1498Szrj     return;
264238fd1498Szrj   access->grp_write = true;
264338fd1498Szrj   add_access_to_work_queue (access);
264438fd1498Szrj 
264538fd1498Szrj   struct access *child;
264638fd1498Szrj   for (child = access->first_child; child; child = child->next_sibling)
264738fd1498Szrj     subtree_mark_written_and_enqueue (child);
264838fd1498Szrj }
264938fd1498Szrj 
265038fd1498Szrj /* Propagate subaccesses and grp_write flags of RACC across an assignment link
265138fd1498Szrj    to LACC.  Enqueue sub-accesses as necessary so that the write flag is
265238fd1498Szrj    propagated transitively.  Return true if anything changed.  Additionally, if
265338fd1498Szrj    RACC is a scalar access but LACC is not, change the type of the latter, if
265438fd1498Szrj    possible.  */
265538fd1498Szrj 
265638fd1498Szrj static bool
propagate_subaccesses_across_link(struct access * lacc,struct access * racc)265738fd1498Szrj propagate_subaccesses_across_link (struct access *lacc, struct access *racc)
265838fd1498Szrj {
265938fd1498Szrj   struct access *rchild;
266038fd1498Szrj   HOST_WIDE_INT norm_delta = lacc->offset - racc->offset;
266138fd1498Szrj   bool ret = false;
266238fd1498Szrj 
266338fd1498Szrj   /* IF the LHS is still not marked as being written to, we only need to do so
266438fd1498Szrj      if the RHS at this level actually was.  */
266538fd1498Szrj   if (!lacc->grp_write)
266638fd1498Szrj     {
266738fd1498Szrj       gcc_checking_assert (!comes_initialized_p (racc->base));
266838fd1498Szrj       if (racc->grp_write)
266938fd1498Szrj 	{
267038fd1498Szrj 	  subtree_mark_written_and_enqueue (lacc);
267138fd1498Szrj 	  ret = true;
267238fd1498Szrj 	}
267338fd1498Szrj     }
267438fd1498Szrj 
267538fd1498Szrj   if (is_gimple_reg_type (lacc->type)
267638fd1498Szrj       || lacc->grp_unscalarizable_region
267738fd1498Szrj       || racc->grp_unscalarizable_region)
267838fd1498Szrj     {
267938fd1498Szrj       if (!lacc->grp_write)
268038fd1498Szrj 	{
268138fd1498Szrj 	  ret = true;
268238fd1498Szrj 	  subtree_mark_written_and_enqueue (lacc);
268338fd1498Szrj 	}
268438fd1498Szrj       return ret;
268538fd1498Szrj     }
268638fd1498Szrj 
268738fd1498Szrj   if (is_gimple_reg_type (racc->type))
268838fd1498Szrj     {
268938fd1498Szrj       if (!lacc->grp_write)
269038fd1498Szrj 	{
269138fd1498Szrj 	  ret = true;
269238fd1498Szrj 	  subtree_mark_written_and_enqueue (lacc);
269338fd1498Szrj 	}
269438fd1498Szrj       if (!lacc->first_child && !racc->first_child)
269538fd1498Szrj 	{
269638fd1498Szrj 	  tree t = lacc->base;
269738fd1498Szrj 
269838fd1498Szrj 	  lacc->type = racc->type;
269938fd1498Szrj 	  if (build_user_friendly_ref_for_offset (&t, TREE_TYPE (t),
270038fd1498Szrj 						  lacc->offset, racc->type))
270138fd1498Szrj 	    lacc->expr = t;
270238fd1498Szrj 	  else
270338fd1498Szrj 	    {
270438fd1498Szrj 	      lacc->expr = build_ref_for_model (EXPR_LOCATION (lacc->base),
270538fd1498Szrj 						lacc->base, lacc->offset,
270638fd1498Szrj 						racc, NULL, false);
270738fd1498Szrj 	      lacc->grp_no_warning = true;
270838fd1498Szrj 	    }
270938fd1498Szrj 	}
271038fd1498Szrj       return ret;
271138fd1498Szrj     }
271238fd1498Szrj 
271338fd1498Szrj   for (rchild = racc->first_child; rchild; rchild = rchild->next_sibling)
271438fd1498Szrj     {
271538fd1498Szrj       struct access *new_acc = NULL;
271638fd1498Szrj       HOST_WIDE_INT norm_offset = rchild->offset + norm_delta;
271738fd1498Szrj 
271838fd1498Szrj       if (child_would_conflict_in_lacc (lacc, norm_offset, rchild->size,
271938fd1498Szrj 					&new_acc))
272038fd1498Szrj 	{
272138fd1498Szrj 	  if (new_acc)
272238fd1498Szrj 	    {
272338fd1498Szrj 	      if (!new_acc->grp_write && rchild->grp_write)
272438fd1498Szrj 		{
272538fd1498Szrj 		  gcc_assert (!lacc->grp_write);
272638fd1498Szrj 		  subtree_mark_written_and_enqueue (new_acc);
272738fd1498Szrj 		  ret = true;
272838fd1498Szrj 		}
272938fd1498Szrj 
273038fd1498Szrj 	      rchild->grp_hint = 1;
273138fd1498Szrj 	      new_acc->grp_hint |= new_acc->grp_read;
2732*e215fc28Szrj 	      if (rchild->first_child
2733*e215fc28Szrj 		  && propagate_subaccesses_across_link (new_acc, rchild))
2734*e215fc28Szrj 		{
2735*e215fc28Szrj 		  ret = 1;
2736*e215fc28Szrj 		  add_access_to_work_queue (new_acc);
2737*e215fc28Szrj 		}
273838fd1498Szrj 	    }
273938fd1498Szrj 	  else
274038fd1498Szrj 	    {
274138fd1498Szrj 	      if (!lacc->grp_write)
274238fd1498Szrj 		{
274338fd1498Szrj 		  ret = true;
274438fd1498Szrj 		  subtree_mark_written_and_enqueue (lacc);
274538fd1498Szrj 		}
274638fd1498Szrj 	    }
274738fd1498Szrj 	  continue;
274838fd1498Szrj 	}
274938fd1498Szrj 
275038fd1498Szrj       if (rchild->grp_unscalarizable_region)
275138fd1498Szrj 	{
275238fd1498Szrj 	  if (rchild->grp_write && !lacc->grp_write)
275338fd1498Szrj 	    {
275438fd1498Szrj 	      ret = true;
275538fd1498Szrj 	      subtree_mark_written_and_enqueue (lacc);
275638fd1498Szrj 	    }
275738fd1498Szrj 	  continue;
275838fd1498Szrj 	}
275938fd1498Szrj 
276038fd1498Szrj       rchild->grp_hint = 1;
276138fd1498Szrj       new_acc = create_artificial_child_access (lacc, rchild, norm_offset,
276238fd1498Szrj 						lacc->grp_write
276338fd1498Szrj 						|| rchild->grp_write);
276438fd1498Szrj       gcc_checking_assert (new_acc);
276538fd1498Szrj       if (racc->first_child)
276638fd1498Szrj 	propagate_subaccesses_across_link (new_acc, rchild);
276738fd1498Szrj 
276838fd1498Szrj       add_access_to_work_queue (lacc);
276938fd1498Szrj       ret = true;
277038fd1498Szrj     }
277138fd1498Szrj 
277238fd1498Szrj   return ret;
277338fd1498Szrj }
277438fd1498Szrj 
277538fd1498Szrj /* Propagate all subaccesses across assignment links.  */
277638fd1498Szrj 
277738fd1498Szrj static void
propagate_all_subaccesses(void)277838fd1498Szrj propagate_all_subaccesses (void)
277938fd1498Szrj {
278038fd1498Szrj   while (work_queue_head)
278138fd1498Szrj     {
278238fd1498Szrj       struct access *racc = pop_access_from_work_queue ();
278338fd1498Szrj       struct assign_link *link;
278438fd1498Szrj 
278538fd1498Szrj       if (racc->group_representative)
278638fd1498Szrj 	racc= racc->group_representative;
278738fd1498Szrj       gcc_assert (racc->first_link);
278838fd1498Szrj 
278938fd1498Szrj       for (link = racc->first_link; link; link = link->next)
279038fd1498Szrj 	{
279138fd1498Szrj 	  struct access *lacc = link->lacc;
279238fd1498Szrj 
279338fd1498Szrj 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (lacc->base)))
279438fd1498Szrj 	    continue;
279538fd1498Szrj 	  lacc = lacc->group_representative;
279638fd1498Szrj 
279738fd1498Szrj 	  bool reque_parents = false;
279838fd1498Szrj 	  if (!bitmap_bit_p (candidate_bitmap, DECL_UID (racc->base)))
279938fd1498Szrj 	    {
280038fd1498Szrj 	      if (!lacc->grp_write)
280138fd1498Szrj 		{
280238fd1498Szrj 		  subtree_mark_written_and_enqueue (lacc);
280338fd1498Szrj 		  reque_parents = true;
280438fd1498Szrj 		}
280538fd1498Szrj 	    }
280638fd1498Szrj 	  else if (propagate_subaccesses_across_link (lacc, racc))
280738fd1498Szrj 	    reque_parents = true;
280838fd1498Szrj 
280938fd1498Szrj 	  if (reque_parents)
281038fd1498Szrj 	    do
281138fd1498Szrj 	      {
281238fd1498Szrj 		add_access_to_work_queue (lacc);
281338fd1498Szrj 		lacc = lacc->parent;
281438fd1498Szrj 	      }
281538fd1498Szrj 	    while (lacc);
281638fd1498Szrj 	}
281738fd1498Szrj     }
281838fd1498Szrj }
281938fd1498Szrj 
282038fd1498Szrj /* Go through all accesses collected throughout the (intraprocedural) analysis
282138fd1498Szrj    stage, exclude overlapping ones, identify representatives and build trees
282238fd1498Szrj    out of them, making decisions about scalarization on the way.  Return true
282338fd1498Szrj    iff there are any to-be-scalarized variables after this stage. */
282438fd1498Szrj 
282538fd1498Szrj static bool
analyze_all_variable_accesses(void)282638fd1498Szrj analyze_all_variable_accesses (void)
282738fd1498Szrj {
282838fd1498Szrj   int res = 0;
282938fd1498Szrj   bitmap tmp = BITMAP_ALLOC (NULL);
283038fd1498Szrj   bitmap_iterator bi;
283138fd1498Szrj   unsigned i;
283238fd1498Szrj   bool optimize_speed_p = !optimize_function_for_size_p (cfun);
283338fd1498Szrj 
283438fd1498Szrj   enum compiler_param param = optimize_speed_p
283538fd1498Szrj 			? PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
283638fd1498Szrj 			: PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE;
283738fd1498Szrj 
283838fd1498Szrj   /* If the user didn't set PARAM_SRA_MAX_SCALARIZATION_SIZE_<...>,
283938fd1498Szrj      fall back to a target default.  */
284038fd1498Szrj   unsigned HOST_WIDE_INT max_scalarization_size
284138fd1498Szrj     = global_options_set.x_param_values[param]
284238fd1498Szrj       ? PARAM_VALUE (param)
284338fd1498Szrj       : get_move_ratio (optimize_speed_p) * UNITS_PER_WORD;
284438fd1498Szrj 
284538fd1498Szrj   max_scalarization_size *= BITS_PER_UNIT;
284638fd1498Szrj 
284738fd1498Szrj   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
284838fd1498Szrj     if (bitmap_bit_p (should_scalarize_away_bitmap, i)
284938fd1498Szrj 	&& !bitmap_bit_p (cannot_scalarize_away_bitmap, i))
285038fd1498Szrj       {
285138fd1498Szrj 	tree var = candidate (i);
285238fd1498Szrj 
285338fd1498Szrj 	if (VAR_P (var) && scalarizable_type_p (TREE_TYPE (var),
285438fd1498Szrj 						constant_decl_p (var)))
285538fd1498Szrj 	  {
285638fd1498Szrj 	    if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var)))
285738fd1498Szrj 		<= max_scalarization_size)
285838fd1498Szrj 	      {
285938fd1498Szrj 		create_total_scalarization_access (var);
286038fd1498Szrj 		completely_scalarize (var, TREE_TYPE (var), 0, var);
286138fd1498Szrj 		statistics_counter_event (cfun,
286238fd1498Szrj 					  "Totally-scalarized aggregates", 1);
286338fd1498Szrj 		if (dump_file && (dump_flags & TDF_DETAILS))
286438fd1498Szrj 		  {
286538fd1498Szrj 		    fprintf (dump_file, "Will attempt to totally scalarize ");
286638fd1498Szrj 		    print_generic_expr (dump_file, var);
286738fd1498Szrj 		    fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
286838fd1498Szrj 		  }
286938fd1498Szrj 	      }
287038fd1498Szrj 	    else if (dump_file && (dump_flags & TDF_DETAILS))
287138fd1498Szrj 	      {
287238fd1498Szrj 		fprintf (dump_file, "Too big to totally scalarize: ");
287338fd1498Szrj 		print_generic_expr (dump_file, var);
287438fd1498Szrj 		fprintf (dump_file, " (UID: %u)\n", DECL_UID (var));
287538fd1498Szrj 	      }
287638fd1498Szrj 	  }
287738fd1498Szrj       }
287838fd1498Szrj 
287938fd1498Szrj   bitmap_copy (tmp, candidate_bitmap);
288038fd1498Szrj   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
288138fd1498Szrj     {
288238fd1498Szrj       tree var = candidate (i);
288338fd1498Szrj       struct access *access;
288438fd1498Szrj 
288538fd1498Szrj       access = sort_and_splice_var_accesses (var);
288638fd1498Szrj       if (!access || !build_access_trees (access))
288738fd1498Szrj 	disqualify_candidate (var,
288838fd1498Szrj 			      "No or inhibitingly overlapping accesses.");
288938fd1498Szrj     }
289038fd1498Szrj 
289138fd1498Szrj   propagate_all_subaccesses ();
289238fd1498Szrj 
289338fd1498Szrj   bitmap_copy (tmp, candidate_bitmap);
289438fd1498Szrj   EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi)
289538fd1498Szrj     {
289638fd1498Szrj       tree var = candidate (i);
289738fd1498Szrj       struct access *access = get_first_repr_for_decl (var);
289838fd1498Szrj 
289938fd1498Szrj       if (analyze_access_trees (access))
290038fd1498Szrj 	{
290138fd1498Szrj 	  res++;
290238fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
290338fd1498Szrj 	    {
290438fd1498Szrj 	      fprintf (dump_file, "\nAccess trees for ");
290538fd1498Szrj 	      print_generic_expr (dump_file, var);
290638fd1498Szrj 	      fprintf (dump_file, " (UID: %u): \n", DECL_UID (var));
290738fd1498Szrj 	      dump_access_tree (dump_file, access);
290838fd1498Szrj 	      fprintf (dump_file, "\n");
290938fd1498Szrj 	    }
291038fd1498Szrj 	}
291138fd1498Szrj       else
291238fd1498Szrj 	disqualify_candidate (var, "No scalar replacements to be created.");
291338fd1498Szrj     }
291438fd1498Szrj 
291538fd1498Szrj   BITMAP_FREE (tmp);
291638fd1498Szrj 
291738fd1498Szrj   if (res)
291838fd1498Szrj     {
291938fd1498Szrj       statistics_counter_event (cfun, "Scalarized aggregates", res);
292038fd1498Szrj       return true;
292138fd1498Szrj     }
292238fd1498Szrj   else
292338fd1498Szrj     return false;
292438fd1498Szrj }
292538fd1498Szrj 
292638fd1498Szrj /* Generate statements copying scalar replacements of accesses within a subtree
292738fd1498Szrj    into or out of AGG.  ACCESS, all its children, siblings and their children
292838fd1498Szrj    are to be processed.  AGG is an aggregate type expression (can be a
292938fd1498Szrj    declaration but does not have to be, it can for example also be a mem_ref or
293038fd1498Szrj    a series of handled components).  TOP_OFFSET is the offset of the processed
293138fd1498Szrj    subtree which has to be subtracted from offsets of individual accesses to
293238fd1498Szrj    get corresponding offsets for AGG.  If CHUNK_SIZE is non-null, copy only
293338fd1498Szrj    replacements in the interval <start_offset, start_offset + chunk_size>,
293438fd1498Szrj    otherwise copy all.  GSI is a statement iterator used to place the new
293538fd1498Szrj    statements.  WRITE should be true when the statements should write from AGG
293638fd1498Szrj    to the replacement and false if vice versa.  if INSERT_AFTER is true, new
293738fd1498Szrj    statements will be added after the current statement in GSI, they will be
293838fd1498Szrj    added before the statement otherwise.  */
293938fd1498Szrj 
294038fd1498Szrj static void
generate_subtree_copies(struct access * access,tree agg,HOST_WIDE_INT top_offset,HOST_WIDE_INT start_offset,HOST_WIDE_INT chunk_size,gimple_stmt_iterator * gsi,bool write,bool insert_after,location_t loc)294138fd1498Szrj generate_subtree_copies (struct access *access, tree agg,
294238fd1498Szrj 			 HOST_WIDE_INT top_offset,
294338fd1498Szrj 			 HOST_WIDE_INT start_offset, HOST_WIDE_INT chunk_size,
294438fd1498Szrj 			 gimple_stmt_iterator *gsi, bool write,
294538fd1498Szrj 			 bool insert_after, location_t loc)
294638fd1498Szrj {
294738fd1498Szrj   /* Never write anything into constant pool decls.  See PR70602.  */
294838fd1498Szrj   if (!write && constant_decl_p (agg))
294938fd1498Szrj     return;
295038fd1498Szrj   do
295138fd1498Szrj     {
295238fd1498Szrj       if (chunk_size && access->offset >= start_offset + chunk_size)
295338fd1498Szrj 	return;
295438fd1498Szrj 
295538fd1498Szrj       if (access->grp_to_be_replaced
295638fd1498Szrj 	  && (chunk_size == 0
295738fd1498Szrj 	      || access->offset + access->size > start_offset))
295838fd1498Szrj 	{
295938fd1498Szrj 	  tree expr, repl = get_access_replacement (access);
296038fd1498Szrj 	  gassign *stmt;
296138fd1498Szrj 
296238fd1498Szrj 	  expr = build_ref_for_model (loc, agg, access->offset - top_offset,
296338fd1498Szrj 				      access, gsi, insert_after);
296438fd1498Szrj 
296538fd1498Szrj 	  if (write)
296638fd1498Szrj 	    {
296738fd1498Szrj 	      if (access->grp_partial_lhs)
296838fd1498Szrj 		expr = force_gimple_operand_gsi (gsi, expr, true, NULL_TREE,
296938fd1498Szrj 						 !insert_after,
297038fd1498Szrj 						 insert_after ? GSI_NEW_STMT
297138fd1498Szrj 						 : GSI_SAME_STMT);
297238fd1498Szrj 	      stmt = gimple_build_assign (repl, expr);
297338fd1498Szrj 	    }
297438fd1498Szrj 	  else
297538fd1498Szrj 	    {
297638fd1498Szrj 	      TREE_NO_WARNING (repl) = 1;
297738fd1498Szrj 	      if (access->grp_partial_lhs)
297838fd1498Szrj 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
297938fd1498Szrj 						 !insert_after,
298038fd1498Szrj 						 insert_after ? GSI_NEW_STMT
298138fd1498Szrj 						 : GSI_SAME_STMT);
298238fd1498Szrj 	      stmt = gimple_build_assign (expr, repl);
298338fd1498Szrj 	    }
298438fd1498Szrj 	  gimple_set_location (stmt, loc);
298538fd1498Szrj 
298638fd1498Szrj 	  if (insert_after)
298738fd1498Szrj 	    gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
298838fd1498Szrj 	  else
298938fd1498Szrj 	    gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
299038fd1498Szrj 	  update_stmt (stmt);
299138fd1498Szrj 	  sra_stats.subtree_copies++;
299238fd1498Szrj 	}
299338fd1498Szrj       else if (write
299438fd1498Szrj 	       && access->grp_to_be_debug_replaced
299538fd1498Szrj 	       && (chunk_size == 0
299638fd1498Szrj 		   || access->offset + access->size > start_offset))
299738fd1498Szrj 	{
299838fd1498Szrj 	  gdebug *ds;
299938fd1498Szrj 	  tree drhs = build_debug_ref_for_model (loc, agg,
300038fd1498Szrj 						 access->offset - top_offset,
300138fd1498Szrj 						 access);
300238fd1498Szrj 	  ds = gimple_build_debug_bind (get_access_replacement (access),
300338fd1498Szrj 					drhs, gsi_stmt (*gsi));
300438fd1498Szrj 	  if (insert_after)
300538fd1498Szrj 	    gsi_insert_after (gsi, ds, GSI_NEW_STMT);
300638fd1498Szrj 	  else
300738fd1498Szrj 	    gsi_insert_before (gsi, ds, GSI_SAME_STMT);
300838fd1498Szrj 	}
300938fd1498Szrj 
301038fd1498Szrj       if (access->first_child)
301138fd1498Szrj 	generate_subtree_copies (access->first_child, agg, top_offset,
301238fd1498Szrj 				 start_offset, chunk_size, gsi,
301338fd1498Szrj 				 write, insert_after, loc);
301438fd1498Szrj 
301538fd1498Szrj       access = access->next_sibling;
301638fd1498Szrj     }
301738fd1498Szrj   while (access);
301838fd1498Szrj }
301938fd1498Szrj 
302038fd1498Szrj /* Assign zero to all scalar replacements in an access subtree.  ACCESS is the
302138fd1498Szrj    root of the subtree to be processed.  GSI is the statement iterator used
302238fd1498Szrj    for inserting statements which are added after the current statement if
302338fd1498Szrj    INSERT_AFTER is true or before it otherwise.  */
302438fd1498Szrj 
302538fd1498Szrj static void
init_subtree_with_zero(struct access * access,gimple_stmt_iterator * gsi,bool insert_after,location_t loc)302638fd1498Szrj init_subtree_with_zero (struct access *access, gimple_stmt_iterator *gsi,
302738fd1498Szrj 			bool insert_after, location_t loc)
302838fd1498Szrj 
302938fd1498Szrj {
303038fd1498Szrj   struct access *child;
303138fd1498Szrj 
303238fd1498Szrj   if (access->grp_to_be_replaced)
303338fd1498Szrj     {
303438fd1498Szrj       gassign *stmt;
303538fd1498Szrj 
303638fd1498Szrj       stmt = gimple_build_assign (get_access_replacement (access),
303738fd1498Szrj 				  build_zero_cst (access->type));
303838fd1498Szrj       if (insert_after)
303938fd1498Szrj 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
304038fd1498Szrj       else
304138fd1498Szrj 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
304238fd1498Szrj       update_stmt (stmt);
304338fd1498Szrj       gimple_set_location (stmt, loc);
304438fd1498Szrj     }
304538fd1498Szrj   else if (access->grp_to_be_debug_replaced)
304638fd1498Szrj     {
304738fd1498Szrj       gdebug *ds
304838fd1498Szrj 	= gimple_build_debug_bind (get_access_replacement (access),
304938fd1498Szrj 				   build_zero_cst (access->type),
305038fd1498Szrj 				   gsi_stmt (*gsi));
305138fd1498Szrj       if (insert_after)
305238fd1498Szrj 	gsi_insert_after (gsi, ds, GSI_NEW_STMT);
305338fd1498Szrj       else
305438fd1498Szrj 	gsi_insert_before (gsi, ds, GSI_SAME_STMT);
305538fd1498Szrj     }
305638fd1498Szrj 
305738fd1498Szrj   for (child = access->first_child; child; child = child->next_sibling)
305838fd1498Szrj     init_subtree_with_zero (child, gsi, insert_after, loc);
305938fd1498Szrj }
306038fd1498Szrj 
306138fd1498Szrj /* Clobber all scalar replacements in an access subtree.  ACCESS is the
306238fd1498Szrj    root of the subtree to be processed.  GSI is the statement iterator used
306338fd1498Szrj    for inserting statements which are added after the current statement if
306438fd1498Szrj    INSERT_AFTER is true or before it otherwise.  */
306538fd1498Szrj 
306638fd1498Szrj static void
clobber_subtree(struct access * access,gimple_stmt_iterator * gsi,bool insert_after,location_t loc)306738fd1498Szrj clobber_subtree (struct access *access, gimple_stmt_iterator *gsi,
306838fd1498Szrj 		bool insert_after, location_t loc)
306938fd1498Szrj 
307038fd1498Szrj {
307138fd1498Szrj   struct access *child;
307238fd1498Szrj 
307338fd1498Szrj   if (access->grp_to_be_replaced)
307438fd1498Szrj     {
307538fd1498Szrj       tree rep = get_access_replacement (access);
307638fd1498Szrj       tree clobber = build_constructor (access->type, NULL);
307738fd1498Szrj       TREE_THIS_VOLATILE (clobber) = 1;
307838fd1498Szrj       gimple *stmt = gimple_build_assign (rep, clobber);
307938fd1498Szrj 
308038fd1498Szrj       if (insert_after)
308138fd1498Szrj 	gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
308238fd1498Szrj       else
308338fd1498Szrj 	gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
308438fd1498Szrj       update_stmt (stmt);
308538fd1498Szrj       gimple_set_location (stmt, loc);
308638fd1498Szrj     }
308738fd1498Szrj 
308838fd1498Szrj   for (child = access->first_child; child; child = child->next_sibling)
308938fd1498Szrj     clobber_subtree (child, gsi, insert_after, loc);
309038fd1498Szrj }
309138fd1498Szrj 
309238fd1498Szrj /* Search for an access representative for the given expression EXPR and
309338fd1498Szrj    return it or NULL if it cannot be found.  */
309438fd1498Szrj 
309538fd1498Szrj static struct access *
get_access_for_expr(tree expr)309638fd1498Szrj get_access_for_expr (tree expr)
309738fd1498Szrj {
309838fd1498Szrj   poly_int64 poffset, psize, pmax_size;
309938fd1498Szrj   HOST_WIDE_INT offset, max_size;
310038fd1498Szrj   tree base;
310138fd1498Szrj   bool reverse;
310238fd1498Szrj 
310338fd1498Szrj   /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
310438fd1498Szrj      a different size than the size of its argument and we need the latter
310538fd1498Szrj      one.  */
310638fd1498Szrj   if (TREE_CODE (expr) == VIEW_CONVERT_EXPR)
310738fd1498Szrj     expr = TREE_OPERAND (expr, 0);
310838fd1498Szrj 
310938fd1498Szrj   base = get_ref_base_and_extent (expr, &poffset, &psize, &pmax_size,
311038fd1498Szrj 				  &reverse);
311138fd1498Szrj   if (!known_size_p (pmax_size)
311238fd1498Szrj       || !pmax_size.is_constant (&max_size)
311338fd1498Szrj       || !poffset.is_constant (&offset)
311438fd1498Szrj       || !DECL_P (base))
311538fd1498Szrj     return NULL;
311638fd1498Szrj 
311738fd1498Szrj   if (!bitmap_bit_p (candidate_bitmap, DECL_UID (base)))
311838fd1498Szrj     return NULL;
311938fd1498Szrj 
312038fd1498Szrj   return get_var_base_offset_size_access (base, offset, max_size);
312138fd1498Szrj }
312238fd1498Szrj 
312338fd1498Szrj /* Replace the expression EXPR with a scalar replacement if there is one and
312438fd1498Szrj    generate other statements to do type conversion or subtree copying if
312538fd1498Szrj    necessary.  GSI is used to place newly created statements, WRITE is true if
312638fd1498Szrj    the expression is being written to (it is on a LHS of a statement or output
312738fd1498Szrj    in an assembly statement).  */
312838fd1498Szrj 
312938fd1498Szrj static bool
sra_modify_expr(tree * expr,gimple_stmt_iterator * gsi,bool write)313038fd1498Szrj sra_modify_expr (tree *expr, gimple_stmt_iterator *gsi, bool write)
313138fd1498Szrj {
313238fd1498Szrj   location_t loc;
313338fd1498Szrj   struct access *access;
313438fd1498Szrj   tree type, bfr, orig_expr;
313538fd1498Szrj 
313638fd1498Szrj   if (TREE_CODE (*expr) == BIT_FIELD_REF)
313738fd1498Szrj     {
313838fd1498Szrj       bfr = *expr;
313938fd1498Szrj       expr = &TREE_OPERAND (*expr, 0);
314038fd1498Szrj     }
314138fd1498Szrj   else
314238fd1498Szrj     bfr = NULL_TREE;
314338fd1498Szrj 
314438fd1498Szrj   if (TREE_CODE (*expr) == REALPART_EXPR || TREE_CODE (*expr) == IMAGPART_EXPR)
314538fd1498Szrj     expr = &TREE_OPERAND (*expr, 0);
314638fd1498Szrj   access = get_access_for_expr (*expr);
314738fd1498Szrj   if (!access)
314838fd1498Szrj     return false;
314938fd1498Szrj   type = TREE_TYPE (*expr);
315038fd1498Szrj   orig_expr = *expr;
315138fd1498Szrj 
315238fd1498Szrj   loc = gimple_location (gsi_stmt (*gsi));
315338fd1498Szrj   gimple_stmt_iterator alt_gsi = gsi_none ();
315438fd1498Szrj   if (write && stmt_ends_bb_p (gsi_stmt (*gsi)))
315538fd1498Szrj     {
315638fd1498Szrj       alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
315738fd1498Szrj       gsi = &alt_gsi;
315838fd1498Szrj     }
315938fd1498Szrj 
316038fd1498Szrj   if (access->grp_to_be_replaced)
316138fd1498Szrj     {
316238fd1498Szrj       tree repl = get_access_replacement (access);
316338fd1498Szrj       /* If we replace a non-register typed access simply use the original
316438fd1498Szrj          access expression to extract the scalar component afterwards.
316538fd1498Szrj 	 This happens if scalarizing a function return value or parameter
316638fd1498Szrj 	 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
316738fd1498Szrj 	 gcc.c-torture/compile/20011217-1.c.
316838fd1498Szrj 
316938fd1498Szrj          We also want to use this when accessing a complex or vector which can
317038fd1498Szrj          be accessed as a different type too, potentially creating a need for
317138fd1498Szrj          type conversion (see PR42196) and when scalarized unions are involved
317238fd1498Szrj          in assembler statements (see PR42398).  */
317338fd1498Szrj       if (!useless_type_conversion_p (type, access->type))
317438fd1498Szrj 	{
317538fd1498Szrj 	  tree ref;
317638fd1498Szrj 
317738fd1498Szrj 	  ref = build_ref_for_model (loc, orig_expr, 0, access, gsi, false);
317838fd1498Szrj 
317938fd1498Szrj 	  if (write)
318038fd1498Szrj 	    {
318138fd1498Szrj 	      gassign *stmt;
318238fd1498Szrj 
318338fd1498Szrj 	      if (access->grp_partial_lhs)
318438fd1498Szrj 		ref = force_gimple_operand_gsi (gsi, ref, true, NULL_TREE,
318538fd1498Szrj 						 false, GSI_NEW_STMT);
318638fd1498Szrj 	      stmt = gimple_build_assign (repl, ref);
318738fd1498Szrj 	      gimple_set_location (stmt, loc);
318838fd1498Szrj 	      gsi_insert_after (gsi, stmt, GSI_NEW_STMT);
318938fd1498Szrj 	    }
319038fd1498Szrj 	  else
319138fd1498Szrj 	    {
319238fd1498Szrj 	      gassign *stmt;
319338fd1498Szrj 
319438fd1498Szrj 	      if (access->grp_partial_lhs)
319538fd1498Szrj 		repl = force_gimple_operand_gsi (gsi, repl, true, NULL_TREE,
319638fd1498Szrj 						 true, GSI_SAME_STMT);
319738fd1498Szrj 	      stmt = gimple_build_assign (ref, repl);
319838fd1498Szrj 	      gimple_set_location (stmt, loc);
319938fd1498Szrj 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
320038fd1498Szrj 	    }
320138fd1498Szrj 	}
320238fd1498Szrj       else
320338fd1498Szrj 	*expr = repl;
320438fd1498Szrj       sra_stats.exprs++;
320538fd1498Szrj     }
320638fd1498Szrj   else if (write && access->grp_to_be_debug_replaced)
320738fd1498Szrj     {
320838fd1498Szrj       gdebug *ds = gimple_build_debug_bind (get_access_replacement (access),
320938fd1498Szrj 					    NULL_TREE,
321038fd1498Szrj 					    gsi_stmt (*gsi));
321138fd1498Szrj       gsi_insert_after (gsi, ds, GSI_NEW_STMT);
321238fd1498Szrj     }
321338fd1498Szrj 
321438fd1498Szrj   if (access->first_child)
321538fd1498Szrj     {
321638fd1498Szrj       HOST_WIDE_INT start_offset, chunk_size;
321738fd1498Szrj       if (bfr
321838fd1498Szrj 	  && tree_fits_uhwi_p (TREE_OPERAND (bfr, 1))
321938fd1498Szrj 	  && tree_fits_uhwi_p (TREE_OPERAND (bfr, 2)))
322038fd1498Szrj 	{
322138fd1498Szrj 	  chunk_size = tree_to_uhwi (TREE_OPERAND (bfr, 1));
322238fd1498Szrj 	  start_offset = access->offset
322338fd1498Szrj 	    + tree_to_uhwi (TREE_OPERAND (bfr, 2));
322438fd1498Szrj 	}
322538fd1498Szrj       else
322638fd1498Szrj 	start_offset = chunk_size = 0;
322738fd1498Szrj 
322838fd1498Szrj       generate_subtree_copies (access->first_child, orig_expr, access->offset,
322938fd1498Szrj 			       start_offset, chunk_size, gsi, write, write,
323038fd1498Szrj 			       loc);
323138fd1498Szrj     }
323238fd1498Szrj   return true;
323338fd1498Szrj }
323438fd1498Szrj 
323538fd1498Szrj /* Where scalar replacements of the RHS have been written to when a replacement
323638fd1498Szrj    of a LHS of an assigments cannot be direclty loaded from a replacement of
323738fd1498Szrj    the RHS. */
323838fd1498Szrj enum unscalarized_data_handling { SRA_UDH_NONE,  /* Nothing done so far. */
323938fd1498Szrj 				  SRA_UDH_RIGHT, /* Data flushed to the RHS. */
324038fd1498Szrj 				  SRA_UDH_LEFT }; /* Data flushed to the LHS. */
324138fd1498Szrj 
324238fd1498Szrj struct subreplacement_assignment_data
324338fd1498Szrj {
324438fd1498Szrj   /* Offset of the access representing the lhs of the assignment.  */
324538fd1498Szrj   HOST_WIDE_INT left_offset;
324638fd1498Szrj 
324738fd1498Szrj   /* LHS and RHS of the original assignment.  */
324838fd1498Szrj   tree assignment_lhs, assignment_rhs;
324938fd1498Szrj 
325038fd1498Szrj   /* Access representing the rhs of the whole assignment.  */
325138fd1498Szrj   struct access *top_racc;
325238fd1498Szrj 
325338fd1498Szrj   /* Stmt iterator used for statement insertions after the original assignment.
325438fd1498Szrj    It points to the main GSI used to traverse a BB during function body
325538fd1498Szrj    modification.  */
325638fd1498Szrj   gimple_stmt_iterator *new_gsi;
325738fd1498Szrj 
325838fd1498Szrj   /* Stmt iterator used for statement insertions before the original
325938fd1498Szrj    assignment.  Keeps on pointing to the original statement.  */
326038fd1498Szrj   gimple_stmt_iterator old_gsi;
326138fd1498Szrj 
326238fd1498Szrj   /* Location of the assignment.   */
326338fd1498Szrj   location_t loc;
326438fd1498Szrj 
326538fd1498Szrj   /* Keeps the information whether we have needed to refresh replacements of
326638fd1498Szrj    the LHS and from which side of the assignments this takes place.  */
326738fd1498Szrj   enum unscalarized_data_handling refreshed;
326838fd1498Szrj };
326938fd1498Szrj 
327038fd1498Szrj /* Store all replacements in the access tree rooted in TOP_RACC either to their
327138fd1498Szrj    base aggregate if there are unscalarized data or directly to LHS of the
327238fd1498Szrj    statement that is pointed to by GSI otherwise.  */
327338fd1498Szrj 
327438fd1498Szrj static void
handle_unscalarized_data_in_subtree(struct subreplacement_assignment_data * sad)327538fd1498Szrj handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data *sad)
327638fd1498Szrj {
327738fd1498Szrj   tree src;
327838fd1498Szrj   if (sad->top_racc->grp_unscalarized_data)
327938fd1498Szrj     {
328038fd1498Szrj       src = sad->assignment_rhs;
328138fd1498Szrj       sad->refreshed = SRA_UDH_RIGHT;
328238fd1498Szrj     }
328338fd1498Szrj   else
328438fd1498Szrj     {
328538fd1498Szrj       src = sad->assignment_lhs;
328638fd1498Szrj       sad->refreshed = SRA_UDH_LEFT;
328738fd1498Szrj     }
328838fd1498Szrj   generate_subtree_copies (sad->top_racc->first_child, src,
328938fd1498Szrj 			   sad->top_racc->offset, 0, 0,
329038fd1498Szrj 			   &sad->old_gsi, false, false, sad->loc);
329138fd1498Szrj }
329238fd1498Szrj 
329338fd1498Szrj /* Try to generate statements to load all sub-replacements in an access subtree
329438fd1498Szrj    formed by children of LACC from scalar replacements in the SAD->top_racc
329538fd1498Szrj    subtree.  If that is not possible, refresh the SAD->top_racc base aggregate
329638fd1498Szrj    and load the accesses from it.  */
329738fd1498Szrj 
329838fd1498Szrj static void
load_assign_lhs_subreplacements(struct access * lacc,struct subreplacement_assignment_data * sad)329938fd1498Szrj load_assign_lhs_subreplacements (struct access *lacc,
330038fd1498Szrj 				 struct subreplacement_assignment_data *sad)
330138fd1498Szrj {
330238fd1498Szrj   for (lacc = lacc->first_child; lacc; lacc = lacc->next_sibling)
330338fd1498Szrj     {
330438fd1498Szrj       HOST_WIDE_INT offset;
330538fd1498Szrj       offset = lacc->offset - sad->left_offset + sad->top_racc->offset;
330638fd1498Szrj 
330738fd1498Szrj       if (lacc->grp_to_be_replaced)
330838fd1498Szrj 	{
330938fd1498Szrj 	  struct access *racc;
331038fd1498Szrj 	  gassign *stmt;
331138fd1498Szrj 	  tree rhs;
331238fd1498Szrj 
331338fd1498Szrj 	  racc = find_access_in_subtree (sad->top_racc, offset, lacc->size);
331438fd1498Szrj 	  if (racc && racc->grp_to_be_replaced)
331538fd1498Szrj 	    {
331638fd1498Szrj 	      rhs = get_access_replacement (racc);
331738fd1498Szrj 	      if (!useless_type_conversion_p (lacc->type, racc->type))
331838fd1498Szrj 		rhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
331938fd1498Szrj 				       lacc->type, rhs);
332038fd1498Szrj 
332138fd1498Szrj 	      if (racc->grp_partial_lhs && lacc->grp_partial_lhs)
332238fd1498Szrj 		rhs = force_gimple_operand_gsi (&sad->old_gsi, rhs, true,
332338fd1498Szrj 						NULL_TREE, true, GSI_SAME_STMT);
332438fd1498Szrj 	    }
332538fd1498Szrj 	  else
332638fd1498Szrj 	    {
332738fd1498Szrj 	      /* No suitable access on the right hand side, need to load from
332838fd1498Szrj 		 the aggregate.  See if we have to update it first... */
332938fd1498Szrj 	      if (sad->refreshed == SRA_UDH_NONE)
333038fd1498Szrj 		handle_unscalarized_data_in_subtree (sad);
333138fd1498Szrj 
333238fd1498Szrj 	      if (sad->refreshed == SRA_UDH_LEFT)
333338fd1498Szrj 		rhs = build_ref_for_model (sad->loc, sad->assignment_lhs,
333438fd1498Szrj 					   lacc->offset - sad->left_offset,
333538fd1498Szrj 					   lacc, sad->new_gsi, true);
333638fd1498Szrj 	      else
333738fd1498Szrj 		rhs = build_ref_for_model (sad->loc, sad->assignment_rhs,
333838fd1498Szrj 					   lacc->offset - sad->left_offset,
333938fd1498Szrj 					   lacc, sad->new_gsi, true);
334038fd1498Szrj 	      if (lacc->grp_partial_lhs)
334138fd1498Szrj 		rhs = force_gimple_operand_gsi (sad->new_gsi,
334238fd1498Szrj 						rhs, true, NULL_TREE,
334338fd1498Szrj 						false, GSI_NEW_STMT);
334438fd1498Szrj 	    }
334538fd1498Szrj 
334638fd1498Szrj 	  stmt = gimple_build_assign (get_access_replacement (lacc), rhs);
334738fd1498Szrj 	  gsi_insert_after (sad->new_gsi, stmt, GSI_NEW_STMT);
334838fd1498Szrj 	  gimple_set_location (stmt, sad->loc);
334938fd1498Szrj 	  update_stmt (stmt);
335038fd1498Szrj 	  sra_stats.subreplacements++;
335138fd1498Szrj 	}
335238fd1498Szrj       else
335338fd1498Szrj 	{
335438fd1498Szrj 	  if (sad->refreshed == SRA_UDH_NONE
335538fd1498Szrj 	      && lacc->grp_read && !lacc->grp_covered)
335638fd1498Szrj 	    handle_unscalarized_data_in_subtree (sad);
335738fd1498Szrj 
335838fd1498Szrj 	  if (lacc && lacc->grp_to_be_debug_replaced)
335938fd1498Szrj 	    {
336038fd1498Szrj 	      gdebug *ds;
336138fd1498Szrj 	      tree drhs;
336238fd1498Szrj 	      struct access *racc = find_access_in_subtree (sad->top_racc,
336338fd1498Szrj 							    offset,
336438fd1498Szrj 							    lacc->size);
336538fd1498Szrj 
336638fd1498Szrj 	      if (racc && racc->grp_to_be_replaced)
336738fd1498Szrj 		{
336838fd1498Szrj 		  if (racc->grp_write || constant_decl_p (racc->base))
336938fd1498Szrj 		    drhs = get_access_replacement (racc);
337038fd1498Szrj 		  else
337138fd1498Szrj 		    drhs = NULL;
337238fd1498Szrj 		}
337338fd1498Szrj 	      else if (sad->refreshed == SRA_UDH_LEFT)
337438fd1498Szrj 		drhs = build_debug_ref_for_model (sad->loc, lacc->base,
337538fd1498Szrj 						  lacc->offset, lacc);
337638fd1498Szrj 	      else if (sad->refreshed == SRA_UDH_RIGHT)
337738fd1498Szrj 		drhs = build_debug_ref_for_model (sad->loc, sad->top_racc->base,
337838fd1498Szrj 						  offset, lacc);
337938fd1498Szrj 	      else
338038fd1498Szrj 		drhs = NULL_TREE;
338138fd1498Szrj 	      if (drhs
338238fd1498Szrj 		  && !useless_type_conversion_p (lacc->type, TREE_TYPE (drhs)))
338338fd1498Szrj 		drhs = fold_build1_loc (sad->loc, VIEW_CONVERT_EXPR,
338438fd1498Szrj 					lacc->type, drhs);
338538fd1498Szrj 	      ds = gimple_build_debug_bind (get_access_replacement (lacc),
338638fd1498Szrj 					    drhs, gsi_stmt (sad->old_gsi));
338738fd1498Szrj 	      gsi_insert_after (sad->new_gsi, ds, GSI_NEW_STMT);
338838fd1498Szrj 	    }
338938fd1498Szrj 	}
339038fd1498Szrj 
339138fd1498Szrj       if (lacc->first_child)
339238fd1498Szrj 	load_assign_lhs_subreplacements (lacc, sad);
339338fd1498Szrj     }
339438fd1498Szrj }
339538fd1498Szrj 
339638fd1498Szrj /* Result code for SRA assignment modification.  */
339738fd1498Szrj enum assignment_mod_result { SRA_AM_NONE,       /* nothing done for the stmt */
339838fd1498Szrj 			     SRA_AM_MODIFIED,  /* stmt changed but not
339938fd1498Szrj 						  removed */
340038fd1498Szrj 			     SRA_AM_REMOVED };  /* stmt eliminated */
340138fd1498Szrj 
340238fd1498Szrj /* Modify assignments with a CONSTRUCTOR on their RHS.  STMT contains a pointer
340338fd1498Szrj    to the assignment and GSI is the statement iterator pointing at it.  Returns
340438fd1498Szrj    the same values as sra_modify_assign.  */
340538fd1498Szrj 
340638fd1498Szrj static enum assignment_mod_result
sra_modify_constructor_assign(gimple * stmt,gimple_stmt_iterator * gsi)340738fd1498Szrj sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi)
340838fd1498Szrj {
340938fd1498Szrj   tree lhs = gimple_assign_lhs (stmt);
341038fd1498Szrj   struct access *acc = get_access_for_expr (lhs);
341138fd1498Szrj   if (!acc)
341238fd1498Szrj     return SRA_AM_NONE;
341338fd1498Szrj   location_t loc = gimple_location (stmt);
341438fd1498Szrj 
341538fd1498Szrj   if (gimple_clobber_p (stmt))
341638fd1498Szrj     {
341738fd1498Szrj       /* Clobber the replacement variable.  */
341838fd1498Szrj       clobber_subtree (acc, gsi, !acc->grp_covered, loc);
341938fd1498Szrj       /* Remove clobbers of fully scalarized variables, they are dead.  */
342038fd1498Szrj       if (acc->grp_covered)
342138fd1498Szrj 	{
342238fd1498Szrj 	  unlink_stmt_vdef (stmt);
342338fd1498Szrj 	  gsi_remove (gsi, true);
342438fd1498Szrj 	  release_defs (stmt);
342538fd1498Szrj 	  return SRA_AM_REMOVED;
342638fd1498Szrj 	}
342738fd1498Szrj       else
342838fd1498Szrj 	return SRA_AM_MODIFIED;
342938fd1498Szrj     }
343038fd1498Szrj 
343138fd1498Szrj   if (CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt)) > 0)
343238fd1498Szrj     {
343338fd1498Szrj       /* I have never seen this code path trigger but if it can happen the
343438fd1498Szrj 	 following should handle it gracefully.  */
343538fd1498Szrj       if (access_has_children_p (acc))
343638fd1498Szrj 	generate_subtree_copies (acc->first_child, lhs, acc->offset, 0, 0, gsi,
343738fd1498Szrj 				 true, true, loc);
343838fd1498Szrj       return SRA_AM_MODIFIED;
343938fd1498Szrj     }
344038fd1498Szrj 
344138fd1498Szrj   if (acc->grp_covered)
344238fd1498Szrj     {
344338fd1498Szrj       init_subtree_with_zero (acc, gsi, false, loc);
344438fd1498Szrj       unlink_stmt_vdef (stmt);
344538fd1498Szrj       gsi_remove (gsi, true);
344638fd1498Szrj       release_defs (stmt);
344738fd1498Szrj       return SRA_AM_REMOVED;
344838fd1498Szrj     }
344938fd1498Szrj   else
345038fd1498Szrj     {
345138fd1498Szrj       init_subtree_with_zero (acc, gsi, true, loc);
345238fd1498Szrj       return SRA_AM_MODIFIED;
345338fd1498Szrj     }
345438fd1498Szrj }
345538fd1498Szrj 
345638fd1498Szrj /* Create and return a new suitable default definition SSA_NAME for RACC which
345738fd1498Szrj    is an access describing an uninitialized part of an aggregate that is being
345838fd1498Szrj    loaded.  */
345938fd1498Szrj 
346038fd1498Szrj static tree
get_repl_default_def_ssa_name(struct access * racc)346138fd1498Szrj get_repl_default_def_ssa_name (struct access *racc)
346238fd1498Szrj {
346338fd1498Szrj   gcc_checking_assert (!racc->grp_to_be_replaced
346438fd1498Szrj 		       && !racc->grp_to_be_debug_replaced);
346538fd1498Szrj   if (!racc->replacement_decl)
346638fd1498Szrj     racc->replacement_decl = create_access_replacement (racc);
346738fd1498Szrj   return get_or_create_ssa_default_def (cfun, racc->replacement_decl);
346838fd1498Szrj }
346938fd1498Szrj 
347038fd1498Szrj /* Examine both sides of the assignment statement pointed to by STMT, replace
347138fd1498Szrj    them with a scalare replacement if there is one and generate copying of
347238fd1498Szrj    replacements if scalarized aggregates have been used in the assignment.  GSI
347338fd1498Szrj    is used to hold generated statements for type conversions and subtree
347438fd1498Szrj    copying.  */
347538fd1498Szrj 
347638fd1498Szrj static enum assignment_mod_result
sra_modify_assign(gimple * stmt,gimple_stmt_iterator * gsi)347738fd1498Szrj sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi)
347838fd1498Szrj {
347938fd1498Szrj   struct access *lacc, *racc;
348038fd1498Szrj   tree lhs, rhs;
348138fd1498Szrj   bool modify_this_stmt = false;
348238fd1498Szrj   bool force_gimple_rhs = false;
348338fd1498Szrj   location_t loc;
348438fd1498Szrj   gimple_stmt_iterator orig_gsi = *gsi;
348538fd1498Szrj 
348638fd1498Szrj   if (!gimple_assign_single_p (stmt))
348738fd1498Szrj     return SRA_AM_NONE;
348838fd1498Szrj   lhs = gimple_assign_lhs (stmt);
348938fd1498Szrj   rhs = gimple_assign_rhs1 (stmt);
349038fd1498Szrj 
349138fd1498Szrj   if (TREE_CODE (rhs) == CONSTRUCTOR)
349238fd1498Szrj     return sra_modify_constructor_assign (stmt, gsi);
349338fd1498Szrj 
349438fd1498Szrj   if (TREE_CODE (rhs) == REALPART_EXPR || TREE_CODE (lhs) == REALPART_EXPR
349538fd1498Szrj       || TREE_CODE (rhs) == IMAGPART_EXPR || TREE_CODE (lhs) == IMAGPART_EXPR
349638fd1498Szrj       || TREE_CODE (rhs) == BIT_FIELD_REF || TREE_CODE (lhs) == BIT_FIELD_REF)
349738fd1498Szrj     {
349838fd1498Szrj       modify_this_stmt = sra_modify_expr (gimple_assign_rhs1_ptr (stmt),
349938fd1498Szrj 					  gsi, false);
350038fd1498Szrj       modify_this_stmt |= sra_modify_expr (gimple_assign_lhs_ptr (stmt),
350138fd1498Szrj 					   gsi, true);
350238fd1498Szrj       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
350338fd1498Szrj     }
350438fd1498Szrj 
350538fd1498Szrj   lacc = get_access_for_expr (lhs);
350638fd1498Szrj   racc = get_access_for_expr (rhs);
350738fd1498Szrj   if (!lacc && !racc)
350838fd1498Szrj     return SRA_AM_NONE;
350938fd1498Szrj   /* Avoid modifying initializations of constant-pool replacements.  */
351038fd1498Szrj   if (racc && (racc->replacement_decl == lhs))
351138fd1498Szrj     return SRA_AM_NONE;
351238fd1498Szrj 
351338fd1498Szrj   loc = gimple_location (stmt);
351438fd1498Szrj   if (lacc && lacc->grp_to_be_replaced)
351538fd1498Szrj     {
351638fd1498Szrj       lhs = get_access_replacement (lacc);
351738fd1498Szrj       gimple_assign_set_lhs (stmt, lhs);
351838fd1498Szrj       modify_this_stmt = true;
351938fd1498Szrj       if (lacc->grp_partial_lhs)
352038fd1498Szrj 	force_gimple_rhs = true;
352138fd1498Szrj       sra_stats.exprs++;
352238fd1498Szrj     }
352338fd1498Szrj 
352438fd1498Szrj   if (racc && racc->grp_to_be_replaced)
352538fd1498Szrj     {
352638fd1498Szrj       rhs = get_access_replacement (racc);
352738fd1498Szrj       modify_this_stmt = true;
352838fd1498Szrj       if (racc->grp_partial_lhs)
352938fd1498Szrj 	force_gimple_rhs = true;
353038fd1498Szrj       sra_stats.exprs++;
353138fd1498Szrj     }
353238fd1498Szrj   else if (racc
353338fd1498Szrj 	   && !racc->grp_unscalarized_data
353438fd1498Szrj 	   && !racc->grp_unscalarizable_region
353538fd1498Szrj 	   && TREE_CODE (lhs) == SSA_NAME
353638fd1498Szrj 	   && !access_has_replacements_p (racc))
353738fd1498Szrj     {
353838fd1498Szrj       rhs = get_repl_default_def_ssa_name (racc);
353938fd1498Szrj       modify_this_stmt = true;
354038fd1498Szrj       sra_stats.exprs++;
354138fd1498Szrj     }
354238fd1498Szrj 
354338fd1498Szrj   if (modify_this_stmt)
354438fd1498Szrj     {
354538fd1498Szrj       if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
354638fd1498Szrj 	{
354738fd1498Szrj 	  /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
354838fd1498Szrj 	     ???  This should move to fold_stmt which we simply should
354938fd1498Szrj 	     call after building a VIEW_CONVERT_EXPR here.  */
355038fd1498Szrj 	  if (AGGREGATE_TYPE_P (TREE_TYPE (lhs))
355138fd1498Szrj 	      && !contains_bitfld_component_ref_p (lhs))
355238fd1498Szrj 	    {
355338fd1498Szrj 	      lhs = build_ref_for_model (loc, lhs, 0, racc, gsi, false);
355438fd1498Szrj 	      gimple_assign_set_lhs (stmt, lhs);
355538fd1498Szrj 	    }
355638fd1498Szrj 	  else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs))
355738fd1498Szrj 		   && !contains_vce_or_bfcref_p (rhs))
355838fd1498Szrj 	    rhs = build_ref_for_model (loc, rhs, 0, lacc, gsi, false);
355938fd1498Szrj 
356038fd1498Szrj 	  if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
356138fd1498Szrj 	    {
356238fd1498Szrj 	      rhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (lhs),
356338fd1498Szrj 				     rhs);
356438fd1498Szrj 	      if (is_gimple_reg_type (TREE_TYPE (lhs))
356538fd1498Szrj 		  && TREE_CODE (lhs) != SSA_NAME)
356638fd1498Szrj 		force_gimple_rhs = true;
356738fd1498Szrj 	    }
356838fd1498Szrj 	}
356938fd1498Szrj     }
357038fd1498Szrj 
357138fd1498Szrj   if (lacc && lacc->grp_to_be_debug_replaced)
357238fd1498Szrj     {
357338fd1498Szrj       tree dlhs = get_access_replacement (lacc);
357438fd1498Szrj       tree drhs = unshare_expr (rhs);
357538fd1498Szrj       if (!useless_type_conversion_p (TREE_TYPE (dlhs), TREE_TYPE (drhs)))
357638fd1498Szrj 	{
357738fd1498Szrj 	  if (AGGREGATE_TYPE_P (TREE_TYPE (drhs))
357838fd1498Szrj 	      && !contains_vce_or_bfcref_p (drhs))
357938fd1498Szrj 	    drhs = build_debug_ref_for_model (loc, drhs, 0, lacc);
358038fd1498Szrj 	  if (drhs
358138fd1498Szrj 	      && !useless_type_conversion_p (TREE_TYPE (dlhs),
358238fd1498Szrj 					     TREE_TYPE (drhs)))
358338fd1498Szrj 	    drhs = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
358438fd1498Szrj 				    TREE_TYPE (dlhs), drhs);
358538fd1498Szrj 	}
358638fd1498Szrj       gdebug *ds = gimple_build_debug_bind (dlhs, drhs, stmt);
358738fd1498Szrj       gsi_insert_before (gsi, ds, GSI_SAME_STMT);
358838fd1498Szrj     }
358938fd1498Szrj 
359038fd1498Szrj   /* From this point on, the function deals with assignments in between
359138fd1498Szrj      aggregates when at least one has scalar reductions of some of its
359238fd1498Szrj      components.  There are three possible scenarios: Both the LHS and RHS have
359338fd1498Szrj      to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
359438fd1498Szrj 
359538fd1498Szrj      In the first case, we would like to load the LHS components from RHS
359638fd1498Szrj      components whenever possible.  If that is not possible, we would like to
359738fd1498Szrj      read it directly from the RHS (after updating it by storing in it its own
359838fd1498Szrj      components).  If there are some necessary unscalarized data in the LHS,
359938fd1498Szrj      those will be loaded by the original assignment too.  If neither of these
360038fd1498Szrj      cases happen, the original statement can be removed.  Most of this is done
360138fd1498Szrj      by load_assign_lhs_subreplacements.
360238fd1498Szrj 
360338fd1498Szrj      In the second case, we would like to store all RHS scalarized components
360438fd1498Szrj      directly into LHS and if they cover the aggregate completely, remove the
360538fd1498Szrj      statement too.  In the third case, we want the LHS components to be loaded
360638fd1498Szrj      directly from the RHS (DSE will remove the original statement if it
360738fd1498Szrj      becomes redundant).
360838fd1498Szrj 
360938fd1498Szrj      This is a bit complex but manageable when types match and when unions do
361038fd1498Szrj      not cause confusion in a way that we cannot really load a component of LHS
361138fd1498Szrj      from the RHS or vice versa (the access representing this level can have
361238fd1498Szrj      subaccesses that are accessible only through a different union field at a
361338fd1498Szrj      higher level - different from the one used in the examined expression).
361438fd1498Szrj      Unions are fun.
361538fd1498Szrj 
361638fd1498Szrj      Therefore, I specially handle a fourth case, happening when there is a
361738fd1498Szrj      specific type cast or it is impossible to locate a scalarized subaccess on
361838fd1498Szrj      the other side of the expression.  If that happens, I simply "refresh" the
361938fd1498Szrj      RHS by storing in it is scalarized components leave the original statement
362038fd1498Szrj      there to do the copying and then load the scalar replacements of the LHS.
362138fd1498Szrj      This is what the first branch does.  */
362238fd1498Szrj 
362338fd1498Szrj   if (modify_this_stmt
362438fd1498Szrj       || gimple_has_volatile_ops (stmt)
362538fd1498Szrj       || contains_vce_or_bfcref_p (rhs)
362638fd1498Szrj       || contains_vce_or_bfcref_p (lhs)
362738fd1498Szrj       || stmt_ends_bb_p (stmt))
362838fd1498Szrj     {
362938fd1498Szrj       /* No need to copy into a constant-pool, it comes pre-initialized.  */
363038fd1498Szrj       if (access_has_children_p (racc) && !constant_decl_p (racc->base))
363138fd1498Szrj 	generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
363238fd1498Szrj 				 gsi, false, false, loc);
363338fd1498Szrj       if (access_has_children_p (lacc))
363438fd1498Szrj 	{
363538fd1498Szrj 	  gimple_stmt_iterator alt_gsi = gsi_none ();
363638fd1498Szrj 	  if (stmt_ends_bb_p (stmt))
363738fd1498Szrj 	    {
363838fd1498Szrj 	      alt_gsi = gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi)));
363938fd1498Szrj 	      gsi = &alt_gsi;
364038fd1498Szrj 	    }
364138fd1498Szrj 	  generate_subtree_copies (lacc->first_child, lhs, lacc->offset, 0, 0,
364238fd1498Szrj 				   gsi, true, true, loc);
364338fd1498Szrj 	}
364438fd1498Szrj       sra_stats.separate_lhs_rhs_handling++;
364538fd1498Szrj 
364638fd1498Szrj       /* This gimplification must be done after generate_subtree_copies,
364738fd1498Szrj 	 lest we insert the subtree copies in the middle of the gimplified
364838fd1498Szrj 	 sequence.  */
364938fd1498Szrj       if (force_gimple_rhs)
365038fd1498Szrj 	rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE,
365138fd1498Szrj 					true, GSI_SAME_STMT);
365238fd1498Szrj       if (gimple_assign_rhs1 (stmt) != rhs)
365338fd1498Szrj 	{
365438fd1498Szrj 	  modify_this_stmt = true;
365538fd1498Szrj 	  gimple_assign_set_rhs_from_tree (&orig_gsi, rhs);
365638fd1498Szrj 	  gcc_assert (stmt == gsi_stmt (orig_gsi));
365738fd1498Szrj 	}
365838fd1498Szrj 
365938fd1498Szrj       return modify_this_stmt ? SRA_AM_MODIFIED : SRA_AM_NONE;
366038fd1498Szrj     }
366138fd1498Szrj   else
366238fd1498Szrj     {
366338fd1498Szrj       if (access_has_children_p (lacc)
366438fd1498Szrj 	  && access_has_children_p (racc)
366538fd1498Szrj 	  /* When an access represents an unscalarizable region, it usually
366638fd1498Szrj 	     represents accesses with variable offset and thus must not be used
366738fd1498Szrj 	     to generate new memory accesses.  */
366838fd1498Szrj 	  && !lacc->grp_unscalarizable_region
366938fd1498Szrj 	  && !racc->grp_unscalarizable_region)
367038fd1498Szrj 	{
367138fd1498Szrj 	  struct subreplacement_assignment_data sad;
367238fd1498Szrj 
367338fd1498Szrj 	  sad.left_offset = lacc->offset;
367438fd1498Szrj 	  sad.assignment_lhs = lhs;
367538fd1498Szrj 	  sad.assignment_rhs = rhs;
367638fd1498Szrj 	  sad.top_racc = racc;
367738fd1498Szrj 	  sad.old_gsi = *gsi;
367838fd1498Szrj 	  sad.new_gsi = gsi;
367938fd1498Szrj 	  sad.loc = gimple_location (stmt);
368038fd1498Szrj 	  sad.refreshed = SRA_UDH_NONE;
368138fd1498Szrj 
368238fd1498Szrj 	  if (lacc->grp_read && !lacc->grp_covered)
368338fd1498Szrj 	    handle_unscalarized_data_in_subtree (&sad);
368438fd1498Szrj 
368538fd1498Szrj 	  load_assign_lhs_subreplacements (lacc, &sad);
368638fd1498Szrj 	  if (sad.refreshed != SRA_UDH_RIGHT)
368738fd1498Szrj 	    {
368838fd1498Szrj 	      gsi_next (gsi);
368938fd1498Szrj 	      unlink_stmt_vdef (stmt);
369038fd1498Szrj 	      gsi_remove (&sad.old_gsi, true);
369138fd1498Szrj 	      release_defs (stmt);
369238fd1498Szrj 	      sra_stats.deleted++;
369338fd1498Szrj 	      return SRA_AM_REMOVED;
369438fd1498Szrj 	    }
369538fd1498Szrj 	}
369638fd1498Szrj       else
369738fd1498Szrj 	{
369838fd1498Szrj 	  if (access_has_children_p (racc)
369938fd1498Szrj 	      && !racc->grp_unscalarized_data
370038fd1498Szrj 	      && TREE_CODE (lhs) != SSA_NAME)
370138fd1498Szrj 	    {
370238fd1498Szrj 	      if (dump_file)
370338fd1498Szrj 		{
370438fd1498Szrj 		  fprintf (dump_file, "Removing load: ");
370538fd1498Szrj 		  print_gimple_stmt (dump_file, stmt, 0);
370638fd1498Szrj 		}
370738fd1498Szrj 	      generate_subtree_copies (racc->first_child, lhs,
370838fd1498Szrj 				       racc->offset, 0, 0, gsi,
370938fd1498Szrj 				       false, false, loc);
371038fd1498Szrj 	      gcc_assert (stmt == gsi_stmt (*gsi));
371138fd1498Szrj 	      unlink_stmt_vdef (stmt);
371238fd1498Szrj 	      gsi_remove (gsi, true);
371338fd1498Szrj 	      release_defs (stmt);
371438fd1498Szrj 	      sra_stats.deleted++;
371538fd1498Szrj 	      return SRA_AM_REMOVED;
371638fd1498Szrj 	    }
371738fd1498Szrj 	  /* Restore the aggregate RHS from its components so the
371838fd1498Szrj 	     prevailing aggregate copy does the right thing.  */
371938fd1498Szrj 	  if (access_has_children_p (racc))
372038fd1498Szrj 	    generate_subtree_copies (racc->first_child, rhs, racc->offset, 0, 0,
372138fd1498Szrj 				     gsi, false, false, loc);
372238fd1498Szrj 	  /* Re-load the components of the aggregate copy destination.
372338fd1498Szrj 	     But use the RHS aggregate to load from to expose more
372438fd1498Szrj 	     optimization opportunities.  */
372538fd1498Szrj 	  if (access_has_children_p (lacc))
372638fd1498Szrj 	    generate_subtree_copies (lacc->first_child, rhs, lacc->offset,
372738fd1498Szrj 				     0, 0, gsi, true, true, loc);
372838fd1498Szrj 	}
372938fd1498Szrj 
373038fd1498Szrj       return SRA_AM_NONE;
373138fd1498Szrj     }
373238fd1498Szrj }
373338fd1498Szrj 
373438fd1498Szrj /* Set any scalar replacements of values in the constant pool to the initial
373538fd1498Szrj    value of the constant.  (Constant-pool decls like *.LC0 have effectively
373638fd1498Szrj    been initialized before the program starts, we must do the same for their
373738fd1498Szrj    replacements.)  Thus, we output statements like 'SR.1 = *.LC0[0];' into
373838fd1498Szrj    the function's entry block.  */
373938fd1498Szrj 
374038fd1498Szrj static void
initialize_constant_pool_replacements(void)374138fd1498Szrj initialize_constant_pool_replacements (void)
374238fd1498Szrj {
374338fd1498Szrj   gimple_seq seq = NULL;
374438fd1498Szrj   gimple_stmt_iterator gsi = gsi_start (seq);
374538fd1498Szrj   bitmap_iterator bi;
374638fd1498Szrj   unsigned i;
374738fd1498Szrj 
374838fd1498Szrj   EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi)
374938fd1498Szrj     {
375038fd1498Szrj       tree var = candidate (i);
375138fd1498Szrj       if (!constant_decl_p (var))
375238fd1498Szrj 	continue;
375338fd1498Szrj       vec<access_p> *access_vec = get_base_access_vector (var);
375438fd1498Szrj       if (!access_vec)
375538fd1498Szrj 	continue;
375638fd1498Szrj       for (unsigned i = 0; i < access_vec->length (); i++)
375738fd1498Szrj 	{
375838fd1498Szrj 	  struct access *access = (*access_vec)[i];
375938fd1498Szrj 	  if (!access->replacement_decl)
376038fd1498Szrj 	    continue;
376138fd1498Szrj 	  gassign *stmt
376238fd1498Szrj 	    = gimple_build_assign (get_access_replacement (access),
376338fd1498Szrj 				   unshare_expr (access->expr));
376438fd1498Szrj 	  if (dump_file && (dump_flags & TDF_DETAILS))
376538fd1498Szrj 	    {
376638fd1498Szrj 	      fprintf (dump_file, "Generating constant initializer: ");
376738fd1498Szrj 	      print_gimple_stmt (dump_file, stmt, 0);
376838fd1498Szrj 	      fprintf (dump_file, "\n");
376938fd1498Szrj 	    }
377038fd1498Szrj 	  gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
377138fd1498Szrj 	  update_stmt (stmt);
377238fd1498Szrj 	}
377338fd1498Szrj     }
377438fd1498Szrj 
377538fd1498Szrj   seq = gsi_seq (gsi);
377638fd1498Szrj   if (seq)
377738fd1498Szrj     gsi_insert_seq_on_edge_immediate (
377838fd1498Szrj       single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
377938fd1498Szrj }
378038fd1498Szrj 
378138fd1498Szrj /* Traverse the function body and all modifications as decided in
378238fd1498Szrj    analyze_all_variable_accesses.  Return true iff the CFG has been
378338fd1498Szrj    changed.  */
378438fd1498Szrj 
378538fd1498Szrj static bool
sra_modify_function_body(void)378638fd1498Szrj sra_modify_function_body (void)
378738fd1498Szrj {
378838fd1498Szrj   bool cfg_changed = false;
378938fd1498Szrj   basic_block bb;
379038fd1498Szrj 
379138fd1498Szrj   initialize_constant_pool_replacements ();
379238fd1498Szrj 
379338fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
379438fd1498Szrj     {
379538fd1498Szrj       gimple_stmt_iterator gsi = gsi_start_bb (bb);
379638fd1498Szrj       while (!gsi_end_p (gsi))
379738fd1498Szrj 	{
379838fd1498Szrj 	  gimple *stmt = gsi_stmt (gsi);
379938fd1498Szrj 	  enum assignment_mod_result assign_result;
380038fd1498Szrj 	  bool modified = false, deleted = false;
380138fd1498Szrj 	  tree *t;
380238fd1498Szrj 	  unsigned i;
380338fd1498Szrj 
380438fd1498Szrj 	  switch (gimple_code (stmt))
380538fd1498Szrj 	    {
380638fd1498Szrj 	    case GIMPLE_RETURN:
380738fd1498Szrj 	      t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
380838fd1498Szrj 	      if (*t != NULL_TREE)
380938fd1498Szrj 		modified |= sra_modify_expr (t, &gsi, false);
381038fd1498Szrj 	      break;
381138fd1498Szrj 
381238fd1498Szrj 	    case GIMPLE_ASSIGN:
381338fd1498Szrj 	      assign_result = sra_modify_assign (stmt, &gsi);
381438fd1498Szrj 	      modified |= assign_result == SRA_AM_MODIFIED;
381538fd1498Szrj 	      deleted = assign_result == SRA_AM_REMOVED;
381638fd1498Szrj 	      break;
381738fd1498Szrj 
381838fd1498Szrj 	    case GIMPLE_CALL:
381938fd1498Szrj 	      /* Operands must be processed before the lhs.  */
382038fd1498Szrj 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
382138fd1498Szrj 		{
382238fd1498Szrj 		  t = gimple_call_arg_ptr (stmt, i);
382338fd1498Szrj 		  modified |= sra_modify_expr (t, &gsi, false);
382438fd1498Szrj 		}
382538fd1498Szrj 
382638fd1498Szrj 	      if (gimple_call_lhs (stmt))
382738fd1498Szrj 		{
382838fd1498Szrj 		  t = gimple_call_lhs_ptr (stmt);
382938fd1498Szrj 		  modified |= sra_modify_expr (t, &gsi, true);
383038fd1498Szrj 		}
383138fd1498Szrj 	      break;
383238fd1498Szrj 
383338fd1498Szrj 	    case GIMPLE_ASM:
383438fd1498Szrj 	      {
383538fd1498Szrj 		gasm *asm_stmt = as_a <gasm *> (stmt);
383638fd1498Szrj 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
383738fd1498Szrj 		  {
383838fd1498Szrj 		    t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
383938fd1498Szrj 		    modified |= sra_modify_expr (t, &gsi, false);
384038fd1498Szrj 		  }
384138fd1498Szrj 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
384238fd1498Szrj 		  {
384338fd1498Szrj 		    t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
384438fd1498Szrj 		    modified |= sra_modify_expr (t, &gsi, true);
384538fd1498Szrj 		  }
384638fd1498Szrj 	      }
384738fd1498Szrj 	      break;
384838fd1498Szrj 
384938fd1498Szrj 	    default:
385038fd1498Szrj 	      break;
385138fd1498Szrj 	    }
385238fd1498Szrj 
385338fd1498Szrj 	  if (modified)
385438fd1498Szrj 	    {
385538fd1498Szrj 	      update_stmt (stmt);
385638fd1498Szrj 	      if (maybe_clean_eh_stmt (stmt)
385738fd1498Szrj 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
385838fd1498Szrj 		cfg_changed = true;
385938fd1498Szrj 	    }
386038fd1498Szrj 	  if (!deleted)
386138fd1498Szrj 	    gsi_next (&gsi);
386238fd1498Szrj 	}
386338fd1498Szrj     }
386438fd1498Szrj 
386538fd1498Szrj   gsi_commit_edge_inserts ();
386638fd1498Szrj   return cfg_changed;
386738fd1498Szrj }
386838fd1498Szrj 
386938fd1498Szrj /* Generate statements initializing scalar replacements of parts of function
387038fd1498Szrj    parameters.  */
387138fd1498Szrj 
387238fd1498Szrj static void
initialize_parameter_reductions(void)387338fd1498Szrj initialize_parameter_reductions (void)
387438fd1498Szrj {
387538fd1498Szrj   gimple_stmt_iterator gsi;
387638fd1498Szrj   gimple_seq seq = NULL;
387738fd1498Szrj   tree parm;
387838fd1498Szrj 
387938fd1498Szrj   gsi = gsi_start (seq);
388038fd1498Szrj   for (parm = DECL_ARGUMENTS (current_function_decl);
388138fd1498Szrj        parm;
388238fd1498Szrj        parm = DECL_CHAIN (parm))
388338fd1498Szrj     {
388438fd1498Szrj       vec<access_p> *access_vec;
388538fd1498Szrj       struct access *access;
388638fd1498Szrj 
388738fd1498Szrj       if (!bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
388838fd1498Szrj 	continue;
388938fd1498Szrj       access_vec = get_base_access_vector (parm);
389038fd1498Szrj       if (!access_vec)
389138fd1498Szrj 	continue;
389238fd1498Szrj 
389338fd1498Szrj       for (access = (*access_vec)[0];
389438fd1498Szrj 	   access;
389538fd1498Szrj 	   access = access->next_grp)
389638fd1498Szrj 	generate_subtree_copies (access, parm, 0, 0, 0, &gsi, true, true,
389738fd1498Szrj 				 EXPR_LOCATION (parm));
389838fd1498Szrj     }
389938fd1498Szrj 
390038fd1498Szrj   seq = gsi_seq (gsi);
390138fd1498Szrj   if (seq)
390238fd1498Szrj     gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)), seq);
390338fd1498Szrj }
390438fd1498Szrj 
390538fd1498Szrj /* The "main" function of intraprocedural SRA passes.  Runs the analysis and if
390638fd1498Szrj    it reveals there are components of some aggregates to be scalarized, it runs
390738fd1498Szrj    the required transformations.  */
390838fd1498Szrj static unsigned int
perform_intra_sra(void)390938fd1498Szrj perform_intra_sra (void)
391038fd1498Szrj {
391138fd1498Szrj   int ret = 0;
391238fd1498Szrj   sra_initialize ();
391338fd1498Szrj 
391438fd1498Szrj   if (!find_var_candidates ())
391538fd1498Szrj     goto out;
391638fd1498Szrj 
391738fd1498Szrj   if (!scan_function ())
391838fd1498Szrj     goto out;
391938fd1498Szrj 
392038fd1498Szrj   if (!analyze_all_variable_accesses ())
392138fd1498Szrj     goto out;
392238fd1498Szrj 
392338fd1498Szrj   if (sra_modify_function_body ())
392438fd1498Szrj     ret = TODO_update_ssa | TODO_cleanup_cfg;
392538fd1498Szrj   else
392638fd1498Szrj     ret = TODO_update_ssa;
392738fd1498Szrj   initialize_parameter_reductions ();
392838fd1498Szrj 
392938fd1498Szrj   statistics_counter_event (cfun, "Scalar replacements created",
393038fd1498Szrj 			    sra_stats.replacements);
393138fd1498Szrj   statistics_counter_event (cfun, "Modified expressions", sra_stats.exprs);
393238fd1498Szrj   statistics_counter_event (cfun, "Subtree copy stmts",
393338fd1498Szrj 			    sra_stats.subtree_copies);
393438fd1498Szrj   statistics_counter_event (cfun, "Subreplacement stmts",
393538fd1498Szrj 			    sra_stats.subreplacements);
393638fd1498Szrj   statistics_counter_event (cfun, "Deleted stmts", sra_stats.deleted);
393738fd1498Szrj   statistics_counter_event (cfun, "Separate LHS and RHS handling",
393838fd1498Szrj 			    sra_stats.separate_lhs_rhs_handling);
393938fd1498Szrj 
394038fd1498Szrj  out:
394138fd1498Szrj   sra_deinitialize ();
394238fd1498Szrj   return ret;
394338fd1498Szrj }
394438fd1498Szrj 
394538fd1498Szrj /* Perform early intraprocedural SRA.  */
394638fd1498Szrj static unsigned int
early_intra_sra(void)394738fd1498Szrj early_intra_sra (void)
394838fd1498Szrj {
394938fd1498Szrj   sra_mode = SRA_MODE_EARLY_INTRA;
395038fd1498Szrj   return perform_intra_sra ();
395138fd1498Szrj }
395238fd1498Szrj 
395338fd1498Szrj /* Perform "late" intraprocedural SRA.  */
395438fd1498Szrj static unsigned int
late_intra_sra(void)395538fd1498Szrj late_intra_sra (void)
395638fd1498Szrj {
395738fd1498Szrj   sra_mode = SRA_MODE_INTRA;
395838fd1498Szrj   return perform_intra_sra ();
395938fd1498Szrj }
396038fd1498Szrj 
396138fd1498Szrj 
396238fd1498Szrj static bool
gate_intra_sra(void)396338fd1498Szrj gate_intra_sra (void)
396438fd1498Szrj {
396538fd1498Szrj   return flag_tree_sra != 0 && dbg_cnt (tree_sra);
396638fd1498Szrj }
396738fd1498Szrj 
396838fd1498Szrj 
396938fd1498Szrj namespace {
397038fd1498Szrj 
397138fd1498Szrj const pass_data pass_data_sra_early =
397238fd1498Szrj {
397338fd1498Szrj   GIMPLE_PASS, /* type */
397438fd1498Szrj   "esra", /* name */
397538fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
397638fd1498Szrj   TV_TREE_SRA, /* tv_id */
397738fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
397838fd1498Szrj   0, /* properties_provided */
397938fd1498Szrj   0, /* properties_destroyed */
398038fd1498Szrj   0, /* todo_flags_start */
398138fd1498Szrj   TODO_update_ssa, /* todo_flags_finish */
398238fd1498Szrj };
398338fd1498Szrj 
398438fd1498Szrj class pass_sra_early : public gimple_opt_pass
398538fd1498Szrj {
398638fd1498Szrj public:
pass_sra_early(gcc::context * ctxt)398738fd1498Szrj   pass_sra_early (gcc::context *ctxt)
398838fd1498Szrj     : gimple_opt_pass (pass_data_sra_early, ctxt)
398938fd1498Szrj   {}
399038fd1498Szrj 
399138fd1498Szrj   /* opt_pass methods: */
gate(function *)399238fd1498Szrj   virtual bool gate (function *) { return gate_intra_sra (); }
execute(function *)399338fd1498Szrj   virtual unsigned int execute (function *) { return early_intra_sra (); }
399438fd1498Szrj 
399538fd1498Szrj }; // class pass_sra_early
399638fd1498Szrj 
399738fd1498Szrj } // anon namespace
399838fd1498Szrj 
399938fd1498Szrj gimple_opt_pass *
make_pass_sra_early(gcc::context * ctxt)400038fd1498Szrj make_pass_sra_early (gcc::context *ctxt)
400138fd1498Szrj {
400238fd1498Szrj   return new pass_sra_early (ctxt);
400338fd1498Szrj }
400438fd1498Szrj 
400538fd1498Szrj namespace {
400638fd1498Szrj 
400738fd1498Szrj const pass_data pass_data_sra =
400838fd1498Szrj {
400938fd1498Szrj   GIMPLE_PASS, /* type */
401038fd1498Szrj   "sra", /* name */
401138fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
401238fd1498Szrj   TV_TREE_SRA, /* tv_id */
401338fd1498Szrj   ( PROP_cfg | PROP_ssa ), /* properties_required */
401438fd1498Szrj   0, /* properties_provided */
401538fd1498Szrj   0, /* properties_destroyed */
401638fd1498Szrj   TODO_update_address_taken, /* todo_flags_start */
401738fd1498Szrj   TODO_update_ssa, /* todo_flags_finish */
401838fd1498Szrj };
401938fd1498Szrj 
402038fd1498Szrj class pass_sra : public gimple_opt_pass
402138fd1498Szrj {
402238fd1498Szrj public:
pass_sra(gcc::context * ctxt)402338fd1498Szrj   pass_sra (gcc::context *ctxt)
402438fd1498Szrj     : gimple_opt_pass (pass_data_sra, ctxt)
402538fd1498Szrj   {}
402638fd1498Szrj 
402738fd1498Szrj   /* opt_pass methods: */
gate(function *)402838fd1498Szrj   virtual bool gate (function *) { return gate_intra_sra (); }
execute(function *)402938fd1498Szrj   virtual unsigned int execute (function *) { return late_intra_sra (); }
403038fd1498Szrj 
403138fd1498Szrj }; // class pass_sra
403238fd1498Szrj 
403338fd1498Szrj } // anon namespace
403438fd1498Szrj 
403538fd1498Szrj gimple_opt_pass *
make_pass_sra(gcc::context * ctxt)403638fd1498Szrj make_pass_sra (gcc::context *ctxt)
403738fd1498Szrj {
403838fd1498Szrj   return new pass_sra (ctxt);
403938fd1498Szrj }
404038fd1498Szrj 
404138fd1498Szrj 
404238fd1498Szrj /* Return true iff PARM (which must be a parm_decl) is an unused scalar
404338fd1498Szrj    parameter.  */
404438fd1498Szrj 
404538fd1498Szrj static bool
is_unused_scalar_param(tree parm)404638fd1498Szrj is_unused_scalar_param (tree parm)
404738fd1498Szrj {
404838fd1498Szrj   tree name;
404938fd1498Szrj   return (is_gimple_reg (parm)
405038fd1498Szrj 	  && (!(name = ssa_default_def (cfun, parm))
405138fd1498Szrj 	      || has_zero_uses (name)));
405238fd1498Szrj }
405338fd1498Szrj 
405438fd1498Szrj /* Scan immediate uses of a default definition SSA name of a parameter PARM and
405538fd1498Szrj    examine whether there are any direct or otherwise infeasible ones.  If so,
405638fd1498Szrj    return true, otherwise return false.  PARM must be a gimple register with a
405738fd1498Szrj    non-NULL default definition.  */
405838fd1498Szrj 
405938fd1498Szrj static bool
ptr_parm_has_direct_uses(tree parm)406038fd1498Szrj ptr_parm_has_direct_uses (tree parm)
406138fd1498Szrj {
406238fd1498Szrj   imm_use_iterator ui;
406338fd1498Szrj   gimple *stmt;
406438fd1498Szrj   tree name = ssa_default_def (cfun, parm);
406538fd1498Szrj   bool ret = false;
406638fd1498Szrj 
406738fd1498Szrj   FOR_EACH_IMM_USE_STMT (stmt, ui, name)
406838fd1498Szrj     {
406938fd1498Szrj       int uses_ok = 0;
407038fd1498Szrj       use_operand_p use_p;
407138fd1498Szrj 
407238fd1498Szrj       if (is_gimple_debug (stmt))
407338fd1498Szrj 	continue;
407438fd1498Szrj 
407538fd1498Szrj       /* Valid uses include dereferences on the lhs and the rhs.  */
407638fd1498Szrj       if (gimple_has_lhs (stmt))
407738fd1498Szrj 	{
407838fd1498Szrj 	  tree lhs = gimple_get_lhs (stmt);
407938fd1498Szrj 	  while (handled_component_p (lhs))
408038fd1498Szrj 	    lhs = TREE_OPERAND (lhs, 0);
408138fd1498Szrj 	  if (TREE_CODE (lhs) == MEM_REF
408238fd1498Szrj 	      && TREE_OPERAND (lhs, 0) == name
408338fd1498Szrj 	      && integer_zerop (TREE_OPERAND (lhs, 1))
408438fd1498Szrj 	      && types_compatible_p (TREE_TYPE (lhs),
408538fd1498Szrj 				     TREE_TYPE (TREE_TYPE (name)))
408638fd1498Szrj 	      && !TREE_THIS_VOLATILE (lhs))
408738fd1498Szrj 	    uses_ok++;
408838fd1498Szrj 	}
408938fd1498Szrj       if (gimple_assign_single_p (stmt))
409038fd1498Szrj 	{
409138fd1498Szrj 	  tree rhs = gimple_assign_rhs1 (stmt);
409238fd1498Szrj 	  while (handled_component_p (rhs))
409338fd1498Szrj 	    rhs = TREE_OPERAND (rhs, 0);
409438fd1498Szrj 	  if (TREE_CODE (rhs) == MEM_REF
409538fd1498Szrj 	      && TREE_OPERAND (rhs, 0) == name
409638fd1498Szrj 	      && integer_zerop (TREE_OPERAND (rhs, 1))
409738fd1498Szrj 	      && types_compatible_p (TREE_TYPE (rhs),
409838fd1498Szrj 				     TREE_TYPE (TREE_TYPE (name)))
409938fd1498Szrj 	      && !TREE_THIS_VOLATILE (rhs))
410038fd1498Szrj 	    uses_ok++;
410138fd1498Szrj 	}
410238fd1498Szrj       else if (is_gimple_call (stmt))
410338fd1498Szrj 	{
410438fd1498Szrj 	  unsigned i;
410538fd1498Szrj 	  for (i = 0; i < gimple_call_num_args (stmt); ++i)
410638fd1498Szrj 	    {
410738fd1498Szrj 	      tree arg = gimple_call_arg (stmt, i);
410838fd1498Szrj 	      while (handled_component_p (arg))
410938fd1498Szrj 		arg = TREE_OPERAND (arg, 0);
411038fd1498Szrj 	      if (TREE_CODE (arg) == MEM_REF
411138fd1498Szrj 		  && TREE_OPERAND (arg, 0) == name
411238fd1498Szrj 		  && integer_zerop (TREE_OPERAND (arg, 1))
411338fd1498Szrj 		  && types_compatible_p (TREE_TYPE (arg),
411438fd1498Szrj 					 TREE_TYPE (TREE_TYPE (name)))
411538fd1498Szrj 		  && !TREE_THIS_VOLATILE (arg))
411638fd1498Szrj 		uses_ok++;
411738fd1498Szrj 	    }
411838fd1498Szrj 	}
411938fd1498Szrj 
412038fd1498Szrj       /* If the number of valid uses does not match the number of
412138fd1498Szrj          uses in this stmt there is an unhandled use.  */
412238fd1498Szrj       FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
412338fd1498Szrj 	--uses_ok;
412438fd1498Szrj 
412538fd1498Szrj       if (uses_ok != 0)
412638fd1498Szrj 	ret = true;
412738fd1498Szrj 
412838fd1498Szrj       if (ret)
412938fd1498Szrj 	BREAK_FROM_IMM_USE_STMT (ui);
413038fd1498Szrj     }
413138fd1498Szrj 
413238fd1498Szrj   return ret;
413338fd1498Szrj }
413438fd1498Szrj 
413538fd1498Szrj /* Identify candidates for reduction for IPA-SRA based on their type and mark
413638fd1498Szrj    them in candidate_bitmap.  Note that these do not necessarily include
413738fd1498Szrj    parameter which are unused and thus can be removed.  Return true iff any
413838fd1498Szrj    such candidate has been found.  */
413938fd1498Szrj 
414038fd1498Szrj static bool
find_param_candidates(void)414138fd1498Szrj find_param_candidates (void)
414238fd1498Szrj {
414338fd1498Szrj   tree parm;
414438fd1498Szrj   int count = 0;
414538fd1498Szrj   bool ret = false;
414638fd1498Szrj   const char *msg;
414738fd1498Szrj 
414838fd1498Szrj   for (parm = DECL_ARGUMENTS (current_function_decl);
414938fd1498Szrj        parm;
415038fd1498Szrj        parm = DECL_CHAIN (parm))
415138fd1498Szrj     {
415238fd1498Szrj       tree type = TREE_TYPE (parm);
415338fd1498Szrj       tree_node **slot;
415438fd1498Szrj 
415538fd1498Szrj       count++;
415638fd1498Szrj 
415738fd1498Szrj       if (TREE_THIS_VOLATILE (parm)
415838fd1498Szrj 	  || TREE_ADDRESSABLE (parm)
415938fd1498Szrj 	  || (!is_gimple_reg_type (type) && is_va_list_type (type)))
416038fd1498Szrj 	continue;
416138fd1498Szrj 
416238fd1498Szrj       if (is_unused_scalar_param (parm))
416338fd1498Szrj 	{
416438fd1498Szrj 	  ret = true;
416538fd1498Szrj 	  continue;
416638fd1498Szrj 	}
416738fd1498Szrj 
416838fd1498Szrj       if (POINTER_TYPE_P (type))
416938fd1498Szrj 	{
417038fd1498Szrj 	  type = TREE_TYPE (type);
417138fd1498Szrj 
417238fd1498Szrj 	  if (TREE_CODE (type) == FUNCTION_TYPE
417338fd1498Szrj 	      || TYPE_VOLATILE (type)
417438fd1498Szrj 	      || (TREE_CODE (type) == ARRAY_TYPE
417538fd1498Szrj 		  && TYPE_NONALIASED_COMPONENT (type))
417638fd1498Szrj 	      || !is_gimple_reg (parm)
417738fd1498Szrj 	      || is_va_list_type (type)
417838fd1498Szrj 	      || ptr_parm_has_direct_uses (parm))
417938fd1498Szrj 	    continue;
418038fd1498Szrj 	}
418138fd1498Szrj       else if (!AGGREGATE_TYPE_P (type))
418238fd1498Szrj 	continue;
418338fd1498Szrj 
418438fd1498Szrj       if (!COMPLETE_TYPE_P (type)
418538fd1498Szrj 	  || !tree_fits_uhwi_p (TYPE_SIZE (type))
418638fd1498Szrj           || tree_to_uhwi (TYPE_SIZE (type)) == 0
418738fd1498Szrj 	  || (AGGREGATE_TYPE_P (type)
418838fd1498Szrj 	      && type_internals_preclude_sra_p (type, &msg)))
418938fd1498Szrj 	continue;
419038fd1498Szrj 
419138fd1498Szrj       bitmap_set_bit (candidate_bitmap, DECL_UID (parm));
419238fd1498Szrj       slot = candidates->find_slot_with_hash (parm, DECL_UID (parm), INSERT);
419338fd1498Szrj       *slot = parm;
419438fd1498Szrj 
419538fd1498Szrj       ret = true;
419638fd1498Szrj       if (dump_file && (dump_flags & TDF_DETAILS))
419738fd1498Szrj 	{
419838fd1498Szrj 	  fprintf (dump_file, "Candidate (%d): ", DECL_UID (parm));
419938fd1498Szrj 	  print_generic_expr (dump_file, parm);
420038fd1498Szrj 	  fprintf (dump_file, "\n");
420138fd1498Szrj 	}
420238fd1498Szrj     }
420338fd1498Szrj 
420438fd1498Szrj   func_param_count = count;
420538fd1498Szrj   return ret;
420638fd1498Szrj }
420738fd1498Szrj 
420838fd1498Szrj /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
420938fd1498Szrj    maybe_modified. */
421038fd1498Szrj 
421138fd1498Szrj static bool
mark_maybe_modified(ao_ref * ao ATTRIBUTE_UNUSED,tree vdef ATTRIBUTE_UNUSED,void * data)421238fd1498Szrj mark_maybe_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
421338fd1498Szrj 		     void *data)
421438fd1498Szrj {
421538fd1498Szrj   struct access *repr = (struct access *) data;
421638fd1498Szrj 
421738fd1498Szrj   repr->grp_maybe_modified = 1;
421838fd1498Szrj   return true;
421938fd1498Szrj }
422038fd1498Szrj 
422138fd1498Szrj /* Analyze what representatives (in linked lists accessible from
422238fd1498Szrj    REPRESENTATIVES) can be modified by side effects of statements in the
422338fd1498Szrj    current function.  */
422438fd1498Szrj 
422538fd1498Szrj static void
analyze_modified_params(vec<access_p> representatives)422638fd1498Szrj analyze_modified_params (vec<access_p> representatives)
422738fd1498Szrj {
422838fd1498Szrj   int i;
422938fd1498Szrj 
423038fd1498Szrj   for (i = 0; i < func_param_count; i++)
423138fd1498Szrj     {
423238fd1498Szrj       struct access *repr;
423338fd1498Szrj 
423438fd1498Szrj       for (repr = representatives[i];
423538fd1498Szrj 	   repr;
423638fd1498Szrj 	   repr = repr->next_grp)
423738fd1498Szrj 	{
423838fd1498Szrj 	  struct access *access;
423938fd1498Szrj 	  bitmap visited;
424038fd1498Szrj 	  ao_ref ar;
424138fd1498Szrj 
424238fd1498Szrj 	  if (no_accesses_p (repr))
424338fd1498Szrj 	    continue;
424438fd1498Szrj 	  if (!POINTER_TYPE_P (TREE_TYPE (repr->base))
424538fd1498Szrj 	      || repr->grp_maybe_modified)
424638fd1498Szrj 	    continue;
424738fd1498Szrj 
424838fd1498Szrj 	  ao_ref_init (&ar, repr->expr);
424938fd1498Szrj 	  visited = BITMAP_ALLOC (NULL);
425038fd1498Szrj 	  for (access = repr; access; access = access->next_sibling)
425138fd1498Szrj 	    {
425238fd1498Szrj 	      /* All accesses are read ones, otherwise grp_maybe_modified would
425338fd1498Szrj 		 be trivially set.  */
425438fd1498Szrj 	      walk_aliased_vdefs (&ar, gimple_vuse (access->stmt),
425538fd1498Szrj 				  mark_maybe_modified, repr, &visited);
425638fd1498Szrj 	      if (repr->grp_maybe_modified)
425738fd1498Szrj 		break;
425838fd1498Szrj 	    }
425938fd1498Szrj 	  BITMAP_FREE (visited);
426038fd1498Szrj 	}
426138fd1498Szrj     }
426238fd1498Szrj }
426338fd1498Szrj 
426438fd1498Szrj /* Propagate distances in bb_dereferences in the opposite direction than the
426538fd1498Szrj    control flow edges, in each step storing the maximum of the current value
426638fd1498Szrj    and the minimum of all successors.  These steps are repeated until the table
426738fd1498Szrj    stabilizes.  Note that BBs which might terminate the functions (according to
426838fd1498Szrj    final_bbs bitmap) never updated in this way.  */
426938fd1498Szrj 
427038fd1498Szrj static void
propagate_dereference_distances(void)427138fd1498Szrj propagate_dereference_distances (void)
427238fd1498Szrj {
427338fd1498Szrj   basic_block bb;
427438fd1498Szrj 
427538fd1498Szrj   auto_vec<basic_block> queue (last_basic_block_for_fn (cfun));
427638fd1498Szrj   queue.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun));
427738fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
427838fd1498Szrj     {
427938fd1498Szrj       queue.quick_push (bb);
428038fd1498Szrj       bb->aux = bb;
428138fd1498Szrj     }
428238fd1498Szrj 
428338fd1498Szrj   while (!queue.is_empty ())
428438fd1498Szrj     {
428538fd1498Szrj       edge_iterator ei;
428638fd1498Szrj       edge e;
428738fd1498Szrj       bool change = false;
428838fd1498Szrj       int i;
428938fd1498Szrj 
429038fd1498Szrj       bb = queue.pop ();
429138fd1498Szrj       bb->aux = NULL;
429238fd1498Szrj 
429338fd1498Szrj       if (bitmap_bit_p (final_bbs, bb->index))
429438fd1498Szrj 	continue;
429538fd1498Szrj 
429638fd1498Szrj       for (i = 0; i < func_param_count; i++)
429738fd1498Szrj 	{
429838fd1498Szrj 	  int idx = bb->index * func_param_count + i;
429938fd1498Szrj 	  bool first = true;
430038fd1498Szrj 	  HOST_WIDE_INT inh = 0;
430138fd1498Szrj 
430238fd1498Szrj 	  FOR_EACH_EDGE (e, ei, bb->succs)
430338fd1498Szrj 	  {
430438fd1498Szrj 	    int succ_idx = e->dest->index * func_param_count + i;
430538fd1498Szrj 
430638fd1498Szrj 	    if (e->src == EXIT_BLOCK_PTR_FOR_FN (cfun))
430738fd1498Szrj 	      continue;
430838fd1498Szrj 
430938fd1498Szrj 	    if (first)
431038fd1498Szrj 	      {
431138fd1498Szrj 		first = false;
431238fd1498Szrj 		inh = bb_dereferences [succ_idx];
431338fd1498Szrj 	      }
431438fd1498Szrj 	    else if (bb_dereferences [succ_idx] < inh)
431538fd1498Szrj 	      inh = bb_dereferences [succ_idx];
431638fd1498Szrj 	  }
431738fd1498Szrj 
431838fd1498Szrj 	  if (!first && bb_dereferences[idx] < inh)
431938fd1498Szrj 	    {
432038fd1498Szrj 	      bb_dereferences[idx] = inh;
432138fd1498Szrj 	      change = true;
432238fd1498Szrj 	    }
432338fd1498Szrj 	}
432438fd1498Szrj 
432538fd1498Szrj       if (change && !bitmap_bit_p (final_bbs, bb->index))
432638fd1498Szrj 	FOR_EACH_EDGE (e, ei, bb->preds)
432738fd1498Szrj 	  {
432838fd1498Szrj 	    if (e->src->aux)
432938fd1498Szrj 	      continue;
433038fd1498Szrj 
433138fd1498Szrj 	    e->src->aux = e->src;
433238fd1498Szrj 	    queue.quick_push (e->src);
433338fd1498Szrj 	  }
433438fd1498Szrj     }
433538fd1498Szrj }
433638fd1498Szrj 
433738fd1498Szrj /* Dump a dereferences TABLE with heading STR to file F.  */
433838fd1498Szrj 
433938fd1498Szrj static void
dump_dereferences_table(FILE * f,const char * str,HOST_WIDE_INT * table)434038fd1498Szrj dump_dereferences_table (FILE *f, const char *str, HOST_WIDE_INT *table)
434138fd1498Szrj {
434238fd1498Szrj   basic_block bb;
434338fd1498Szrj 
434438fd1498Szrj   fprintf (dump_file, "%s", str);
434538fd1498Szrj   FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun),
434638fd1498Szrj 		  EXIT_BLOCK_PTR_FOR_FN (cfun), next_bb)
434738fd1498Szrj     {
434838fd1498Szrj       fprintf (f, "%4i  %i   ", bb->index, bitmap_bit_p (final_bbs, bb->index));
434938fd1498Szrj       if (bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
435038fd1498Szrj 	{
435138fd1498Szrj 	  int i;
435238fd1498Szrj 	  for (i = 0; i < func_param_count; i++)
435338fd1498Szrj 	    {
435438fd1498Szrj 	      int idx = bb->index * func_param_count + i;
435538fd1498Szrj 	      fprintf (f, " %4" HOST_WIDE_INT_PRINT "d", table[idx]);
435638fd1498Szrj 	    }
435738fd1498Szrj 	}
435838fd1498Szrj       fprintf (f, "\n");
435938fd1498Szrj     }
436038fd1498Szrj   fprintf (dump_file, "\n");
436138fd1498Szrj }
436238fd1498Szrj 
436338fd1498Szrj /* Determine what (parts of) parameters passed by reference that are not
436438fd1498Szrj    assigned to are not certainly dereferenced in this function and thus the
436538fd1498Szrj    dereferencing cannot be safely moved to the caller without potentially
436638fd1498Szrj    introducing a segfault.  Mark such REPRESENTATIVES as
436738fd1498Szrj    grp_not_necessarilly_dereferenced.
436838fd1498Szrj 
436938fd1498Szrj    The dereferenced maximum "distance," i.e. the offset + size of the accessed
437038fd1498Szrj    part is calculated rather than simple booleans are calculated for each
437138fd1498Szrj    pointer parameter to handle cases when only a fraction of the whole
437238fd1498Szrj    aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
437338fd1498Szrj    an example).
437438fd1498Szrj 
437538fd1498Szrj    The maximum dereference distances for each pointer parameter and BB are
437638fd1498Szrj    already stored in bb_dereference.  This routine simply propagates these
437738fd1498Szrj    values upwards by propagate_dereference_distances and then compares the
437838fd1498Szrj    distances of individual parameters in the ENTRY BB to the equivalent
437938fd1498Szrj    distances of each representative of a (fraction of a) parameter.  */
438038fd1498Szrj 
438138fd1498Szrj static void
analyze_caller_dereference_legality(vec<access_p> representatives)438238fd1498Szrj analyze_caller_dereference_legality (vec<access_p> representatives)
438338fd1498Szrj {
438438fd1498Szrj   int i;
438538fd1498Szrj 
438638fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
438738fd1498Szrj     dump_dereferences_table (dump_file,
438838fd1498Szrj 			     "Dereference table before propagation:\n",
438938fd1498Szrj 			     bb_dereferences);
439038fd1498Szrj 
439138fd1498Szrj   propagate_dereference_distances ();
439238fd1498Szrj 
439338fd1498Szrj   if (dump_file && (dump_flags & TDF_DETAILS))
439438fd1498Szrj     dump_dereferences_table (dump_file,
439538fd1498Szrj 			     "Dereference table after propagation:\n",
439638fd1498Szrj 			     bb_dereferences);
439738fd1498Szrj 
439838fd1498Szrj   for (i = 0; i < func_param_count; i++)
439938fd1498Szrj     {
440038fd1498Szrj       struct access *repr = representatives[i];
440138fd1498Szrj       int idx = ENTRY_BLOCK_PTR_FOR_FN (cfun)->index * func_param_count + i;
440238fd1498Szrj 
440338fd1498Szrj       if (!repr || no_accesses_p (repr))
440438fd1498Szrj 	continue;
440538fd1498Szrj 
440638fd1498Szrj       do
440738fd1498Szrj 	{
440838fd1498Szrj 	  if ((repr->offset + repr->size) > bb_dereferences[idx])
440938fd1498Szrj 	    repr->grp_not_necessarilly_dereferenced = 1;
441038fd1498Szrj 	  repr = repr->next_grp;
441138fd1498Szrj 	}
441238fd1498Szrj       while (repr);
441338fd1498Szrj     }
441438fd1498Szrj }
441538fd1498Szrj 
441638fd1498Szrj /* Return the representative access for the parameter declaration PARM if it is
441738fd1498Szrj    a scalar passed by reference which is not written to and the pointer value
441838fd1498Szrj    is not used directly.  Thus, if it is legal to dereference it in the caller
441938fd1498Szrj    and we can rule out modifications through aliases, such parameter should be
442038fd1498Szrj    turned into one passed by value.  Return NULL otherwise.  */
442138fd1498Szrj 
442238fd1498Szrj static struct access *
unmodified_by_ref_scalar_representative(tree parm)442338fd1498Szrj unmodified_by_ref_scalar_representative (tree parm)
442438fd1498Szrj {
442538fd1498Szrj   int i, access_count;
442638fd1498Szrj   struct access *repr;
442738fd1498Szrj   vec<access_p> *access_vec;
442838fd1498Szrj 
442938fd1498Szrj   access_vec = get_base_access_vector (parm);
443038fd1498Szrj   gcc_assert (access_vec);
443138fd1498Szrj   repr = (*access_vec)[0];
443238fd1498Szrj   if (repr->write)
443338fd1498Szrj     return NULL;
443438fd1498Szrj   repr->group_representative = repr;
443538fd1498Szrj 
443638fd1498Szrj   access_count = access_vec->length ();
443738fd1498Szrj   for (i = 1; i < access_count; i++)
443838fd1498Szrj     {
443938fd1498Szrj       struct access *access = (*access_vec)[i];
444038fd1498Szrj       if (access->write)
444138fd1498Szrj 	return NULL;
444238fd1498Szrj       access->group_representative = repr;
444338fd1498Szrj       access->next_sibling = repr->next_sibling;
444438fd1498Szrj       repr->next_sibling = access;
444538fd1498Szrj     }
444638fd1498Szrj 
444738fd1498Szrj   repr->grp_read = 1;
444838fd1498Szrj   repr->grp_scalar_ptr = 1;
444938fd1498Szrj   return repr;
445038fd1498Szrj }
445138fd1498Szrj 
445238fd1498Szrj /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
445338fd1498Szrj    associated with.  REQ_ALIGN is the minimum required alignment.  */
445438fd1498Szrj 
445538fd1498Szrj static bool
access_precludes_ipa_sra_p(struct access * access,unsigned int req_align)445638fd1498Szrj access_precludes_ipa_sra_p (struct access *access, unsigned int req_align)
445738fd1498Szrj {
445838fd1498Szrj   unsigned int exp_align;
445938fd1498Szrj   /* Avoid issues such as the second simple testcase in PR 42025.  The problem
446038fd1498Szrj      is incompatible assign in a call statement (and possibly even in asm
446138fd1498Szrj      statements).  This can be relaxed by using a new temporary but only for
446238fd1498Szrj      non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
446338fd1498Szrj      intraprocedural SRA we deal with this by keeping the old aggregate around,
446438fd1498Szrj      something we cannot do in IPA-SRA.)  */
446538fd1498Szrj   if (access->write
446638fd1498Szrj       && (is_gimple_call (access->stmt)
446738fd1498Szrj 	  || gimple_code (access->stmt) == GIMPLE_ASM))
446838fd1498Szrj     return true;
446938fd1498Szrj 
447038fd1498Szrj   exp_align = get_object_alignment (access->expr);
447138fd1498Szrj   if (exp_align < req_align)
447238fd1498Szrj     return true;
447338fd1498Szrj 
447438fd1498Szrj   return false;
447538fd1498Szrj }
447638fd1498Szrj 
447738fd1498Szrj 
447838fd1498Szrj /* Sort collected accesses for parameter PARM, identify representatives for
447938fd1498Szrj    each accessed region and link them together.  Return NULL if there are
448038fd1498Szrj    different but overlapping accesses, return the special ptr value meaning
448138fd1498Szrj    there are no accesses for this parameter if that is the case and return the
448238fd1498Szrj    first representative otherwise.  Set *RO_GRP if there is a group of accesses
448338fd1498Szrj    with only read (i.e. no write) accesses.  */
448438fd1498Szrj 
448538fd1498Szrj static struct access *
splice_param_accesses(tree parm,bool * ro_grp)448638fd1498Szrj splice_param_accesses (tree parm, bool *ro_grp)
448738fd1498Szrj {
448838fd1498Szrj   int i, j, access_count, group_count;
448938fd1498Szrj   int total_size = 0;
449038fd1498Szrj   struct access *access, *res, **prev_acc_ptr = &res;
449138fd1498Szrj   vec<access_p> *access_vec;
449238fd1498Szrj 
449338fd1498Szrj   access_vec = get_base_access_vector (parm);
449438fd1498Szrj   if (!access_vec)
449538fd1498Szrj     return &no_accesses_representant;
449638fd1498Szrj   access_count = access_vec->length ();
449738fd1498Szrj 
449838fd1498Szrj   access_vec->qsort (compare_access_positions);
449938fd1498Szrj 
450038fd1498Szrj   i = 0;
450138fd1498Szrj   total_size = 0;
450238fd1498Szrj   group_count = 0;
450338fd1498Szrj   while (i < access_count)
450438fd1498Szrj     {
450538fd1498Szrj       bool modification;
450638fd1498Szrj       tree a1_alias_type;
450738fd1498Szrj       access = (*access_vec)[i];
450838fd1498Szrj       modification = access->write;
450938fd1498Szrj       if (access_precludes_ipa_sra_p (access, TYPE_ALIGN (access->type)))
451038fd1498Szrj 	return NULL;
451138fd1498Szrj       a1_alias_type = reference_alias_ptr_type (access->expr);
451238fd1498Szrj 
451338fd1498Szrj       /* Access is about to become group representative unless we find some
451438fd1498Szrj 	 nasty overlap which would preclude us from breaking this parameter
451538fd1498Szrj 	 apart. */
451638fd1498Szrj 
451738fd1498Szrj       j = i + 1;
451838fd1498Szrj       while (j < access_count)
451938fd1498Szrj 	{
452038fd1498Szrj 	  struct access *ac2 = (*access_vec)[j];
452138fd1498Szrj 	  if (ac2->offset != access->offset)
452238fd1498Szrj 	    {
452338fd1498Szrj 	      /* All or nothing law for parameters. */
452438fd1498Szrj 	      if (access->offset + access->size > ac2->offset)
452538fd1498Szrj 		return NULL;
452638fd1498Szrj 	      else
452738fd1498Szrj 		break;
452838fd1498Szrj 	    }
452938fd1498Szrj 	  else if (ac2->size != access->size)
453038fd1498Szrj 	    return NULL;
453138fd1498Szrj 
453238fd1498Szrj 	  if (access_precludes_ipa_sra_p (ac2, TYPE_ALIGN (access->type))
453338fd1498Szrj 	      || (ac2->type != access->type
453438fd1498Szrj 		  && (TREE_ADDRESSABLE (ac2->type)
453538fd1498Szrj 		      || TREE_ADDRESSABLE (access->type)))
453638fd1498Szrj 	      || (reference_alias_ptr_type (ac2->expr) != a1_alias_type))
453738fd1498Szrj 	    return NULL;
453838fd1498Szrj 
453938fd1498Szrj 	  modification |= ac2->write;
454038fd1498Szrj 	  ac2->group_representative = access;
454138fd1498Szrj 	  ac2->next_sibling = access->next_sibling;
454238fd1498Szrj 	  access->next_sibling = ac2;
454338fd1498Szrj 	  j++;
454438fd1498Szrj 	}
454538fd1498Szrj 
454638fd1498Szrj       group_count++;
454738fd1498Szrj       access->grp_maybe_modified = modification;
454838fd1498Szrj       if (!modification)
454938fd1498Szrj 	*ro_grp = true;
455038fd1498Szrj       *prev_acc_ptr = access;
455138fd1498Szrj       prev_acc_ptr = &access->next_grp;
455238fd1498Szrj       total_size += access->size;
455338fd1498Szrj       i = j;
455438fd1498Szrj     }
455538fd1498Szrj 
455638fd1498Szrj   gcc_assert (group_count > 0);
455738fd1498Szrj   return res;
455838fd1498Szrj }
455938fd1498Szrj 
456038fd1498Szrj /* Decide whether parameters with representative accesses given by REPR should
456138fd1498Szrj    be reduced into components.  */
456238fd1498Szrj 
456338fd1498Szrj static int
decide_one_param_reduction(struct access * repr)456438fd1498Szrj decide_one_param_reduction (struct access *repr)
456538fd1498Szrj {
456638fd1498Szrj   HOST_WIDE_INT total_size, cur_parm_size;
456738fd1498Szrj   bool by_ref;
456838fd1498Szrj   tree parm;
456938fd1498Szrj 
457038fd1498Szrj   parm = repr->base;
457138fd1498Szrj   cur_parm_size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm)));
457238fd1498Szrj   gcc_assert (cur_parm_size > 0);
457338fd1498Szrj 
457438fd1498Szrj   if (POINTER_TYPE_P (TREE_TYPE (parm)))
457538fd1498Szrj     by_ref = true;
457638fd1498Szrj   else
457738fd1498Szrj     by_ref = false;
457838fd1498Szrj 
457938fd1498Szrj   if (dump_file)
458038fd1498Szrj     {
458138fd1498Szrj       struct access *acc;
458238fd1498Szrj       fprintf (dump_file, "Evaluating PARAM group sizes for ");
458338fd1498Szrj       print_generic_expr (dump_file, parm);
458438fd1498Szrj       fprintf (dump_file, " (UID: %u): \n", DECL_UID (parm));
458538fd1498Szrj       for (acc = repr; acc; acc = acc->next_grp)
458638fd1498Szrj 	dump_access (dump_file, acc, true);
458738fd1498Szrj     }
458838fd1498Szrj 
458938fd1498Szrj   total_size = 0;
459038fd1498Szrj   int new_param_count = 0;
459138fd1498Szrj 
459238fd1498Szrj   for (; repr; repr = repr->next_grp)
459338fd1498Szrj     {
459438fd1498Szrj       gcc_assert (parm == repr->base);
459538fd1498Szrj 
459638fd1498Szrj       /* Taking the address of a non-addressable field is verboten.  */
459738fd1498Szrj       if (by_ref && repr->non_addressable)
459838fd1498Szrj 	return 0;
459938fd1498Szrj 
460038fd1498Szrj       /* Do not decompose a non-BLKmode param in a way that would
460138fd1498Szrj          create BLKmode params.  Especially for by-reference passing
460238fd1498Szrj 	 (thus, pointer-type param) this is hardly worthwhile.  */
460338fd1498Szrj       if (DECL_MODE (parm) != BLKmode
460438fd1498Szrj 	  && TYPE_MODE (repr->type) == BLKmode)
460538fd1498Szrj 	return 0;
460638fd1498Szrj 
460738fd1498Szrj       if (!by_ref || (!repr->grp_maybe_modified
460838fd1498Szrj 		      && !repr->grp_not_necessarilly_dereferenced))
460938fd1498Szrj 	total_size += repr->size;
461038fd1498Szrj       else
461138fd1498Szrj 	total_size += cur_parm_size;
461238fd1498Szrj 
461338fd1498Szrj       new_param_count++;
461438fd1498Szrj     }
461538fd1498Szrj 
461638fd1498Szrj   gcc_assert (new_param_count > 0);
461738fd1498Szrj 
461838fd1498Szrj   if (!by_ref)
461938fd1498Szrj     {
462038fd1498Szrj       if (total_size >= cur_parm_size)
462138fd1498Szrj 	return 0;
462238fd1498Szrj     }
462338fd1498Szrj   else
462438fd1498Szrj     {
462538fd1498Szrj       int parm_num_limit;
462638fd1498Szrj       if (optimize_function_for_size_p (cfun))
462738fd1498Szrj 	parm_num_limit = 1;
462838fd1498Szrj       else
462938fd1498Szrj 	parm_num_limit = PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR);
463038fd1498Szrj 
463138fd1498Szrj       if (new_param_count > parm_num_limit
463238fd1498Szrj 	  || total_size > (parm_num_limit * cur_parm_size))
463338fd1498Szrj 	return 0;
463438fd1498Szrj     }
463538fd1498Szrj 
463638fd1498Szrj   if (dump_file)
463738fd1498Szrj     fprintf (dump_file, "    ....will be split into %i components\n",
463838fd1498Szrj 	     new_param_count);
463938fd1498Szrj   return new_param_count;
464038fd1498Szrj }
464138fd1498Szrj 
464238fd1498Szrj /* The order of the following enums is important, we need to do extra work for
464338fd1498Szrj    UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES.  */
464438fd1498Szrj enum ipa_splicing_result { NO_GOOD_ACCESS, UNUSED_PARAMS, BY_VAL_ACCESSES,
464538fd1498Szrj 			  MODIF_BY_REF_ACCESSES, UNMODIF_BY_REF_ACCESSES };
464638fd1498Szrj 
464738fd1498Szrj /* Identify representatives of all accesses to all candidate parameters for
464838fd1498Szrj    IPA-SRA.  Return result based on what representatives have been found. */
464938fd1498Szrj 
465038fd1498Szrj static enum ipa_splicing_result
splice_all_param_accesses(vec<access_p> & representatives)465138fd1498Szrj splice_all_param_accesses (vec<access_p> &representatives)
465238fd1498Szrj {
465338fd1498Szrj   enum ipa_splicing_result result = NO_GOOD_ACCESS;
465438fd1498Szrj   tree parm;
465538fd1498Szrj   struct access *repr;
465638fd1498Szrj 
465738fd1498Szrj   representatives.create (func_param_count);
465838fd1498Szrj 
465938fd1498Szrj   for (parm = DECL_ARGUMENTS (current_function_decl);
466038fd1498Szrj        parm;
466138fd1498Szrj        parm = DECL_CHAIN (parm))
466238fd1498Szrj     {
466338fd1498Szrj       if (is_unused_scalar_param (parm))
466438fd1498Szrj 	{
466538fd1498Szrj 	  representatives.quick_push (&no_accesses_representant);
466638fd1498Szrj 	  if (result == NO_GOOD_ACCESS)
466738fd1498Szrj 	    result = UNUSED_PARAMS;
466838fd1498Szrj 	}
466938fd1498Szrj       else if (POINTER_TYPE_P (TREE_TYPE (parm))
467038fd1498Szrj 	       && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm)))
467138fd1498Szrj 	       && bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
467238fd1498Szrj 	{
467338fd1498Szrj 	  repr = unmodified_by_ref_scalar_representative (parm);
467438fd1498Szrj 	  representatives.quick_push (repr);
467538fd1498Szrj 	  if (repr)
467638fd1498Szrj 	    result = UNMODIF_BY_REF_ACCESSES;
467738fd1498Szrj 	}
467838fd1498Szrj       else if (bitmap_bit_p (candidate_bitmap, DECL_UID (parm)))
467938fd1498Szrj 	{
468038fd1498Szrj 	  bool ro_grp = false;
468138fd1498Szrj 	  repr = splice_param_accesses (parm, &ro_grp);
468238fd1498Szrj 	  representatives.quick_push (repr);
468338fd1498Szrj 
468438fd1498Szrj 	  if (repr && !no_accesses_p (repr))
468538fd1498Szrj 	    {
468638fd1498Szrj 	      if (POINTER_TYPE_P (TREE_TYPE (parm)))
468738fd1498Szrj 		{
468838fd1498Szrj 		  if (ro_grp)
468938fd1498Szrj 		    result = UNMODIF_BY_REF_ACCESSES;
469038fd1498Szrj 		  else if (result < MODIF_BY_REF_ACCESSES)
469138fd1498Szrj 		    result = MODIF_BY_REF_ACCESSES;
469238fd1498Szrj 		}
469338fd1498Szrj 	      else if (result < BY_VAL_ACCESSES)
469438fd1498Szrj 		result = BY_VAL_ACCESSES;
469538fd1498Szrj 	    }
469638fd1498Szrj 	  else if (no_accesses_p (repr) && (result == NO_GOOD_ACCESS))
469738fd1498Szrj 	    result = UNUSED_PARAMS;
469838fd1498Szrj 	}
469938fd1498Szrj       else
470038fd1498Szrj 	representatives.quick_push (NULL);
470138fd1498Szrj     }
470238fd1498Szrj 
470338fd1498Szrj   if (result == NO_GOOD_ACCESS)
470438fd1498Szrj     {
470538fd1498Szrj       representatives.release ();
470638fd1498Szrj       return NO_GOOD_ACCESS;
470738fd1498Szrj     }
470838fd1498Szrj 
470938fd1498Szrj   return result;
471038fd1498Szrj }
471138fd1498Szrj 
471238fd1498Szrj /* Return the index of BASE in PARMS.  Abort if it is not found.  */
471338fd1498Szrj 
471438fd1498Szrj static inline int
get_param_index(tree base,vec<tree> parms)471538fd1498Szrj get_param_index (tree base, vec<tree> parms)
471638fd1498Szrj {
471738fd1498Szrj   int i, len;
471838fd1498Szrj 
471938fd1498Szrj   len = parms.length ();
472038fd1498Szrj   for (i = 0; i < len; i++)
472138fd1498Szrj     if (parms[i] == base)
472238fd1498Szrj       return i;
472338fd1498Szrj   gcc_unreachable ();
472438fd1498Szrj }
472538fd1498Szrj 
472638fd1498Szrj /* Convert the decisions made at the representative level into compact
472738fd1498Szrj    parameter adjustments.  REPRESENTATIVES are pointers to first
472838fd1498Szrj    representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
472938fd1498Szrj    final number of adjustments.  */
473038fd1498Szrj 
473138fd1498Szrj static ipa_parm_adjustment_vec
turn_representatives_into_adjustments(vec<access_p> representatives,int adjustments_count)473238fd1498Szrj turn_representatives_into_adjustments (vec<access_p> representatives,
473338fd1498Szrj 				       int adjustments_count)
473438fd1498Szrj {
473538fd1498Szrj   vec<tree> parms;
473638fd1498Szrj   ipa_parm_adjustment_vec adjustments;
473738fd1498Szrj   tree parm;
473838fd1498Szrj   int i;
473938fd1498Szrj 
474038fd1498Szrj   gcc_assert (adjustments_count > 0);
474138fd1498Szrj   parms = ipa_get_vector_of_formal_parms (current_function_decl);
474238fd1498Szrj   adjustments.create (adjustments_count);
474338fd1498Szrj   parm = DECL_ARGUMENTS (current_function_decl);
474438fd1498Szrj   for (i = 0; i < func_param_count; i++, parm = DECL_CHAIN (parm))
474538fd1498Szrj     {
474638fd1498Szrj       struct access *repr = representatives[i];
474738fd1498Szrj 
474838fd1498Szrj       if (!repr || no_accesses_p (repr))
474938fd1498Szrj 	{
475038fd1498Szrj 	  struct ipa_parm_adjustment adj;
475138fd1498Szrj 
475238fd1498Szrj 	  memset (&adj, 0, sizeof (adj));
475338fd1498Szrj 	  adj.base_index = get_param_index (parm, parms);
475438fd1498Szrj 	  adj.base = parm;
475538fd1498Szrj 	  if (!repr)
475638fd1498Szrj 	    adj.op = IPA_PARM_OP_COPY;
475738fd1498Szrj 	  else
475838fd1498Szrj 	    adj.op = IPA_PARM_OP_REMOVE;
475938fd1498Szrj 	  adj.arg_prefix = "ISRA";
476038fd1498Szrj 	  adjustments.quick_push (adj);
476138fd1498Szrj 	}
476238fd1498Szrj       else
476338fd1498Szrj 	{
476438fd1498Szrj 	  struct ipa_parm_adjustment adj;
476538fd1498Szrj 	  int index = get_param_index (parm, parms);
476638fd1498Szrj 
476738fd1498Szrj 	  for (; repr; repr = repr->next_grp)
476838fd1498Szrj 	    {
476938fd1498Szrj 	      memset (&adj, 0, sizeof (adj));
477038fd1498Szrj 	      gcc_assert (repr->base == parm);
477138fd1498Szrj 	      adj.base_index = index;
477238fd1498Szrj 	      adj.base = repr->base;
477338fd1498Szrj 	      adj.type = repr->type;
477438fd1498Szrj 	      adj.alias_ptr_type = reference_alias_ptr_type (repr->expr);
477538fd1498Szrj 	      adj.offset = repr->offset;
477638fd1498Szrj 	      adj.reverse = repr->reverse;
477738fd1498Szrj 	      adj.by_ref = (POINTER_TYPE_P (TREE_TYPE (repr->base))
477838fd1498Szrj 			    && (repr->grp_maybe_modified
477938fd1498Szrj 				|| repr->grp_not_necessarilly_dereferenced));
478038fd1498Szrj 	      adj.arg_prefix = "ISRA";
478138fd1498Szrj 	      adjustments.quick_push (adj);
478238fd1498Szrj 	    }
478338fd1498Szrj 	}
478438fd1498Szrj     }
478538fd1498Szrj   parms.release ();
478638fd1498Szrj   return adjustments;
478738fd1498Szrj }
478838fd1498Szrj 
478938fd1498Szrj /* Analyze the collected accesses and produce a plan what to do with the
479038fd1498Szrj    parameters in the form of adjustments, NULL meaning nothing.  */
479138fd1498Szrj 
479238fd1498Szrj static ipa_parm_adjustment_vec
analyze_all_param_acesses(void)479338fd1498Szrj analyze_all_param_acesses (void)
479438fd1498Szrj {
479538fd1498Szrj   enum ipa_splicing_result repr_state;
479638fd1498Szrj   bool proceed = false;
479738fd1498Szrj   int i, adjustments_count = 0;
479838fd1498Szrj   vec<access_p> representatives;
479938fd1498Szrj   ipa_parm_adjustment_vec adjustments;
480038fd1498Szrj 
480138fd1498Szrj   repr_state = splice_all_param_accesses (representatives);
480238fd1498Szrj   if (repr_state == NO_GOOD_ACCESS)
480338fd1498Szrj     return ipa_parm_adjustment_vec ();
480438fd1498Szrj 
480538fd1498Szrj   /* If there are any parameters passed by reference which are not modified
480638fd1498Szrj      directly, we need to check whether they can be modified indirectly.  */
480738fd1498Szrj   if (repr_state == UNMODIF_BY_REF_ACCESSES)
480838fd1498Szrj     {
480938fd1498Szrj       analyze_caller_dereference_legality (representatives);
481038fd1498Szrj       analyze_modified_params (representatives);
481138fd1498Szrj     }
481238fd1498Szrj 
481338fd1498Szrj   for (i = 0; i < func_param_count; i++)
481438fd1498Szrj     {
481538fd1498Szrj       struct access *repr = representatives[i];
481638fd1498Szrj 
481738fd1498Szrj       if (repr && !no_accesses_p (repr))
481838fd1498Szrj 	{
481938fd1498Szrj 	  if (repr->grp_scalar_ptr)
482038fd1498Szrj 	    {
482138fd1498Szrj 	      adjustments_count++;
482238fd1498Szrj 	      if (repr->grp_not_necessarilly_dereferenced
482338fd1498Szrj 		  || repr->grp_maybe_modified)
482438fd1498Szrj 		representatives[i] = NULL;
482538fd1498Szrj 	      else
482638fd1498Szrj 		{
482738fd1498Szrj 		  proceed = true;
482838fd1498Szrj 		  sra_stats.scalar_by_ref_to_by_val++;
482938fd1498Szrj 		}
483038fd1498Szrj 	    }
483138fd1498Szrj 	  else
483238fd1498Szrj 	    {
483338fd1498Szrj 	      int new_components = decide_one_param_reduction (repr);
483438fd1498Szrj 
483538fd1498Szrj 	      if (new_components == 0)
483638fd1498Szrj 		{
483738fd1498Szrj 		  representatives[i] = NULL;
483838fd1498Szrj 		  adjustments_count++;
483938fd1498Szrj 		}
484038fd1498Szrj 	      else
484138fd1498Szrj 		{
484238fd1498Szrj 		  adjustments_count += new_components;
484338fd1498Szrj 		  sra_stats.aggregate_params_reduced++;
484438fd1498Szrj 		  sra_stats.param_reductions_created += new_components;
484538fd1498Szrj 		  proceed = true;
484638fd1498Szrj 		}
484738fd1498Szrj 	    }
484838fd1498Szrj 	}
484938fd1498Szrj       else
485038fd1498Szrj 	{
485138fd1498Szrj 	  if (no_accesses_p (repr))
485238fd1498Szrj 	    {
485338fd1498Szrj 	      proceed = true;
485438fd1498Szrj 	      sra_stats.deleted_unused_parameters++;
485538fd1498Szrj 	    }
485638fd1498Szrj 	  adjustments_count++;
485738fd1498Szrj 	}
485838fd1498Szrj     }
485938fd1498Szrj 
486038fd1498Szrj   if (!proceed && dump_file)
486138fd1498Szrj     fprintf (dump_file, "NOT proceeding to change params.\n");
486238fd1498Szrj 
486338fd1498Szrj   if (proceed)
486438fd1498Szrj     adjustments = turn_representatives_into_adjustments (representatives,
486538fd1498Szrj 							 adjustments_count);
486638fd1498Szrj   else
486738fd1498Szrj     adjustments = ipa_parm_adjustment_vec ();
486838fd1498Szrj 
486938fd1498Szrj   representatives.release ();
487038fd1498Szrj   return adjustments;
487138fd1498Szrj }
487238fd1498Szrj 
487338fd1498Szrj /* If a parameter replacement identified by ADJ does not yet exist in the form
487438fd1498Szrj    of declaration, create it and record it, otherwise return the previously
487538fd1498Szrj    created one.  */
487638fd1498Szrj 
487738fd1498Szrj static tree
get_replaced_param_substitute(struct ipa_parm_adjustment * adj)487838fd1498Szrj get_replaced_param_substitute (struct ipa_parm_adjustment *adj)
487938fd1498Szrj {
488038fd1498Szrj   tree repl;
488138fd1498Szrj   if (!adj->new_ssa_base)
488238fd1498Szrj     {
488338fd1498Szrj       char *pretty_name = make_fancy_name (adj->base);
488438fd1498Szrj 
488538fd1498Szrj       repl = create_tmp_reg (TREE_TYPE (adj->base), "ISR");
488638fd1498Szrj       DECL_NAME (repl) = get_identifier (pretty_name);
488738fd1498Szrj       DECL_NAMELESS (repl) = 1;
488838fd1498Szrj       obstack_free (&name_obstack, pretty_name);
488938fd1498Szrj 
489038fd1498Szrj       adj->new_ssa_base = repl;
489138fd1498Szrj     }
489238fd1498Szrj   else
489338fd1498Szrj     repl = adj->new_ssa_base;
489438fd1498Szrj   return repl;
489538fd1498Szrj }
489638fd1498Szrj 
489738fd1498Szrj /* Find the first adjustment for a particular parameter BASE in a vector of
489838fd1498Szrj    ADJUSTMENTS which is not a copy_param.  Return NULL if there is no such
489938fd1498Szrj    adjustment. */
490038fd1498Szrj 
490138fd1498Szrj static struct ipa_parm_adjustment *
get_adjustment_for_base(ipa_parm_adjustment_vec adjustments,tree base)490238fd1498Szrj get_adjustment_for_base (ipa_parm_adjustment_vec adjustments, tree base)
490338fd1498Szrj {
490438fd1498Szrj   int i, len;
490538fd1498Szrj 
490638fd1498Szrj   len = adjustments.length ();
490738fd1498Szrj   for (i = 0; i < len; i++)
490838fd1498Szrj     {
490938fd1498Szrj       struct ipa_parm_adjustment *adj;
491038fd1498Szrj 
491138fd1498Szrj       adj = &adjustments[i];
491238fd1498Szrj       if (adj->op != IPA_PARM_OP_COPY && adj->base == base)
491338fd1498Szrj 	return adj;
491438fd1498Szrj     }
491538fd1498Szrj 
491638fd1498Szrj   return NULL;
491738fd1498Szrj }
491838fd1498Szrj 
491938fd1498Szrj /* If OLD_NAME, which is being defined by statement STMT, is an SSA_NAME of a
492038fd1498Szrj    parameter which is to be removed because its value is not used, create a new
492138fd1498Szrj    SSA_NAME relating to a replacement VAR_DECL, replace all uses of the
492238fd1498Szrj    original with it and return it.  If there is no need to re-map, return NULL.
492338fd1498Szrj    ADJUSTMENTS is a pointer to a vector of IPA-SRA adjustments.  */
492438fd1498Szrj 
492538fd1498Szrj static tree
replace_removed_params_ssa_names(tree old_name,gimple * stmt,ipa_parm_adjustment_vec adjustments)492638fd1498Szrj replace_removed_params_ssa_names (tree old_name, gimple *stmt,
492738fd1498Szrj 				  ipa_parm_adjustment_vec adjustments)
492838fd1498Szrj {
492938fd1498Szrj   struct ipa_parm_adjustment *adj;
493038fd1498Szrj   tree decl, repl, new_name;
493138fd1498Szrj 
493238fd1498Szrj   if (TREE_CODE (old_name) != SSA_NAME)
493338fd1498Szrj     return NULL;
493438fd1498Szrj 
493538fd1498Szrj   decl = SSA_NAME_VAR (old_name);
493638fd1498Szrj   if (decl == NULL_TREE
493738fd1498Szrj       || TREE_CODE (decl) != PARM_DECL)
493838fd1498Szrj     return NULL;
493938fd1498Szrj 
494038fd1498Szrj   adj = get_adjustment_for_base (adjustments, decl);
494138fd1498Szrj   if (!adj)
494238fd1498Szrj     return NULL;
494338fd1498Szrj 
494438fd1498Szrj   repl = get_replaced_param_substitute (adj);
494538fd1498Szrj   new_name = make_ssa_name (repl, stmt);
494638fd1498Szrj   SSA_NAME_OCCURS_IN_ABNORMAL_PHI (new_name)
494738fd1498Szrj     = SSA_NAME_OCCURS_IN_ABNORMAL_PHI (old_name);
494838fd1498Szrj 
494938fd1498Szrj   if (dump_file)
495038fd1498Szrj     {
495138fd1498Szrj       fprintf (dump_file, "replacing an SSA name of a removed param ");
495238fd1498Szrj       print_generic_expr (dump_file, old_name);
495338fd1498Szrj       fprintf (dump_file, " with ");
495438fd1498Szrj       print_generic_expr (dump_file, new_name);
495538fd1498Szrj       fprintf (dump_file, "\n");
495638fd1498Szrj     }
495738fd1498Szrj 
495838fd1498Szrj   replace_uses_by (old_name, new_name);
495938fd1498Szrj   return new_name;
496038fd1498Szrj }
496138fd1498Szrj 
496238fd1498Szrj /* If the statement STMT contains any expressions that need to replaced with a
496338fd1498Szrj    different one as noted by ADJUSTMENTS, do so.  Handle any potential type
496438fd1498Szrj    incompatibilities (GSI is used to accommodate conversion statements and must
496538fd1498Szrj    point to the statement).  Return true iff the statement was modified.  */
496638fd1498Szrj 
496738fd1498Szrj static bool
sra_ipa_modify_assign(gimple * stmt,gimple_stmt_iterator * gsi,ipa_parm_adjustment_vec adjustments)496838fd1498Szrj sra_ipa_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi,
496938fd1498Szrj 		       ipa_parm_adjustment_vec adjustments)
497038fd1498Szrj {
497138fd1498Szrj   tree *lhs_p, *rhs_p;
497238fd1498Szrj   bool any;
497338fd1498Szrj 
497438fd1498Szrj   if (!gimple_assign_single_p (stmt))
497538fd1498Szrj     return false;
497638fd1498Szrj 
497738fd1498Szrj   rhs_p = gimple_assign_rhs1_ptr (stmt);
497838fd1498Szrj   lhs_p = gimple_assign_lhs_ptr (stmt);
497938fd1498Szrj 
498038fd1498Szrj   any = ipa_modify_expr (rhs_p, false, adjustments);
498138fd1498Szrj   any |= ipa_modify_expr (lhs_p, false, adjustments);
498238fd1498Szrj   if (any)
498338fd1498Szrj     {
498438fd1498Szrj       tree new_rhs = NULL_TREE;
498538fd1498Szrj 
498638fd1498Szrj       if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p)))
498738fd1498Szrj 	{
498838fd1498Szrj 	  if (TREE_CODE (*rhs_p) == CONSTRUCTOR)
498938fd1498Szrj 	    {
499038fd1498Szrj 	      /* V_C_Es of constructors can cause trouble (PR 42714).  */
499138fd1498Szrj 	      if (is_gimple_reg_type (TREE_TYPE (*lhs_p)))
499238fd1498Szrj 		*rhs_p = build_zero_cst (TREE_TYPE (*lhs_p));
499338fd1498Szrj 	      else
499438fd1498Szrj 		*rhs_p = build_constructor (TREE_TYPE (*lhs_p),
499538fd1498Szrj 					    NULL);
499638fd1498Szrj 	    }
499738fd1498Szrj 	  else
499838fd1498Szrj 	    new_rhs = fold_build1_loc (gimple_location (stmt),
499938fd1498Szrj 				       VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p),
500038fd1498Szrj 				       *rhs_p);
500138fd1498Szrj 	}
500238fd1498Szrj       else if (REFERENCE_CLASS_P (*rhs_p)
500338fd1498Szrj 	       && is_gimple_reg_type (TREE_TYPE (*lhs_p))
500438fd1498Szrj 	       && !is_gimple_reg (*lhs_p))
500538fd1498Szrj 	/* This can happen when an assignment in between two single field
500638fd1498Szrj 	   structures is turned into an assignment in between two pointers to
500738fd1498Szrj 	   scalars (PR 42237).  */
500838fd1498Szrj 	new_rhs = *rhs_p;
500938fd1498Szrj 
501038fd1498Szrj       if (new_rhs)
501138fd1498Szrj 	{
501238fd1498Szrj 	  tree tmp = force_gimple_operand_gsi (gsi, new_rhs, true, NULL_TREE,
501338fd1498Szrj 					       true, GSI_SAME_STMT);
501438fd1498Szrj 
501538fd1498Szrj 	  gimple_assign_set_rhs_from_tree (gsi, tmp);
501638fd1498Szrj 	}
501738fd1498Szrj 
501838fd1498Szrj       return true;
501938fd1498Szrj     }
502038fd1498Szrj 
502138fd1498Szrj   return false;
502238fd1498Szrj }
502338fd1498Szrj 
502438fd1498Szrj /* Traverse the function body and all modifications as described in
502538fd1498Szrj    ADJUSTMENTS.  Return true iff the CFG has been changed.  */
502638fd1498Szrj 
502738fd1498Szrj bool
ipa_sra_modify_function_body(ipa_parm_adjustment_vec adjustments)502838fd1498Szrj ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments)
502938fd1498Szrj {
503038fd1498Szrj   bool cfg_changed = false;
503138fd1498Szrj   basic_block bb;
503238fd1498Szrj 
503338fd1498Szrj   FOR_EACH_BB_FN (bb, cfun)
503438fd1498Szrj     {
503538fd1498Szrj       gimple_stmt_iterator gsi;
503638fd1498Szrj 
503738fd1498Szrj       for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
503838fd1498Szrj 	{
503938fd1498Szrj 	  gphi *phi = as_a <gphi *> (gsi_stmt (gsi));
504038fd1498Szrj 	  tree new_lhs, old_lhs = gimple_phi_result (phi);
504138fd1498Szrj 	  new_lhs = replace_removed_params_ssa_names (old_lhs, phi, adjustments);
504238fd1498Szrj 	  if (new_lhs)
504338fd1498Szrj 	    {
504438fd1498Szrj 	      gimple_phi_set_result (phi, new_lhs);
504538fd1498Szrj 	      release_ssa_name (old_lhs);
504638fd1498Szrj 	    }
504738fd1498Szrj 	}
504838fd1498Szrj 
504938fd1498Szrj       gsi = gsi_start_bb (bb);
505038fd1498Szrj       while (!gsi_end_p (gsi))
505138fd1498Szrj 	{
505238fd1498Szrj 	  gimple *stmt = gsi_stmt (gsi);
505338fd1498Szrj 	  bool modified = false;
505438fd1498Szrj 	  tree *t;
505538fd1498Szrj 	  unsigned i;
505638fd1498Szrj 
505738fd1498Szrj 	  switch (gimple_code (stmt))
505838fd1498Szrj 	    {
505938fd1498Szrj 	    case GIMPLE_RETURN:
506038fd1498Szrj 	      t = gimple_return_retval_ptr (as_a <greturn *> (stmt));
506138fd1498Szrj 	      if (*t != NULL_TREE)
506238fd1498Szrj 		modified |= ipa_modify_expr (t, true, adjustments);
506338fd1498Szrj 	      break;
506438fd1498Szrj 
506538fd1498Szrj 	    case GIMPLE_ASSIGN:
506638fd1498Szrj 	      modified |= sra_ipa_modify_assign (stmt, &gsi, adjustments);
506738fd1498Szrj 	      break;
506838fd1498Szrj 
506938fd1498Szrj 	    case GIMPLE_CALL:
507038fd1498Szrj 	      /* Operands must be processed before the lhs.  */
507138fd1498Szrj 	      for (i = 0; i < gimple_call_num_args (stmt); i++)
507238fd1498Szrj 		{
507338fd1498Szrj 		  t = gimple_call_arg_ptr (stmt, i);
507438fd1498Szrj 		  modified |= ipa_modify_expr (t, true, adjustments);
507538fd1498Szrj 		}
507638fd1498Szrj 
507738fd1498Szrj 	      if (gimple_call_lhs (stmt))
507838fd1498Szrj 		{
507938fd1498Szrj 		  t = gimple_call_lhs_ptr (stmt);
508038fd1498Szrj 		  modified |= ipa_modify_expr (t, false, adjustments);
508138fd1498Szrj 		}
508238fd1498Szrj 	      break;
508338fd1498Szrj 
508438fd1498Szrj 	    case GIMPLE_ASM:
508538fd1498Szrj 	      {
508638fd1498Szrj 		gasm *asm_stmt = as_a <gasm *> (stmt);
508738fd1498Szrj 		for (i = 0; i < gimple_asm_ninputs (asm_stmt); i++)
508838fd1498Szrj 		  {
508938fd1498Szrj 		    t = &TREE_VALUE (gimple_asm_input_op (asm_stmt, i));
509038fd1498Szrj 		    modified |= ipa_modify_expr (t, true, adjustments);
509138fd1498Szrj 		  }
509238fd1498Szrj 		for (i = 0; i < gimple_asm_noutputs (asm_stmt); i++)
509338fd1498Szrj 		  {
509438fd1498Szrj 		    t = &TREE_VALUE (gimple_asm_output_op (asm_stmt, i));
509538fd1498Szrj 		    modified |= ipa_modify_expr (t, false, adjustments);
509638fd1498Szrj 		  }
509738fd1498Szrj 	      }
509838fd1498Szrj 	      break;
509938fd1498Szrj 
510038fd1498Szrj 	    default:
510138fd1498Szrj 	      break;
510238fd1498Szrj 	    }
510338fd1498Szrj 
510438fd1498Szrj 	  def_operand_p defp;
510538fd1498Szrj 	  ssa_op_iter iter;
510638fd1498Szrj 	  FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
510738fd1498Szrj 	    {
510838fd1498Szrj 	      tree old_def = DEF_FROM_PTR (defp);
510938fd1498Szrj 	      if (tree new_def = replace_removed_params_ssa_names (old_def, stmt,
511038fd1498Szrj 								   adjustments))
511138fd1498Szrj 		{
511238fd1498Szrj 		  SET_DEF (defp, new_def);
511338fd1498Szrj 		  release_ssa_name (old_def);
511438fd1498Szrj 		  modified = true;
511538fd1498Szrj 		}
511638fd1498Szrj 	    }
511738fd1498Szrj 
511838fd1498Szrj 	  if (modified)
511938fd1498Szrj 	    {
512038fd1498Szrj 	      update_stmt (stmt);
512138fd1498Szrj 	      if (maybe_clean_eh_stmt (stmt)
512238fd1498Szrj 		  && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
512338fd1498Szrj 		cfg_changed = true;
512438fd1498Szrj 	    }
512538fd1498Szrj 	  gsi_next (&gsi);
512638fd1498Szrj 	}
512738fd1498Szrj     }
512838fd1498Szrj 
512938fd1498Szrj   return cfg_changed;
513038fd1498Szrj }
513138fd1498Szrj 
513238fd1498Szrj /* Call gimple_debug_bind_reset_value on all debug statements describing
513338fd1498Szrj    gimple register parameters that are being removed or replaced.  */
513438fd1498Szrj 
513538fd1498Szrj static void
sra_ipa_reset_debug_stmts(ipa_parm_adjustment_vec adjustments)513638fd1498Szrj sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments)
513738fd1498Szrj {
513838fd1498Szrj   int i, len;
513938fd1498Szrj   gimple_stmt_iterator *gsip = NULL, gsi;
514038fd1498Szrj 
514138fd1498Szrj   if (MAY_HAVE_DEBUG_STMTS && single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun)))
514238fd1498Szrj     {
514338fd1498Szrj       gsi = gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
514438fd1498Szrj       gsip = &gsi;
514538fd1498Szrj     }
514638fd1498Szrj   len = adjustments.length ();
514738fd1498Szrj   for (i = 0; i < len; i++)
514838fd1498Szrj     {
514938fd1498Szrj       struct ipa_parm_adjustment *adj;
515038fd1498Szrj       imm_use_iterator ui;
515138fd1498Szrj       gimple *stmt;
515238fd1498Szrj       gdebug *def_temp;
515338fd1498Szrj       tree name, vexpr, copy = NULL_TREE;
515438fd1498Szrj       use_operand_p use_p;
515538fd1498Szrj 
515638fd1498Szrj       adj = &adjustments[i];
515738fd1498Szrj       if (adj->op == IPA_PARM_OP_COPY || !is_gimple_reg (adj->base))
515838fd1498Szrj 	continue;
515938fd1498Szrj       name = ssa_default_def (cfun, adj->base);
516038fd1498Szrj       vexpr = NULL;
516138fd1498Szrj       if (name)
516238fd1498Szrj 	FOR_EACH_IMM_USE_STMT (stmt, ui, name)
516338fd1498Szrj 	  {
516438fd1498Szrj 	    if (gimple_clobber_p (stmt))
516538fd1498Szrj 	      {
516638fd1498Szrj 		gimple_stmt_iterator cgsi = gsi_for_stmt (stmt);
516738fd1498Szrj 		unlink_stmt_vdef (stmt);
516838fd1498Szrj 		gsi_remove (&cgsi, true);
516938fd1498Szrj 		release_defs (stmt);
517038fd1498Szrj 		continue;
517138fd1498Szrj 	      }
517238fd1498Szrj 	    /* All other users must have been removed by
517338fd1498Szrj 	       ipa_sra_modify_function_body.  */
517438fd1498Szrj 	    gcc_assert (is_gimple_debug (stmt));
517538fd1498Szrj 	    if (vexpr == NULL && gsip != NULL)
517638fd1498Szrj 	      {
517738fd1498Szrj 		gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
517838fd1498Szrj 		vexpr = make_node (DEBUG_EXPR_DECL);
517938fd1498Szrj 		def_temp = gimple_build_debug_source_bind (vexpr, adj->base,
518038fd1498Szrj 							   NULL);
518138fd1498Szrj 		DECL_ARTIFICIAL (vexpr) = 1;
518238fd1498Szrj 		TREE_TYPE (vexpr) = TREE_TYPE (name);
518338fd1498Szrj 		SET_DECL_MODE (vexpr, DECL_MODE (adj->base));
518438fd1498Szrj 		gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
518538fd1498Szrj 	      }
518638fd1498Szrj 	    if (vexpr)
518738fd1498Szrj 	      {
518838fd1498Szrj 		FOR_EACH_IMM_USE_ON_STMT (use_p, ui)
518938fd1498Szrj 		  SET_USE (use_p, vexpr);
519038fd1498Szrj 	      }
519138fd1498Szrj 	    else
519238fd1498Szrj 	      gimple_debug_bind_reset_value (stmt);
519338fd1498Szrj 	    update_stmt (stmt);
519438fd1498Szrj 	  }
519538fd1498Szrj       /* Create a VAR_DECL for debug info purposes.  */
519638fd1498Szrj       if (!DECL_IGNORED_P (adj->base))
519738fd1498Szrj 	{
519838fd1498Szrj 	  copy = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
519938fd1498Szrj 			     VAR_DECL, DECL_NAME (adj->base),
520038fd1498Szrj 			     TREE_TYPE (adj->base));
520138fd1498Szrj 	  if (DECL_PT_UID_SET_P (adj->base))
520238fd1498Szrj 	    SET_DECL_PT_UID (copy, DECL_PT_UID (adj->base));
520338fd1498Szrj 	  TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (adj->base);
520438fd1498Szrj 	  TREE_READONLY (copy) = TREE_READONLY (adj->base);
520538fd1498Szrj 	  TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (adj->base);
520638fd1498Szrj 	  DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (adj->base);
520738fd1498Szrj 	  DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (adj->base);
520838fd1498Szrj 	  DECL_IGNORED_P (copy) = DECL_IGNORED_P (adj->base);
520938fd1498Szrj 	  DECL_ABSTRACT_ORIGIN (copy) = DECL_ORIGIN (adj->base);
521038fd1498Szrj 	  DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
521138fd1498Szrj 	  SET_DECL_RTL (copy, 0);
521238fd1498Szrj 	  TREE_USED (copy) = 1;
521338fd1498Szrj 	  DECL_CONTEXT (copy) = current_function_decl;
521438fd1498Szrj 	  add_local_decl (cfun, copy);
521538fd1498Szrj 	  DECL_CHAIN (copy) =
521638fd1498Szrj 	    BLOCK_VARS (DECL_INITIAL (current_function_decl));
521738fd1498Szrj 	  BLOCK_VARS (DECL_INITIAL (current_function_decl)) = copy;
521838fd1498Szrj 	}
521938fd1498Szrj       if (gsip != NULL && copy && target_for_debug_bind (adj->base))
522038fd1498Szrj 	{
522138fd1498Szrj 	  gcc_assert (TREE_CODE (adj->base) == PARM_DECL);
522238fd1498Szrj 	  if (vexpr)
522338fd1498Szrj 	    def_temp = gimple_build_debug_bind (copy, vexpr, NULL);
522438fd1498Szrj 	  else
522538fd1498Szrj 	    def_temp = gimple_build_debug_source_bind (copy, adj->base,
522638fd1498Szrj 						       NULL);
522738fd1498Szrj 	  gsi_insert_before (gsip, def_temp, GSI_SAME_STMT);
522838fd1498Szrj 	}
522938fd1498Szrj     }
523038fd1498Szrj }
523138fd1498Szrj 
523238fd1498Szrj /* Return false if all callers have at least as many actual arguments as there
523338fd1498Szrj    are formal parameters in the current function and that their types
523438fd1498Szrj    match.  */
523538fd1498Szrj 
523638fd1498Szrj static bool
some_callers_have_mismatched_arguments_p(struct cgraph_node * node,void * data ATTRIBUTE_UNUSED)523738fd1498Szrj some_callers_have_mismatched_arguments_p (struct cgraph_node *node,
523838fd1498Szrj 					  void *data ATTRIBUTE_UNUSED)
523938fd1498Szrj {
524038fd1498Szrj   struct cgraph_edge *cs;
524138fd1498Szrj   for (cs = node->callers; cs; cs = cs->next_caller)
524238fd1498Szrj     if (!cs->call_stmt || !callsite_arguments_match_p (cs->call_stmt))
524338fd1498Szrj       return true;
524438fd1498Szrj 
524538fd1498Szrj   return false;
524638fd1498Szrj }
524738fd1498Szrj 
524838fd1498Szrj /* Return false if all callers have vuse attached to a call statement.  */
524938fd1498Szrj 
525038fd1498Szrj static bool
some_callers_have_no_vuse_p(struct cgraph_node * node,void * data ATTRIBUTE_UNUSED)525138fd1498Szrj some_callers_have_no_vuse_p (struct cgraph_node *node,
525238fd1498Szrj 			     void *data ATTRIBUTE_UNUSED)
525338fd1498Szrj {
525438fd1498Szrj   struct cgraph_edge *cs;
525538fd1498Szrj   for (cs = node->callers; cs; cs = cs->next_caller)
525638fd1498Szrj     if (!cs->call_stmt || !gimple_vuse (cs->call_stmt))
525738fd1498Szrj       return true;
525838fd1498Szrj 
525938fd1498Szrj   return false;
526038fd1498Szrj }
526138fd1498Szrj 
526238fd1498Szrj /* Convert all callers of NODE.  */
526338fd1498Szrj 
526438fd1498Szrj static bool
convert_callers_for_node(struct cgraph_node * node,void * data)526538fd1498Szrj convert_callers_for_node (struct cgraph_node *node,
526638fd1498Szrj 		          void *data)
526738fd1498Szrj {
526838fd1498Szrj   ipa_parm_adjustment_vec *adjustments = (ipa_parm_adjustment_vec *) data;
526938fd1498Szrj   bitmap recomputed_callers = BITMAP_ALLOC (NULL);
527038fd1498Szrj   struct cgraph_edge *cs;
527138fd1498Szrj 
527238fd1498Szrj   for (cs = node->callers; cs; cs = cs->next_caller)
527338fd1498Szrj     {
527438fd1498Szrj       push_cfun (DECL_STRUCT_FUNCTION (cs->caller->decl));
527538fd1498Szrj 
527638fd1498Szrj       if (dump_file)
527738fd1498Szrj 	fprintf (dump_file, "Adjusting call %s -> %s\n",
527838fd1498Szrj 		 cs->caller->dump_name (), cs->callee->dump_name ());
527938fd1498Szrj 
528038fd1498Szrj       ipa_modify_call_arguments (cs, cs->call_stmt, *adjustments);
528138fd1498Szrj 
528238fd1498Szrj       pop_cfun ();
528338fd1498Szrj     }
528438fd1498Szrj 
528538fd1498Szrj   for (cs = node->callers; cs; cs = cs->next_caller)
528638fd1498Szrj     if (bitmap_set_bit (recomputed_callers, cs->caller->uid)
528738fd1498Szrj 	&& gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs->caller->decl)))
528838fd1498Szrj       compute_fn_summary (cs->caller, true);
528938fd1498Szrj   BITMAP_FREE (recomputed_callers);
529038fd1498Szrj 
529138fd1498Szrj   return true;
529238fd1498Szrj }
529338fd1498Szrj 
529438fd1498Szrj /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS.  */
529538fd1498Szrj 
529638fd1498Szrj static void
convert_callers(struct cgraph_node * node,tree old_decl,ipa_parm_adjustment_vec adjustments)529738fd1498Szrj convert_callers (struct cgraph_node *node, tree old_decl,
529838fd1498Szrj 		 ipa_parm_adjustment_vec adjustments)
529938fd1498Szrj {
530038fd1498Szrj   basic_block this_block;
530138fd1498Szrj 
530238fd1498Szrj   node->call_for_symbol_and_aliases (convert_callers_for_node,
530338fd1498Szrj 				     &adjustments, false);
530438fd1498Szrj 
530538fd1498Szrj   if (!encountered_recursive_call)
530638fd1498Szrj     return;
530738fd1498Szrj 
530838fd1498Szrj   FOR_EACH_BB_FN (this_block, cfun)
530938fd1498Szrj     {
531038fd1498Szrj       gimple_stmt_iterator gsi;
531138fd1498Szrj 
531238fd1498Szrj       for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
531338fd1498Szrj         {
531438fd1498Szrj 	  gcall *stmt;
531538fd1498Szrj 	  tree call_fndecl;
531638fd1498Szrj 	  stmt = dyn_cast <gcall *> (gsi_stmt (gsi));
531738fd1498Szrj 	  if (!stmt)
531838fd1498Szrj 	    continue;
531938fd1498Szrj 	  call_fndecl = gimple_call_fndecl (stmt);
532038fd1498Szrj 	  if (call_fndecl == old_decl)
532138fd1498Szrj 	    {
532238fd1498Szrj 	      if (dump_file)
532338fd1498Szrj 		fprintf (dump_file, "Adjusting recursive call");
532438fd1498Szrj 	      gimple_call_set_fndecl (stmt, node->decl);
532538fd1498Szrj 	      ipa_modify_call_arguments (NULL, stmt, adjustments);
532638fd1498Szrj 	    }
532738fd1498Szrj 	}
532838fd1498Szrj     }
532938fd1498Szrj 
533038fd1498Szrj   return;
533138fd1498Szrj }
533238fd1498Szrj 
533338fd1498Szrj /* Perform all the modification required in IPA-SRA for NODE to have parameters
533438fd1498Szrj    as given in ADJUSTMENTS.  Return true iff the CFG has been changed.  */
533538fd1498Szrj 
533638fd1498Szrj static bool
modify_function(struct cgraph_node * node,ipa_parm_adjustment_vec adjustments)533738fd1498Szrj modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments)
533838fd1498Szrj {
533938fd1498Szrj   struct cgraph_node *new_node;
534038fd1498Szrj   bool cfg_changed;
534138fd1498Szrj 
534238fd1498Szrj   cgraph_edge::rebuild_edges ();
534338fd1498Szrj   free_dominance_info (CDI_DOMINATORS);
534438fd1498Szrj   pop_cfun ();
534538fd1498Szrj 
534638fd1498Szrj   /* This must be done after rebuilding cgraph edges for node above.
534738fd1498Szrj      Otherwise any recursive calls to node that are recorded in
534838fd1498Szrj      redirect_callers will be corrupted.  */
534938fd1498Szrj   vec<cgraph_edge *> redirect_callers = node->collect_callers ();
535038fd1498Szrj   new_node = node->create_version_clone_with_body (redirect_callers, NULL,
535138fd1498Szrj 						   NULL, false, NULL, NULL,
535238fd1498Szrj 						   "isra");
535338fd1498Szrj   redirect_callers.release ();
535438fd1498Szrj 
535538fd1498Szrj   push_cfun (DECL_STRUCT_FUNCTION (new_node->decl));
535638fd1498Szrj   ipa_modify_formal_parameters (current_function_decl, adjustments);
535738fd1498Szrj   cfg_changed = ipa_sra_modify_function_body (adjustments);
535838fd1498Szrj   sra_ipa_reset_debug_stmts (adjustments);
535938fd1498Szrj   convert_callers (new_node, node->decl, adjustments);
536038fd1498Szrj   new_node->make_local ();
536138fd1498Szrj   return cfg_changed;
536238fd1498Szrj }
536338fd1498Szrj 
536438fd1498Szrj /* Means of communication between ipa_sra_check_caller and
536538fd1498Szrj    ipa_sra_preliminary_function_checks.  */
536638fd1498Szrj 
536738fd1498Szrj struct ipa_sra_check_caller_data
536838fd1498Szrj {
536938fd1498Szrj   bool has_callers;
537038fd1498Szrj   bool bad_arg_alignment;
537138fd1498Szrj   bool has_thunk;
537238fd1498Szrj };
537338fd1498Szrj 
537438fd1498Szrj /* If NODE has a caller, mark that fact in DATA which is pointer to
537538fd1498Szrj    ipa_sra_check_caller_data.  Also check all aggregate arguments in all known
537638fd1498Szrj    calls if they are unit aligned and if not, set the appropriate flag in DATA
537738fd1498Szrj    too. */
537838fd1498Szrj 
537938fd1498Szrj static bool
ipa_sra_check_caller(struct cgraph_node * node,void * data)538038fd1498Szrj ipa_sra_check_caller (struct cgraph_node *node, void *data)
538138fd1498Szrj {
538238fd1498Szrj   if (!node->callers)
538338fd1498Szrj     return false;
538438fd1498Szrj 
538538fd1498Szrj   struct ipa_sra_check_caller_data *iscc;
538638fd1498Szrj   iscc = (struct ipa_sra_check_caller_data *) data;
538738fd1498Szrj   iscc->has_callers = true;
538838fd1498Szrj 
538938fd1498Szrj   for (cgraph_edge *cs = node->callers; cs; cs = cs->next_caller)
539038fd1498Szrj     {
539138fd1498Szrj       if (cs->caller->thunk.thunk_p)
539238fd1498Szrj 	{
539338fd1498Szrj 	  iscc->has_thunk = true;
539438fd1498Szrj 	  return true;
539538fd1498Szrj 	}
539638fd1498Szrj       gimple *call_stmt = cs->call_stmt;
539738fd1498Szrj       unsigned count = gimple_call_num_args (call_stmt);
539838fd1498Szrj       for (unsigned i = 0; i < count; i++)
539938fd1498Szrj 	{
540038fd1498Szrj 	  tree arg = gimple_call_arg (call_stmt, i);
540138fd1498Szrj 	  if (is_gimple_reg (arg))
540238fd1498Szrj 	      continue;
540338fd1498Szrj 
540438fd1498Szrj 	  tree offset;
540538fd1498Szrj 	  poly_int64 bitsize, bitpos;
540638fd1498Szrj 	  machine_mode mode;
540738fd1498Szrj 	  int unsignedp, reversep, volatilep = 0;
540838fd1498Szrj 	  get_inner_reference (arg, &bitsize, &bitpos, &offset, &mode,
540938fd1498Szrj 			       &unsignedp, &reversep, &volatilep);
541038fd1498Szrj 	  if (!multiple_p (bitpos, BITS_PER_UNIT))
541138fd1498Szrj 	    {
541238fd1498Szrj 	      iscc->bad_arg_alignment = true;
541338fd1498Szrj 	      return true;
541438fd1498Szrj 	    }
541538fd1498Szrj 	}
541638fd1498Szrj     }
541738fd1498Szrj 
541838fd1498Szrj   return false;
541938fd1498Szrj }
542038fd1498Szrj 
542138fd1498Szrj /* Return false the function is apparently unsuitable for IPA-SRA based on it's
542238fd1498Szrj    attributes, return true otherwise.  NODE is the cgraph node of the current
542338fd1498Szrj    function.  */
542438fd1498Szrj 
542538fd1498Szrj static bool
ipa_sra_preliminary_function_checks(struct cgraph_node * node)542638fd1498Szrj ipa_sra_preliminary_function_checks (struct cgraph_node *node)
542738fd1498Szrj {
542838fd1498Szrj   if (!node->can_be_local_p ())
542938fd1498Szrj     {
543038fd1498Szrj       if (dump_file)
543138fd1498Szrj 	fprintf (dump_file, "Function not local to this compilation unit.\n");
543238fd1498Szrj       return false;
543338fd1498Szrj     }
543438fd1498Szrj 
543538fd1498Szrj   if (!node->local.can_change_signature)
543638fd1498Szrj     {
543738fd1498Szrj       if (dump_file)
543838fd1498Szrj 	fprintf (dump_file, "Function can not change signature.\n");
543938fd1498Szrj       return false;
544038fd1498Szrj     }
544138fd1498Szrj 
544238fd1498Szrj   if (!tree_versionable_function_p (node->decl))
544338fd1498Szrj     {
544438fd1498Szrj       if (dump_file)
544538fd1498Szrj 	fprintf (dump_file, "Function is not versionable.\n");
544638fd1498Szrj       return false;
544738fd1498Szrj     }
544838fd1498Szrj 
544938fd1498Szrj   if (!opt_for_fn (node->decl, optimize)
545038fd1498Szrj       || !opt_for_fn (node->decl, flag_ipa_sra))
545138fd1498Szrj     {
545238fd1498Szrj       if (dump_file)
545338fd1498Szrj 	fprintf (dump_file, "Function not optimized.\n");
545438fd1498Szrj       return false;
545538fd1498Szrj     }
545638fd1498Szrj 
545738fd1498Szrj   if (DECL_VIRTUAL_P (current_function_decl))
545838fd1498Szrj     {
545938fd1498Szrj       if (dump_file)
546038fd1498Szrj 	fprintf (dump_file, "Function is a virtual method.\n");
546138fd1498Szrj       return false;
546238fd1498Szrj     }
546338fd1498Szrj 
546438fd1498Szrj   if ((DECL_ONE_ONLY (node->decl) || DECL_EXTERNAL (node->decl))
546538fd1498Szrj       && ipa_fn_summaries->get (node)->size >= MAX_INLINE_INSNS_AUTO)
546638fd1498Szrj     {
546738fd1498Szrj       if (dump_file)
546838fd1498Szrj 	fprintf (dump_file, "Function too big to be made truly local.\n");
546938fd1498Szrj       return false;
547038fd1498Szrj     }
547138fd1498Szrj 
547238fd1498Szrj   if (cfun->stdarg)
547338fd1498Szrj     {
547438fd1498Szrj       if (dump_file)
547538fd1498Szrj 	fprintf (dump_file, "Function uses stdarg. \n");
547638fd1498Szrj       return false;
547738fd1498Szrj     }
547838fd1498Szrj 
547938fd1498Szrj   if (TYPE_ATTRIBUTES (TREE_TYPE (node->decl)))
548038fd1498Szrj     return false;
548138fd1498Szrj 
548238fd1498Szrj   if (DECL_DISREGARD_INLINE_LIMITS (node->decl))
548338fd1498Szrj     {
548438fd1498Szrj       if (dump_file)
548538fd1498Szrj 	fprintf (dump_file, "Always inline function will be inlined "
548638fd1498Szrj 		 "anyway. \n");
548738fd1498Szrj       return false;
548838fd1498Szrj     }
548938fd1498Szrj 
549038fd1498Szrj   struct ipa_sra_check_caller_data iscc;
549138fd1498Szrj   memset (&iscc, 0, sizeof(iscc));
549238fd1498Szrj   node->call_for_symbol_and_aliases (ipa_sra_check_caller, &iscc, true);
549338fd1498Szrj   if (!iscc.has_callers)
549438fd1498Szrj     {
549538fd1498Szrj       if (dump_file)
549638fd1498Szrj 	fprintf (dump_file,
549738fd1498Szrj 		 "Function has no callers in this compilation unit.\n");
549838fd1498Szrj       return false;
549938fd1498Szrj     }
550038fd1498Szrj 
550138fd1498Szrj   if (iscc.bad_arg_alignment)
550238fd1498Szrj     {
550338fd1498Szrj       if (dump_file)
550438fd1498Szrj 	fprintf (dump_file,
550538fd1498Szrj 		 "A function call has an argument with non-unit alignment.\n");
550638fd1498Szrj       return false;
550738fd1498Szrj     }
550838fd1498Szrj 
550938fd1498Szrj   if (iscc.has_thunk)
551038fd1498Szrj     {
551138fd1498Szrj       if (dump_file)
551238fd1498Szrj 	fprintf (dump_file,
551338fd1498Szrj 		 "A has thunk.\n");
551438fd1498Szrj       return false;
551538fd1498Szrj     }
551638fd1498Szrj 
551738fd1498Szrj   return true;
551838fd1498Szrj }
551938fd1498Szrj 
552038fd1498Szrj /* Perform early interprocedural SRA.  */
552138fd1498Szrj 
552238fd1498Szrj static unsigned int
ipa_early_sra(void)552338fd1498Szrj ipa_early_sra (void)
552438fd1498Szrj {
552538fd1498Szrj   struct cgraph_node *node = cgraph_node::get (current_function_decl);
552638fd1498Szrj   ipa_parm_adjustment_vec adjustments;
552738fd1498Szrj   int ret = 0;
552838fd1498Szrj 
552938fd1498Szrj   if (!ipa_sra_preliminary_function_checks (node))
553038fd1498Szrj     return 0;
553138fd1498Szrj 
553238fd1498Szrj   sra_initialize ();
553338fd1498Szrj   sra_mode = SRA_MODE_EARLY_IPA;
553438fd1498Szrj 
553538fd1498Szrj   if (!find_param_candidates ())
553638fd1498Szrj     {
553738fd1498Szrj       if (dump_file)
553838fd1498Szrj 	fprintf (dump_file, "Function has no IPA-SRA candidates.\n");
553938fd1498Szrj       goto simple_out;
554038fd1498Szrj     }
554138fd1498Szrj 
554238fd1498Szrj   if (node->call_for_symbol_and_aliases
554338fd1498Szrj        (some_callers_have_mismatched_arguments_p, NULL, true))
554438fd1498Szrj     {
554538fd1498Szrj       if (dump_file)
554638fd1498Szrj 	fprintf (dump_file, "There are callers with insufficient number of "
554738fd1498Szrj 		 "arguments or arguments with type mismatches.\n");
554838fd1498Szrj       goto simple_out;
554938fd1498Szrj     }
555038fd1498Szrj 
555138fd1498Szrj   if (node->call_for_symbol_and_aliases
555238fd1498Szrj        (some_callers_have_no_vuse_p, NULL, true))
555338fd1498Szrj     {
555438fd1498Szrj       if (dump_file)
555538fd1498Szrj 	fprintf (dump_file, "There are callers with no VUSE attached "
555638fd1498Szrj 		 "to a call stmt.\n");
555738fd1498Szrj       goto simple_out;
555838fd1498Szrj     }
555938fd1498Szrj 
556038fd1498Szrj   bb_dereferences = XCNEWVEC (HOST_WIDE_INT,
556138fd1498Szrj 				 func_param_count
556238fd1498Szrj 				 * last_basic_block_for_fn (cfun));
556338fd1498Szrj   final_bbs = BITMAP_ALLOC (NULL);
556438fd1498Szrj 
556538fd1498Szrj   scan_function ();
556638fd1498Szrj   if (encountered_apply_args)
556738fd1498Szrj     {
556838fd1498Szrj       if (dump_file)
556938fd1498Szrj 	fprintf (dump_file, "Function calls  __builtin_apply_args().\n");
557038fd1498Szrj       goto out;
557138fd1498Szrj     }
557238fd1498Szrj 
557338fd1498Szrj   if (encountered_unchangable_recursive_call)
557438fd1498Szrj     {
557538fd1498Szrj       if (dump_file)
557638fd1498Szrj 	fprintf (dump_file, "Function calls itself with insufficient "
557738fd1498Szrj 		 "number of arguments.\n");
557838fd1498Szrj       goto out;
557938fd1498Szrj     }
558038fd1498Szrj 
558138fd1498Szrj   adjustments = analyze_all_param_acesses ();
558238fd1498Szrj   if (!adjustments.exists ())
558338fd1498Szrj     goto out;
558438fd1498Szrj   if (dump_file)
558538fd1498Szrj     ipa_dump_param_adjustments (dump_file, adjustments, current_function_decl);
558638fd1498Szrj 
558738fd1498Szrj   if (modify_function (node, adjustments))
558838fd1498Szrj     ret = TODO_update_ssa | TODO_cleanup_cfg;
558938fd1498Szrj   else
559038fd1498Szrj     ret = TODO_update_ssa;
559138fd1498Szrj   adjustments.release ();
559238fd1498Szrj 
559338fd1498Szrj   statistics_counter_event (cfun, "Unused parameters deleted",
559438fd1498Szrj 			    sra_stats.deleted_unused_parameters);
559538fd1498Szrj   statistics_counter_event (cfun, "Scalar parameters converted to by-value",
559638fd1498Szrj 			    sra_stats.scalar_by_ref_to_by_val);
559738fd1498Szrj   statistics_counter_event (cfun, "Aggregate parameters broken up",
559838fd1498Szrj 			    sra_stats.aggregate_params_reduced);
559938fd1498Szrj   statistics_counter_event (cfun, "Aggregate parameter components created",
560038fd1498Szrj 			    sra_stats.param_reductions_created);
560138fd1498Szrj 
560238fd1498Szrj  out:
560338fd1498Szrj   BITMAP_FREE (final_bbs);
560438fd1498Szrj   free (bb_dereferences);
560538fd1498Szrj  simple_out:
560638fd1498Szrj   sra_deinitialize ();
560738fd1498Szrj   return ret;
560838fd1498Szrj }
560938fd1498Szrj 
561038fd1498Szrj namespace {
561138fd1498Szrj 
561238fd1498Szrj const pass_data pass_data_early_ipa_sra =
561338fd1498Szrj {
561438fd1498Szrj   GIMPLE_PASS, /* type */
561538fd1498Szrj   "eipa_sra", /* name */
561638fd1498Szrj   OPTGROUP_NONE, /* optinfo_flags */
561738fd1498Szrj   TV_IPA_SRA, /* tv_id */
561838fd1498Szrj   0, /* properties_required */
561938fd1498Szrj   0, /* properties_provided */
562038fd1498Szrj   0, /* properties_destroyed */
562138fd1498Szrj   0, /* todo_flags_start */
562238fd1498Szrj   TODO_dump_symtab, /* todo_flags_finish */
562338fd1498Szrj };
562438fd1498Szrj 
562538fd1498Szrj class pass_early_ipa_sra : public gimple_opt_pass
562638fd1498Szrj {
562738fd1498Szrj public:
pass_early_ipa_sra(gcc::context * ctxt)562838fd1498Szrj   pass_early_ipa_sra (gcc::context *ctxt)
562938fd1498Szrj     : gimple_opt_pass (pass_data_early_ipa_sra, ctxt)
563038fd1498Szrj   {}
563138fd1498Szrj 
563238fd1498Szrj   /* opt_pass methods: */
gate(function *)563338fd1498Szrj   virtual bool gate (function *) { return flag_ipa_sra && dbg_cnt (eipa_sra); }
execute(function *)563438fd1498Szrj   virtual unsigned int execute (function *) { return ipa_early_sra (); }
563538fd1498Szrj 
563638fd1498Szrj }; // class pass_early_ipa_sra
563738fd1498Szrj 
563838fd1498Szrj } // anon namespace
563938fd1498Szrj 
564038fd1498Szrj gimple_opt_pass *
make_pass_early_ipa_sra(gcc::context * ctxt)564138fd1498Szrj make_pass_early_ipa_sra (gcc::context *ctxt)
564238fd1498Szrj {
564338fd1498Szrj   return new pass_early_ipa_sra (ctxt);
564438fd1498Szrj }
5645