1*38fd1498Szrj /* Memory address lowering and addressing mode selection.
2*38fd1498Szrj Copyright (C) 2004-2018 Free Software Foundation, Inc.
3*38fd1498Szrj
4*38fd1498Szrj This file is part of GCC.
5*38fd1498Szrj
6*38fd1498Szrj GCC is free software; you can redistribute it and/or modify it
7*38fd1498Szrj under the terms of the GNU General Public License as published by the
8*38fd1498Szrj Free Software Foundation; either version 3, or (at your option) any
9*38fd1498Szrj later version.
10*38fd1498Szrj
11*38fd1498Szrj GCC is distributed in the hope that it will be useful, but WITHOUT
12*38fd1498Szrj ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13*38fd1498Szrj FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14*38fd1498Szrj for more details.
15*38fd1498Szrj
16*38fd1498Szrj You should have received a copy of the GNU General Public License
17*38fd1498Szrj along with GCC; see the file COPYING3. If not see
18*38fd1498Szrj <http://www.gnu.org/licenses/>. */
19*38fd1498Szrj
20*38fd1498Szrj /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21*38fd1498Szrj that directly map to addressing modes of the target. */
22*38fd1498Szrj
23*38fd1498Szrj #include "config.h"
24*38fd1498Szrj #include "system.h"
25*38fd1498Szrj #include "coretypes.h"
26*38fd1498Szrj #include "backend.h"
27*38fd1498Szrj #include "target.h"
28*38fd1498Szrj #include "rtl.h"
29*38fd1498Szrj #include "tree.h"
30*38fd1498Szrj #include "gimple.h"
31*38fd1498Szrj #include "memmodel.h"
32*38fd1498Szrj #include "stringpool.h"
33*38fd1498Szrj #include "tree-vrp.h"
34*38fd1498Szrj #include "tree-ssanames.h"
35*38fd1498Szrj #include "expmed.h"
36*38fd1498Szrj #include "insn-config.h"
37*38fd1498Szrj #include "emit-rtl.h"
38*38fd1498Szrj #include "recog.h"
39*38fd1498Szrj #include "tree-pretty-print.h"
40*38fd1498Szrj #include "fold-const.h"
41*38fd1498Szrj #include "stor-layout.h"
42*38fd1498Szrj #include "gimple-iterator.h"
43*38fd1498Szrj #include "gimplify-me.h"
44*38fd1498Szrj #include "tree-ssa-loop-ivopts.h"
45*38fd1498Szrj #include "expr.h"
46*38fd1498Szrj #include "tree-dfa.h"
47*38fd1498Szrj #include "dumpfile.h"
48*38fd1498Szrj #include "tree-affine.h"
49*38fd1498Szrj #include "gimplify.h"
50*38fd1498Szrj
51*38fd1498Szrj /* FIXME: We compute address costs using RTL. */
52*38fd1498Szrj #include "tree-ssa-address.h"
53*38fd1498Szrj
54*38fd1498Szrj /* TODO -- handling of symbols (according to Richard Hendersons
55*38fd1498Szrj comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
56*38fd1498Szrj
57*38fd1498Szrj There are at least 5 different kinds of symbols that we can run up against:
58*38fd1498Szrj
59*38fd1498Szrj (1) binds_local_p, small data area.
60*38fd1498Szrj (2) binds_local_p, eg local statics
61*38fd1498Szrj (3) !binds_local_p, eg global variables
62*38fd1498Szrj (4) thread local, local_exec
63*38fd1498Szrj (5) thread local, !local_exec
64*38fd1498Szrj
65*38fd1498Szrj Now, (1) won't appear often in an array context, but it certainly can.
66*38fd1498Szrj All you have to do is set -GN high enough, or explicitly mark any
67*38fd1498Szrj random object __attribute__((section (".sdata"))).
68*38fd1498Szrj
69*38fd1498Szrj All of these affect whether or not a symbol is in fact a valid address.
70*38fd1498Szrj The only one tested here is (3). And that result may very well
71*38fd1498Szrj be incorrect for (4) or (5).
72*38fd1498Szrj
73*38fd1498Szrj An incorrect result here does not cause incorrect results out the
74*38fd1498Szrj back end, because the expander in expr.c validizes the address. However
75*38fd1498Szrj it would be nice to improve the handling here in order to produce more
76*38fd1498Szrj precise results. */
77*38fd1498Szrj
78*38fd1498Szrj /* A "template" for memory address, used to determine whether the address is
79*38fd1498Szrj valid for mode. */
80*38fd1498Szrj
81*38fd1498Szrj struct GTY (()) mem_addr_template {
82*38fd1498Szrj rtx ref; /* The template. */
83*38fd1498Szrj rtx * GTY ((skip)) step_p; /* The point in template where the step should be
84*38fd1498Szrj filled in. */
85*38fd1498Szrj rtx * GTY ((skip)) off_p; /* The point in template where the offset should
86*38fd1498Szrj be filled in. */
87*38fd1498Szrj };
88*38fd1498Szrj
89*38fd1498Szrj
90*38fd1498Szrj /* The templates. Each of the low five bits of the index corresponds to one
91*38fd1498Szrj component of TARGET_MEM_REF being present, while the high bits identify
92*38fd1498Szrj the address space. See TEMPL_IDX. */
93*38fd1498Szrj
94*38fd1498Szrj static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
95*38fd1498Szrj
96*38fd1498Szrj #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
97*38fd1498Szrj (((int) (AS) << 5) \
98*38fd1498Szrj | ((SYMBOL != 0) << 4) \
99*38fd1498Szrj | ((BASE != 0) << 3) \
100*38fd1498Szrj | ((INDEX != 0) << 2) \
101*38fd1498Szrj | ((STEP != 0) << 1) \
102*38fd1498Szrj | (OFFSET != 0))
103*38fd1498Szrj
104*38fd1498Szrj /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
105*38fd1498Szrj STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
106*38fd1498Szrj to where step is placed to *STEP_P and offset to *OFFSET_P. */
107*38fd1498Szrj
108*38fd1498Szrj static void
gen_addr_rtx(machine_mode address_mode,rtx symbol,rtx base,rtx index,rtx step,rtx offset,rtx * addr,rtx ** step_p,rtx ** offset_p)109*38fd1498Szrj gen_addr_rtx (machine_mode address_mode,
110*38fd1498Szrj rtx symbol, rtx base, rtx index, rtx step, rtx offset,
111*38fd1498Szrj rtx *addr, rtx **step_p, rtx **offset_p)
112*38fd1498Szrj {
113*38fd1498Szrj rtx act_elem;
114*38fd1498Szrj
115*38fd1498Szrj *addr = NULL_RTX;
116*38fd1498Szrj if (step_p)
117*38fd1498Szrj *step_p = NULL;
118*38fd1498Szrj if (offset_p)
119*38fd1498Szrj *offset_p = NULL;
120*38fd1498Szrj
121*38fd1498Szrj if (index && index != const0_rtx)
122*38fd1498Szrj {
123*38fd1498Szrj act_elem = index;
124*38fd1498Szrj if (step)
125*38fd1498Szrj {
126*38fd1498Szrj act_elem = gen_rtx_MULT (address_mode, act_elem, step);
127*38fd1498Szrj
128*38fd1498Szrj if (step_p)
129*38fd1498Szrj *step_p = &XEXP (act_elem, 1);
130*38fd1498Szrj }
131*38fd1498Szrj
132*38fd1498Szrj *addr = act_elem;
133*38fd1498Szrj }
134*38fd1498Szrj
135*38fd1498Szrj if (base && base != const0_rtx)
136*38fd1498Szrj {
137*38fd1498Szrj if (*addr)
138*38fd1498Szrj *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
139*38fd1498Szrj else
140*38fd1498Szrj *addr = base;
141*38fd1498Szrj }
142*38fd1498Szrj
143*38fd1498Szrj if (symbol)
144*38fd1498Szrj {
145*38fd1498Szrj act_elem = symbol;
146*38fd1498Szrj if (offset)
147*38fd1498Szrj {
148*38fd1498Szrj act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
149*38fd1498Szrj
150*38fd1498Szrj if (offset_p)
151*38fd1498Szrj *offset_p = &XEXP (act_elem, 1);
152*38fd1498Szrj
153*38fd1498Szrj if (GET_CODE (symbol) == SYMBOL_REF
154*38fd1498Szrj || GET_CODE (symbol) == LABEL_REF
155*38fd1498Szrj || GET_CODE (symbol) == CONST)
156*38fd1498Szrj act_elem = gen_rtx_CONST (address_mode, act_elem);
157*38fd1498Szrj }
158*38fd1498Szrj
159*38fd1498Szrj if (*addr)
160*38fd1498Szrj *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
161*38fd1498Szrj else
162*38fd1498Szrj *addr = act_elem;
163*38fd1498Szrj }
164*38fd1498Szrj else if (offset)
165*38fd1498Szrj {
166*38fd1498Szrj if (*addr)
167*38fd1498Szrj {
168*38fd1498Szrj *addr = gen_rtx_PLUS (address_mode, *addr, offset);
169*38fd1498Szrj if (offset_p)
170*38fd1498Szrj *offset_p = &XEXP (*addr, 1);
171*38fd1498Szrj }
172*38fd1498Szrj else
173*38fd1498Szrj {
174*38fd1498Szrj *addr = offset;
175*38fd1498Szrj if (offset_p)
176*38fd1498Szrj *offset_p = addr;
177*38fd1498Szrj }
178*38fd1498Szrj }
179*38fd1498Szrj
180*38fd1498Szrj if (!*addr)
181*38fd1498Szrj *addr = const0_rtx;
182*38fd1498Szrj }
183*38fd1498Szrj
184*38fd1498Szrj /* Returns address for TARGET_MEM_REF with parameters given by ADDR
185*38fd1498Szrj in address space AS.
186*38fd1498Szrj If REALLY_EXPAND is false, just make fake registers instead
187*38fd1498Szrj of really expanding the operands, and perform the expansion in-place
188*38fd1498Szrj by using one of the "templates". */
189*38fd1498Szrj
190*38fd1498Szrj rtx
addr_for_mem_ref(struct mem_address * addr,addr_space_t as,bool really_expand)191*38fd1498Szrj addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
192*38fd1498Szrj bool really_expand)
193*38fd1498Szrj {
194*38fd1498Szrj scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195*38fd1498Szrj scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
196*38fd1498Szrj rtx address, sym, bse, idx, st, off;
197*38fd1498Szrj struct mem_addr_template *templ;
198*38fd1498Szrj
199*38fd1498Szrj if (addr->step && !integer_onep (addr->step))
200*38fd1498Szrj st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
201*38fd1498Szrj else
202*38fd1498Szrj st = NULL_RTX;
203*38fd1498Szrj
204*38fd1498Szrj if (addr->offset && !integer_zerop (addr->offset))
205*38fd1498Szrj {
206*38fd1498Szrj poly_offset_int dc
207*38fd1498Szrj = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
208*38fd1498Szrj off = immed_wide_int_const (dc, pointer_mode);
209*38fd1498Szrj }
210*38fd1498Szrj else
211*38fd1498Szrj off = NULL_RTX;
212*38fd1498Szrj
213*38fd1498Szrj if (!really_expand)
214*38fd1498Szrj {
215*38fd1498Szrj unsigned int templ_index
216*38fd1498Szrj = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
217*38fd1498Szrj
218*38fd1498Szrj if (templ_index >= vec_safe_length (mem_addr_template_list))
219*38fd1498Szrj vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
220*38fd1498Szrj
221*38fd1498Szrj /* Reuse the templates for addresses, so that we do not waste memory. */
222*38fd1498Szrj templ = &(*mem_addr_template_list)[templ_index];
223*38fd1498Szrj if (!templ->ref)
224*38fd1498Szrj {
225*38fd1498Szrj sym = (addr->symbol ?
226*38fd1498Szrj gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
227*38fd1498Szrj : NULL_RTX);
228*38fd1498Szrj bse = (addr->base ?
229*38fd1498Szrj gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
230*38fd1498Szrj : NULL_RTX);
231*38fd1498Szrj idx = (addr->index ?
232*38fd1498Szrj gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
233*38fd1498Szrj : NULL_RTX);
234*38fd1498Szrj
235*38fd1498Szrj gen_addr_rtx (pointer_mode, sym, bse, idx,
236*38fd1498Szrj st? const0_rtx : NULL_RTX,
237*38fd1498Szrj off? const0_rtx : NULL_RTX,
238*38fd1498Szrj &templ->ref,
239*38fd1498Szrj &templ->step_p,
240*38fd1498Szrj &templ->off_p);
241*38fd1498Szrj }
242*38fd1498Szrj
243*38fd1498Szrj if (st)
244*38fd1498Szrj *templ->step_p = st;
245*38fd1498Szrj if (off)
246*38fd1498Szrj *templ->off_p = off;
247*38fd1498Szrj
248*38fd1498Szrj return templ->ref;
249*38fd1498Szrj }
250*38fd1498Szrj
251*38fd1498Szrj /* Otherwise really expand the expressions. */
252*38fd1498Szrj sym = (addr->symbol
253*38fd1498Szrj ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
254*38fd1498Szrj : NULL_RTX);
255*38fd1498Szrj bse = (addr->base
256*38fd1498Szrj ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257*38fd1498Szrj : NULL_RTX);
258*38fd1498Szrj idx = (addr->index
259*38fd1498Szrj ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
260*38fd1498Szrj : NULL_RTX);
261*38fd1498Szrj
262*38fd1498Szrj gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
263*38fd1498Szrj if (pointer_mode != address_mode)
264*38fd1498Szrj address = convert_memory_address (address_mode, address);
265*38fd1498Szrj return address;
266*38fd1498Szrj }
267*38fd1498Szrj
268*38fd1498Szrj /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
269*38fd1498Szrj the mem_address structure. */
270*38fd1498Szrj
271*38fd1498Szrj rtx
addr_for_mem_ref(tree exp,addr_space_t as,bool really_expand)272*38fd1498Szrj addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
273*38fd1498Szrj {
274*38fd1498Szrj struct mem_address addr;
275*38fd1498Szrj get_address_description (exp, &addr);
276*38fd1498Szrj return addr_for_mem_ref (&addr, as, really_expand);
277*38fd1498Szrj }
278*38fd1498Szrj
279*38fd1498Szrj /* Returns address of MEM_REF in TYPE. */
280*38fd1498Szrj
281*38fd1498Szrj tree
tree_mem_ref_addr(tree type,tree mem_ref)282*38fd1498Szrj tree_mem_ref_addr (tree type, tree mem_ref)
283*38fd1498Szrj {
284*38fd1498Szrj tree addr;
285*38fd1498Szrj tree act_elem;
286*38fd1498Szrj tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
287*38fd1498Szrj tree addr_base = NULL_TREE, addr_off = NULL_TREE;
288*38fd1498Szrj
289*38fd1498Szrj addr_base = fold_convert (type, TMR_BASE (mem_ref));
290*38fd1498Szrj
291*38fd1498Szrj act_elem = TMR_INDEX (mem_ref);
292*38fd1498Szrj if (act_elem)
293*38fd1498Szrj {
294*38fd1498Szrj if (step)
295*38fd1498Szrj act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
296*38fd1498Szrj act_elem, step);
297*38fd1498Szrj addr_off = act_elem;
298*38fd1498Szrj }
299*38fd1498Szrj
300*38fd1498Szrj act_elem = TMR_INDEX2 (mem_ref);
301*38fd1498Szrj if (act_elem)
302*38fd1498Szrj {
303*38fd1498Szrj if (addr_off)
304*38fd1498Szrj addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
305*38fd1498Szrj addr_off, act_elem);
306*38fd1498Szrj else
307*38fd1498Szrj addr_off = act_elem;
308*38fd1498Szrj }
309*38fd1498Szrj
310*38fd1498Szrj if (offset && !integer_zerop (offset))
311*38fd1498Szrj {
312*38fd1498Szrj if (addr_off)
313*38fd1498Szrj addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
314*38fd1498Szrj fold_convert (TREE_TYPE (addr_off), offset));
315*38fd1498Szrj else
316*38fd1498Szrj addr_off = offset;
317*38fd1498Szrj }
318*38fd1498Szrj
319*38fd1498Szrj if (addr_off)
320*38fd1498Szrj addr = fold_build_pointer_plus (addr_base, addr_off);
321*38fd1498Szrj else
322*38fd1498Szrj addr = addr_base;
323*38fd1498Szrj
324*38fd1498Szrj return addr;
325*38fd1498Szrj }
326*38fd1498Szrj
327*38fd1498Szrj /* Returns true if a memory reference in MODE and with parameters given by
328*38fd1498Szrj ADDR is valid on the current target. */
329*38fd1498Szrj
330*38fd1498Szrj bool
valid_mem_ref_p(machine_mode mode,addr_space_t as,struct mem_address * addr)331*38fd1498Szrj valid_mem_ref_p (machine_mode mode, addr_space_t as,
332*38fd1498Szrj struct mem_address *addr)
333*38fd1498Szrj {
334*38fd1498Szrj rtx address;
335*38fd1498Szrj
336*38fd1498Szrj address = addr_for_mem_ref (addr, as, false);
337*38fd1498Szrj if (!address)
338*38fd1498Szrj return false;
339*38fd1498Szrj
340*38fd1498Szrj return memory_address_addr_space_p (mode, address, as);
341*38fd1498Szrj }
342*38fd1498Szrj
343*38fd1498Szrj /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
344*38fd1498Szrj is valid on the current target and if so, creates and returns the
345*38fd1498Szrj TARGET_MEM_REF. If VERIFY is false omit the verification step. */
346*38fd1498Szrj
347*38fd1498Szrj static tree
create_mem_ref_raw(tree type,tree alias_ptr_type,struct mem_address * addr,bool verify)348*38fd1498Szrj create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
349*38fd1498Szrj bool verify)
350*38fd1498Szrj {
351*38fd1498Szrj tree base, index2;
352*38fd1498Szrj
353*38fd1498Szrj if (verify
354*38fd1498Szrj && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
355*38fd1498Szrj return NULL_TREE;
356*38fd1498Szrj
357*38fd1498Szrj if (addr->step && integer_onep (addr->step))
358*38fd1498Szrj addr->step = NULL_TREE;
359*38fd1498Szrj
360*38fd1498Szrj if (addr->offset)
361*38fd1498Szrj addr->offset = fold_convert (alias_ptr_type, addr->offset);
362*38fd1498Szrj else
363*38fd1498Szrj addr->offset = build_int_cst (alias_ptr_type, 0);
364*38fd1498Szrj
365*38fd1498Szrj if (addr->symbol)
366*38fd1498Szrj {
367*38fd1498Szrj base = addr->symbol;
368*38fd1498Szrj index2 = addr->base;
369*38fd1498Szrj }
370*38fd1498Szrj else if (addr->base
371*38fd1498Szrj && POINTER_TYPE_P (TREE_TYPE (addr->base)))
372*38fd1498Szrj {
373*38fd1498Szrj base = addr->base;
374*38fd1498Szrj index2 = NULL_TREE;
375*38fd1498Szrj }
376*38fd1498Szrj else
377*38fd1498Szrj {
378*38fd1498Szrj base = build_int_cst (build_pointer_type (type), 0);
379*38fd1498Szrj index2 = addr->base;
380*38fd1498Szrj }
381*38fd1498Szrj
382*38fd1498Szrj /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
383*38fd1498Szrj ??? As IVOPTs does not follow restrictions to where the base
384*38fd1498Szrj pointer may point to create a MEM_REF only if we know that
385*38fd1498Szrj base is valid. */
386*38fd1498Szrj if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
387*38fd1498Szrj && (!index2 || integer_zerop (index2))
388*38fd1498Szrj && (!addr->index || integer_zerop (addr->index)))
389*38fd1498Szrj return fold_build2 (MEM_REF, type, base, addr->offset);
390*38fd1498Szrj
391*38fd1498Szrj return build5 (TARGET_MEM_REF, type,
392*38fd1498Szrj base, addr->offset, addr->index, addr->step, index2);
393*38fd1498Szrj }
394*38fd1498Szrj
395*38fd1498Szrj /* Returns true if OBJ is an object whose address is a link time constant. */
396*38fd1498Szrj
397*38fd1498Szrj static bool
fixed_address_object_p(tree obj)398*38fd1498Szrj fixed_address_object_p (tree obj)
399*38fd1498Szrj {
400*38fd1498Szrj return (VAR_P (obj)
401*38fd1498Szrj && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
402*38fd1498Szrj && ! DECL_DLLIMPORT_P (obj));
403*38fd1498Szrj }
404*38fd1498Szrj
405*38fd1498Szrj /* If ADDR contains an address of object that is a link time constant,
406*38fd1498Szrj move it to PARTS->symbol. */
407*38fd1498Szrj
408*38fd1498Szrj void
move_fixed_address_to_symbol(struct mem_address * parts,aff_tree * addr)409*38fd1498Szrj move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
410*38fd1498Szrj {
411*38fd1498Szrj unsigned i;
412*38fd1498Szrj tree val = NULL_TREE;
413*38fd1498Szrj
414*38fd1498Szrj for (i = 0; i < addr->n; i++)
415*38fd1498Szrj {
416*38fd1498Szrj if (addr->elts[i].coef != 1)
417*38fd1498Szrj continue;
418*38fd1498Szrj
419*38fd1498Szrj val = addr->elts[i].val;
420*38fd1498Szrj if (TREE_CODE (val) == ADDR_EXPR
421*38fd1498Szrj && fixed_address_object_p (TREE_OPERAND (val, 0)))
422*38fd1498Szrj break;
423*38fd1498Szrj }
424*38fd1498Szrj
425*38fd1498Szrj if (i == addr->n)
426*38fd1498Szrj return;
427*38fd1498Szrj
428*38fd1498Szrj parts->symbol = val;
429*38fd1498Szrj aff_combination_remove_elt (addr, i);
430*38fd1498Szrj }
431*38fd1498Szrj
432*38fd1498Szrj /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
433*38fd1498Szrj PARTS->base. */
434*38fd1498Szrj
435*38fd1498Szrj static bool
move_hint_to_base(tree type,struct mem_address * parts,tree base_hint,aff_tree * addr)436*38fd1498Szrj move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
437*38fd1498Szrj aff_tree *addr)
438*38fd1498Szrj {
439*38fd1498Szrj unsigned i;
440*38fd1498Szrj tree val = NULL_TREE;
441*38fd1498Szrj int qual;
442*38fd1498Szrj
443*38fd1498Szrj for (i = 0; i < addr->n; i++)
444*38fd1498Szrj {
445*38fd1498Szrj if (addr->elts[i].coef != 1)
446*38fd1498Szrj continue;
447*38fd1498Szrj
448*38fd1498Szrj val = addr->elts[i].val;
449*38fd1498Szrj if (operand_equal_p (val, base_hint, 0))
450*38fd1498Szrj break;
451*38fd1498Szrj }
452*38fd1498Szrj
453*38fd1498Szrj if (i == addr->n)
454*38fd1498Szrj return false;
455*38fd1498Szrj
456*38fd1498Szrj /* Cast value to appropriate pointer type. We cannot use a pointer
457*38fd1498Szrj to TYPE directly, as the back-end will assume registers of pointer
458*38fd1498Szrj type are aligned, and just the base itself may not actually be.
459*38fd1498Szrj We use void pointer to the type's address space instead. */
460*38fd1498Szrj qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
461*38fd1498Szrj type = build_qualified_type (void_type_node, qual);
462*38fd1498Szrj parts->base = fold_convert (build_pointer_type (type), val);
463*38fd1498Szrj aff_combination_remove_elt (addr, i);
464*38fd1498Szrj return true;
465*38fd1498Szrj }
466*38fd1498Szrj
467*38fd1498Szrj /* If ADDR contains an address of a dereferenced pointer, move it to
468*38fd1498Szrj PARTS->base. */
469*38fd1498Szrj
470*38fd1498Szrj static void
move_pointer_to_base(struct mem_address * parts,aff_tree * addr)471*38fd1498Szrj move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
472*38fd1498Szrj {
473*38fd1498Szrj unsigned i;
474*38fd1498Szrj tree val = NULL_TREE;
475*38fd1498Szrj
476*38fd1498Szrj for (i = 0; i < addr->n; i++)
477*38fd1498Szrj {
478*38fd1498Szrj if (addr->elts[i].coef != 1)
479*38fd1498Szrj continue;
480*38fd1498Szrj
481*38fd1498Szrj val = addr->elts[i].val;
482*38fd1498Szrj if (POINTER_TYPE_P (TREE_TYPE (val)))
483*38fd1498Szrj break;
484*38fd1498Szrj }
485*38fd1498Szrj
486*38fd1498Szrj if (i == addr->n)
487*38fd1498Szrj return;
488*38fd1498Szrj
489*38fd1498Szrj parts->base = val;
490*38fd1498Szrj aff_combination_remove_elt (addr, i);
491*38fd1498Szrj }
492*38fd1498Szrj
493*38fd1498Szrj /* Moves the loop variant part V in linear address ADDR to be the index
494*38fd1498Szrj of PARTS. */
495*38fd1498Szrj
496*38fd1498Szrj static void
move_variant_to_index(struct mem_address * parts,aff_tree * addr,tree v)497*38fd1498Szrj move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
498*38fd1498Szrj {
499*38fd1498Szrj unsigned i;
500*38fd1498Szrj tree val = NULL_TREE;
501*38fd1498Szrj
502*38fd1498Szrj gcc_assert (!parts->index);
503*38fd1498Szrj for (i = 0; i < addr->n; i++)
504*38fd1498Szrj {
505*38fd1498Szrj val = addr->elts[i].val;
506*38fd1498Szrj if (operand_equal_p (val, v, 0))
507*38fd1498Szrj break;
508*38fd1498Szrj }
509*38fd1498Szrj
510*38fd1498Szrj if (i == addr->n)
511*38fd1498Szrj return;
512*38fd1498Szrj
513*38fd1498Szrj parts->index = fold_convert (sizetype, val);
514*38fd1498Szrj parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
515*38fd1498Szrj aff_combination_remove_elt (addr, i);
516*38fd1498Szrj }
517*38fd1498Szrj
518*38fd1498Szrj /* Adds ELT to PARTS. */
519*38fd1498Szrj
520*38fd1498Szrj static void
add_to_parts(struct mem_address * parts,tree elt)521*38fd1498Szrj add_to_parts (struct mem_address *parts, tree elt)
522*38fd1498Szrj {
523*38fd1498Szrj tree type;
524*38fd1498Szrj
525*38fd1498Szrj if (!parts->index)
526*38fd1498Szrj {
527*38fd1498Szrj parts->index = fold_convert (sizetype, elt);
528*38fd1498Szrj return;
529*38fd1498Szrj }
530*38fd1498Szrj
531*38fd1498Szrj if (!parts->base)
532*38fd1498Szrj {
533*38fd1498Szrj parts->base = elt;
534*38fd1498Szrj return;
535*38fd1498Szrj }
536*38fd1498Szrj
537*38fd1498Szrj /* Add ELT to base. */
538*38fd1498Szrj type = TREE_TYPE (parts->base);
539*38fd1498Szrj if (POINTER_TYPE_P (type))
540*38fd1498Szrj parts->base = fold_build_pointer_plus (parts->base, elt);
541*38fd1498Szrj else
542*38fd1498Szrj parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
543*38fd1498Szrj }
544*38fd1498Szrj
545*38fd1498Szrj /* Returns true if multiplying by RATIO is allowed in an address. Test the
546*38fd1498Szrj validity for a memory reference accessing memory of mode MODE in address
547*38fd1498Szrj space AS. */
548*38fd1498Szrj
549*38fd1498Szrj static bool
multiplier_allowed_in_address_p(HOST_WIDE_INT ratio,machine_mode mode,addr_space_t as)550*38fd1498Szrj multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
551*38fd1498Szrj addr_space_t as)
552*38fd1498Szrj {
553*38fd1498Szrj #define MAX_RATIO 128
554*38fd1498Szrj unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
555*38fd1498Szrj static vec<sbitmap> valid_mult_list;
556*38fd1498Szrj sbitmap valid_mult;
557*38fd1498Szrj
558*38fd1498Szrj if (data_index >= valid_mult_list.length ())
559*38fd1498Szrj valid_mult_list.safe_grow_cleared (data_index + 1);
560*38fd1498Szrj
561*38fd1498Szrj valid_mult = valid_mult_list[data_index];
562*38fd1498Szrj if (!valid_mult)
563*38fd1498Szrj {
564*38fd1498Szrj machine_mode address_mode = targetm.addr_space.address_mode (as);
565*38fd1498Szrj rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
566*38fd1498Szrj rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
567*38fd1498Szrj rtx addr, scaled;
568*38fd1498Szrj HOST_WIDE_INT i;
569*38fd1498Szrj
570*38fd1498Szrj valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
571*38fd1498Szrj bitmap_clear (valid_mult);
572*38fd1498Szrj scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
573*38fd1498Szrj addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
574*38fd1498Szrj for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
575*38fd1498Szrj {
576*38fd1498Szrj XEXP (scaled, 1) = gen_int_mode (i, address_mode);
577*38fd1498Szrj if (memory_address_addr_space_p (mode, addr, as)
578*38fd1498Szrj || memory_address_addr_space_p (mode, scaled, as))
579*38fd1498Szrj bitmap_set_bit (valid_mult, i + MAX_RATIO);
580*38fd1498Szrj }
581*38fd1498Szrj
582*38fd1498Szrj if (dump_file && (dump_flags & TDF_DETAILS))
583*38fd1498Szrj {
584*38fd1498Szrj fprintf (dump_file, " allowed multipliers:");
585*38fd1498Szrj for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
586*38fd1498Szrj if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
587*38fd1498Szrj fprintf (dump_file, " %d", (int) i);
588*38fd1498Szrj fprintf (dump_file, "\n");
589*38fd1498Szrj fprintf (dump_file, "\n");
590*38fd1498Szrj }
591*38fd1498Szrj
592*38fd1498Szrj valid_mult_list[data_index] = valid_mult;
593*38fd1498Szrj }
594*38fd1498Szrj
595*38fd1498Szrj if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
596*38fd1498Szrj return false;
597*38fd1498Szrj
598*38fd1498Szrj return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
599*38fd1498Szrj }
600*38fd1498Szrj
601*38fd1498Szrj /* Finds the most expensive multiplication in ADDR that can be
602*38fd1498Szrj expressed in an addressing mode and move the corresponding
603*38fd1498Szrj element(s) to PARTS. */
604*38fd1498Szrj
605*38fd1498Szrj static void
most_expensive_mult_to_index(tree type,struct mem_address * parts,aff_tree * addr,bool speed)606*38fd1498Szrj most_expensive_mult_to_index (tree type, struct mem_address *parts,
607*38fd1498Szrj aff_tree *addr, bool speed)
608*38fd1498Szrj {
609*38fd1498Szrj addr_space_t as = TYPE_ADDR_SPACE (type);
610*38fd1498Szrj machine_mode address_mode = targetm.addr_space.address_mode (as);
611*38fd1498Szrj HOST_WIDE_INT coef;
612*38fd1498Szrj unsigned best_mult_cost = 0, acost;
613*38fd1498Szrj tree mult_elt = NULL_TREE, elt;
614*38fd1498Szrj unsigned i, j;
615*38fd1498Szrj enum tree_code op_code;
616*38fd1498Szrj
617*38fd1498Szrj offset_int best_mult = 0;
618*38fd1498Szrj for (i = 0; i < addr->n; i++)
619*38fd1498Szrj {
620*38fd1498Szrj if (!wi::fits_shwi_p (addr->elts[i].coef))
621*38fd1498Szrj continue;
622*38fd1498Szrj
623*38fd1498Szrj coef = addr->elts[i].coef.to_shwi ();
624*38fd1498Szrj if (coef == 1
625*38fd1498Szrj || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
626*38fd1498Szrj continue;
627*38fd1498Szrj
628*38fd1498Szrj acost = mult_by_coeff_cost (coef, address_mode, speed);
629*38fd1498Szrj
630*38fd1498Szrj if (acost > best_mult_cost)
631*38fd1498Szrj {
632*38fd1498Szrj best_mult_cost = acost;
633*38fd1498Szrj best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
634*38fd1498Szrj }
635*38fd1498Szrj }
636*38fd1498Szrj
637*38fd1498Szrj if (!best_mult_cost)
638*38fd1498Szrj return;
639*38fd1498Szrj
640*38fd1498Szrj /* Collect elements multiplied by best_mult. */
641*38fd1498Szrj for (i = j = 0; i < addr->n; i++)
642*38fd1498Szrj {
643*38fd1498Szrj offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
644*38fd1498Szrj offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
645*38fd1498Szrj
646*38fd1498Szrj if (amult == best_mult)
647*38fd1498Szrj op_code = PLUS_EXPR;
648*38fd1498Szrj else if (amult_neg == best_mult)
649*38fd1498Szrj op_code = MINUS_EXPR;
650*38fd1498Szrj else
651*38fd1498Szrj {
652*38fd1498Szrj addr->elts[j] = addr->elts[i];
653*38fd1498Szrj j++;
654*38fd1498Szrj continue;
655*38fd1498Szrj }
656*38fd1498Szrj
657*38fd1498Szrj elt = fold_convert (sizetype, addr->elts[i].val);
658*38fd1498Szrj if (mult_elt)
659*38fd1498Szrj mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
660*38fd1498Szrj else if (op_code == PLUS_EXPR)
661*38fd1498Szrj mult_elt = elt;
662*38fd1498Szrj else
663*38fd1498Szrj mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
664*38fd1498Szrj }
665*38fd1498Szrj addr->n = j;
666*38fd1498Szrj
667*38fd1498Szrj parts->index = mult_elt;
668*38fd1498Szrj parts->step = wide_int_to_tree (sizetype, best_mult);
669*38fd1498Szrj }
670*38fd1498Szrj
671*38fd1498Szrj /* Splits address ADDR for a memory access of type TYPE into PARTS.
672*38fd1498Szrj If BASE_HINT is non-NULL, it specifies an SSA name to be used
673*38fd1498Szrj preferentially as base of the reference, and IV_CAND is the selected
674*38fd1498Szrj iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
675*38fd1498Szrj part of address is split to PARTS.base.
676*38fd1498Szrj
677*38fd1498Szrj TODO -- be more clever about the distribution of the elements of ADDR
678*38fd1498Szrj to PARTS. Some architectures do not support anything but single
679*38fd1498Szrj register in address, possibly with a small integer offset; while
680*38fd1498Szrj create_mem_ref will simplify the address to an acceptable shape
681*38fd1498Szrj later, it would be more efficient to know that asking for complicated
682*38fd1498Szrj addressing modes is useless. */
683*38fd1498Szrj
684*38fd1498Szrj static void
addr_to_parts(tree type,aff_tree * addr,tree iv_cand,tree base_hint,struct mem_address * parts,bool * var_in_base,bool speed)685*38fd1498Szrj addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
686*38fd1498Szrj struct mem_address *parts, bool *var_in_base, bool speed)
687*38fd1498Szrj {
688*38fd1498Szrj tree part;
689*38fd1498Szrj unsigned i;
690*38fd1498Szrj
691*38fd1498Szrj parts->symbol = NULL_TREE;
692*38fd1498Szrj parts->base = NULL_TREE;
693*38fd1498Szrj parts->index = NULL_TREE;
694*38fd1498Szrj parts->step = NULL_TREE;
695*38fd1498Szrj
696*38fd1498Szrj if (maybe_ne (addr->offset, 0))
697*38fd1498Szrj parts->offset = wide_int_to_tree (sizetype, addr->offset);
698*38fd1498Szrj else
699*38fd1498Szrj parts->offset = NULL_TREE;
700*38fd1498Szrj
701*38fd1498Szrj /* Try to find a symbol. */
702*38fd1498Szrj move_fixed_address_to_symbol (parts, addr);
703*38fd1498Szrj
704*38fd1498Szrj /* Since at the moment there is no reliable way to know how to
705*38fd1498Szrj distinguish between pointer and its offset, we decide if var
706*38fd1498Szrj part is the pointer based on guess. */
707*38fd1498Szrj *var_in_base = (base_hint != NULL && parts->symbol == NULL);
708*38fd1498Szrj if (*var_in_base)
709*38fd1498Szrj *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
710*38fd1498Szrj else
711*38fd1498Szrj move_variant_to_index (parts, addr, iv_cand);
712*38fd1498Szrj
713*38fd1498Szrj /* First move the most expensive feasible multiplication to index. */
714*38fd1498Szrj if (!parts->index)
715*38fd1498Szrj most_expensive_mult_to_index (type, parts, addr, speed);
716*38fd1498Szrj
717*38fd1498Szrj /* Move pointer into base. */
718*38fd1498Szrj if (!parts->symbol && !parts->base)
719*38fd1498Szrj move_pointer_to_base (parts, addr);
720*38fd1498Szrj
721*38fd1498Szrj /* Then try to process the remaining elements. */
722*38fd1498Szrj for (i = 0; i < addr->n; i++)
723*38fd1498Szrj {
724*38fd1498Szrj part = fold_convert (sizetype, addr->elts[i].val);
725*38fd1498Szrj if (addr->elts[i].coef != 1)
726*38fd1498Szrj part = fold_build2 (MULT_EXPR, sizetype, part,
727*38fd1498Szrj wide_int_to_tree (sizetype, addr->elts[i].coef));
728*38fd1498Szrj add_to_parts (parts, part);
729*38fd1498Szrj }
730*38fd1498Szrj if (addr->rest)
731*38fd1498Szrj add_to_parts (parts, fold_convert (sizetype, addr->rest));
732*38fd1498Szrj }
733*38fd1498Szrj
734*38fd1498Szrj /* Force the PARTS to register. */
735*38fd1498Szrj
736*38fd1498Szrj static void
gimplify_mem_ref_parts(gimple_stmt_iterator * gsi,struct mem_address * parts)737*38fd1498Szrj gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
738*38fd1498Szrj {
739*38fd1498Szrj if (parts->base)
740*38fd1498Szrj parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
741*38fd1498Szrj is_gimple_mem_ref_addr, NULL_TREE,
742*38fd1498Szrj true, GSI_SAME_STMT);
743*38fd1498Szrj if (parts->index)
744*38fd1498Szrj parts->index = force_gimple_operand_gsi (gsi, parts->index,
745*38fd1498Szrj true, NULL_TREE,
746*38fd1498Szrj true, GSI_SAME_STMT);
747*38fd1498Szrj }
748*38fd1498Szrj
749*38fd1498Szrj /* Return true if the OFFSET in PARTS is the only thing that is making
750*38fd1498Szrj it an invalid address for type TYPE. */
751*38fd1498Szrj
752*38fd1498Szrj static bool
mem_ref_valid_without_offset_p(tree type,mem_address parts)753*38fd1498Szrj mem_ref_valid_without_offset_p (tree type, mem_address parts)
754*38fd1498Szrj {
755*38fd1498Szrj if (!parts.base)
756*38fd1498Szrj parts.base = parts.offset;
757*38fd1498Szrj parts.offset = NULL_TREE;
758*38fd1498Szrj return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
759*38fd1498Szrj }
760*38fd1498Szrj
761*38fd1498Szrj /* Fold PARTS->offset into PARTS->base, so that there is no longer
762*38fd1498Szrj a separate offset. Emit any new instructions before GSI. */
763*38fd1498Szrj
764*38fd1498Szrj static void
add_offset_to_base(gimple_stmt_iterator * gsi,mem_address * parts)765*38fd1498Szrj add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
766*38fd1498Szrj {
767*38fd1498Szrj tree tmp = parts->offset;
768*38fd1498Szrj if (parts->base)
769*38fd1498Szrj {
770*38fd1498Szrj tmp = fold_build_pointer_plus (parts->base, tmp);
771*38fd1498Szrj tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
772*38fd1498Szrj NULL_TREE, true, GSI_SAME_STMT);
773*38fd1498Szrj }
774*38fd1498Szrj parts->base = tmp;
775*38fd1498Szrj parts->offset = NULL_TREE;
776*38fd1498Szrj }
777*38fd1498Szrj
778*38fd1498Szrj /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
779*38fd1498Szrj computations are emitted in front of GSI. TYPE is the mode
780*38fd1498Szrj of created memory reference. IV_CAND is the selected iv candidate in ADDR,
781*38fd1498Szrj and BASE_HINT is non NULL if IV_CAND comes from a base address
782*38fd1498Szrj object. */
783*38fd1498Szrj
784*38fd1498Szrj tree
create_mem_ref(gimple_stmt_iterator * gsi,tree type,aff_tree * addr,tree alias_ptr_type,tree iv_cand,tree base_hint,bool speed)785*38fd1498Szrj create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
786*38fd1498Szrj tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
787*38fd1498Szrj {
788*38fd1498Szrj bool var_in_base;
789*38fd1498Szrj tree mem_ref, tmp;
790*38fd1498Szrj struct mem_address parts;
791*38fd1498Szrj
792*38fd1498Szrj addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
793*38fd1498Szrj gimplify_mem_ref_parts (gsi, &parts);
794*38fd1498Szrj mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
795*38fd1498Szrj if (mem_ref)
796*38fd1498Szrj return mem_ref;
797*38fd1498Szrj
798*38fd1498Szrj /* The expression is too complicated. Try making it simpler. */
799*38fd1498Szrj
800*38fd1498Szrj /* Merge symbol into other parts. */
801*38fd1498Szrj if (parts.symbol)
802*38fd1498Szrj {
803*38fd1498Szrj tmp = parts.symbol;
804*38fd1498Szrj parts.symbol = NULL_TREE;
805*38fd1498Szrj gcc_assert (is_gimple_val (tmp));
806*38fd1498Szrj
807*38fd1498Szrj if (parts.base)
808*38fd1498Szrj {
809*38fd1498Szrj gcc_assert (useless_type_conversion_p (sizetype,
810*38fd1498Szrj TREE_TYPE (parts.base)));
811*38fd1498Szrj
812*38fd1498Szrj if (parts.index)
813*38fd1498Szrj {
814*38fd1498Szrj /* Add the symbol to base, eventually forcing it to register. */
815*38fd1498Szrj tmp = fold_build_pointer_plus (tmp, parts.base);
816*38fd1498Szrj tmp = force_gimple_operand_gsi_1 (gsi, tmp,
817*38fd1498Szrj is_gimple_mem_ref_addr,
818*38fd1498Szrj NULL_TREE, true,
819*38fd1498Szrj GSI_SAME_STMT);
820*38fd1498Szrj }
821*38fd1498Szrj else
822*38fd1498Szrj {
823*38fd1498Szrj /* Move base to index, then move the symbol to base. */
824*38fd1498Szrj parts.index = parts.base;
825*38fd1498Szrj }
826*38fd1498Szrj parts.base = tmp;
827*38fd1498Szrj }
828*38fd1498Szrj else
829*38fd1498Szrj parts.base = tmp;
830*38fd1498Szrj
831*38fd1498Szrj mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
832*38fd1498Szrj if (mem_ref)
833*38fd1498Szrj return mem_ref;
834*38fd1498Szrj }
835*38fd1498Szrj
836*38fd1498Szrj /* Move multiplication to index by transforming address expression:
837*38fd1498Szrj [... + index << step + ...]
838*38fd1498Szrj into:
839*38fd1498Szrj index' = index << step;
840*38fd1498Szrj [... + index' + ,,,]. */
841*38fd1498Szrj if (parts.step && !integer_onep (parts.step))
842*38fd1498Szrj {
843*38fd1498Szrj gcc_assert (parts.index);
844*38fd1498Szrj if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
845*38fd1498Szrj {
846*38fd1498Szrj add_offset_to_base (gsi, &parts);
847*38fd1498Szrj mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
848*38fd1498Szrj gcc_assert (mem_ref);
849*38fd1498Szrj return mem_ref;
850*38fd1498Szrj }
851*38fd1498Szrj
852*38fd1498Szrj parts.index = force_gimple_operand_gsi (gsi,
853*38fd1498Szrj fold_build2 (MULT_EXPR, sizetype,
854*38fd1498Szrj parts.index, parts.step),
855*38fd1498Szrj true, NULL_TREE, true, GSI_SAME_STMT);
856*38fd1498Szrj parts.step = NULL_TREE;
857*38fd1498Szrj
858*38fd1498Szrj mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
859*38fd1498Szrj if (mem_ref)
860*38fd1498Szrj return mem_ref;
861*38fd1498Szrj }
862*38fd1498Szrj
863*38fd1498Szrj /* Add offset to invariant part by transforming address expression:
864*38fd1498Szrj [base + index + offset]
865*38fd1498Szrj into:
866*38fd1498Szrj base' = base + offset;
867*38fd1498Szrj [base' + index]
868*38fd1498Szrj or:
869*38fd1498Szrj index' = index + offset;
870*38fd1498Szrj [base + index']
871*38fd1498Szrj depending on which one is invariant. */
872*38fd1498Szrj if (parts.offset && !integer_zerop (parts.offset))
873*38fd1498Szrj {
874*38fd1498Szrj tree old_base = unshare_expr (parts.base);
875*38fd1498Szrj tree old_index = unshare_expr (parts.index);
876*38fd1498Szrj tree old_offset = unshare_expr (parts.offset);
877*38fd1498Szrj
878*38fd1498Szrj tmp = parts.offset;
879*38fd1498Szrj parts.offset = NULL_TREE;
880*38fd1498Szrj /* Add offset to invariant part. */
881*38fd1498Szrj if (!var_in_base)
882*38fd1498Szrj {
883*38fd1498Szrj if (parts.base)
884*38fd1498Szrj {
885*38fd1498Szrj tmp = fold_build_pointer_plus (parts.base, tmp);
886*38fd1498Szrj tmp = force_gimple_operand_gsi_1 (gsi, tmp,
887*38fd1498Szrj is_gimple_mem_ref_addr,
888*38fd1498Szrj NULL_TREE, true,
889*38fd1498Szrj GSI_SAME_STMT);
890*38fd1498Szrj }
891*38fd1498Szrj parts.base = tmp;
892*38fd1498Szrj }
893*38fd1498Szrj else
894*38fd1498Szrj {
895*38fd1498Szrj if (parts.index)
896*38fd1498Szrj {
897*38fd1498Szrj tmp = fold_build_pointer_plus (parts.index, tmp);
898*38fd1498Szrj tmp = force_gimple_operand_gsi_1 (gsi, tmp,
899*38fd1498Szrj is_gimple_mem_ref_addr,
900*38fd1498Szrj NULL_TREE, true,
901*38fd1498Szrj GSI_SAME_STMT);
902*38fd1498Szrj }
903*38fd1498Szrj parts.index = tmp;
904*38fd1498Szrj }
905*38fd1498Szrj
906*38fd1498Szrj mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
907*38fd1498Szrj if (mem_ref)
908*38fd1498Szrj return mem_ref;
909*38fd1498Szrj
910*38fd1498Szrj /* Restore parts.base, index and offset so that we can check if
911*38fd1498Szrj [base + offset] addressing mode is supported in next step.
912*38fd1498Szrj This is necessary for targets only support [base + offset],
913*38fd1498Szrj but not [base + index] addressing mode. */
914*38fd1498Szrj parts.base = old_base;
915*38fd1498Szrj parts.index = old_index;
916*38fd1498Szrj parts.offset = old_offset;
917*38fd1498Szrj }
918*38fd1498Szrj
919*38fd1498Szrj /* Transform [base + index + ...] into:
920*38fd1498Szrj base' = base + index;
921*38fd1498Szrj [base' + ...]. */
922*38fd1498Szrj if (parts.index)
923*38fd1498Szrj {
924*38fd1498Szrj tmp = parts.index;
925*38fd1498Szrj parts.index = NULL_TREE;
926*38fd1498Szrj /* Add index to base. */
927*38fd1498Szrj if (parts.base)
928*38fd1498Szrj {
929*38fd1498Szrj tmp = fold_build_pointer_plus (parts.base, tmp);
930*38fd1498Szrj tmp = force_gimple_operand_gsi_1 (gsi, tmp,
931*38fd1498Szrj is_gimple_mem_ref_addr,
932*38fd1498Szrj NULL_TREE, true, GSI_SAME_STMT);
933*38fd1498Szrj }
934*38fd1498Szrj parts.base = tmp;
935*38fd1498Szrj
936*38fd1498Szrj mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
937*38fd1498Szrj if (mem_ref)
938*38fd1498Szrj return mem_ref;
939*38fd1498Szrj }
940*38fd1498Szrj
941*38fd1498Szrj /* Transform [base + offset] into:
942*38fd1498Szrj base' = base + offset;
943*38fd1498Szrj [base']. */
944*38fd1498Szrj if (parts.offset && !integer_zerop (parts.offset))
945*38fd1498Szrj {
946*38fd1498Szrj add_offset_to_base (gsi, &parts);
947*38fd1498Szrj mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
948*38fd1498Szrj if (mem_ref)
949*38fd1498Szrj return mem_ref;
950*38fd1498Szrj }
951*38fd1498Szrj
952*38fd1498Szrj /* Verify that the address is in the simplest possible shape
953*38fd1498Szrj (only a register). If we cannot create such a memory reference,
954*38fd1498Szrj something is really wrong. */
955*38fd1498Szrj gcc_assert (parts.symbol == NULL_TREE);
956*38fd1498Szrj gcc_assert (parts.index == NULL_TREE);
957*38fd1498Szrj gcc_assert (!parts.step || integer_onep (parts.step));
958*38fd1498Szrj gcc_assert (!parts.offset || integer_zerop (parts.offset));
959*38fd1498Szrj gcc_unreachable ();
960*38fd1498Szrj }
961*38fd1498Szrj
962*38fd1498Szrj /* Copies components of the address from OP to ADDR. */
963*38fd1498Szrj
964*38fd1498Szrj void
get_address_description(tree op,struct mem_address * addr)965*38fd1498Szrj get_address_description (tree op, struct mem_address *addr)
966*38fd1498Szrj {
967*38fd1498Szrj if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
968*38fd1498Szrj {
969*38fd1498Szrj addr->symbol = TMR_BASE (op);
970*38fd1498Szrj addr->base = TMR_INDEX2 (op);
971*38fd1498Szrj }
972*38fd1498Szrj else
973*38fd1498Szrj {
974*38fd1498Szrj addr->symbol = NULL_TREE;
975*38fd1498Szrj if (TMR_INDEX2 (op))
976*38fd1498Szrj {
977*38fd1498Szrj gcc_assert (integer_zerop (TMR_BASE (op)));
978*38fd1498Szrj addr->base = TMR_INDEX2 (op);
979*38fd1498Szrj }
980*38fd1498Szrj else
981*38fd1498Szrj addr->base = TMR_BASE (op);
982*38fd1498Szrj }
983*38fd1498Szrj addr->index = TMR_INDEX (op);
984*38fd1498Szrj addr->step = TMR_STEP (op);
985*38fd1498Szrj addr->offset = TMR_OFFSET (op);
986*38fd1498Szrj }
987*38fd1498Szrj
988*38fd1498Szrj /* Copies the reference information from OLD_REF to NEW_REF, where
989*38fd1498Szrj NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
990*38fd1498Szrj
991*38fd1498Szrj void
copy_ref_info(tree new_ref,tree old_ref)992*38fd1498Szrj copy_ref_info (tree new_ref, tree old_ref)
993*38fd1498Szrj {
994*38fd1498Szrj tree new_ptr_base = NULL_TREE;
995*38fd1498Szrj
996*38fd1498Szrj gcc_assert (TREE_CODE (new_ref) == MEM_REF
997*38fd1498Szrj || TREE_CODE (new_ref) == TARGET_MEM_REF);
998*38fd1498Szrj
999*38fd1498Szrj TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1000*38fd1498Szrj TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1001*38fd1498Szrj
1002*38fd1498Szrj new_ptr_base = TREE_OPERAND (new_ref, 0);
1003*38fd1498Szrj
1004*38fd1498Szrj /* We can transfer points-to information from an old pointer
1005*38fd1498Szrj or decl base to the new one. */
1006*38fd1498Szrj if (new_ptr_base
1007*38fd1498Szrj && TREE_CODE (new_ptr_base) == SSA_NAME
1008*38fd1498Szrj && !SSA_NAME_PTR_INFO (new_ptr_base))
1009*38fd1498Szrj {
1010*38fd1498Szrj tree base = get_base_address (old_ref);
1011*38fd1498Szrj if (!base)
1012*38fd1498Szrj ;
1013*38fd1498Szrj else if ((TREE_CODE (base) == MEM_REF
1014*38fd1498Szrj || TREE_CODE (base) == TARGET_MEM_REF)
1015*38fd1498Szrj && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1016*38fd1498Szrj && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1017*38fd1498Szrj {
1018*38fd1498Szrj struct ptr_info_def *new_pi;
1019*38fd1498Szrj unsigned int align, misalign;
1020*38fd1498Szrj
1021*38fd1498Szrj duplicate_ssa_name_ptr_info
1022*38fd1498Szrj (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1023*38fd1498Szrj new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
1024*38fd1498Szrj /* We have to be careful about transferring alignment information. */
1025*38fd1498Szrj if (get_ptr_info_alignment (new_pi, &align, &misalign)
1026*38fd1498Szrj && TREE_CODE (old_ref) == MEM_REF
1027*38fd1498Szrj && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1028*38fd1498Szrj && (TMR_INDEX2 (new_ref)
1029*38fd1498Szrj /* TODO: Below conditions can be relaxed if TMR_INDEX
1030*38fd1498Szrj is an indcution variable and its initial value and
1031*38fd1498Szrj step are aligned. */
1032*38fd1498Szrj || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
1033*38fd1498Szrj || (TMR_STEP (new_ref)
1034*38fd1498Szrj && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
1035*38fd1498Szrj < align)))))
1036*38fd1498Szrj {
1037*38fd1498Szrj poly_uint64 inc = (mem_ref_offset (old_ref)
1038*38fd1498Szrj - mem_ref_offset (new_ref)).force_uhwi ();
1039*38fd1498Szrj adjust_ptr_info_misalignment (new_pi, inc);
1040*38fd1498Szrj }
1041*38fd1498Szrj else
1042*38fd1498Szrj mark_ptr_info_alignment_unknown (new_pi);
1043*38fd1498Szrj }
1044*38fd1498Szrj else if (VAR_P (base)
1045*38fd1498Szrj || TREE_CODE (base) == PARM_DECL
1046*38fd1498Szrj || TREE_CODE (base) == RESULT_DECL)
1047*38fd1498Szrj {
1048*38fd1498Szrj struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1049*38fd1498Szrj pt_solution_set_var (&pi->pt, base);
1050*38fd1498Szrj }
1051*38fd1498Szrj }
1052*38fd1498Szrj }
1053*38fd1498Szrj
1054*38fd1498Szrj /* Move constants in target_mem_ref REF to offset. Returns the new target
1055*38fd1498Szrj mem ref if anything changes, NULL_TREE otherwise. */
1056*38fd1498Szrj
1057*38fd1498Szrj tree
maybe_fold_tmr(tree ref)1058*38fd1498Szrj maybe_fold_tmr (tree ref)
1059*38fd1498Szrj {
1060*38fd1498Szrj struct mem_address addr;
1061*38fd1498Szrj bool changed = false;
1062*38fd1498Szrj tree new_ref, off;
1063*38fd1498Szrj
1064*38fd1498Szrj get_address_description (ref, &addr);
1065*38fd1498Szrj
1066*38fd1498Szrj if (addr.base
1067*38fd1498Szrj && TREE_CODE (addr.base) == INTEGER_CST
1068*38fd1498Szrj && !integer_zerop (addr.base))
1069*38fd1498Szrj {
1070*38fd1498Szrj addr.offset = fold_binary_to_constant (PLUS_EXPR,
1071*38fd1498Szrj TREE_TYPE (addr.offset),
1072*38fd1498Szrj addr.offset, addr.base);
1073*38fd1498Szrj addr.base = NULL_TREE;
1074*38fd1498Szrj changed = true;
1075*38fd1498Szrj }
1076*38fd1498Szrj
1077*38fd1498Szrj if (addr.symbol
1078*38fd1498Szrj && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1079*38fd1498Szrj {
1080*38fd1498Szrj addr.offset = fold_binary_to_constant
1081*38fd1498Szrj (PLUS_EXPR, TREE_TYPE (addr.offset),
1082*38fd1498Szrj addr.offset,
1083*38fd1498Szrj TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1084*38fd1498Szrj addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1085*38fd1498Szrj changed = true;
1086*38fd1498Szrj }
1087*38fd1498Szrj else if (addr.symbol
1088*38fd1498Szrj && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1089*38fd1498Szrj {
1090*38fd1498Szrj poly_int64 offset;
1091*38fd1498Szrj addr.symbol = build_fold_addr_expr
1092*38fd1498Szrj (get_addr_base_and_unit_offset
1093*38fd1498Szrj (TREE_OPERAND (addr.symbol, 0), &offset));
1094*38fd1498Szrj addr.offset = int_const_binop (PLUS_EXPR,
1095*38fd1498Szrj addr.offset, size_int (offset));
1096*38fd1498Szrj changed = true;
1097*38fd1498Szrj }
1098*38fd1498Szrj
1099*38fd1498Szrj if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1100*38fd1498Szrj {
1101*38fd1498Szrj off = addr.index;
1102*38fd1498Szrj if (addr.step)
1103*38fd1498Szrj {
1104*38fd1498Szrj off = fold_binary_to_constant (MULT_EXPR, sizetype,
1105*38fd1498Szrj off, addr.step);
1106*38fd1498Szrj addr.step = NULL_TREE;
1107*38fd1498Szrj }
1108*38fd1498Szrj
1109*38fd1498Szrj addr.offset = fold_binary_to_constant (PLUS_EXPR,
1110*38fd1498Szrj TREE_TYPE (addr.offset),
1111*38fd1498Szrj addr.offset, off);
1112*38fd1498Szrj addr.index = NULL_TREE;
1113*38fd1498Szrj changed = true;
1114*38fd1498Szrj }
1115*38fd1498Szrj
1116*38fd1498Szrj if (!changed)
1117*38fd1498Szrj return NULL_TREE;
1118*38fd1498Szrj
1119*38fd1498Szrj /* If we have propagated something into this TARGET_MEM_REF and thus
1120*38fd1498Szrj ended up folding it, always create a new TARGET_MEM_REF regardless
1121*38fd1498Szrj if it is valid in this for on the target - the propagation result
1122*38fd1498Szrj wouldn't be anyway. */
1123*38fd1498Szrj new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1124*38fd1498Szrj TREE_TYPE (addr.offset), &addr, false);
1125*38fd1498Szrj TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1126*38fd1498Szrj TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1127*38fd1498Szrj return new_ref;
1128*38fd1498Szrj }
1129*38fd1498Szrj
1130*38fd1498Szrj /* Dump PARTS to FILE. */
1131*38fd1498Szrj
1132*38fd1498Szrj extern void dump_mem_address (FILE *, struct mem_address *);
1133*38fd1498Szrj void
dump_mem_address(FILE * file,struct mem_address * parts)1134*38fd1498Szrj dump_mem_address (FILE *file, struct mem_address *parts)
1135*38fd1498Szrj {
1136*38fd1498Szrj if (parts->symbol)
1137*38fd1498Szrj {
1138*38fd1498Szrj fprintf (file, "symbol: ");
1139*38fd1498Szrj print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1140*38fd1498Szrj fprintf (file, "\n");
1141*38fd1498Szrj }
1142*38fd1498Szrj if (parts->base)
1143*38fd1498Szrj {
1144*38fd1498Szrj fprintf (file, "base: ");
1145*38fd1498Szrj print_generic_expr (file, parts->base, TDF_SLIM);
1146*38fd1498Szrj fprintf (file, "\n");
1147*38fd1498Szrj }
1148*38fd1498Szrj if (parts->index)
1149*38fd1498Szrj {
1150*38fd1498Szrj fprintf (file, "index: ");
1151*38fd1498Szrj print_generic_expr (file, parts->index, TDF_SLIM);
1152*38fd1498Szrj fprintf (file, "\n");
1153*38fd1498Szrj }
1154*38fd1498Szrj if (parts->step)
1155*38fd1498Szrj {
1156*38fd1498Szrj fprintf (file, "step: ");
1157*38fd1498Szrj print_generic_expr (file, parts->step, TDF_SLIM);
1158*38fd1498Szrj fprintf (file, "\n");
1159*38fd1498Szrj }
1160*38fd1498Szrj if (parts->offset)
1161*38fd1498Szrj {
1162*38fd1498Szrj fprintf (file, "offset: ");
1163*38fd1498Szrj print_generic_expr (file, parts->offset, TDF_SLIM);
1164*38fd1498Szrj fprintf (file, "\n");
1165*38fd1498Szrj }
1166*38fd1498Szrj }
1167*38fd1498Szrj
1168*38fd1498Szrj #include "gt-tree-ssa-address.h"
1169