xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/tree-ssa-address.c (revision cef8759bd76c1b621f8eab8faa6f208faabc2e15)
1 /* Memory address lowering and addressing mode selection.
2    Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21    that directly map to addressing modes of the target.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "stringpool.h"
32 #include "tree-vrp.h"
33 #include "tree-ssanames.h"
34 #include "expmed.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "tree-pretty-print.h"
38 #include "fold-const.h"
39 #include "stor-layout.h"
40 #include "gimple-iterator.h"
41 #include "gimplify-me.h"
42 #include "tree-ssa-loop-ivopts.h"
43 #include "expr.h"
44 #include "tree-dfa.h"
45 #include "dumpfile.h"
46 #include "tree-affine.h"
47 
48 /* FIXME: We compute address costs using RTL.  */
49 #include "tree-ssa-address.h"
50 
51 /* TODO -- handling of symbols (according to Richard Hendersons
52    comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
53 
54    There are at least 5 different kinds of symbols that we can run up against:
55 
56      (1) binds_local_p, small data area.
57      (2) binds_local_p, eg local statics
58      (3) !binds_local_p, eg global variables
59      (4) thread local, local_exec
60      (5) thread local, !local_exec
61 
62    Now, (1) won't appear often in an array context, but it certainly can.
63    All you have to do is set -GN high enough, or explicitly mark any
64    random object __attribute__((section (".sdata"))).
65 
66    All of these affect whether or not a symbol is in fact a valid address.
67    The only one tested here is (3).  And that result may very well
68    be incorrect for (4) or (5).
69 
70    An incorrect result here does not cause incorrect results out the
71    back end, because the expander in expr.c validizes the address.  However
72    it would be nice to improve the handling here in order to produce more
73    precise results.  */
74 
75 /* A "template" for memory address, used to determine whether the address is
76    valid for mode.  */
77 
78 struct GTY (()) mem_addr_template {
79   rtx ref;			/* The template.  */
80   rtx * GTY ((skip)) step_p;	/* The point in template where the step should be
81 				   filled in.  */
82   rtx * GTY ((skip)) off_p;	/* The point in template where the offset should
83 				   be filled in.  */
84 };
85 
86 
87 /* The templates.  Each of the low five bits of the index corresponds to one
88    component of TARGET_MEM_REF being present, while the high bits identify
89    the address space.  See TEMPL_IDX.  */
90 
91 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
92 
93 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
94   (((int) (AS) << 5) \
95    | ((SYMBOL != 0) << 4) \
96    | ((BASE != 0) << 3) \
97    | ((INDEX != 0) << 2) \
98    | ((STEP != 0) << 1) \
99    | (OFFSET != 0))
100 
101 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
102    STEP and OFFSET to *ADDR using address mode ADDRESS_MODE.  Stores pointers
103    to where step is placed to *STEP_P and offset to *OFFSET_P.  */
104 
105 static void
106 gen_addr_rtx (machine_mode address_mode,
107 	      rtx symbol, rtx base, rtx index, rtx step, rtx offset,
108 	      rtx *addr, rtx **step_p, rtx **offset_p)
109 {
110   rtx act_elem;
111 
112   *addr = NULL_RTX;
113   if (step_p)
114     *step_p = NULL;
115   if (offset_p)
116     *offset_p = NULL;
117 
118   if (index && index != const0_rtx)
119     {
120       act_elem = index;
121       if (step)
122 	{
123 	  act_elem = gen_rtx_MULT (address_mode, act_elem, step);
124 
125 	  if (step_p)
126 	    *step_p = &XEXP (act_elem, 1);
127 	}
128 
129       *addr = act_elem;
130     }
131 
132   if (base && base != const0_rtx)
133     {
134       if (*addr)
135 	*addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
136       else
137 	*addr = base;
138     }
139 
140   if (symbol)
141     {
142       act_elem = symbol;
143       if (offset)
144 	{
145 	  act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
146 
147 	  if (offset_p)
148 	    *offset_p = &XEXP (act_elem, 1);
149 
150 	  if (GET_CODE (symbol) == SYMBOL_REF
151 	      || GET_CODE (symbol) == LABEL_REF
152 	      || GET_CODE (symbol) == CONST)
153 	    act_elem = gen_rtx_CONST (address_mode, act_elem);
154 	}
155 
156       if (*addr)
157 	*addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
158       else
159 	*addr = act_elem;
160     }
161   else if (offset)
162     {
163       if (*addr)
164 	{
165 	  *addr = gen_rtx_PLUS (address_mode, *addr, offset);
166 	  if (offset_p)
167 	    *offset_p = &XEXP (*addr, 1);
168 	}
169       else
170 	{
171 	  *addr = offset;
172 	  if (offset_p)
173 	    *offset_p = addr;
174 	}
175     }
176 
177   if (!*addr)
178     *addr = const0_rtx;
179 }
180 
181 /* Description of a memory address.  */
182 
183 struct mem_address
184 {
185   tree symbol, base, index, step, offset;
186 };
187 
188 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
189    in address space AS.
190    If REALLY_EXPAND is false, just make fake registers instead
191    of really expanding the operands, and perform the expansion in-place
192    by using one of the "templates".  */
193 
194 rtx
195 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
196 		  bool really_expand)
197 {
198   machine_mode address_mode = targetm.addr_space.address_mode (as);
199   machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
200   rtx address, sym, bse, idx, st, off;
201   struct mem_addr_template *templ;
202 
203   if (addr->step && !integer_onep (addr->step))
204     st = immed_wide_int_const (addr->step, pointer_mode);
205   else
206     st = NULL_RTX;
207 
208   if (addr->offset && !integer_zerop (addr->offset))
209     {
210       offset_int dc = offset_int::from (addr->offset, SIGNED);
211       off = immed_wide_int_const (dc, pointer_mode);
212     }
213   else
214     off = NULL_RTX;
215 
216   if (!really_expand)
217     {
218       unsigned int templ_index
219 	= TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
220 
221       if (templ_index >= vec_safe_length (mem_addr_template_list))
222 	vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
223 
224       /* Reuse the templates for addresses, so that we do not waste memory.  */
225       templ = &(*mem_addr_template_list)[templ_index];
226       if (!templ->ref)
227 	{
228 	  sym = (addr->symbol ?
229 		 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
230 		 : NULL_RTX);
231 	  bse = (addr->base ?
232 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
233 		 : NULL_RTX);
234 	  idx = (addr->index ?
235 		 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
236 		 : NULL_RTX);
237 
238 	  gen_addr_rtx (pointer_mode, sym, bse, idx,
239 			st? const0_rtx : NULL_RTX,
240 			off? const0_rtx : NULL_RTX,
241 			&templ->ref,
242 			&templ->step_p,
243 			&templ->off_p);
244 	}
245 
246       if (st)
247 	*templ->step_p = st;
248       if (off)
249 	*templ->off_p = off;
250 
251       return templ->ref;
252     }
253 
254   /* Otherwise really expand the expressions.  */
255   sym = (addr->symbol
256 	 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257 	 : NULL_RTX);
258   bse = (addr->base
259 	 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
260 	 : NULL_RTX);
261   idx = (addr->index
262 	 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
263 	 : NULL_RTX);
264 
265   gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
266   if (pointer_mode != address_mode)
267     address = convert_memory_address (address_mode, address);
268   return address;
269 }
270 
271 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
272    the mem_address structure.  */
273 
274 rtx
275 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
276 {
277   struct mem_address addr;
278   get_address_description (exp, &addr);
279   return addr_for_mem_ref (&addr, as, really_expand);
280 }
281 
282 /* Returns address of MEM_REF in TYPE.  */
283 
284 tree
285 tree_mem_ref_addr (tree type, tree mem_ref)
286 {
287   tree addr;
288   tree act_elem;
289   tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
290   tree addr_base = NULL_TREE, addr_off = NULL_TREE;
291 
292   addr_base = fold_convert (type, TMR_BASE (mem_ref));
293 
294   act_elem = TMR_INDEX (mem_ref);
295   if (act_elem)
296     {
297       if (step)
298 	act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
299 				act_elem, step);
300       addr_off = act_elem;
301     }
302 
303   act_elem = TMR_INDEX2 (mem_ref);
304   if (act_elem)
305     {
306       if (addr_off)
307 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
308 				addr_off, act_elem);
309       else
310 	addr_off = act_elem;
311     }
312 
313   if (offset && !integer_zerop (offset))
314     {
315       if (addr_off)
316 	addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
317 				fold_convert (TREE_TYPE (addr_off), offset));
318       else
319 	addr_off = offset;
320     }
321 
322   if (addr_off)
323     addr = fold_build_pointer_plus (addr_base, addr_off);
324   else
325     addr = addr_base;
326 
327   return addr;
328 }
329 
330 /* Returns true if a memory reference in MODE and with parameters given by
331    ADDR is valid on the current target.  */
332 
333 static bool
334 valid_mem_ref_p (machine_mode mode, addr_space_t as,
335 		 struct mem_address *addr)
336 {
337   rtx address;
338 
339   address = addr_for_mem_ref (addr, as, false);
340   if (!address)
341     return false;
342 
343   return memory_address_addr_space_p (mode, address, as);
344 }
345 
346 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
347    is valid on the current target and if so, creates and returns the
348    TARGET_MEM_REF.  If VERIFY is false omit the verification step.  */
349 
350 static tree
351 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
352 		    bool verify)
353 {
354   tree base, index2;
355 
356   if (verify
357       && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
358     return NULL_TREE;
359 
360   if (addr->step && integer_onep (addr->step))
361     addr->step = NULL_TREE;
362 
363   if (addr->offset)
364     addr->offset = fold_convert (alias_ptr_type, addr->offset);
365   else
366     addr->offset = build_int_cst (alias_ptr_type, 0);
367 
368   if (addr->symbol)
369     {
370       base = addr->symbol;
371       index2 = addr->base;
372     }
373   else if (addr->base
374 	   && POINTER_TYPE_P (TREE_TYPE (addr->base)))
375     {
376       base = addr->base;
377       index2 = NULL_TREE;
378     }
379   else
380     {
381       base = build_int_cst (build_pointer_type (type), 0);
382       index2 = addr->base;
383     }
384 
385   /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
386      ???  As IVOPTs does not follow restrictions to where the base
387      pointer may point to create a MEM_REF only if we know that
388      base is valid.  */
389   if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
390       && (!index2 || integer_zerop (index2))
391       && (!addr->index || integer_zerop (addr->index)))
392     return fold_build2 (MEM_REF, type, base, addr->offset);
393 
394   return build5 (TARGET_MEM_REF, type,
395 		 base, addr->offset, addr->index, addr->step, index2);
396 }
397 
398 /* Returns true if OBJ is an object whose address is a link time constant.  */
399 
400 static bool
401 fixed_address_object_p (tree obj)
402 {
403   return (VAR_P (obj)
404 	  && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
405 	  && ! DECL_DLLIMPORT_P (obj));
406 }
407 
408 /* If ADDR contains an address of object that is a link time constant,
409    move it to PARTS->symbol.  */
410 
411 static void
412 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
413 {
414   unsigned i;
415   tree val = NULL_TREE;
416 
417   for (i = 0; i < addr->n; i++)
418     {
419       if (addr->elts[i].coef != 1)
420 	continue;
421 
422       val = addr->elts[i].val;
423       if (TREE_CODE (val) == ADDR_EXPR
424 	  && fixed_address_object_p (TREE_OPERAND (val, 0)))
425 	break;
426     }
427 
428   if (i == addr->n)
429     return;
430 
431   parts->symbol = val;
432   aff_combination_remove_elt (addr, i);
433 }
434 
435 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base.  */
436 
437 static void
438 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
439 		   aff_tree *addr)
440 {
441   unsigned i;
442   tree val = NULL_TREE;
443   int qual;
444 
445   for (i = 0; i < addr->n; i++)
446     {
447       if (addr->elts[i].coef != 1)
448 	continue;
449 
450       val = addr->elts[i].val;
451       if (operand_equal_p (val, base_hint, 0))
452 	break;
453     }
454 
455   if (i == addr->n)
456     return;
457 
458   /* Cast value to appropriate pointer type.  We cannot use a pointer
459      to TYPE directly, as the back-end will assume registers of pointer
460      type are aligned, and just the base itself may not actually be.
461      We use void pointer to the type's address space instead.  */
462   qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
463   type = build_qualified_type (void_type_node, qual);
464   parts->base = fold_convert (build_pointer_type (type), val);
465   aff_combination_remove_elt (addr, i);
466 }
467 
468 /* If ADDR contains an address of a dereferenced pointer, move it to
469    PARTS->base.  */
470 
471 static void
472 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
473 {
474   unsigned i;
475   tree val = NULL_TREE;
476 
477   for (i = 0; i < addr->n; i++)
478     {
479       if (addr->elts[i].coef != 1)
480 	continue;
481 
482       val = addr->elts[i].val;
483       if (POINTER_TYPE_P (TREE_TYPE (val)))
484 	break;
485     }
486 
487   if (i == addr->n)
488     return;
489 
490   parts->base = val;
491   aff_combination_remove_elt (addr, i);
492 }
493 
494 /* Moves the loop variant part V in linear address ADDR to be the index
495    of PARTS.  */
496 
497 static void
498 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
499 {
500   unsigned i;
501   tree val = NULL_TREE;
502 
503   gcc_assert (!parts->index);
504   for (i = 0; i < addr->n; i++)
505     {
506       val = addr->elts[i].val;
507       if (operand_equal_p (val, v, 0))
508 	break;
509     }
510 
511   if (i == addr->n)
512     return;
513 
514   parts->index = fold_convert (sizetype, val);
515   parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
516   aff_combination_remove_elt (addr, i);
517 }
518 
519 /* Adds ELT to PARTS.  */
520 
521 static void
522 add_to_parts (struct mem_address *parts, tree elt)
523 {
524   tree type;
525 
526   if (!parts->index)
527     {
528       parts->index = fold_convert (sizetype, elt);
529       return;
530     }
531 
532   if (!parts->base)
533     {
534       parts->base = elt;
535       return;
536     }
537 
538   /* Add ELT to base.  */
539   type = TREE_TYPE (parts->base);
540   if (POINTER_TYPE_P (type))
541     parts->base = fold_build_pointer_plus (parts->base, elt);
542   else
543     parts->base = fold_build2 (PLUS_EXPR, type,
544 			       parts->base, elt);
545 }
546 
547 /* Finds the most expensive multiplication in ADDR that can be
548    expressed in an addressing mode and move the corresponding
549    element(s) to PARTS.  */
550 
551 static void
552 most_expensive_mult_to_index (tree type, struct mem_address *parts,
553 			      aff_tree *addr, bool speed)
554 {
555   addr_space_t as = TYPE_ADDR_SPACE (type);
556   machine_mode address_mode = targetm.addr_space.address_mode (as);
557   HOST_WIDE_INT coef;
558   unsigned best_mult_cost = 0, acost;
559   tree mult_elt = NULL_TREE, elt;
560   unsigned i, j;
561   enum tree_code op_code;
562 
563   offset_int best_mult = 0;
564   for (i = 0; i < addr->n; i++)
565     {
566       if (!wi::fits_shwi_p (addr->elts[i].coef))
567 	continue;
568 
569       coef = addr->elts[i].coef.to_shwi ();
570       if (coef == 1
571 	  || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
572 	continue;
573 
574       acost = mult_by_coeff_cost (coef, address_mode, speed);
575 
576       if (acost > best_mult_cost)
577 	{
578 	  best_mult_cost = acost;
579 	  best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
580 	}
581     }
582 
583   if (!best_mult_cost)
584     return;
585 
586   /* Collect elements multiplied by best_mult.  */
587   for (i = j = 0; i < addr->n; i++)
588     {
589       offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
590       offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
591 
592       if (amult == best_mult)
593 	op_code = PLUS_EXPR;
594       else if (amult_neg == best_mult)
595 	op_code = MINUS_EXPR;
596       else
597 	{
598 	  addr->elts[j] = addr->elts[i];
599 	  j++;
600 	  continue;
601 	}
602 
603       elt = fold_convert (sizetype, addr->elts[i].val);
604       if (mult_elt)
605 	mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
606       else if (op_code == PLUS_EXPR)
607 	mult_elt = elt;
608       else
609 	mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
610     }
611   addr->n = j;
612 
613   parts->index = mult_elt;
614   parts->step = wide_int_to_tree (sizetype, best_mult);
615 }
616 
617 /* Splits address ADDR for a memory access of type TYPE into PARTS.
618    If BASE_HINT is non-NULL, it specifies an SSA name to be used
619    preferentially as base of the reference, and IV_CAND is the selected
620    iv candidate used in ADDR.
621 
622    TODO -- be more clever about the distribution of the elements of ADDR
623    to PARTS.  Some architectures do not support anything but single
624    register in address, possibly with a small integer offset; while
625    create_mem_ref will simplify the address to an acceptable shape
626    later, it would be more efficient to know that asking for complicated
627    addressing modes is useless.  */
628 
629 static void
630 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
631 	       tree base_hint, struct mem_address *parts,
632                bool speed)
633 {
634   tree part;
635   unsigned i;
636 
637   parts->symbol = NULL_TREE;
638   parts->base = NULL_TREE;
639   parts->index = NULL_TREE;
640   parts->step = NULL_TREE;
641 
642   if (addr->offset != 0)
643     parts->offset = wide_int_to_tree (sizetype, addr->offset);
644   else
645     parts->offset = NULL_TREE;
646 
647   /* Try to find a symbol.  */
648   move_fixed_address_to_symbol (parts, addr);
649 
650   /* No need to do address parts reassociation if the number of parts
651      is <= 2 -- in that case, no loop invariant code motion can be
652      exposed.  */
653 
654   if (!base_hint && (addr->n > 2))
655     move_variant_to_index (parts, addr, iv_cand);
656 
657   /* First move the most expensive feasible multiplication
658      to index.  */
659   if (!parts->index)
660     most_expensive_mult_to_index (type, parts, addr, speed);
661 
662   /* Try to find a base of the reference.  Since at the moment
663      there is no reliable way how to distinguish between pointer and its
664      offset, this is just a guess.  */
665   if (!parts->symbol && base_hint)
666     move_hint_to_base (type, parts, base_hint, addr);
667   if (!parts->symbol && !parts->base)
668     move_pointer_to_base (parts, addr);
669 
670   /* Then try to process the remaining elements.  */
671   for (i = 0; i < addr->n; i++)
672     {
673       part = fold_convert (sizetype, addr->elts[i].val);
674       if (addr->elts[i].coef != 1)
675 	part = fold_build2 (MULT_EXPR, sizetype, part,
676 			    wide_int_to_tree (sizetype, addr->elts[i].coef));
677       add_to_parts (parts, part);
678     }
679   if (addr->rest)
680     add_to_parts (parts, fold_convert (sizetype, addr->rest));
681 }
682 
683 /* Force the PARTS to register.  */
684 
685 static void
686 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
687 {
688   if (parts->base)
689     parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
690 					    is_gimple_mem_ref_addr, NULL_TREE,
691 					    true, GSI_SAME_STMT);
692   if (parts->index)
693     parts->index = force_gimple_operand_gsi (gsi, parts->index,
694 					     true, NULL_TREE,
695 					     true, GSI_SAME_STMT);
696 }
697 
698 /* Creates and returns a TARGET_MEM_REF for address ADDR.  If necessary
699    computations are emitted in front of GSI.  TYPE is the mode
700    of created memory reference. IV_CAND is the selected iv candidate in ADDR,
701    and BASE_HINT is non NULL if IV_CAND comes from a base address
702    object.  */
703 
704 tree
705 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
706 		tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
707 {
708   tree mem_ref, tmp;
709   struct mem_address parts;
710 
711   addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
712   gimplify_mem_ref_parts (gsi, &parts);
713   mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
714   if (mem_ref)
715     return mem_ref;
716 
717   /* The expression is too complicated.  Try making it simpler.  */
718 
719   if (parts.step && !integer_onep (parts.step))
720     {
721       /* Move the multiplication to index.  */
722       gcc_assert (parts.index);
723       parts.index = force_gimple_operand_gsi (gsi,
724 				fold_build2 (MULT_EXPR, sizetype,
725 					     parts.index, parts.step),
726 				true, NULL_TREE, true, GSI_SAME_STMT);
727       parts.step = NULL_TREE;
728 
729       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
730       if (mem_ref)
731 	return mem_ref;
732     }
733 
734   if (parts.symbol)
735     {
736       tmp = parts.symbol;
737       gcc_assert (is_gimple_val (tmp));
738 
739       /* Add the symbol to base, eventually forcing it to register.  */
740       if (parts.base)
741 	{
742 	  gcc_assert (useless_type_conversion_p
743 				(sizetype, TREE_TYPE (parts.base)));
744 
745 	  if (parts.index)
746 	    {
747 	      parts.base = force_gimple_operand_gsi_1 (gsi,
748 			fold_build_pointer_plus (tmp, parts.base),
749 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
750 	    }
751 	  else
752 	    {
753 	      parts.index = parts.base;
754 	      parts.base = tmp;
755 	    }
756 	}
757       else
758 	parts.base = tmp;
759       parts.symbol = NULL_TREE;
760 
761       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
762       if (mem_ref)
763 	return mem_ref;
764     }
765 
766   if (parts.index)
767     {
768       /* Add index to base.  */
769       if (parts.base)
770 	{
771 	  parts.base = force_gimple_operand_gsi_1 (gsi,
772 			fold_build_pointer_plus (parts.base, parts.index),
773 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
774 	}
775       else
776 	parts.base = parts.index;
777       parts.index = NULL_TREE;
778 
779       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
780       if (mem_ref)
781 	return mem_ref;
782     }
783 
784   if (parts.offset && !integer_zerop (parts.offset))
785     {
786       /* Try adding offset to base.  */
787       if (parts.base)
788 	{
789 	  parts.base = force_gimple_operand_gsi_1 (gsi,
790 			fold_build_pointer_plus (parts.base, parts.offset),
791 			is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
792 	}
793       else
794 	parts.base = parts.offset;
795 
796       parts.offset = NULL_TREE;
797 
798       mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
799       if (mem_ref)
800 	return mem_ref;
801     }
802 
803   /* Verify that the address is in the simplest possible shape
804      (only a register).  If we cannot create such a memory reference,
805      something is really wrong.  */
806   gcc_assert (parts.symbol == NULL_TREE);
807   gcc_assert (parts.index == NULL_TREE);
808   gcc_assert (!parts.step || integer_onep (parts.step));
809   gcc_assert (!parts.offset || integer_zerop (parts.offset));
810   gcc_unreachable ();
811 }
812 
813 /* Copies components of the address from OP to ADDR.  */
814 
815 void
816 get_address_description (tree op, struct mem_address *addr)
817 {
818   if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
819     {
820       addr->symbol = TMR_BASE (op);
821       addr->base = TMR_INDEX2 (op);
822     }
823   else
824     {
825       addr->symbol = NULL_TREE;
826       if (TMR_INDEX2 (op))
827 	{
828 	  gcc_assert (integer_zerop (TMR_BASE (op)));
829 	  addr->base = TMR_INDEX2 (op);
830 	}
831       else
832 	addr->base = TMR_BASE (op);
833     }
834   addr->index = TMR_INDEX (op);
835   addr->step = TMR_STEP (op);
836   addr->offset = TMR_OFFSET (op);
837 }
838 
839 /* Copies the reference information from OLD_REF to NEW_REF, where
840    NEW_REF should be either a MEM_REF or a TARGET_MEM_REF.  */
841 
842 void
843 copy_ref_info (tree new_ref, tree old_ref)
844 {
845   tree new_ptr_base = NULL_TREE;
846 
847   gcc_assert (TREE_CODE (new_ref) == MEM_REF
848 	      || TREE_CODE (new_ref) == TARGET_MEM_REF);
849 
850   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
851   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
852 
853   new_ptr_base = TREE_OPERAND (new_ref, 0);
854 
855   /* We can transfer points-to information from an old pointer
856      or decl base to the new one.  */
857   if (new_ptr_base
858       && TREE_CODE (new_ptr_base) == SSA_NAME
859       && !SSA_NAME_PTR_INFO (new_ptr_base))
860     {
861       tree base = get_base_address (old_ref);
862       if (!base)
863 	;
864       else if ((TREE_CODE (base) == MEM_REF
865 		|| TREE_CODE (base) == TARGET_MEM_REF)
866 	       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
867 	       && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
868 	{
869 	  struct ptr_info_def *new_pi;
870 	  unsigned int align, misalign;
871 
872 	  duplicate_ssa_name_ptr_info
873 	    (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
874 	  new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
875 	  /* We have to be careful about transferring alignment information.  */
876 	  if (get_ptr_info_alignment (new_pi, &align, &misalign)
877 	      && TREE_CODE (old_ref) == MEM_REF
878 	      && !(TREE_CODE (new_ref) == TARGET_MEM_REF
879 		   && (TMR_INDEX2 (new_ref)
880 		       /* TODO: Below conditions can be relaxed if TMR_INDEX
881 			  is an indcution variable and its initial value and
882 			  step are aligned.  */
883 		       || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
884 		       || (TMR_STEP (new_ref)
885 			   && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
886 			       < align)))))
887 	    {
888 	      unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
889 				  - mem_ref_offset (new_ref).to_short_addr ());
890 	      adjust_ptr_info_misalignment (new_pi, inc);
891 	    }
892 	  else
893 	    mark_ptr_info_alignment_unknown (new_pi);
894 	}
895       else if (VAR_P (base)
896 	       || TREE_CODE (base) == PARM_DECL
897 	       || TREE_CODE (base) == RESULT_DECL)
898 	{
899 	  struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
900 	  pt_solution_set_var (&pi->pt, base);
901 	}
902     }
903 }
904 
905 /* Move constants in target_mem_ref REF to offset.  Returns the new target
906    mem ref if anything changes, NULL_TREE otherwise.  */
907 
908 tree
909 maybe_fold_tmr (tree ref)
910 {
911   struct mem_address addr;
912   bool changed = false;
913   tree new_ref, off;
914 
915   get_address_description (ref, &addr);
916 
917   if (addr.base
918       && TREE_CODE (addr.base) == INTEGER_CST
919       && !integer_zerop (addr.base))
920     {
921       addr.offset = fold_binary_to_constant (PLUS_EXPR,
922 					     TREE_TYPE (addr.offset),
923 					     addr.offset, addr.base);
924       addr.base = NULL_TREE;
925       changed = true;
926     }
927 
928   if (addr.symbol
929       && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
930     {
931       addr.offset = fold_binary_to_constant
932 			(PLUS_EXPR, TREE_TYPE (addr.offset),
933 			 addr.offset,
934 			 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
935       addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
936       changed = true;
937     }
938   else if (addr.symbol
939 	   && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
940     {
941       HOST_WIDE_INT offset;
942       addr.symbol = build_fold_addr_expr
943 		      (get_addr_base_and_unit_offset
944 		         (TREE_OPERAND (addr.symbol, 0), &offset));
945       addr.offset = int_const_binop (PLUS_EXPR,
946 				     addr.offset, size_int (offset));
947       changed = true;
948     }
949 
950   if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
951     {
952       off = addr.index;
953       if (addr.step)
954 	{
955 	  off = fold_binary_to_constant (MULT_EXPR, sizetype,
956 					 off, addr.step);
957 	  addr.step = NULL_TREE;
958 	}
959 
960       addr.offset = fold_binary_to_constant (PLUS_EXPR,
961 					     TREE_TYPE (addr.offset),
962 					     addr.offset, off);
963       addr.index = NULL_TREE;
964       changed = true;
965     }
966 
967   if (!changed)
968     return NULL_TREE;
969 
970   /* If we have propagated something into this TARGET_MEM_REF and thus
971      ended up folding it, always create a new TARGET_MEM_REF regardless
972      if it is valid in this for on the target - the propagation result
973      wouldn't be anyway.  */
974   new_ref = create_mem_ref_raw (TREE_TYPE (ref),
975 			        TREE_TYPE (addr.offset), &addr, false);
976   TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
977   TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
978   return new_ref;
979 }
980 
981 /* Dump PARTS to FILE.  */
982 
983 extern void dump_mem_address (FILE *, struct mem_address *);
984 void
985 dump_mem_address (FILE *file, struct mem_address *parts)
986 {
987   if (parts->symbol)
988     {
989       fprintf (file, "symbol: ");
990       print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
991       fprintf (file, "\n");
992     }
993   if (parts->base)
994     {
995       fprintf (file, "base: ");
996       print_generic_expr (file, parts->base, TDF_SLIM);
997       fprintf (file, "\n");
998     }
999   if (parts->index)
1000     {
1001       fprintf (file, "index: ");
1002       print_generic_expr (file, parts->index, TDF_SLIM);
1003       fprintf (file, "\n");
1004     }
1005   if (parts->step)
1006     {
1007       fprintf (file, "step: ");
1008       print_generic_expr (file, parts->step, TDF_SLIM);
1009       fprintf (file, "\n");
1010     }
1011   if (parts->offset)
1012     {
1013       fprintf (file, "offset: ");
1014       print_generic_expr (file, parts->offset, TDF_SLIM);
1015       fprintf (file, "\n");
1016     }
1017 }
1018 
1019 #include "gt-tree-ssa-address.h"
1020