xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/expr.c (revision 7330f729ccf0bd976a06f95fad452fe774fc7fd1)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2017 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
60 #include "builtins.h"
61 #include "tree-chkp.h"
62 #include "rtl-chkp.h"
63 #include "ccmp.h"
64 
65 
66 /* If this is nonzero, we do not bother generating VOLATILE
67    around volatile memory references, and we are willing to
68    output indirect addresses.  If cse is to follow, we reject
69    indirect addresses so a useful potential cse is generated;
70    if it is used only once, instruction combination will produce
71    the same indirect address eventually.  */
72 int cse_not_expected;
73 
74 static bool block_move_libcall_safe_for_call_parm (void);
75 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
76 					unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
77 					unsigned HOST_WIDE_INT);
78 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
79 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
80 static rtx_insn *compress_float_constant (rtx, rtx);
81 static rtx get_subtarget (rtx);
82 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
83 				     HOST_WIDE_INT, unsigned HOST_WIDE_INT,
84 				     unsigned HOST_WIDE_INT, machine_mode,
85 				     tree, int, alias_set_type, bool);
86 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
87 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
88 			unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
89 			machine_mode, tree, alias_set_type, bool, bool);
90 
91 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
92 
93 static int is_aligning_offset (const_tree, const_tree);
94 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
95 static rtx do_store_flag (sepops, rtx, machine_mode);
96 #ifdef PUSH_ROUNDING
97 static void emit_single_push_insn (machine_mode, rtx, tree);
98 #endif
99 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
100 static rtx const_vector_from_tree (tree);
101 static rtx const_scalar_mask_from_tree (tree);
102 static tree tree_expr_size (const_tree);
103 static HOST_WIDE_INT int_expr_size (tree);
104 
105 
106 /* This is run to set up which modes can be used
107    directly in memory and to initialize the block move optab.  It is run
108    at the beginning of compilation and when the target is reinitialized.  */
109 
110 void
111 init_expr_target (void)
112 {
113   rtx pat;
114   machine_mode mode;
115   int num_clobbers;
116   rtx mem, mem1;
117   rtx reg;
118 
119   /* Try indexing by frame ptr and try by stack ptr.
120      It is known that on the Convex the stack ptr isn't a valid index.
121      With luck, one or the other is valid on any machine.  */
122   mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
123   mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
124 
125   /* A scratch register we can modify in-place below to avoid
126      useless RTL allocations.  */
127   reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
128 
129   rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
130   pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
131   PATTERN (insn) = pat;
132 
133   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
134        mode = (machine_mode) ((int) mode + 1))
135     {
136       int regno;
137 
138       direct_load[(int) mode] = direct_store[(int) mode] = 0;
139       PUT_MODE (mem, mode);
140       PUT_MODE (mem1, mode);
141 
142       /* See if there is some register that can be used in this mode and
143 	 directly loaded or stored from memory.  */
144 
145       if (mode != VOIDmode && mode != BLKmode)
146 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
147 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
148 	     regno++)
149 	  {
150 	    if (! HARD_REGNO_MODE_OK (regno, mode))
151 	      continue;
152 
153 	    set_mode_and_regno (reg, mode, regno);
154 
155 	    SET_SRC (pat) = mem;
156 	    SET_DEST (pat) = reg;
157 	    if (recog (pat, insn, &num_clobbers) >= 0)
158 	      direct_load[(int) mode] = 1;
159 
160 	    SET_SRC (pat) = mem1;
161 	    SET_DEST (pat) = reg;
162 	    if (recog (pat, insn, &num_clobbers) >= 0)
163 	      direct_load[(int) mode] = 1;
164 
165 	    SET_SRC (pat) = reg;
166 	    SET_DEST (pat) = mem;
167 	    if (recog (pat, insn, &num_clobbers) >= 0)
168 	      direct_store[(int) mode] = 1;
169 
170 	    SET_SRC (pat) = reg;
171 	    SET_DEST (pat) = mem1;
172 	    if (recog (pat, insn, &num_clobbers) >= 0)
173 	      direct_store[(int) mode] = 1;
174 	  }
175     }
176 
177   mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
178 
179   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
180        mode = GET_MODE_WIDER_MODE (mode))
181     {
182       machine_mode srcmode;
183       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
184 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
185 	{
186 	  enum insn_code ic;
187 
188 	  ic = can_extend_p (mode, srcmode, 0);
189 	  if (ic == CODE_FOR_nothing)
190 	    continue;
191 
192 	  PUT_MODE (mem, srcmode);
193 
194 	  if (insn_operand_matches (ic, 1, mem))
195 	    float_extend_from_mem[mode][srcmode] = true;
196 	}
197     }
198 }
199 
200 /* This is run at the start of compiling a function.  */
201 
202 void
203 init_expr (void)
204 {
205   memset (&crtl->expr, 0, sizeof (crtl->expr));
206 }
207 
208 /* Copy data from FROM to TO, where the machine modes are not the same.
209    Both modes may be integer, or both may be floating, or both may be
210    fixed-point.
211    UNSIGNEDP should be nonzero if FROM is an unsigned type.
212    This causes zero-extension instead of sign-extension.  */
213 
214 void
215 convert_move (rtx to, rtx from, int unsignedp)
216 {
217   machine_mode to_mode = GET_MODE (to);
218   machine_mode from_mode = GET_MODE (from);
219   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
220   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
221   enum insn_code code;
222   rtx libcall;
223 
224   /* rtx code for making an equivalent value.  */
225   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
226 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
227 
228 
229   gcc_assert (to_real == from_real);
230   gcc_assert (to_mode != BLKmode);
231   gcc_assert (from_mode != BLKmode);
232 
233   /* If the source and destination are already the same, then there's
234      nothing to do.  */
235   if (to == from)
236     return;
237 
238   /* If FROM is a SUBREG that indicates that we have already done at least
239      the required extension, strip it.  We don't handle such SUBREGs as
240      TO here.  */
241 
242   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
243       && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
244 	  >= GET_MODE_PRECISION (to_mode))
245       && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
246     from = gen_lowpart (to_mode, from), from_mode = to_mode;
247 
248   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
249 
250   if (to_mode == from_mode
251       || (from_mode == VOIDmode && CONSTANT_P (from)))
252     {
253       emit_move_insn (to, from);
254       return;
255     }
256 
257   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
258     {
259       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
260 
261       if (VECTOR_MODE_P (to_mode))
262 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
263       else
264 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
265 
266       emit_move_insn (to, from);
267       return;
268     }
269 
270   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
271     {
272       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
273       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
274       return;
275     }
276 
277   if (to_real)
278     {
279       rtx value;
280       rtx_insn *insns;
281       convert_optab tab;
282 
283       gcc_assert ((GET_MODE_PRECISION (from_mode)
284 		   != GET_MODE_PRECISION (to_mode))
285 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
286 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
287 
288       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
289 	/* Conversion between decimal float and binary float, same size.  */
290 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
291       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
292 	tab = sext_optab;
293       else
294 	tab = trunc_optab;
295 
296       /* Try converting directly if the insn is supported.  */
297 
298       code = convert_optab_handler (tab, to_mode, from_mode);
299       if (code != CODE_FOR_nothing)
300 	{
301 	  emit_unop_insn (code, to, from,
302 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
303 	  return;
304 	}
305 
306       /* Otherwise use a libcall.  */
307       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
308 
309       /* Is this conversion implemented yet?  */
310       gcc_assert (libcall);
311 
312       start_sequence ();
313       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
314 				       1, from, from_mode);
315       insns = get_insns ();
316       end_sequence ();
317       emit_libcall_block (insns, to, value,
318 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
319 								       from)
320 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
321       return;
322     }
323 
324   /* Handle pointer conversion.  */			/* SPEE 900220.  */
325   /* If the target has a converter from FROM_MODE to TO_MODE, use it.  */
326   {
327     convert_optab ctab;
328 
329     if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
330       ctab = trunc_optab;
331     else if (unsignedp)
332       ctab = zext_optab;
333     else
334       ctab = sext_optab;
335 
336     if (convert_optab_handler (ctab, to_mode, from_mode)
337 	!= CODE_FOR_nothing)
338       {
339 	emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
340 			to, from, UNKNOWN);
341 	return;
342       }
343   }
344 
345   /* Targets are expected to provide conversion insns between PxImode and
346      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
347   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
348     {
349       machine_mode full_mode
350 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
351 
352       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
353 		  != CODE_FOR_nothing);
354 
355       if (full_mode != from_mode)
356 	from = convert_to_mode (full_mode, from, unsignedp);
357       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
358 		      to, from, UNKNOWN);
359       return;
360     }
361   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
362     {
363       rtx new_from;
364       machine_mode full_mode
365 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
366       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
367       enum insn_code icode;
368 
369       icode = convert_optab_handler (ctab, full_mode, from_mode);
370       gcc_assert (icode != CODE_FOR_nothing);
371 
372       if (to_mode == full_mode)
373 	{
374 	  emit_unop_insn (icode, to, from, UNKNOWN);
375 	  return;
376 	}
377 
378       new_from = gen_reg_rtx (full_mode);
379       emit_unop_insn (icode, new_from, from, UNKNOWN);
380 
381       /* else proceed to integer conversions below.  */
382       from_mode = full_mode;
383       from = new_from;
384     }
385 
386    /* Make sure both are fixed-point modes or both are not.  */
387    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
388 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
389    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
390     {
391       /* If we widen from_mode to to_mode and they are in the same class,
392 	 we won't saturate the result.
393 	 Otherwise, always saturate the result to play safe.  */
394       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
395 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
396 	expand_fixed_convert (to, from, 0, 0);
397       else
398 	expand_fixed_convert (to, from, 0, 1);
399       return;
400     }
401 
402   /* Now both modes are integers.  */
403 
404   /* Handle expanding beyond a word.  */
405   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
406       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
407     {
408       rtx_insn *insns;
409       rtx lowpart;
410       rtx fill_value;
411       rtx lowfrom;
412       int i;
413       machine_mode lowpart_mode;
414       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
415 
416       /* Try converting directly if the insn is supported.  */
417       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
418 	  != CODE_FOR_nothing)
419 	{
420 	  /* If FROM is a SUBREG, put it into a register.  Do this
421 	     so that we always generate the same set of insns for
422 	     better cse'ing; if an intermediate assignment occurred,
423 	     we won't be doing the operation directly on the SUBREG.  */
424 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
425 	    from = force_reg (from_mode, from);
426 	  emit_unop_insn (code, to, from, equiv_code);
427 	  return;
428 	}
429       /* Next, try converting via full word.  */
430       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
431 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
432 		   != CODE_FOR_nothing))
433 	{
434 	  rtx word_to = gen_reg_rtx (word_mode);
435 	  if (REG_P (to))
436 	    {
437 	      if (reg_overlap_mentioned_p (to, from))
438 		from = force_reg (from_mode, from);
439 	      emit_clobber (to);
440 	    }
441 	  convert_move (word_to, from, unsignedp);
442 	  emit_unop_insn (code, to, word_to, equiv_code);
443 	  return;
444 	}
445 
446       /* No special multiword conversion insn; do it by hand.  */
447       start_sequence ();
448 
449       /* Since we will turn this into a no conflict block, we must ensure
450          the source does not overlap the target so force it into an isolated
451          register when maybe so.  Likewise for any MEM input, since the
452          conversion sequence might require several references to it and we
453          must ensure we're getting the same value every time.  */
454 
455       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
456 	from = force_reg (from_mode, from);
457 
458       /* Get a copy of FROM widened to a word, if necessary.  */
459       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
460 	lowpart_mode = word_mode;
461       else
462 	lowpart_mode = from_mode;
463 
464       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
465 
466       lowpart = gen_lowpart (lowpart_mode, to);
467       emit_move_insn (lowpart, lowfrom);
468 
469       /* Compute the value to put in each remaining word.  */
470       if (unsignedp)
471 	fill_value = const0_rtx;
472       else
473 	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
474 					    LT, lowfrom, const0_rtx,
475 					    lowpart_mode, 0, -1);
476 
477       /* Fill the remaining words.  */
478       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
479 	{
480 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
481 	  rtx subword = operand_subword (to, index, 1, to_mode);
482 
483 	  gcc_assert (subword);
484 
485 	  if (fill_value != subword)
486 	    emit_move_insn (subword, fill_value);
487 	}
488 
489       insns = get_insns ();
490       end_sequence ();
491 
492       emit_insn (insns);
493       return;
494     }
495 
496   /* Truncating multi-word to a word or less.  */
497   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
498       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
499     {
500       if (!((MEM_P (from)
501 	     && ! MEM_VOLATILE_P (from)
502 	     && direct_load[(int) to_mode]
503 	     && ! mode_dependent_address_p (XEXP (from, 0),
504 					    MEM_ADDR_SPACE (from)))
505 	    || REG_P (from)
506 	    || GET_CODE (from) == SUBREG))
507 	from = force_reg (from_mode, from);
508       convert_move (to, gen_lowpart (word_mode, from), 0);
509       return;
510     }
511 
512   /* Now follow all the conversions between integers
513      no more than a word long.  */
514 
515   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
516   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
517       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
518     {
519       if (!((MEM_P (from)
520 	     && ! MEM_VOLATILE_P (from)
521 	     && direct_load[(int) to_mode]
522 	     && ! mode_dependent_address_p (XEXP (from, 0),
523 					    MEM_ADDR_SPACE (from)))
524 	    || REG_P (from)
525 	    || GET_CODE (from) == SUBREG))
526 	from = force_reg (from_mode, from);
527       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
528 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
529 	from = copy_to_reg (from);
530       emit_move_insn (to, gen_lowpart (to_mode, from));
531       return;
532     }
533 
534   /* Handle extension.  */
535   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
536     {
537       /* Convert directly if that works.  */
538       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 	  != CODE_FOR_nothing)
540 	{
541 	  emit_unop_insn (code, to, from, equiv_code);
542 	  return;
543 	}
544       else
545 	{
546 	  machine_mode intermediate;
547 	  rtx tmp;
548 	  int shift_amount;
549 
550 	  /* Search for a mode to convert via.  */
551 	  for (intermediate = from_mode; intermediate != VOIDmode;
552 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
553 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
554 		  != CODE_FOR_nothing)
555 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
556 		     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
557 		&& (can_extend_p (intermediate, from_mode, unsignedp)
558 		    != CODE_FOR_nothing))
559 	      {
560 		convert_move (to, convert_to_mode (intermediate, from,
561 						   unsignedp), unsignedp);
562 		return;
563 	      }
564 
565 	  /* No suitable intermediate mode.
566 	     Generate what we need with	shifts.  */
567 	  shift_amount = (GET_MODE_PRECISION (to_mode)
568 			  - GET_MODE_PRECISION (from_mode));
569 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
570 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
571 			      to, unsignedp);
572 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
573 			      to, unsignedp);
574 	  if (tmp != to)
575 	    emit_move_insn (to, tmp);
576 	  return;
577 	}
578     }
579 
580   /* Support special truncate insns for certain modes.  */
581   if (convert_optab_handler (trunc_optab, to_mode,
582 			     from_mode) != CODE_FOR_nothing)
583     {
584       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
585 		      to, from, UNKNOWN);
586       return;
587     }
588 
589   /* Handle truncation of volatile memrefs, and so on;
590      the things that couldn't be truncated directly,
591      and for which there was no special instruction.
592 
593      ??? Code above formerly short-circuited this, for most integer
594      mode pairs, with a force_reg in from_mode followed by a recursive
595      call to this routine.  Appears always to have been wrong.  */
596   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
597     {
598       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
599       emit_move_insn (to, temp);
600       return;
601     }
602 
603   /* Mode combination is not recognized.  */
604   gcc_unreachable ();
605 }
606 
607 /* Return an rtx for a value that would result
608    from converting X to mode MODE.
609    Both X and MODE may be floating, or both integer.
610    UNSIGNEDP is nonzero if X is an unsigned value.
611    This can be done by referring to a part of X in place
612    or by copying to a new temporary with conversion.  */
613 
614 rtx
615 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
616 {
617   return convert_modes (mode, VOIDmode, x, unsignedp);
618 }
619 
620 /* Return an rtx for a value that would result
621    from converting X from mode OLDMODE to mode MODE.
622    Both modes may be floating, or both integer.
623    UNSIGNEDP is nonzero if X is an unsigned value.
624 
625    This can be done by referring to a part of X in place
626    or by copying to a new temporary with conversion.
627 
628    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
629 
630 rtx
631 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
632 {
633   rtx temp;
634 
635   /* If FROM is a SUBREG that indicates that we have already done at least
636      the required extension, strip it.  */
637 
638   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
639       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
640       && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
641     x = gen_lowpart (mode, SUBREG_REG (x));
642 
643   if (GET_MODE (x) != VOIDmode)
644     oldmode = GET_MODE (x);
645 
646   if (mode == oldmode)
647     return x;
648 
649   if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
650     {
651       /* If the caller did not tell us the old mode, then there is not
652 	 much to do with respect to canonicalization.  We have to
653 	 assume that all the bits are significant.  */
654       if (GET_MODE_CLASS (oldmode) != MODE_INT)
655 	oldmode = MAX_MODE_INT;
656       wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
657 				   GET_MODE_PRECISION (mode),
658 				   unsignedp ? UNSIGNED : SIGNED);
659       return immed_wide_int_const (w, mode);
660     }
661 
662   /* We can do this with a gen_lowpart if both desired and current modes
663      are integer, and this is either a constant integer, a register, or a
664      non-volatile MEM. */
665   if (GET_MODE_CLASS (mode) == MODE_INT
666       && GET_MODE_CLASS (oldmode) == MODE_INT
667       && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
668       && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
669           || (REG_P (x)
670               && (!HARD_REGISTER_P (x)
671                   || HARD_REGNO_MODE_OK (REGNO (x), mode))
672               && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
673 
674    return gen_lowpart (mode, x);
675 
676   /* Converting from integer constant into mode is always equivalent to an
677      subreg operation.  */
678   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
679     {
680       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
681       return simplify_gen_subreg (mode, x, oldmode, 0);
682     }
683 
684   temp = gen_reg_rtx (mode);
685   convert_move (temp, x, unsignedp);
686   return temp;
687 }
688 
689 /* Return the largest alignment we can use for doing a move (or store)
690    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
691 
692 static unsigned int
693 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
694 {
695   machine_mode tmode;
696 
697   tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
698   if (align >= GET_MODE_ALIGNMENT (tmode))
699     align = GET_MODE_ALIGNMENT (tmode);
700   else
701     {
702       machine_mode tmode, xmode;
703 
704       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
705 	   tmode != VOIDmode;
706 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
707 	if (GET_MODE_SIZE (tmode) > max_pieces
708 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
709 	  break;
710 
711       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
712     }
713 
714   return align;
715 }
716 
717 /* Return the widest integer mode no wider than SIZE.  If no such mode
718    can be found, return VOIDmode.  */
719 
720 static machine_mode
721 widest_int_mode_for_size (unsigned int size)
722 {
723   machine_mode tmode, mode = VOIDmode;
724 
725   for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
726        tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
727     if (GET_MODE_SIZE (tmode) < size)
728       mode = tmode;
729 
730   return mode;
731 }
732 
733 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
734    and should be performed piecewise.  */
735 
736 static bool
737 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
738 		  enum by_pieces_operation op)
739 {
740   return targetm.use_by_pieces_infrastructure_p (len, align, op,
741 						 optimize_insn_for_speed_p ());
742 }
743 
744 /* Determine whether the LEN bytes can be moved by using several move
745    instructions.  Return nonzero if a call to move_by_pieces should
746    succeed.  */
747 
748 bool
749 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
750 {
751   return can_do_by_pieces (len, align, MOVE_BY_PIECES);
752 }
753 
754 /* Return number of insns required to perform operation OP by pieces
755    for L bytes.  ALIGN (in bits) is maximum alignment we can assume.  */
756 
757 unsigned HOST_WIDE_INT
758 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
759 		  unsigned int max_size, by_pieces_operation op)
760 {
761   unsigned HOST_WIDE_INT n_insns = 0;
762 
763   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
764 
765   while (max_size > 1 && l > 0)
766     {
767       machine_mode mode;
768       enum insn_code icode;
769 
770       mode = widest_int_mode_for_size (max_size);
771 
772       if (mode == VOIDmode)
773 	break;
774       unsigned int modesize = GET_MODE_SIZE (mode);
775 
776       icode = optab_handler (mov_optab, mode);
777       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
778 	{
779 	  unsigned HOST_WIDE_INT n_pieces = l / modesize;
780 	  l %= modesize;
781 	  switch (op)
782 	    {
783 	    default:
784 	      n_insns += n_pieces;
785 	      break;
786 
787 	    case COMPARE_BY_PIECES:
788 	      int batch = targetm.compare_by_pieces_branch_ratio (mode);
789 	      int batch_ops = 4 * batch - 1;
790 	      unsigned HOST_WIDE_INT full = n_pieces / batch;
791 	      n_insns += full * batch_ops;
792 	      if (n_pieces % batch != 0)
793 		n_insns++;
794 	      break;
795 
796 	    }
797 	}
798       max_size = modesize;
799     }
800 
801   gcc_assert (!l);
802   return n_insns;
803 }
804 
805 /* Used when performing piecewise block operations, holds information
806    about one of the memory objects involved.  The member functions
807    can be used to generate code for loading from the object and
808    updating the address when iterating.  */
809 
810 class pieces_addr
811 {
812   /* The object being referenced, a MEM.  Can be NULL_RTX to indicate
813      stack pushes.  */
814   rtx m_obj;
815   /* The address of the object.  Can differ from that seen in the
816      MEM rtx if we copied the address to a register.  */
817   rtx m_addr;
818   /* Nonzero if the address on the object has an autoincrement already,
819      signifies whether that was an increment or decrement.  */
820   signed char m_addr_inc;
821   /* Nonzero if we intend to use autoinc without the address already
822      having autoinc form.  We will insert add insns around each memory
823      reference, expecting later passes to form autoinc addressing modes.
824      The only supported options are predecrement and postincrement.  */
825   signed char m_explicit_inc;
826   /* True if we have either of the two possible cases of using
827      autoincrement.  */
828   bool m_auto;
829   /* True if this is an address to be used for load operations rather
830      than stores.  */
831   bool m_is_load;
832 
833   /* Optionally, a function to obtain constants for any given offset into
834      the objects, and data associated with it.  */
835   by_pieces_constfn m_constfn;
836   void *m_cfndata;
837 public:
838   pieces_addr (rtx, bool, by_pieces_constfn, void *);
839   rtx adjust (machine_mode, HOST_WIDE_INT);
840   void increment_address (HOST_WIDE_INT);
841   void maybe_predec (HOST_WIDE_INT);
842   void maybe_postinc (HOST_WIDE_INT);
843   void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
844   int get_addr_inc ()
845   {
846     return m_addr_inc;
847   }
848 };
849 
850 /* Initialize a pieces_addr structure from an object OBJ.  IS_LOAD is
851    true if the operation to be performed on this object is a load
852    rather than a store.  For stores, OBJ can be NULL, in which case we
853    assume the operation is a stack push.  For loads, the optional
854    CONSTFN and its associated CFNDATA can be used in place of the
855    memory load.  */
856 
857 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
858 			  void *cfndata)
859   : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
860 {
861   m_addr_inc = 0;
862   m_auto = false;
863   if (obj)
864     {
865       rtx addr = XEXP (obj, 0);
866       rtx_code code = GET_CODE (addr);
867       m_addr = addr;
868       bool dec = code == PRE_DEC || code == POST_DEC;
869       bool inc = code == PRE_INC || code == POST_INC;
870       m_auto = inc || dec;
871       if (m_auto)
872 	m_addr_inc = dec ? -1 : 1;
873 
874       /* While we have always looked for these codes here, the code
875 	 implementing the memory operation has never handled them.
876 	 Support could be added later if necessary or beneficial.  */
877       gcc_assert (code != PRE_INC && code != POST_DEC);
878     }
879   else
880     {
881       m_addr = NULL_RTX;
882       if (!is_load)
883 	{
884 	  m_auto = true;
885 	  if (STACK_GROWS_DOWNWARD)
886 	    m_addr_inc = -1;
887 	  else
888 	    m_addr_inc = 1;
889 	}
890       else
891 	gcc_assert (constfn != NULL);
892     }
893   m_explicit_inc = 0;
894   if (constfn)
895     gcc_assert (is_load);
896 }
897 
898 /* Decide whether to use autoinc for an address involved in a memory op.
899    MODE is the mode of the accesses, REVERSE is true if we've decided to
900    perform the operation starting from the end, and LEN is the length of
901    the operation.  Don't override an earlier decision to set m_auto.  */
902 
903 void
904 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
905 			     HOST_WIDE_INT len)
906 {
907   if (m_auto || m_obj == NULL_RTX)
908     return;
909 
910   bool use_predec = (m_is_load
911 		     ? USE_LOAD_PRE_DECREMENT (mode)
912 		     : USE_STORE_PRE_DECREMENT (mode));
913   bool use_postinc = (m_is_load
914 		      ? USE_LOAD_POST_INCREMENT (mode)
915 		      : USE_STORE_POST_INCREMENT (mode));
916   machine_mode addr_mode = get_address_mode (m_obj);
917 
918   if (use_predec && reverse)
919     {
920       m_addr = copy_to_mode_reg (addr_mode,
921 				 plus_constant (addr_mode,
922 						m_addr, len));
923       m_auto = true;
924       m_explicit_inc = -1;
925     }
926   else if (use_postinc && !reverse)
927     {
928       m_addr = copy_to_mode_reg (addr_mode, m_addr);
929       m_auto = true;
930       m_explicit_inc = 1;
931     }
932   else if (CONSTANT_P (m_addr))
933     m_addr = copy_to_mode_reg (addr_mode, m_addr);
934 }
935 
936 /* Adjust the address to refer to the data at OFFSET in MODE.  If we
937    are using autoincrement for this address, we don't add the offset,
938    but we still modify the MEM's properties.  */
939 
940 rtx
941 pieces_addr::adjust (machine_mode mode, HOST_WIDE_INT offset)
942 {
943   if (m_constfn)
944     return m_constfn (m_cfndata, offset, mode);
945   if (m_obj == NULL_RTX)
946     return NULL_RTX;
947   if (m_auto)
948     return adjust_automodify_address (m_obj, mode, m_addr, offset);
949   else
950     return adjust_address (m_obj, mode, offset);
951 }
952 
953 /* Emit an add instruction to increment the address by SIZE.  */
954 
955 void
956 pieces_addr::increment_address (HOST_WIDE_INT size)
957 {
958   rtx amount = gen_int_mode (size, GET_MODE (m_addr));
959   emit_insn (gen_add2_insn (m_addr, amount));
960 }
961 
962 /* If we are supposed to decrement the address after each access, emit code
963    to do so now.  Increment by SIZE (which has should have the correct sign
964    already).  */
965 
966 void
967 pieces_addr::maybe_predec (HOST_WIDE_INT size)
968 {
969   if (m_explicit_inc >= 0)
970     return;
971   gcc_assert (HAVE_PRE_DECREMENT);
972   increment_address (size);
973 }
974 
975 /* If we are supposed to decrement the address after each access, emit code
976    to do so now.  Increment by SIZE.  */
977 
978 void
979 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
980 {
981   if (m_explicit_inc <= 0)
982     return;
983   gcc_assert (HAVE_POST_INCREMENT);
984   increment_address (size);
985 }
986 
987 /* This structure is used by do_op_by_pieces to describe the operation
988    to be performed.  */
989 
990 class op_by_pieces_d
991 {
992  protected:
993   pieces_addr m_to, m_from;
994   unsigned HOST_WIDE_INT m_len;
995   HOST_WIDE_INT m_offset;
996   unsigned int m_align;
997   unsigned int m_max_size;
998   bool m_reverse;
999 
1000   /* Virtual functions, overriden by derived classes for the specific
1001      operation.  */
1002   virtual void generate (rtx, rtx, machine_mode) = 0;
1003   virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1004   virtual void finish_mode (machine_mode)
1005   {
1006   }
1007 
1008  public:
1009   op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1010 		  unsigned HOST_WIDE_INT, unsigned int);
1011   void run ();
1012 };
1013 
1014 /* The constructor for an op_by_pieces_d structure.  We require two
1015    objects named TO and FROM, which are identified as loads or stores
1016    by TO_LOAD and FROM_LOAD.  If FROM is a load, the optional FROM_CFN
1017    and its associated FROM_CFN_DATA can be used to replace loads with
1018    constant values.  LEN describes the length of the operation.  */
1019 
1020 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1021 				rtx from, bool from_load,
1022 				by_pieces_constfn from_cfn,
1023 				void *from_cfn_data,
1024 				unsigned HOST_WIDE_INT len,
1025 				unsigned int align)
1026   : m_to (to, to_load, NULL, NULL),
1027     m_from (from, from_load, from_cfn, from_cfn_data),
1028     m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1029 {
1030   int toi = m_to.get_addr_inc ();
1031   int fromi = m_from.get_addr_inc ();
1032   if (toi >= 0 && fromi >= 0)
1033     m_reverse = false;
1034   else if (toi <= 0 && fromi <= 0)
1035     m_reverse = true;
1036   else
1037     gcc_unreachable ();
1038 
1039   m_offset = m_reverse ? len : 0;
1040   align = MIN (to ? MEM_ALIGN (to) : align,
1041 	       from ? MEM_ALIGN (from) : align);
1042 
1043   /* If copying requires more than two move insns,
1044      copy addresses to registers (to make displacements shorter)
1045      and use post-increment if available.  */
1046   if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1047     {
1048       /* Find the mode of the largest comparison.  */
1049       machine_mode mode = widest_int_mode_for_size (m_max_size);
1050 
1051       m_from.decide_autoinc (mode, m_reverse, len);
1052       m_to.decide_autoinc (mode, m_reverse, len);
1053     }
1054 
1055   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1056   m_align = align;
1057 }
1058 
1059 /* This function contains the main loop used for expanding a block
1060    operation.  First move what we can in the largest integer mode,
1061    then go to successively smaller modes.  For every access, call
1062    GENFUN with the two operands and the EXTRA_DATA.  */
1063 
1064 void
1065 op_by_pieces_d::run ()
1066 {
1067   while (m_max_size > 1 && m_len > 0)
1068     {
1069       machine_mode mode = widest_int_mode_for_size (m_max_size);
1070 
1071       if (mode == VOIDmode)
1072 	break;
1073 
1074       if (prepare_mode (mode, m_align))
1075 	{
1076 	  unsigned int size = GET_MODE_SIZE (mode);
1077 	  rtx to1 = NULL_RTX, from1;
1078 
1079 	  while (m_len >= size)
1080 	    {
1081 	      if (m_reverse)
1082 		m_offset -= size;
1083 
1084 	      to1 = m_to.adjust (mode, m_offset);
1085 	      from1 = m_from.adjust (mode, m_offset);
1086 
1087 	      m_to.maybe_predec (-(HOST_WIDE_INT)size);
1088 	      m_from.maybe_predec (-(HOST_WIDE_INT)size);
1089 
1090 	      generate (to1, from1, mode);
1091 
1092 	      m_to.maybe_postinc (size);
1093 	      m_from.maybe_postinc (size);
1094 
1095 	      if (!m_reverse)
1096 		m_offset += size;
1097 
1098 	      m_len -= size;
1099 	    }
1100 
1101 	  finish_mode (mode);
1102 	}
1103 
1104       m_max_size = GET_MODE_SIZE (mode);
1105     }
1106 
1107   /* The code above should have handled everything.  */
1108   gcc_assert (!m_len);
1109 }
1110 
1111 /* Derived class from op_by_pieces_d, providing support for block move
1112    operations.  */
1113 
1114 class move_by_pieces_d : public op_by_pieces_d
1115 {
1116   insn_gen_fn m_gen_fun;
1117   void generate (rtx, rtx, machine_mode);
1118   bool prepare_mode (machine_mode, unsigned int);
1119 
1120  public:
1121   move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1122 		    unsigned int align)
1123     : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1124   {
1125   }
1126   rtx finish_endp (int);
1127 };
1128 
1129 /* Return true if MODE can be used for a set of copies, given an
1130    alignment ALIGN.  Prepare whatever data is necessary for later
1131    calls to generate.  */
1132 
1133 bool
1134 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1135 {
1136   insn_code icode = optab_handler (mov_optab, mode);
1137   m_gen_fun = GEN_FCN (icode);
1138   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1139 }
1140 
1141 /* A callback used when iterating for a compare_by_pieces_operation.
1142    OP0 and OP1 are the values that have been loaded and should be
1143    compared in MODE.  If OP0 is NULL, this means we should generate a
1144    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1145    gen function that should be used to generate the mode.  */
1146 
1147 void
1148 move_by_pieces_d::generate (rtx op0, rtx op1,
1149 			    machine_mode mode ATTRIBUTE_UNUSED)
1150 {
1151 #ifdef PUSH_ROUNDING
1152   if (op0 == NULL_RTX)
1153     {
1154       emit_single_push_insn (mode, op1, NULL);
1155       return;
1156     }
1157 #endif
1158   emit_insn (m_gen_fun (op0, op1));
1159 }
1160 
1161 /* Perform the final adjustment at the end of a string to obtain the
1162    correct return value for the block operation.  If ENDP is 1 return
1163    memory at the end ala mempcpy, and if ENDP is 2 return memory the
1164    end minus one byte ala stpcpy.  */
1165 
1166 rtx
1167 move_by_pieces_d::finish_endp (int endp)
1168 {
1169   gcc_assert (!m_reverse);
1170   if (endp == 2)
1171     {
1172       m_to.maybe_postinc (-1);
1173       --m_offset;
1174     }
1175   return m_to.adjust (QImode, m_offset);
1176 }
1177 
1178 /* Generate several move instructions to copy LEN bytes from block FROM to
1179    block TO.  (These are MEM rtx's with BLKmode).
1180 
1181    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1182    used to push FROM to the stack.
1183 
1184    ALIGN is maximum stack alignment we can assume.
1185 
1186    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1187    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1188    stpcpy.  */
1189 
1190 rtx
1191 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1192 		unsigned int align, int endp)
1193 {
1194 #ifndef PUSH_ROUNDING
1195   if (to == NULL)
1196     gcc_unreachable ();
1197 #endif
1198 
1199   move_by_pieces_d data (to, from, len, align);
1200 
1201   data.run ();
1202 
1203   if (endp)
1204     return data.finish_endp (endp);
1205   else
1206     return to;
1207 }
1208 
1209 /* Derived class from op_by_pieces_d, providing support for block move
1210    operations.  */
1211 
1212 class store_by_pieces_d : public op_by_pieces_d
1213 {
1214   insn_gen_fn m_gen_fun;
1215   void generate (rtx, rtx, machine_mode);
1216   bool prepare_mode (machine_mode, unsigned int);
1217 
1218  public:
1219   store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1220 		     unsigned HOST_WIDE_INT len, unsigned int align)
1221     : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1222   {
1223   }
1224   rtx finish_endp (int);
1225 };
1226 
1227 /* Return true if MODE can be used for a set of stores, given an
1228    alignment ALIGN.  Prepare whatever data is necessary for later
1229    calls to generate.  */
1230 
1231 bool
1232 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1233 {
1234   insn_code icode = optab_handler (mov_optab, mode);
1235   m_gen_fun = GEN_FCN (icode);
1236   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1237 }
1238 
1239 /* A callback used when iterating for a store_by_pieces_operation.
1240    OP0 and OP1 are the values that have been loaded and should be
1241    compared in MODE.  If OP0 is NULL, this means we should generate a
1242    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1243    gen function that should be used to generate the mode.  */
1244 
1245 void
1246 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1247 {
1248   emit_insn (m_gen_fun (op0, op1));
1249 }
1250 
1251 /* Perform the final adjustment at the end of a string to obtain the
1252    correct return value for the block operation.  If ENDP is 1 return
1253    memory at the end ala mempcpy, and if ENDP is 2 return memory the
1254    end minus one byte ala stpcpy.  */
1255 
1256 rtx
1257 store_by_pieces_d::finish_endp (int endp)
1258 {
1259   gcc_assert (!m_reverse);
1260   if (endp == 2)
1261     {
1262       m_to.maybe_postinc (-1);
1263       --m_offset;
1264     }
1265   return m_to.adjust (QImode, m_offset);
1266 }
1267 
1268 /* Determine whether the LEN bytes generated by CONSTFUN can be
1269    stored to memory using several move instructions.  CONSTFUNDATA is
1270    a pointer which will be passed as argument in every CONSTFUN call.
1271    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1272    a memset operation and false if it's a copy of a constant string.
1273    Return nonzero if a call to store_by_pieces should succeed.  */
1274 
1275 int
1276 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1277 		     rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
1278 		     void *constfundata, unsigned int align, bool memsetp)
1279 {
1280   unsigned HOST_WIDE_INT l;
1281   unsigned int max_size;
1282   HOST_WIDE_INT offset = 0;
1283   machine_mode mode;
1284   enum insn_code icode;
1285   int reverse;
1286   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
1287   rtx cst ATTRIBUTE_UNUSED;
1288 
1289   if (len == 0)
1290     return 1;
1291 
1292   if (!targetm.use_by_pieces_infrastructure_p (len, align,
1293 					       memsetp
1294 						 ? SET_BY_PIECES
1295 						 : STORE_BY_PIECES,
1296 					       optimize_insn_for_speed_p ()))
1297     return 0;
1298 
1299   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1300 
1301   /* We would first store what we can in the largest integer mode, then go to
1302      successively smaller modes.  */
1303 
1304   for (reverse = 0;
1305        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1306        reverse++)
1307     {
1308       l = len;
1309       max_size = STORE_MAX_PIECES + 1;
1310       while (max_size > 1 && l > 0)
1311 	{
1312 	  mode = widest_int_mode_for_size (max_size);
1313 
1314 	  if (mode == VOIDmode)
1315 	    break;
1316 
1317 	  icode = optab_handler (mov_optab, mode);
1318 	  if (icode != CODE_FOR_nothing
1319 	      && align >= GET_MODE_ALIGNMENT (mode))
1320 	    {
1321 	      unsigned int size = GET_MODE_SIZE (mode);
1322 
1323 	      while (l >= size)
1324 		{
1325 		  if (reverse)
1326 		    offset -= size;
1327 
1328 		  cst = (*constfun) (constfundata, offset, mode);
1329 		  if (!targetm.legitimate_constant_p (mode, cst))
1330 		    return 0;
1331 
1332 		  if (!reverse)
1333 		    offset += size;
1334 
1335 		  l -= size;
1336 		}
1337 	    }
1338 
1339 	  max_size = GET_MODE_SIZE (mode);
1340 	}
1341 
1342       /* The code above should have handled everything.  */
1343       gcc_assert (!l);
1344     }
1345 
1346   return 1;
1347 }
1348 
1349 /* Generate several move instructions to store LEN bytes generated by
1350    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
1351    pointer which will be passed as argument in every CONSTFUN call.
1352    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1353    a memset operation and false if it's a copy of a constant string.
1354    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1355    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1356    stpcpy.  */
1357 
1358 rtx
1359 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1360 		 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
1361 		 void *constfundata, unsigned int align, bool memsetp, int endp)
1362 {
1363   if (len == 0)
1364     {
1365       gcc_assert (endp != 2);
1366       return to;
1367     }
1368 
1369   gcc_assert (targetm.use_by_pieces_infrastructure_p
1370 		(len, align,
1371 		 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1372 		 optimize_insn_for_speed_p ()));
1373 
1374   store_by_pieces_d data (to, constfun, constfundata, len, align);
1375   data.run ();
1376 
1377   if (endp)
1378     return data.finish_endp (endp);
1379   else
1380     return to;
1381 }
1382 
1383 /* Callback routine for clear_by_pieces.
1384    Return const0_rtx unconditionally.  */
1385 
1386 static rtx
1387 clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode)
1388 {
1389   return const0_rtx;
1390 }
1391 
1392 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
1393    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
1394 
1395 static void
1396 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1397 {
1398   if (len == 0)
1399     return;
1400 
1401   store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1402   data.run ();
1403 }
1404 
1405 /* Context used by compare_by_pieces_genfn.  It stores the fail label
1406    to jump to in case of miscomparison, and for branch ratios greater than 1,
1407    it stores an accumulator and the current and maximum counts before
1408    emitting another branch.  */
1409 
1410 class compare_by_pieces_d : public op_by_pieces_d
1411 {
1412   rtx_code_label *m_fail_label;
1413   rtx m_accumulator;
1414   int m_count, m_batch;
1415 
1416   void generate (rtx, rtx, machine_mode);
1417   bool prepare_mode (machine_mode, unsigned int);
1418   void finish_mode (machine_mode);
1419  public:
1420   compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1421 		       void *op1_cfn_data, HOST_WIDE_INT len, int align,
1422 		       rtx_code_label *fail_label)
1423     : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1424   {
1425     m_fail_label = fail_label;
1426   }
1427 };
1428 
1429 /* A callback used when iterating for a compare_by_pieces_operation.
1430    OP0 and OP1 are the values that have been loaded and should be
1431    compared in MODE.  DATA holds a pointer to the compare_by_pieces_data
1432    context structure.  */
1433 
1434 void
1435 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1436 {
1437   if (m_batch > 1)
1438     {
1439       rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1440 			       true, OPTAB_LIB_WIDEN);
1441       if (m_count != 0)
1442 	temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1443 			     true, OPTAB_LIB_WIDEN);
1444       m_accumulator = temp;
1445 
1446       if (++m_count < m_batch)
1447 	return;
1448 
1449       m_count = 0;
1450       op0 = m_accumulator;
1451       op1 = const0_rtx;
1452       m_accumulator = NULL_RTX;
1453     }
1454   do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1455 			   m_fail_label, -1);
1456 }
1457 
1458 /* Return true if MODE can be used for a set of moves and comparisons,
1459    given an alignment ALIGN.  Prepare whatever data is necessary for
1460    later calls to generate.  */
1461 
1462 bool
1463 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1464 {
1465   insn_code icode = optab_handler (mov_optab, mode);
1466   if (icode == CODE_FOR_nothing
1467       || align < GET_MODE_ALIGNMENT (mode)
1468       || !can_compare_p (EQ, mode, ccp_jump))
1469     return false;
1470   m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1471   if (m_batch < 0)
1472     return false;
1473   m_accumulator = NULL_RTX;
1474   m_count = 0;
1475   return true;
1476 }
1477 
1478 /* Called after expanding a series of comparisons in MODE.  If we have
1479    accumulated results for which we haven't emitted a branch yet, do
1480    so now.  */
1481 
1482 void
1483 compare_by_pieces_d::finish_mode (machine_mode mode)
1484 {
1485   if (m_accumulator != NULL_RTX)
1486     do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1487 			     NULL_RTX, NULL, m_fail_label, -1);
1488 }
1489 
1490 /* Generate several move instructions to compare LEN bytes from blocks
1491    ARG0 and ARG1.  (These are MEM rtx's with BLKmode).
1492 
1493    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1494    used to push FROM to the stack.
1495 
1496    ALIGN is maximum stack alignment we can assume.
1497 
1498    Optionally, the caller can pass a constfn and associated data in A1_CFN
1499    and A1_CFN_DATA. describing that the second operand being compared is a
1500    known constant and how to obtain its data.  */
1501 
1502 static rtx
1503 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1504 		   rtx target, unsigned int align,
1505 		   by_pieces_constfn a1_cfn, void *a1_cfn_data)
1506 {
1507   rtx_code_label *fail_label = gen_label_rtx ();
1508   rtx_code_label *end_label = gen_label_rtx ();
1509 
1510   if (target == NULL_RTX
1511       || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1512     target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1513 
1514   compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1515 			    fail_label);
1516 
1517   data.run ();
1518 
1519   emit_move_insn (target, const0_rtx);
1520   emit_jump (end_label);
1521   emit_barrier ();
1522   emit_label (fail_label);
1523   emit_move_insn (target, const1_rtx);
1524   emit_label (end_label);
1525 
1526   return target;
1527 }
1528 
1529 /* Emit code to move a block Y to a block X.  This may be done with
1530    string-move instructions, with multiple scalar move instructions,
1531    or with a library call.
1532 
1533    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1534    SIZE is an rtx that says how long they are.
1535    ALIGN is the maximum alignment we can assume they have.
1536    METHOD describes what kind of copy this is, and what mechanisms may be used.
1537    MIN_SIZE is the minimal size of block to move
1538    MAX_SIZE is the maximal size of block to move, if it can not be represented
1539    in unsigned HOST_WIDE_INT, than it is mask of all ones.
1540 
1541    Return the address of the new block, if memcpy is called and returns it,
1542    0 otherwise.  */
1543 
1544 rtx
1545 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1546 		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1547 		       unsigned HOST_WIDE_INT min_size,
1548 		       unsigned HOST_WIDE_INT max_size,
1549 		       unsigned HOST_WIDE_INT probable_max_size)
1550 {
1551   bool may_use_call;
1552   rtx retval = 0;
1553   unsigned int align;
1554 
1555   gcc_assert (size);
1556   if (CONST_INT_P (size) && INTVAL (size) == 0)
1557     return 0;
1558 
1559   switch (method)
1560     {
1561     case BLOCK_OP_NORMAL:
1562     case BLOCK_OP_TAILCALL:
1563       may_use_call = true;
1564       break;
1565 
1566     case BLOCK_OP_CALL_PARM:
1567       may_use_call = block_move_libcall_safe_for_call_parm ();
1568 
1569       /* Make inhibit_defer_pop nonzero around the library call
1570 	 to force it to pop the arguments right away.  */
1571       NO_DEFER_POP;
1572       break;
1573 
1574     case BLOCK_OP_NO_LIBCALL:
1575       may_use_call = false;
1576       break;
1577 
1578     default:
1579       gcc_unreachable ();
1580     }
1581 
1582   gcc_assert (MEM_P (x) && MEM_P (y));
1583   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1584   gcc_assert (align >= BITS_PER_UNIT);
1585 
1586   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1587      block copy is more efficient for other large modes, e.g. DCmode.  */
1588   x = adjust_address (x, BLKmode, 0);
1589   y = adjust_address (y, BLKmode, 0);
1590 
1591   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1592      can be incorrect is coming from __builtin_memcpy.  */
1593   if (CONST_INT_P (size))
1594     {
1595       x = shallow_copy_rtx (x);
1596       y = shallow_copy_rtx (y);
1597       set_mem_size (x, INTVAL (size));
1598       set_mem_size (y, INTVAL (size));
1599     }
1600 
1601   if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1602     move_by_pieces (x, y, INTVAL (size), align, 0);
1603   else if (emit_block_move_via_movmem (x, y, size, align,
1604 				       expected_align, expected_size,
1605 				       min_size, max_size, probable_max_size))
1606     ;
1607   else if (may_use_call
1608 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1609 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1610     {
1611       /* Since x and y are passed to a libcall, mark the corresponding
1612 	 tree EXPR as addressable.  */
1613       tree y_expr = MEM_EXPR (y);
1614       tree x_expr = MEM_EXPR (x);
1615       if (y_expr)
1616 	mark_addressable (y_expr);
1617       if (x_expr)
1618 	mark_addressable (x_expr);
1619       retval = emit_block_copy_via_libcall (x, y, size,
1620 					    method == BLOCK_OP_TAILCALL);
1621     }
1622 
1623   else
1624     emit_block_move_via_loop (x, y, size, align);
1625 
1626   if (method == BLOCK_OP_CALL_PARM)
1627     OK_DEFER_POP;
1628 
1629   return retval;
1630 }
1631 
1632 rtx
1633 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1634 {
1635   unsigned HOST_WIDE_INT max, min = 0;
1636   if (GET_CODE (size) == CONST_INT)
1637     min = max = UINTVAL (size);
1638   else
1639     max = GET_MODE_MASK (GET_MODE (size));
1640   return emit_block_move_hints (x, y, size, method, 0, -1,
1641 				min, max, max);
1642 }
1643 
1644 /* A subroutine of emit_block_move.  Returns true if calling the
1645    block move libcall will not clobber any parameters which may have
1646    already been placed on the stack.  */
1647 
1648 static bool
1649 block_move_libcall_safe_for_call_parm (void)
1650 {
1651 #if defined (REG_PARM_STACK_SPACE)
1652   tree fn;
1653 #endif
1654 
1655   /* If arguments are pushed on the stack, then they're safe.  */
1656   if (PUSH_ARGS)
1657     return true;
1658 
1659   /* If registers go on the stack anyway, any argument is sure to clobber
1660      an outgoing argument.  */
1661 #if defined (REG_PARM_STACK_SPACE)
1662   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1663   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1664      depend on its argument.  */
1665   (void) fn;
1666   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1667       && REG_PARM_STACK_SPACE (fn) != 0)
1668     return false;
1669 #endif
1670 
1671   /* If any argument goes in memory, then it might clobber an outgoing
1672      argument.  */
1673   {
1674     CUMULATIVE_ARGS args_so_far_v;
1675     cumulative_args_t args_so_far;
1676     tree fn, arg;
1677 
1678     fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1679     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1680     args_so_far = pack_cumulative_args (&args_so_far_v);
1681 
1682     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1683     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1684       {
1685 	machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1686 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1687 					      NULL_TREE, true);
1688 	if (!tmp || !REG_P (tmp))
1689 	  return false;
1690 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1691 	  return false;
1692 	targetm.calls.function_arg_advance (args_so_far, mode,
1693 					    NULL_TREE, true);
1694       }
1695   }
1696   return true;
1697 }
1698 
1699 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1700    return true if successful.  */
1701 
1702 static bool
1703 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1704 			    unsigned int expected_align, HOST_WIDE_INT expected_size,
1705 			    unsigned HOST_WIDE_INT min_size,
1706 			    unsigned HOST_WIDE_INT max_size,
1707 			    unsigned HOST_WIDE_INT probable_max_size)
1708 {
1709   int save_volatile_ok = volatile_ok;
1710   machine_mode mode;
1711 
1712   if (expected_align < align)
1713     expected_align = align;
1714   if (expected_size != -1)
1715     {
1716       if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1717 	expected_size = probable_max_size;
1718       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1719 	expected_size = min_size;
1720     }
1721 
1722   /* Since this is a move insn, we don't care about volatility.  */
1723   volatile_ok = 1;
1724 
1725   /* Try the most limited insn first, because there's no point
1726      including more than one in the machine description unless
1727      the more limited one has some advantage.  */
1728 
1729   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1730        mode = GET_MODE_WIDER_MODE (mode))
1731     {
1732       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1733 
1734       if (code != CODE_FOR_nothing
1735 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1736 	     here because if SIZE is less than the mode mask, as it is
1737 	     returned by the macro, it will definitely be less than the
1738 	     actual mode mask.  Since SIZE is within the Pmode address
1739 	     space, we limit MODE to Pmode.  */
1740 	  && ((CONST_INT_P (size)
1741 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1742 		   <= (GET_MODE_MASK (mode) >> 1)))
1743 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1744 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1745 	{
1746 	  struct expand_operand ops[9];
1747 	  unsigned int nops;
1748 
1749 	  /* ??? When called via emit_block_move_for_call, it'd be
1750 	     nice if there were some way to inform the backend, so
1751 	     that it doesn't fail the expansion because it thinks
1752 	     emitting the libcall would be more efficient.  */
1753 	  nops = insn_data[(int) code].n_generator_args;
1754 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1755 
1756 	  create_fixed_operand (&ops[0], x);
1757 	  create_fixed_operand (&ops[1], y);
1758 	  /* The check above guarantees that this size conversion is valid.  */
1759 	  create_convert_operand_to (&ops[2], size, mode, true);
1760 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1761 	  if (nops >= 6)
1762 	    {
1763 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1764 	      create_integer_operand (&ops[5], expected_size);
1765 	    }
1766 	  if (nops >= 8)
1767 	    {
1768 	      create_integer_operand (&ops[6], min_size);
1769 	      /* If we can not represent the maximal size,
1770 		 make parameter NULL.  */
1771 	      if ((HOST_WIDE_INT) max_size != -1)
1772 	        create_integer_operand (&ops[7], max_size);
1773 	      else
1774 		create_fixed_operand (&ops[7], NULL);
1775 	    }
1776 	  if (nops == 9)
1777 	    {
1778 	      /* If we can not represent the maximal size,
1779 		 make parameter NULL.  */
1780 	      if ((HOST_WIDE_INT) probable_max_size != -1)
1781 	        create_integer_operand (&ops[8], probable_max_size);
1782 	      else
1783 		create_fixed_operand (&ops[8], NULL);
1784 	    }
1785 	  if (maybe_expand_insn (code, nops, ops))
1786 	    {
1787 	      volatile_ok = save_volatile_ok;
1788 	      return true;
1789 	    }
1790 	}
1791     }
1792 
1793   volatile_ok = save_volatile_ok;
1794   return false;
1795 }
1796 
1797 /* A subroutine of emit_block_move.  Copy the data via an explicit
1798    loop.  This is used only when libcalls are forbidden.  */
1799 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1800 
1801 static void
1802 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1803 			  unsigned int align ATTRIBUTE_UNUSED)
1804 {
1805   rtx_code_label *cmp_label, *top_label;
1806   rtx iter, x_addr, y_addr, tmp;
1807   machine_mode x_addr_mode = get_address_mode (x);
1808   machine_mode y_addr_mode = get_address_mode (y);
1809   machine_mode iter_mode;
1810 
1811   iter_mode = GET_MODE (size);
1812   if (iter_mode == VOIDmode)
1813     iter_mode = word_mode;
1814 
1815   top_label = gen_label_rtx ();
1816   cmp_label = gen_label_rtx ();
1817   iter = gen_reg_rtx (iter_mode);
1818 
1819   emit_move_insn (iter, const0_rtx);
1820 
1821   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1822   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1823   do_pending_stack_adjust ();
1824 
1825   emit_jump (cmp_label);
1826   emit_label (top_label);
1827 
1828   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1829   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1830 
1831   if (x_addr_mode != y_addr_mode)
1832     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1833   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1834 
1835   x = change_address (x, QImode, x_addr);
1836   y = change_address (y, QImode, y_addr);
1837 
1838   emit_move_insn (x, y);
1839 
1840   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1841 			     true, OPTAB_LIB_WIDEN);
1842   if (tmp != iter)
1843     emit_move_insn (iter, tmp);
1844 
1845   emit_label (cmp_label);
1846 
1847   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1848 			   true, top_label, REG_BR_PROB_BASE * 90 / 100);
1849 }
1850 
1851 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1852    TAILCALL is true if this is a tail call.  */
1853 
1854 rtx
1855 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1856 			   rtx size, bool tailcall)
1857 {
1858   rtx dst_addr, src_addr;
1859   tree call_expr, dst_tree, src_tree, size_tree;
1860   machine_mode size_mode;
1861 
1862   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1863   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1864   dst_tree = make_tree (ptr_type_node, dst_addr);
1865 
1866   src_addr = copy_addr_to_reg (XEXP (src, 0));
1867   src_addr = convert_memory_address (ptr_mode, src_addr);
1868   src_tree = make_tree (ptr_type_node, src_addr);
1869 
1870   size_mode = TYPE_MODE (sizetype);
1871   size = convert_to_mode (size_mode, size, 1);
1872   size = copy_to_mode_reg (size_mode, size);
1873   size_tree = make_tree (sizetype, size);
1874 
1875   /* It is incorrect to use the libcall calling conventions for calls to
1876      memcpy/memmove/memcmp because they can be provided by the user.  */
1877   tree fn = builtin_decl_implicit (fncode);
1878   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1879   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1880 
1881   return expand_call (call_expr, NULL_RTX, false);
1882 }
1883 
1884 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1885    ARG3_TYPE is the type of ARG3_RTX.  Return the result rtx on success,
1886    otherwise return null.  */
1887 
1888 rtx
1889 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1890 			  rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1891 			  HOST_WIDE_INT align)
1892 {
1893   machine_mode insn_mode = insn_data[icode].operand[0].mode;
1894 
1895   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1896     target = NULL_RTX;
1897 
1898   struct expand_operand ops[5];
1899   create_output_operand (&ops[0], target, insn_mode);
1900   create_fixed_operand (&ops[1], arg1_rtx);
1901   create_fixed_operand (&ops[2], arg2_rtx);
1902   create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1903 			       TYPE_UNSIGNED (arg3_type));
1904   create_integer_operand (&ops[4], align);
1905   if (maybe_expand_insn (icode, 5, ops))
1906     return ops[0].value;
1907   return NULL_RTX;
1908 }
1909 
1910 /* Expand a block compare between X and Y with length LEN using the
1911    cmpmem optab, placing the result in TARGET.  LEN_TYPE is the type
1912    of the expression that was used to calculate the length.  ALIGN
1913    gives the known minimum common alignment.  */
1914 
1915 static rtx
1916 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
1917 			   unsigned align)
1918 {
1919   /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1920      implementing memcmp because it will stop if it encounters two
1921      zero bytes.  */
1922   insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
1923 
1924   if (icode == CODE_FOR_nothing)
1925     return NULL_RTX;
1926 
1927   return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
1928 }
1929 
1930 /* Emit code to compare a block Y to a block X.  This may be done with
1931    string-compare instructions, with multiple scalar instructions,
1932    or with a library call.
1933 
1934    Both X and Y must be MEM rtx's.  LEN is an rtx that says how long
1935    they are.  LEN_TYPE is the type of the expression that was used to
1936    calculate it.
1937 
1938    If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1939    value of a normal memcmp call, instead we can just compare for equality.
1940    If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1941    returning NULL_RTX.
1942 
1943    Optionally, the caller can pass a constfn and associated data in Y_CFN
1944    and Y_CFN_DATA. describing that the second operand being compared is a
1945    known constant and how to obtain its data.
1946    Return the result of the comparison, or NULL_RTX if we failed to
1947    perform the operation.  */
1948 
1949 rtx
1950 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
1951 		      bool equality_only, by_pieces_constfn y_cfn,
1952 		      void *y_cfndata)
1953 {
1954   rtx result = 0;
1955 
1956   if (CONST_INT_P (len) && INTVAL (len) == 0)
1957     return const0_rtx;
1958 
1959   gcc_assert (MEM_P (x) && MEM_P (y));
1960   unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1961   gcc_assert (align >= BITS_PER_UNIT);
1962 
1963   x = adjust_address (x, BLKmode, 0);
1964   y = adjust_address (y, BLKmode, 0);
1965 
1966   if (equality_only
1967       && CONST_INT_P (len)
1968       && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
1969     result = compare_by_pieces (x, y, INTVAL (len), target, align,
1970 				y_cfn, y_cfndata);
1971   else
1972     result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
1973 
1974   return result;
1975 }
1976 
1977 /* Copy all or part of a value X into registers starting at REGNO.
1978    The number of registers to be filled is NREGS.  */
1979 
1980 void
1981 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1982 {
1983   if (nregs == 0)
1984     return;
1985 
1986   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1987     x = validize_mem (force_const_mem (mode, x));
1988 
1989   /* See if the machine can do this with a load multiple insn.  */
1990   if (targetm.have_load_multiple ())
1991     {
1992       rtx_insn *last = get_last_insn ();
1993       rtx first = gen_rtx_REG (word_mode, regno);
1994       if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1995 						     GEN_INT (nregs)))
1996 	{
1997 	  emit_insn (pat);
1998 	  return;
1999 	}
2000       else
2001 	delete_insns_since (last);
2002     }
2003 
2004   for (int i = 0; i < nregs; i++)
2005     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2006 		    operand_subword_force (x, i, mode));
2007 }
2008 
2009 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2010    The number of registers to be filled is NREGS.  */
2011 
2012 void
2013 move_block_from_reg (int regno, rtx x, int nregs)
2014 {
2015   if (nregs == 0)
2016     return;
2017 
2018   /* See if the machine can do this with a store multiple insn.  */
2019   if (targetm.have_store_multiple ())
2020     {
2021       rtx_insn *last = get_last_insn ();
2022       rtx first = gen_rtx_REG (word_mode, regno);
2023       if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2024 						      GEN_INT (nregs)))
2025 	{
2026 	  emit_insn (pat);
2027 	  return;
2028 	}
2029       else
2030 	delete_insns_since (last);
2031     }
2032 
2033   for (int i = 0; i < nregs; i++)
2034     {
2035       rtx tem = operand_subword (x, i, 1, BLKmode);
2036 
2037       gcc_assert (tem);
2038 
2039       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2040     }
2041 }
2042 
2043 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2044    ORIG, where ORIG is a non-consecutive group of registers represented by
2045    a PARALLEL.  The clone is identical to the original except in that the
2046    original set of registers is replaced by a new set of pseudo registers.
2047    The new set has the same modes as the original set.  */
2048 
2049 rtx
2050 gen_group_rtx (rtx orig)
2051 {
2052   int i, length;
2053   rtx *tmps;
2054 
2055   gcc_assert (GET_CODE (orig) == PARALLEL);
2056 
2057   length = XVECLEN (orig, 0);
2058   tmps = XALLOCAVEC (rtx, length);
2059 
2060   /* Skip a NULL entry in first slot.  */
2061   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2062 
2063   if (i)
2064     tmps[0] = 0;
2065 
2066   for (; i < length; i++)
2067     {
2068       machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2069       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2070 
2071       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2072     }
2073 
2074   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2075 }
2076 
2077 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
2078    except that values are placed in TMPS[i], and must later be moved
2079    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
2080 
2081 static void
2082 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
2083 {
2084   rtx src;
2085   int start, i;
2086   machine_mode m = GET_MODE (orig_src);
2087 
2088   gcc_assert (GET_CODE (dst) == PARALLEL);
2089 
2090   if (m != VOIDmode
2091       && !SCALAR_INT_MODE_P (m)
2092       && !MEM_P (orig_src)
2093       && GET_CODE (orig_src) != CONCAT)
2094     {
2095       machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
2096       if (imode == BLKmode)
2097 	src = assign_stack_temp (GET_MODE (orig_src), ssize);
2098       else
2099 	src = gen_reg_rtx (imode);
2100       if (imode != BLKmode)
2101 	src = gen_lowpart (GET_MODE (orig_src), src);
2102       emit_move_insn (src, orig_src);
2103       /* ...and back again.  */
2104       if (imode != BLKmode)
2105 	src = gen_lowpart (imode, src);
2106       emit_group_load_1 (tmps, dst, src, type, ssize);
2107       return;
2108     }
2109 
2110   /* Check for a NULL entry, used to indicate that the parameter goes
2111      both on the stack and in registers.  */
2112   if (XEXP (XVECEXP (dst, 0, 0), 0))
2113     start = 0;
2114   else
2115     start = 1;
2116 
2117   /* Process the pieces.  */
2118   for (i = start; i < XVECLEN (dst, 0); i++)
2119     {
2120       machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2121       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2122       unsigned int bytelen = GET_MODE_SIZE (mode);
2123       int shift = 0;
2124 
2125       /* Handle trailing fragments that run over the size of the struct.  */
2126       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2127 	{
2128 	  /* Arrange to shift the fragment to where it belongs.
2129 	     extract_bit_field loads to the lsb of the reg.  */
2130 	  if (
2131 #ifdef BLOCK_REG_PADDING
2132 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2133 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2134 #else
2135 	      BYTES_BIG_ENDIAN
2136 #endif
2137 	      )
2138 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2139 	  bytelen = ssize - bytepos;
2140 	  gcc_assert (bytelen > 0);
2141 	}
2142 
2143       /* If we won't be loading directly from memory, protect the real source
2144 	 from strange tricks we might play; but make sure that the source can
2145 	 be loaded directly into the destination.  */
2146       src = orig_src;
2147       if (!MEM_P (orig_src)
2148 	  && (!CONSTANT_P (orig_src)
2149 	      || (GET_MODE (orig_src) != mode
2150 		  && GET_MODE (orig_src) != VOIDmode)))
2151 	{
2152 	  if (GET_MODE (orig_src) == VOIDmode)
2153 	    src = gen_reg_rtx (mode);
2154 	  else
2155 	    src = gen_reg_rtx (GET_MODE (orig_src));
2156 
2157 	  emit_move_insn (src, orig_src);
2158 	}
2159 
2160       /* Optimize the access just a bit.  */
2161       if (MEM_P (src)
2162 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
2163 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2164 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2165 	  && bytelen == GET_MODE_SIZE (mode))
2166 	{
2167 	  tmps[i] = gen_reg_rtx (mode);
2168 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2169 	}
2170       else if (COMPLEX_MODE_P (mode)
2171 	       && GET_MODE (src) == mode
2172 	       && bytelen == GET_MODE_SIZE (mode))
2173 	/* Let emit_move_complex do the bulk of the work.  */
2174 	tmps[i] = src;
2175       else if (GET_CODE (src) == CONCAT)
2176 	{
2177 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2178 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2179 	  unsigned int elt = bytepos / slen0;
2180 	  unsigned int subpos = bytepos % slen0;
2181 
2182 	  if (subpos + bytelen <= slen0)
2183 	    {
2184 	      /* The following assumes that the concatenated objects all
2185 		 have the same size.  In this case, a simple calculation
2186 		 can be used to determine the object and the bit field
2187 		 to be extracted.  */
2188 	      tmps[i] = XEXP (src, elt);
2189 	      if (subpos != 0
2190 		  || subpos + bytelen != slen0
2191 		  || (!CONSTANT_P (tmps[i])
2192 		      && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2193 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2194 					     subpos * BITS_PER_UNIT,
2195 					     1, NULL_RTX, mode, mode, false);
2196 	    }
2197 	  else
2198 	    {
2199 	      rtx mem;
2200 
2201 	      gcc_assert (!bytepos);
2202 	      mem = assign_stack_temp (GET_MODE (src), slen);
2203 	      emit_move_insn (mem, src);
2204 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2205 					   0, 1, NULL_RTX, mode, mode, false);
2206 	    }
2207 	}
2208       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2209 	 SIMD register, which is currently broken.  While we get GCC
2210 	 to emit proper RTL for these cases, let's dump to memory.  */
2211       else if (VECTOR_MODE_P (GET_MODE (dst))
2212 	       && REG_P (src))
2213 	{
2214 	  int slen = GET_MODE_SIZE (GET_MODE (src));
2215 	  rtx mem;
2216 
2217 	  mem = assign_stack_temp (GET_MODE (src), slen);
2218 	  emit_move_insn (mem, src);
2219 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
2220 	}
2221       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2222                && XVECLEN (dst, 0) > 1)
2223         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2224       else if (CONSTANT_P (src))
2225 	{
2226 	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
2227 
2228 	  if (len == ssize)
2229 	    tmps[i] = src;
2230 	  else
2231 	    {
2232 	      rtx first, second;
2233 
2234 	      /* TODO: const_wide_int can have sizes other than this...  */
2235 	      gcc_assert (2 * len == ssize);
2236 	      split_double (src, &first, &second);
2237 	      if (i)
2238 		tmps[i] = second;
2239 	      else
2240 		tmps[i] = first;
2241 	    }
2242 	}
2243       else if (REG_P (src) && GET_MODE (src) == mode)
2244 	tmps[i] = src;
2245       else
2246 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2247 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2248 				     mode, mode, false);
2249 
2250       if (shift)
2251 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2252 				shift, tmps[i], 0);
2253     }
2254 }
2255 
2256 /* Emit code to move a block SRC of type TYPE to a block DST,
2257    where DST is non-consecutive registers represented by a PARALLEL.
2258    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2259    if not known.  */
2260 
2261 void
2262 emit_group_load (rtx dst, rtx src, tree type, int ssize)
2263 {
2264   rtx *tmps;
2265   int i;
2266 
2267   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2268   emit_group_load_1 (tmps, dst, src, type, ssize);
2269 
2270   /* Copy the extracted pieces into the proper (probable) hard regs.  */
2271   for (i = 0; i < XVECLEN (dst, 0); i++)
2272     {
2273       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2274       if (d == NULL)
2275 	continue;
2276       emit_move_insn (d, tmps[i]);
2277     }
2278 }
2279 
2280 /* Similar, but load SRC into new pseudos in a format that looks like
2281    PARALLEL.  This can later be fed to emit_group_move to get things
2282    in the right place.  */
2283 
2284 rtx
2285 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
2286 {
2287   rtvec vec;
2288   int i;
2289 
2290   vec = rtvec_alloc (XVECLEN (parallel, 0));
2291   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2292 
2293   /* Convert the vector to look just like the original PARALLEL, except
2294      with the computed values.  */
2295   for (i = 0; i < XVECLEN (parallel, 0); i++)
2296     {
2297       rtx e = XVECEXP (parallel, 0, i);
2298       rtx d = XEXP (e, 0);
2299 
2300       if (d)
2301 	{
2302 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2303 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2304 	}
2305       RTVEC_ELT (vec, i) = e;
2306     }
2307 
2308   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2309 }
2310 
2311 /* Emit code to move a block SRC to block DST, where SRC and DST are
2312    non-consecutive groups of registers, each represented by a PARALLEL.  */
2313 
2314 void
2315 emit_group_move (rtx dst, rtx src)
2316 {
2317   int i;
2318 
2319   gcc_assert (GET_CODE (src) == PARALLEL
2320 	      && GET_CODE (dst) == PARALLEL
2321 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
2322 
2323   /* Skip first entry if NULL.  */
2324   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2325     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2326 		    XEXP (XVECEXP (src, 0, i), 0));
2327 }
2328 
2329 /* Move a group of registers represented by a PARALLEL into pseudos.  */
2330 
2331 rtx
2332 emit_group_move_into_temps (rtx src)
2333 {
2334   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2335   int i;
2336 
2337   for (i = 0; i < XVECLEN (src, 0); i++)
2338     {
2339       rtx e = XVECEXP (src, 0, i);
2340       rtx d = XEXP (e, 0);
2341 
2342       if (d)
2343 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2344       RTVEC_ELT (vec, i) = e;
2345     }
2346 
2347   return gen_rtx_PARALLEL (GET_MODE (src), vec);
2348 }
2349 
2350 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2351    where SRC is non-consecutive registers represented by a PARALLEL.
2352    SSIZE represents the total size of block ORIG_DST, or -1 if not
2353    known.  */
2354 
2355 void
2356 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2357 {
2358   rtx *tmps, dst;
2359   int start, finish, i;
2360   machine_mode m = GET_MODE (orig_dst);
2361 
2362   gcc_assert (GET_CODE (src) == PARALLEL);
2363 
2364   if (!SCALAR_INT_MODE_P (m)
2365       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2366     {
2367       machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
2368       if (imode == BLKmode)
2369         dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2370       else
2371         dst = gen_reg_rtx (imode);
2372       emit_group_store (dst, src, type, ssize);
2373       if (imode != BLKmode)
2374         dst = gen_lowpart (GET_MODE (orig_dst), dst);
2375       emit_move_insn (orig_dst, dst);
2376       return;
2377     }
2378 
2379   /* Check for a NULL entry, used to indicate that the parameter goes
2380      both on the stack and in registers.  */
2381   if (XEXP (XVECEXP (src, 0, 0), 0))
2382     start = 0;
2383   else
2384     start = 1;
2385   finish = XVECLEN (src, 0);
2386 
2387   tmps = XALLOCAVEC (rtx, finish);
2388 
2389   /* Copy the (probable) hard regs into pseudos.  */
2390   for (i = start; i < finish; i++)
2391     {
2392       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2393       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2394 	{
2395 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
2396 	  emit_move_insn (tmps[i], reg);
2397 	}
2398       else
2399 	tmps[i] = reg;
2400     }
2401 
2402   /* If we won't be storing directly into memory, protect the real destination
2403      from strange tricks we might play.  */
2404   dst = orig_dst;
2405   if (GET_CODE (dst) == PARALLEL)
2406     {
2407       rtx temp;
2408 
2409       /* We can get a PARALLEL dst if there is a conditional expression in
2410 	 a return statement.  In that case, the dst and src are the same,
2411 	 so no action is necessary.  */
2412       if (rtx_equal_p (dst, src))
2413 	return;
2414 
2415       /* It is unclear if we can ever reach here, but we may as well handle
2416 	 it.  Allocate a temporary, and split this into a store/load to/from
2417 	 the temporary.  */
2418       temp = assign_stack_temp (GET_MODE (dst), ssize);
2419       emit_group_store (temp, src, type, ssize);
2420       emit_group_load (dst, temp, type, ssize);
2421       return;
2422     }
2423   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2424     {
2425       machine_mode outer = GET_MODE (dst);
2426       machine_mode inner;
2427       HOST_WIDE_INT bytepos;
2428       bool done = false;
2429       rtx temp;
2430 
2431       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2432 	dst = gen_reg_rtx (outer);
2433 
2434       /* Make life a bit easier for combine.  */
2435       /* If the first element of the vector is the low part
2436 	 of the destination mode, use a paradoxical subreg to
2437 	 initialize the destination.  */
2438       if (start < finish)
2439 	{
2440 	  inner = GET_MODE (tmps[start]);
2441 	  bytepos = subreg_lowpart_offset (inner, outer);
2442 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2443 	    {
2444 	      temp = simplify_gen_subreg (outer, tmps[start],
2445 					  inner, 0);
2446 	      if (temp)
2447 		{
2448 		  emit_move_insn (dst, temp);
2449 		  done = true;
2450 		  start++;
2451 		}
2452 	    }
2453 	}
2454 
2455       /* If the first element wasn't the low part, try the last.  */
2456       if (!done
2457 	  && start < finish - 1)
2458 	{
2459 	  inner = GET_MODE (tmps[finish - 1]);
2460 	  bytepos = subreg_lowpart_offset (inner, outer);
2461 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2462 	    {
2463 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2464 					  inner, 0);
2465 	      if (temp)
2466 		{
2467 		  emit_move_insn (dst, temp);
2468 		  done = true;
2469 		  finish--;
2470 		}
2471 	    }
2472 	}
2473 
2474       /* Otherwise, simply initialize the result to zero.  */
2475       if (!done)
2476         emit_move_insn (dst, CONST0_RTX (outer));
2477     }
2478 
2479   /* Process the pieces.  */
2480   for (i = start; i < finish; i++)
2481     {
2482       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2483       machine_mode mode = GET_MODE (tmps[i]);
2484       unsigned int bytelen = GET_MODE_SIZE (mode);
2485       unsigned int adj_bytelen;
2486       rtx dest = dst;
2487 
2488       /* Handle trailing fragments that run over the size of the struct.  */
2489       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2490 	adj_bytelen = ssize - bytepos;
2491       else
2492 	adj_bytelen = bytelen;
2493 
2494       if (GET_CODE (dst) == CONCAT)
2495 	{
2496 	  if (bytepos + adj_bytelen
2497 	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2498 	    dest = XEXP (dst, 0);
2499 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2500 	    {
2501 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2502 	      dest = XEXP (dst, 1);
2503 	    }
2504 	  else
2505 	    {
2506 	      machine_mode dest_mode = GET_MODE (dest);
2507 	      machine_mode tmp_mode = GET_MODE (tmps[i]);
2508 
2509 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2510 
2511 	      if (GET_MODE_ALIGNMENT (dest_mode)
2512 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2513 		{
2514 		  dest = assign_stack_temp (dest_mode,
2515 					    GET_MODE_SIZE (dest_mode));
2516 		  emit_move_insn (adjust_address (dest,
2517 						  tmp_mode,
2518 						  bytepos),
2519 				  tmps[i]);
2520 		  dst = dest;
2521 		}
2522 	      else
2523 		{
2524 		  dest = assign_stack_temp (tmp_mode,
2525 					    GET_MODE_SIZE (tmp_mode));
2526 		  emit_move_insn (dest, tmps[i]);
2527 		  dst = adjust_address (dest, dest_mode, bytepos);
2528 		}
2529 	      break;
2530 	    }
2531 	}
2532 
2533       /* Handle trailing fragments that run over the size of the struct.  */
2534       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2535 	{
2536 	  /* store_bit_field always takes its value from the lsb.
2537 	     Move the fragment to the lsb if it's not already there.  */
2538 	  if (
2539 #ifdef BLOCK_REG_PADDING
2540 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2541 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2542 #else
2543 	      BYTES_BIG_ENDIAN
2544 #endif
2545 	      )
2546 	    {
2547 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2548 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2549 				      shift, tmps[i], 0);
2550 	    }
2551 
2552 	  /* Make sure not to write past the end of the struct.  */
2553 	  store_bit_field (dest,
2554 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2555 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2556 			   VOIDmode, tmps[i], false);
2557 	}
2558 
2559       /* Optimize the access just a bit.  */
2560       else if (MEM_P (dest)
2561 	       && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2562 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2563 	       && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2564 	       && bytelen == GET_MODE_SIZE (mode))
2565 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2566 
2567       else
2568 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2569 			 0, 0, mode, tmps[i], false);
2570     }
2571 
2572   /* Copy from the pseudo into the (probable) hard reg.  */
2573   if (orig_dst != dst)
2574     emit_move_insn (orig_dst, dst);
2575 }
2576 
2577 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2578    of the value stored in X.  */
2579 
2580 rtx
2581 maybe_emit_group_store (rtx x, tree type)
2582 {
2583   machine_mode mode = TYPE_MODE (type);
2584   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2585   if (GET_CODE (x) == PARALLEL)
2586     {
2587       rtx result = gen_reg_rtx (mode);
2588       emit_group_store (result, x, type, int_size_in_bytes (type));
2589       return result;
2590     }
2591   return x;
2592 }
2593 
2594 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2595 
2596    This is used on targets that return BLKmode values in registers.  */
2597 
2598 static void
2599 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2600 {
2601   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2602   rtx src = NULL, dst = NULL;
2603   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2604   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2605   machine_mode mode = GET_MODE (srcreg);
2606   machine_mode tmode = GET_MODE (target);
2607   machine_mode copy_mode;
2608 
2609   /* BLKmode registers created in the back-end shouldn't have survived.  */
2610   gcc_assert (mode != BLKmode);
2611 
2612   /* If the structure doesn't take up a whole number of words, see whether
2613      SRCREG is padded on the left or on the right.  If it's on the left,
2614      set PADDING_CORRECTION to the number of bits to skip.
2615 
2616      In most ABIs, the structure will be returned at the least end of
2617      the register, which translates to right padding on little-endian
2618      targets and left padding on big-endian targets.  The opposite
2619      holds if the structure is returned at the most significant
2620      end of the register.  */
2621   if (bytes % UNITS_PER_WORD != 0
2622       && (targetm.calls.return_in_msb (type)
2623 	  ? !BYTES_BIG_ENDIAN
2624 	  : BYTES_BIG_ENDIAN))
2625     padding_correction
2626       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2627 
2628   /* We can use a single move if we have an exact mode for the size.  */
2629   else if (MEM_P (target)
2630 	   && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2631 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2632 	   && bytes == GET_MODE_SIZE (mode))
2633   {
2634     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2635     return;
2636   }
2637 
2638   /* And if we additionally have the same mode for a register.  */
2639   else if (REG_P (target)
2640 	   && GET_MODE (target) == mode
2641 	   && bytes == GET_MODE_SIZE (mode))
2642   {
2643     emit_move_insn (target, srcreg);
2644     return;
2645   }
2646 
2647   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2648      into a new pseudo which is a full word.  */
2649   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2650     {
2651       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2652       mode = word_mode;
2653     }
2654 
2655   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2656      memory, take care of not reading/writing past its end by selecting
2657      a copy mode suited to BITSIZE.  This should always be possible given
2658      how it is computed.
2659 
2660      If the target lives in register, make sure not to select a copy mode
2661      larger than the mode of the register.
2662 
2663      We could probably emit more efficient code for machines which do not use
2664      strict alignment, but it doesn't seem worth the effort at the current
2665      time.  */
2666 
2667   copy_mode = word_mode;
2668   if (MEM_P (target))
2669     {
2670       machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2671       if (mem_mode != BLKmode)
2672 	copy_mode = mem_mode;
2673     }
2674   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2675     copy_mode = tmode;
2676 
2677   for (bitpos = 0, xbitpos = padding_correction;
2678        bitpos < bytes * BITS_PER_UNIT;
2679        bitpos += bitsize, xbitpos += bitsize)
2680     {
2681       /* We need a new source operand each time xbitpos is on a
2682 	 word boundary and when xbitpos == padding_correction
2683 	 (the first time through).  */
2684       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2685 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2686 
2687       /* We need a new destination operand each time bitpos is on
2688 	 a word boundary.  */
2689       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2690 	dst = target;
2691       else if (bitpos % BITS_PER_WORD == 0)
2692 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2693 
2694       /* Use xbitpos for the source extraction (right justified) and
2695 	 bitpos for the destination store (left justified).  */
2696       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2697 		       extract_bit_field (src, bitsize,
2698 					  xbitpos % BITS_PER_WORD, 1,
2699 					  NULL_RTX, copy_mode, copy_mode,
2700 					  false),
2701 		       false);
2702     }
2703 }
2704 
2705 /* Copy BLKmode value SRC into a register of mode MODE.  Return the
2706    register if it contains any data, otherwise return null.
2707 
2708    This is used on targets that return BLKmode values in registers.  */
2709 
2710 rtx
2711 copy_blkmode_to_reg (machine_mode mode, tree src)
2712 {
2713   int i, n_regs;
2714   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2715   unsigned int bitsize;
2716   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2717   machine_mode dst_mode;
2718 
2719   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2720 
2721   x = expand_normal (src);
2722 
2723   bytes = int_size_in_bytes (TREE_TYPE (src));
2724   if (bytes == 0)
2725     return NULL_RTX;
2726 
2727   /* If the structure doesn't take up a whole number of words, see
2728      whether the register value should be padded on the left or on
2729      the right.  Set PADDING_CORRECTION to the number of padding
2730      bits needed on the left side.
2731 
2732      In most ABIs, the structure will be returned at the least end of
2733      the register, which translates to right padding on little-endian
2734      targets and left padding on big-endian targets.  The opposite
2735      holds if the structure is returned at the most significant
2736      end of the register.  */
2737   if (bytes % UNITS_PER_WORD != 0
2738       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2739 	  ? !BYTES_BIG_ENDIAN
2740 	  : BYTES_BIG_ENDIAN))
2741     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2742 					   * BITS_PER_UNIT));
2743 
2744   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2745   dst_words = XALLOCAVEC (rtx, n_regs);
2746   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2747 
2748   /* Copy the structure BITSIZE bits at a time.  */
2749   for (bitpos = 0, xbitpos = padding_correction;
2750        bitpos < bytes * BITS_PER_UNIT;
2751        bitpos += bitsize, xbitpos += bitsize)
2752     {
2753       /* We need a new destination pseudo each time xbitpos is
2754 	 on a word boundary and when xbitpos == padding_correction
2755 	 (the first time through).  */
2756       if (xbitpos % BITS_PER_WORD == 0
2757 	  || xbitpos == padding_correction)
2758 	{
2759 	  /* Generate an appropriate register.  */
2760 	  dst_word = gen_reg_rtx (word_mode);
2761 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2762 
2763 	  /* Clear the destination before we move anything into it.  */
2764 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2765 	}
2766 
2767       /* We need a new source operand each time bitpos is on a word
2768 	 boundary.  */
2769       if (bitpos % BITS_PER_WORD == 0)
2770 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2771 
2772       /* Use bitpos for the source extraction (left justified) and
2773 	 xbitpos for the destination store (right justified).  */
2774       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2775 		       0, 0, word_mode,
2776 		       extract_bit_field (src_word, bitsize,
2777 					  bitpos % BITS_PER_WORD, 1,
2778 					  NULL_RTX, word_mode, word_mode,
2779 					  false),
2780 		       false);
2781     }
2782 
2783   if (mode == BLKmode)
2784     {
2785       /* Find the smallest integer mode large enough to hold the
2786 	 entire structure.  */
2787       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2788 	   mode != VOIDmode;
2789 	   mode = GET_MODE_WIDER_MODE (mode))
2790 	/* Have we found a large enough mode?  */
2791 	if (GET_MODE_SIZE (mode) >= bytes)
2792 	  break;
2793 
2794       /* A suitable mode should have been found.  */
2795       gcc_assert (mode != VOIDmode);
2796     }
2797 
2798   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2799     dst_mode = word_mode;
2800   else
2801     dst_mode = mode;
2802   dst = gen_reg_rtx (dst_mode);
2803 
2804   for (i = 0; i < n_regs; i++)
2805     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2806 
2807   if (mode != dst_mode)
2808     dst = gen_lowpart (mode, dst);
2809 
2810   return dst;
2811 }
2812 
2813 /* Add a USE expression for REG to the (possibly empty) list pointed
2814    to by CALL_FUSAGE.  REG must denote a hard register.  */
2815 
2816 void
2817 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2818 {
2819   gcc_assert (REG_P (reg));
2820 
2821   if (!HARD_REGISTER_P (reg))
2822     return;
2823 
2824   *call_fusage
2825     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2826 }
2827 
2828 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2829    to by CALL_FUSAGE.  REG must denote a hard register.  */
2830 
2831 void
2832 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2833 {
2834   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2835 
2836   *call_fusage
2837     = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2838 }
2839 
2840 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2841    starting at REGNO.  All of these registers must be hard registers.  */
2842 
2843 void
2844 use_regs (rtx *call_fusage, int regno, int nregs)
2845 {
2846   int i;
2847 
2848   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2849 
2850   for (i = 0; i < nregs; i++)
2851     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2852 }
2853 
2854 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2855    PARALLEL REGS.  This is for calls that pass values in multiple
2856    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2857 
2858 void
2859 use_group_regs (rtx *call_fusage, rtx regs)
2860 {
2861   int i;
2862 
2863   for (i = 0; i < XVECLEN (regs, 0); i++)
2864     {
2865       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2866 
2867       /* A NULL entry means the parameter goes both on the stack and in
2868 	 registers.  This can also be a MEM for targets that pass values
2869 	 partially on the stack and partially in registers.  */
2870       if (reg != 0 && REG_P (reg))
2871 	use_reg (call_fusage, reg);
2872     }
2873 }
2874 
2875 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2876    assigment and the code of the expresion on the RHS is CODE.  Return
2877    NULL otherwise.  */
2878 
2879 static gimple *
2880 get_def_for_expr (tree name, enum tree_code code)
2881 {
2882   gimple *def_stmt;
2883 
2884   if (TREE_CODE (name) != SSA_NAME)
2885     return NULL;
2886 
2887   def_stmt = get_gimple_for_ssa_name (name);
2888   if (!def_stmt
2889       || gimple_assign_rhs_code (def_stmt) != code)
2890     return NULL;
2891 
2892   return def_stmt;
2893 }
2894 
2895 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2896    assigment and the class of the expresion on the RHS is CLASS.  Return
2897    NULL otherwise.  */
2898 
2899 static gimple *
2900 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2901 {
2902   gimple *def_stmt;
2903 
2904   if (TREE_CODE (name) != SSA_NAME)
2905     return NULL;
2906 
2907   def_stmt = get_gimple_for_ssa_name (name);
2908   if (!def_stmt
2909       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2910     return NULL;
2911 
2912   return def_stmt;
2913 }
2914 
2915 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2916    its length in bytes.  */
2917 
2918 rtx
2919 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2920 		     unsigned int expected_align, HOST_WIDE_INT expected_size,
2921 		     unsigned HOST_WIDE_INT min_size,
2922 		     unsigned HOST_WIDE_INT max_size,
2923 		     unsigned HOST_WIDE_INT probable_max_size)
2924 {
2925   machine_mode mode = GET_MODE (object);
2926   unsigned int align;
2927 
2928   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2929 
2930   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2931      just move a zero.  Otherwise, do this a piece at a time.  */
2932   if (mode != BLKmode
2933       && CONST_INT_P (size)
2934       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2935     {
2936       rtx zero = CONST0_RTX (mode);
2937       if (zero != NULL)
2938 	{
2939 	  emit_move_insn (object, zero);
2940 	  return NULL;
2941 	}
2942 
2943       if (COMPLEX_MODE_P (mode))
2944 	{
2945 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2946 	  if (zero != NULL)
2947 	    {
2948 	      write_complex_part (object, zero, 0);
2949 	      write_complex_part (object, zero, 1);
2950 	      return NULL;
2951 	    }
2952 	}
2953     }
2954 
2955   if (size == const0_rtx)
2956     return NULL;
2957 
2958   align = MEM_ALIGN (object);
2959 
2960   if (CONST_INT_P (size)
2961       && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2962 						 CLEAR_BY_PIECES,
2963 						 optimize_insn_for_speed_p ()))
2964     clear_by_pieces (object, INTVAL (size), align);
2965   else if (set_storage_via_setmem (object, size, const0_rtx, align,
2966 				   expected_align, expected_size,
2967 				   min_size, max_size, probable_max_size))
2968     ;
2969   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2970     return set_storage_via_libcall (object, size, const0_rtx,
2971 				    method == BLOCK_OP_TAILCALL);
2972   else
2973     gcc_unreachable ();
2974 
2975   return NULL;
2976 }
2977 
2978 rtx
2979 clear_storage (rtx object, rtx size, enum block_op_methods method)
2980 {
2981   unsigned HOST_WIDE_INT max, min = 0;
2982   if (GET_CODE (size) == CONST_INT)
2983     min = max = UINTVAL (size);
2984   else
2985     max = GET_MODE_MASK (GET_MODE (size));
2986   return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2987 }
2988 
2989 
2990 /* A subroutine of clear_storage.  Expand a call to memset.
2991    Return the return value of memset, 0 otherwise.  */
2992 
2993 rtx
2994 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2995 {
2996   tree call_expr, fn, object_tree, size_tree, val_tree;
2997   machine_mode size_mode;
2998 
2999   object = copy_addr_to_reg (XEXP (object, 0));
3000   object_tree = make_tree (ptr_type_node, object);
3001 
3002   if (!CONST_INT_P (val))
3003     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3004   val_tree = make_tree (integer_type_node, val);
3005 
3006   size_mode = TYPE_MODE (sizetype);
3007   size = convert_to_mode (size_mode, size, 1);
3008   size = copy_to_mode_reg (size_mode, size);
3009   size_tree = make_tree (sizetype, size);
3010 
3011   /* It is incorrect to use the libcall calling conventions for calls to
3012      memset because it can be provided by the user.  */
3013   fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3014   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3015   CALL_EXPR_TAILCALL (call_expr) = tailcall;
3016 
3017   return expand_call (call_expr, NULL_RTX, false);
3018 }
3019 
3020 /* Expand a setmem pattern; return true if successful.  */
3021 
3022 bool
3023 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3024 			unsigned int expected_align, HOST_WIDE_INT expected_size,
3025 			unsigned HOST_WIDE_INT min_size,
3026 			unsigned HOST_WIDE_INT max_size,
3027 			unsigned HOST_WIDE_INT probable_max_size)
3028 {
3029   /* Try the most limited insn first, because there's no point
3030      including more than one in the machine description unless
3031      the more limited one has some advantage.  */
3032 
3033   machine_mode mode;
3034 
3035   if (expected_align < align)
3036     expected_align = align;
3037   if (expected_size != -1)
3038     {
3039       if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3040 	expected_size = max_size;
3041       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3042 	expected_size = min_size;
3043     }
3044 
3045   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3046        mode = GET_MODE_WIDER_MODE (mode))
3047     {
3048       enum insn_code code = direct_optab_handler (setmem_optab, mode);
3049 
3050       if (code != CODE_FOR_nothing
3051 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3052 	     here because if SIZE is less than the mode mask, as it is
3053 	     returned by the macro, it will definitely be less than the
3054 	     actual mode mask.  Since SIZE is within the Pmode address
3055 	     space, we limit MODE to Pmode.  */
3056 	  && ((CONST_INT_P (size)
3057 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
3058 		   <= (GET_MODE_MASK (mode) >> 1)))
3059 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
3060 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3061 	{
3062 	  struct expand_operand ops[9];
3063 	  unsigned int nops;
3064 
3065 	  nops = insn_data[(int) code].n_generator_args;
3066 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3067 
3068 	  create_fixed_operand (&ops[0], object);
3069 	  /* The check above guarantees that this size conversion is valid.  */
3070 	  create_convert_operand_to (&ops[1], size, mode, true);
3071 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
3072 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3073 	  if (nops >= 6)
3074 	    {
3075 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3076 	      create_integer_operand (&ops[5], expected_size);
3077 	    }
3078 	  if (nops >= 8)
3079 	    {
3080 	      create_integer_operand (&ops[6], min_size);
3081 	      /* If we can not represent the maximal size,
3082 		 make parameter NULL.  */
3083 	      if ((HOST_WIDE_INT) max_size != -1)
3084 	        create_integer_operand (&ops[7], max_size);
3085 	      else
3086 		create_fixed_operand (&ops[7], NULL);
3087 	    }
3088 	  if (nops == 9)
3089 	    {
3090 	      /* If we can not represent the maximal size,
3091 		 make parameter NULL.  */
3092 	      if ((HOST_WIDE_INT) probable_max_size != -1)
3093 	        create_integer_operand (&ops[8], probable_max_size);
3094 	      else
3095 		create_fixed_operand (&ops[8], NULL);
3096 	    }
3097 	  if (maybe_expand_insn (code, nops, ops))
3098 	    return true;
3099 	}
3100     }
3101 
3102   return false;
3103 }
3104 
3105 
3106 /* Write to one of the components of the complex value CPLX.  Write VAL to
3107    the real part if IMAG_P is false, and the imaginary part if its true.  */
3108 
3109 void
3110 write_complex_part (rtx cplx, rtx val, bool imag_p)
3111 {
3112   machine_mode cmode;
3113   machine_mode imode;
3114   unsigned ibitsize;
3115 
3116   if (GET_CODE (cplx) == CONCAT)
3117     {
3118       emit_move_insn (XEXP (cplx, imag_p), val);
3119       return;
3120     }
3121 
3122   cmode = GET_MODE (cplx);
3123   imode = GET_MODE_INNER (cmode);
3124   ibitsize = GET_MODE_BITSIZE (imode);
3125 
3126   /* For MEMs simplify_gen_subreg may generate an invalid new address
3127      because, e.g., the original address is considered mode-dependent
3128      by the target, which restricts simplify_subreg from invoking
3129      adjust_address_nv.  Instead of preparing fallback support for an
3130      invalid address, we call adjust_address_nv directly.  */
3131   if (MEM_P (cplx))
3132     {
3133       emit_move_insn (adjust_address_nv (cplx, imode,
3134 					 imag_p ? GET_MODE_SIZE (imode) : 0),
3135 		      val);
3136       return;
3137     }
3138 
3139   /* If the sub-object is at least word sized, then we know that subregging
3140      will work.  This special case is important, since store_bit_field
3141      wants to operate on integer modes, and there's rarely an OImode to
3142      correspond to TCmode.  */
3143   if (ibitsize >= BITS_PER_WORD
3144       /* For hard regs we have exact predicates.  Assume we can split
3145 	 the original object if it spans an even number of hard regs.
3146 	 This special case is important for SCmode on 64-bit platforms
3147 	 where the natural size of floating-point regs is 32-bit.  */
3148       || (REG_P (cplx)
3149 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3150 	  && REG_NREGS (cplx) % 2 == 0))
3151     {
3152       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3153 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3154       if (part)
3155         {
3156 	  emit_move_insn (part, val);
3157 	  return;
3158 	}
3159       else
3160 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3161 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3162     }
3163 
3164   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3165 		   false);
3166 }
3167 
3168 /* Extract one of the components of the complex value CPLX.  Extract the
3169    real part if IMAG_P is false, and the imaginary part if it's true.  */
3170 
3171 rtx
3172 read_complex_part (rtx cplx, bool imag_p)
3173 {
3174   machine_mode cmode, imode;
3175   unsigned ibitsize;
3176 
3177   if (GET_CODE (cplx) == CONCAT)
3178     return XEXP (cplx, imag_p);
3179 
3180   cmode = GET_MODE (cplx);
3181   imode = GET_MODE_INNER (cmode);
3182   ibitsize = GET_MODE_BITSIZE (imode);
3183 
3184   /* Special case reads from complex constants that got spilled to memory.  */
3185   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3186     {
3187       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3188       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3189 	{
3190 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3191 	  if (CONSTANT_CLASS_P (part))
3192 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3193 	}
3194     }
3195 
3196   /* For MEMs simplify_gen_subreg may generate an invalid new address
3197      because, e.g., the original address is considered mode-dependent
3198      by the target, which restricts simplify_subreg from invoking
3199      adjust_address_nv.  Instead of preparing fallback support for an
3200      invalid address, we call adjust_address_nv directly.  */
3201   if (MEM_P (cplx))
3202     return adjust_address_nv (cplx, imode,
3203 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3204 
3205   /* If the sub-object is at least word sized, then we know that subregging
3206      will work.  This special case is important, since extract_bit_field
3207      wants to operate on integer modes, and there's rarely an OImode to
3208      correspond to TCmode.  */
3209   if (ibitsize >= BITS_PER_WORD
3210       /* For hard regs we have exact predicates.  Assume we can split
3211 	 the original object if it spans an even number of hard regs.
3212 	 This special case is important for SCmode on 64-bit platforms
3213 	 where the natural size of floating-point regs is 32-bit.  */
3214       || (REG_P (cplx)
3215 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3216 	  && REG_NREGS (cplx) % 2 == 0))
3217     {
3218       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3219 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3220       if (ret)
3221         return ret;
3222       else
3223 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3224 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3225     }
3226 
3227   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3228 			    true, NULL_RTX, imode, imode, false);
3229 }
3230 
3231 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3232    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3233    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3234    we'll force-create a SUBREG if needed.  */
3235 
3236 static rtx
3237 emit_move_change_mode (machine_mode new_mode,
3238 		       machine_mode old_mode, rtx x, bool force)
3239 {
3240   rtx ret;
3241 
3242   if (push_operand (x, GET_MODE (x)))
3243     {
3244       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3245       MEM_COPY_ATTRIBUTES (ret, x);
3246     }
3247   else if (MEM_P (x))
3248     {
3249       /* We don't have to worry about changing the address since the
3250 	 size in bytes is supposed to be the same.  */
3251       if (reload_in_progress)
3252 	{
3253 	  /* Copy the MEM to change the mode and move any
3254 	     substitutions from the old MEM to the new one.  */
3255 	  ret = adjust_address_nv (x, new_mode, 0);
3256 	  copy_replacements (x, ret);
3257 	}
3258       else
3259 	ret = adjust_address (x, new_mode, 0);
3260     }
3261   else
3262     {
3263       /* Note that we do want simplify_subreg's behavior of validating
3264 	 that the new mode is ok for a hard register.  If we were to use
3265 	 simplify_gen_subreg, we would create the subreg, but would
3266 	 probably run into the target not being able to implement it.  */
3267       /* Except, of course, when FORCE is true, when this is exactly what
3268 	 we want.  Which is needed for CCmodes on some targets.  */
3269       if (force)
3270 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3271       else
3272 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3273     }
3274 
3275   return ret;
3276 }
3277 
3278 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3279    an integer mode of the same size as MODE.  Returns the instruction
3280    emitted, or NULL if such a move could not be generated.  */
3281 
3282 static rtx_insn *
3283 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3284 {
3285   machine_mode imode;
3286   enum insn_code code;
3287 
3288   /* There must exist a mode of the exact size we require.  */
3289   imode = int_mode_for_mode (mode);
3290   if (imode == BLKmode)
3291     return NULL;
3292 
3293   /* The target must support moves in this mode.  */
3294   code = optab_handler (mov_optab, imode);
3295   if (code == CODE_FOR_nothing)
3296     return NULL;
3297 
3298   x = emit_move_change_mode (imode, mode, x, force);
3299   if (x == NULL_RTX)
3300     return NULL;
3301   y = emit_move_change_mode (imode, mode, y, force);
3302   if (y == NULL_RTX)
3303     return NULL;
3304   return emit_insn (GEN_FCN (code) (x, y));
3305 }
3306 
3307 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3308    Return an equivalent MEM that does not use an auto-increment.  */
3309 
3310 rtx
3311 emit_move_resolve_push (machine_mode mode, rtx x)
3312 {
3313   enum rtx_code code = GET_CODE (XEXP (x, 0));
3314   HOST_WIDE_INT adjust;
3315   rtx temp;
3316 
3317   adjust = GET_MODE_SIZE (mode);
3318 #ifdef PUSH_ROUNDING
3319   adjust = PUSH_ROUNDING (adjust);
3320 #endif
3321   if (code == PRE_DEC || code == POST_DEC)
3322     adjust = -adjust;
3323   else if (code == PRE_MODIFY || code == POST_MODIFY)
3324     {
3325       rtx expr = XEXP (XEXP (x, 0), 1);
3326       HOST_WIDE_INT val;
3327 
3328       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3329       gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3330       val = INTVAL (XEXP (expr, 1));
3331       if (GET_CODE (expr) == MINUS)
3332 	val = -val;
3333       gcc_assert (adjust == val || adjust == -val);
3334       adjust = val;
3335     }
3336 
3337   /* Do not use anti_adjust_stack, since we don't want to update
3338      stack_pointer_delta.  */
3339   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3340 			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3341 			      0, OPTAB_LIB_WIDEN);
3342   if (temp != stack_pointer_rtx)
3343     emit_move_insn (stack_pointer_rtx, temp);
3344 
3345   switch (code)
3346     {
3347     case PRE_INC:
3348     case PRE_DEC:
3349     case PRE_MODIFY:
3350       temp = stack_pointer_rtx;
3351       break;
3352     case POST_INC:
3353     case POST_DEC:
3354     case POST_MODIFY:
3355       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3356       break;
3357     default:
3358       gcc_unreachable ();
3359     }
3360 
3361   return replace_equiv_address (x, temp);
3362 }
3363 
3364 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3365    X is known to satisfy push_operand, and MODE is known to be complex.
3366    Returns the last instruction emitted.  */
3367 
3368 rtx_insn *
3369 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3370 {
3371   machine_mode submode = GET_MODE_INNER (mode);
3372   bool imag_first;
3373 
3374 #ifdef PUSH_ROUNDING
3375   unsigned int submodesize = GET_MODE_SIZE (submode);
3376 
3377   /* In case we output to the stack, but the size is smaller than the
3378      machine can push exactly, we need to use move instructions.  */
3379   if (PUSH_ROUNDING (submodesize) != submodesize)
3380     {
3381       x = emit_move_resolve_push (mode, x);
3382       return emit_move_insn (x, y);
3383     }
3384 #endif
3385 
3386   /* Note that the real part always precedes the imag part in memory
3387      regardless of machine's endianness.  */
3388   switch (GET_CODE (XEXP (x, 0)))
3389     {
3390     case PRE_DEC:
3391     case POST_DEC:
3392       imag_first = true;
3393       break;
3394     case PRE_INC:
3395     case POST_INC:
3396       imag_first = false;
3397       break;
3398     default:
3399       gcc_unreachable ();
3400     }
3401 
3402   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3403 		  read_complex_part (y, imag_first));
3404   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3405 			 read_complex_part (y, !imag_first));
3406 }
3407 
3408 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3409    via two moves of the parts.  Returns the last instruction emitted.  */
3410 
3411 rtx_insn *
3412 emit_move_complex_parts (rtx x, rtx y)
3413 {
3414   /* Show the output dies here.  This is necessary for SUBREGs
3415      of pseudos since we cannot track their lifetimes correctly;
3416      hard regs shouldn't appear here except as return values.  */
3417   if (!reload_completed && !reload_in_progress
3418       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3419     emit_clobber (x);
3420 
3421   write_complex_part (x, read_complex_part (y, false), false);
3422   write_complex_part (x, read_complex_part (y, true), true);
3423 
3424   return get_last_insn ();
3425 }
3426 
3427 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3428    MODE is known to be complex.  Returns the last instruction emitted.  */
3429 
3430 static rtx_insn *
3431 emit_move_complex (machine_mode mode, rtx x, rtx y)
3432 {
3433   bool try_int;
3434 
3435   /* Need to take special care for pushes, to maintain proper ordering
3436      of the data, and possibly extra padding.  */
3437   if (push_operand (x, mode))
3438     return emit_move_complex_push (mode, x, y);
3439 
3440   /* See if we can coerce the target into moving both values at once, except
3441      for floating point where we favor moving as parts if this is easy.  */
3442   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3443       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3444       && !(REG_P (x)
3445 	   && HARD_REGISTER_P (x)
3446 	   && REG_NREGS (x) == 1)
3447       && !(REG_P (y)
3448 	   && HARD_REGISTER_P (y)
3449 	   && REG_NREGS (y) == 1))
3450     try_int = false;
3451   /* Not possible if the values are inherently not adjacent.  */
3452   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3453     try_int = false;
3454   /* Is possible if both are registers (or subregs of registers).  */
3455   else if (register_operand (x, mode) && register_operand (y, mode))
3456     try_int = true;
3457   /* If one of the operands is a memory, and alignment constraints
3458      are friendly enough, we may be able to do combined memory operations.
3459      We do not attempt this if Y is a constant because that combination is
3460      usually better with the by-parts thing below.  */
3461   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3462 	   && (!STRICT_ALIGNMENT
3463 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3464     try_int = true;
3465   else
3466     try_int = false;
3467 
3468   if (try_int)
3469     {
3470       rtx_insn *ret;
3471 
3472       /* For memory to memory moves, optimal behavior can be had with the
3473 	 existing block move logic.  */
3474       if (MEM_P (x) && MEM_P (y))
3475 	{
3476 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3477 			   BLOCK_OP_NO_LIBCALL);
3478 	  return get_last_insn ();
3479 	}
3480 
3481       ret = emit_move_via_integer (mode, x, y, true);
3482       if (ret)
3483 	return ret;
3484     }
3485 
3486   return emit_move_complex_parts (x, y);
3487 }
3488 
3489 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3490    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3491 
3492 static rtx_insn *
3493 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3494 {
3495   rtx_insn *ret;
3496 
3497   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3498   if (mode != CCmode)
3499     {
3500       enum insn_code code = optab_handler (mov_optab, CCmode);
3501       if (code != CODE_FOR_nothing)
3502 	{
3503 	  x = emit_move_change_mode (CCmode, mode, x, true);
3504 	  y = emit_move_change_mode (CCmode, mode, y, true);
3505 	  return emit_insn (GEN_FCN (code) (x, y));
3506 	}
3507     }
3508 
3509   /* Otherwise, find the MODE_INT mode of the same width.  */
3510   ret = emit_move_via_integer (mode, x, y, false);
3511   gcc_assert (ret != NULL);
3512   return ret;
3513 }
3514 
3515 /* Return true if word I of OP lies entirely in the
3516    undefined bits of a paradoxical subreg.  */
3517 
3518 static bool
3519 undefined_operand_subword_p (const_rtx op, int i)
3520 {
3521   machine_mode innermode, innermostmode;
3522   int offset;
3523   if (GET_CODE (op) != SUBREG)
3524     return false;
3525   innermode = GET_MODE (op);
3526   innermostmode = GET_MODE (SUBREG_REG (op));
3527   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3528   /* The SUBREG_BYTE represents offset, as if the value were stored in
3529      memory, except for a paradoxical subreg where we define
3530      SUBREG_BYTE to be 0; undo this exception as in
3531      simplify_subreg.  */
3532   if (SUBREG_BYTE (op) == 0
3533       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3534     {
3535       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3536       if (WORDS_BIG_ENDIAN)
3537 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3538       if (BYTES_BIG_ENDIAN)
3539 	offset += difference % UNITS_PER_WORD;
3540     }
3541   if (offset >= GET_MODE_SIZE (innermostmode)
3542       || offset <= -GET_MODE_SIZE (word_mode))
3543     return true;
3544   return false;
3545 }
3546 
3547 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3548    MODE is any multi-word or full-word mode that lacks a move_insn
3549    pattern.  Note that you will get better code if you define such
3550    patterns, even if they must turn into multiple assembler instructions.  */
3551 
3552 static rtx_insn *
3553 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3554 {
3555   rtx_insn *last_insn = 0;
3556   rtx_insn *seq;
3557   rtx inner;
3558   bool need_clobber;
3559   int i;
3560 
3561   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3562 
3563   /* If X is a push on the stack, do the push now and replace
3564      X with a reference to the stack pointer.  */
3565   if (push_operand (x, mode))
3566     x = emit_move_resolve_push (mode, x);
3567 
3568   /* If we are in reload, see if either operand is a MEM whose address
3569      is scheduled for replacement.  */
3570   if (reload_in_progress && MEM_P (x)
3571       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3572     x = replace_equiv_address_nv (x, inner);
3573   if (reload_in_progress && MEM_P (y)
3574       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3575     y = replace_equiv_address_nv (y, inner);
3576 
3577   start_sequence ();
3578 
3579   need_clobber = false;
3580   for (i = 0;
3581        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3582        i++)
3583     {
3584       rtx xpart = operand_subword (x, i, 1, mode);
3585       rtx ypart;
3586 
3587       /* Do not generate code for a move if it would come entirely
3588 	 from the undefined bits of a paradoxical subreg.  */
3589       if (undefined_operand_subword_p (y, i))
3590 	continue;
3591 
3592       ypart = operand_subword (y, i, 1, mode);
3593 
3594       /* If we can't get a part of Y, put Y into memory if it is a
3595 	 constant.  Otherwise, force it into a register.  Then we must
3596 	 be able to get a part of Y.  */
3597       if (ypart == 0 && CONSTANT_P (y))
3598 	{
3599 	  y = use_anchored_address (force_const_mem (mode, y));
3600 	  ypart = operand_subword (y, i, 1, mode);
3601 	}
3602       else if (ypart == 0)
3603 	ypart = operand_subword_force (y, i, mode);
3604 
3605       gcc_assert (xpart && ypart);
3606 
3607       need_clobber |= (GET_CODE (xpart) == SUBREG);
3608 
3609       last_insn = emit_move_insn (xpart, ypart);
3610     }
3611 
3612   seq = get_insns ();
3613   end_sequence ();
3614 
3615   /* Show the output dies here.  This is necessary for SUBREGs
3616      of pseudos since we cannot track their lifetimes correctly;
3617      hard regs shouldn't appear here except as return values.
3618      We never want to emit such a clobber after reload.  */
3619   if (x != y
3620       && ! (reload_in_progress || reload_completed)
3621       && need_clobber != 0)
3622     emit_clobber (x);
3623 
3624   emit_insn (seq);
3625 
3626   return last_insn;
3627 }
3628 
3629 /* Low level part of emit_move_insn.
3630    Called just like emit_move_insn, but assumes X and Y
3631    are basically valid.  */
3632 
3633 rtx_insn *
3634 emit_move_insn_1 (rtx x, rtx y)
3635 {
3636   machine_mode mode = GET_MODE (x);
3637   enum insn_code code;
3638 
3639   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3640 
3641   code = optab_handler (mov_optab, mode);
3642   if (code != CODE_FOR_nothing)
3643     return emit_insn (GEN_FCN (code) (x, y));
3644 
3645   /* Expand complex moves by moving real part and imag part.  */
3646   if (COMPLEX_MODE_P (mode))
3647     return emit_move_complex (mode, x, y);
3648 
3649   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3650       || ALL_FIXED_POINT_MODE_P (mode))
3651     {
3652       rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3653 
3654       /* If we can't find an integer mode, use multi words.  */
3655       if (result)
3656 	return result;
3657       else
3658 	return emit_move_multi_word (mode, x, y);
3659     }
3660 
3661   if (GET_MODE_CLASS (mode) == MODE_CC)
3662     return emit_move_ccmode (mode, x, y);
3663 
3664   /* Try using a move pattern for the corresponding integer mode.  This is
3665      only safe when simplify_subreg can convert MODE constants into integer
3666      constants.  At present, it can only do this reliably if the value
3667      fits within a HOST_WIDE_INT.  */
3668   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3669     {
3670       rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3671 
3672       if (ret)
3673 	{
3674 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3675 	    return ret;
3676 	}
3677     }
3678 
3679   return emit_move_multi_word (mode, x, y);
3680 }
3681 
3682 /* Generate code to copy Y into X.
3683    Both Y and X must have the same mode, except that
3684    Y can be a constant with VOIDmode.
3685    This mode cannot be BLKmode; use emit_block_move for that.
3686 
3687    Return the last instruction emitted.  */
3688 
3689 rtx_insn *
3690 emit_move_insn (rtx x, rtx y)
3691 {
3692   machine_mode mode = GET_MODE (x);
3693   rtx y_cst = NULL_RTX;
3694   rtx_insn *last_insn;
3695   rtx set;
3696 
3697   gcc_assert (mode != BLKmode
3698 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3699 
3700   if (CONSTANT_P (y))
3701     {
3702       if (optimize
3703 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3704 	  && (last_insn = compress_float_constant (x, y)))
3705 	return last_insn;
3706 
3707       y_cst = y;
3708 
3709       if (!targetm.legitimate_constant_p (mode, y))
3710 	{
3711 	  y = force_const_mem (mode, y);
3712 
3713 	  /* If the target's cannot_force_const_mem prevented the spill,
3714 	     assume that the target's move expanders will also take care
3715 	     of the non-legitimate constant.  */
3716 	  if (!y)
3717 	    y = y_cst;
3718 	  else
3719 	    y = use_anchored_address (y);
3720 	}
3721     }
3722 
3723   /* If X or Y are memory references, verify that their addresses are valid
3724      for the machine.  */
3725   if (MEM_P (x)
3726       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3727 					 MEM_ADDR_SPACE (x))
3728 	  && ! push_operand (x, GET_MODE (x))))
3729     x = validize_mem (x);
3730 
3731   if (MEM_P (y)
3732       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3733 					MEM_ADDR_SPACE (y)))
3734     y = validize_mem (y);
3735 
3736   gcc_assert (mode != BLKmode);
3737 
3738   last_insn = emit_move_insn_1 (x, y);
3739 
3740   if (y_cst && REG_P (x)
3741       && (set = single_set (last_insn)) != NULL_RTX
3742       && SET_DEST (set) == x
3743       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3744     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3745 
3746   return last_insn;
3747 }
3748 
3749 /* Generate the body of an instruction to copy Y into X.
3750    It may be a list of insns, if one insn isn't enough.  */
3751 
3752 rtx_insn *
3753 gen_move_insn (rtx x, rtx y)
3754 {
3755   rtx_insn *seq;
3756 
3757   start_sequence ();
3758   emit_move_insn_1 (x, y);
3759   seq = get_insns ();
3760   end_sequence ();
3761   return seq;
3762 }
3763 
3764 /* If Y is representable exactly in a narrower mode, and the target can
3765    perform the extension directly from constant or memory, then emit the
3766    move as an extension.  */
3767 
3768 static rtx_insn *
3769 compress_float_constant (rtx x, rtx y)
3770 {
3771   machine_mode dstmode = GET_MODE (x);
3772   machine_mode orig_srcmode = GET_MODE (y);
3773   machine_mode srcmode;
3774   const REAL_VALUE_TYPE *r;
3775   int oldcost, newcost;
3776   bool speed = optimize_insn_for_speed_p ();
3777 
3778   r = CONST_DOUBLE_REAL_VALUE (y);
3779 
3780   if (targetm.legitimate_constant_p (dstmode, y))
3781     oldcost = set_src_cost (y, orig_srcmode, speed);
3782   else
3783     oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3784 
3785   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3786        srcmode != orig_srcmode;
3787        srcmode = GET_MODE_WIDER_MODE (srcmode))
3788     {
3789       enum insn_code ic;
3790       rtx trunc_y;
3791       rtx_insn *last_insn;
3792 
3793       /* Skip if the target can't extend this way.  */
3794       ic = can_extend_p (dstmode, srcmode, 0);
3795       if (ic == CODE_FOR_nothing)
3796 	continue;
3797 
3798       /* Skip if the narrowed value isn't exact.  */
3799       if (! exact_real_truncate (srcmode, r))
3800 	continue;
3801 
3802       trunc_y = const_double_from_real_value (*r, srcmode);
3803 
3804       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3805 	{
3806 	  /* Skip if the target needs extra instructions to perform
3807 	     the extension.  */
3808 	  if (!insn_operand_matches (ic, 1, trunc_y))
3809 	    continue;
3810 	  /* This is valid, but may not be cheaper than the original. */
3811 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3812 				  dstmode, speed);
3813 	  if (oldcost < newcost)
3814 	    continue;
3815 	}
3816       else if (float_extend_from_mem[dstmode][srcmode])
3817 	{
3818 	  trunc_y = force_const_mem (srcmode, trunc_y);
3819 	  /* This is valid, but may not be cheaper than the original. */
3820 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3821 				  dstmode, speed);
3822 	  if (oldcost < newcost)
3823 	    continue;
3824 	  trunc_y = validize_mem (trunc_y);
3825 	}
3826       else
3827 	continue;
3828 
3829       /* For CSE's benefit, force the compressed constant pool entry
3830 	 into a new pseudo.  This constant may be used in different modes,
3831 	 and if not, combine will put things back together for us.  */
3832       trunc_y = force_reg (srcmode, trunc_y);
3833 
3834       /* If x is a hard register, perform the extension into a pseudo,
3835 	 so that e.g. stack realignment code is aware of it.  */
3836       rtx target = x;
3837       if (REG_P (x) && HARD_REGISTER_P (x))
3838 	target = gen_reg_rtx (dstmode);
3839 
3840       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3841       last_insn = get_last_insn ();
3842 
3843       if (REG_P (target))
3844 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3845 
3846       if (target != x)
3847 	return emit_move_insn (x, target);
3848       return last_insn;
3849     }
3850 
3851   return NULL;
3852 }
3853 
3854 /* Pushing data onto the stack.  */
3855 
3856 /* Push a block of length SIZE (perhaps variable)
3857    and return an rtx to address the beginning of the block.
3858    The value may be virtual_outgoing_args_rtx.
3859 
3860    EXTRA is the number of bytes of padding to push in addition to SIZE.
3861    BELOW nonzero means this padding comes at low addresses;
3862    otherwise, the padding comes at high addresses.  */
3863 
3864 rtx
3865 push_block (rtx size, int extra, int below)
3866 {
3867   rtx temp;
3868 
3869   size = convert_modes (Pmode, ptr_mode, size, 1);
3870   if (CONSTANT_P (size))
3871     anti_adjust_stack (plus_constant (Pmode, size, extra));
3872   else if (REG_P (size) && extra == 0)
3873     anti_adjust_stack (size);
3874   else
3875     {
3876       temp = copy_to_mode_reg (Pmode, size);
3877       if (extra != 0)
3878 	temp = expand_binop (Pmode, add_optab, temp,
3879 			     gen_int_mode (extra, Pmode),
3880 			     temp, 0, OPTAB_LIB_WIDEN);
3881       anti_adjust_stack (temp);
3882     }
3883 
3884   if (STACK_GROWS_DOWNWARD)
3885     {
3886       temp = virtual_outgoing_args_rtx;
3887       if (extra != 0 && below)
3888 	temp = plus_constant (Pmode, temp, extra);
3889     }
3890   else
3891     {
3892       if (CONST_INT_P (size))
3893 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3894 			      -INTVAL (size) - (below ? 0 : extra));
3895       else if (extra != 0 && !below)
3896 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3897 			     negate_rtx (Pmode, plus_constant (Pmode, size,
3898 							       extra)));
3899       else
3900 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3901 			     negate_rtx (Pmode, size));
3902     }
3903 
3904   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3905 }
3906 
3907 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3908 
3909 static rtx
3910 mem_autoinc_base (rtx mem)
3911 {
3912   if (MEM_P (mem))
3913     {
3914       rtx addr = XEXP (mem, 0);
3915       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3916 	return XEXP (addr, 0);
3917     }
3918   return NULL;
3919 }
3920 
3921 /* A utility routine used here, in reload, and in try_split.  The insns
3922    after PREV up to and including LAST are known to adjust the stack,
3923    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3924    placing notes as appropriate.  PREV may be NULL, indicating the
3925    entire insn sequence prior to LAST should be scanned.
3926 
3927    The set of allowed stack pointer modifications is small:
3928      (1) One or more auto-inc style memory references (aka pushes),
3929      (2) One or more addition/subtraction with the SP as destination,
3930      (3) A single move insn with the SP as destination,
3931      (4) A call_pop insn,
3932      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3933 
3934    Insns in the sequence that do not modify the SP are ignored,
3935    except for noreturn calls.
3936 
3937    The return value is the amount of adjustment that can be trivially
3938    verified, via immediate operand or auto-inc.  If the adjustment
3939    cannot be trivially extracted, the return value is INT_MIN.  */
3940 
3941 HOST_WIDE_INT
3942 find_args_size_adjust (rtx_insn *insn)
3943 {
3944   rtx dest, set, pat;
3945   int i;
3946 
3947   pat = PATTERN (insn);
3948   set = NULL;
3949 
3950   /* Look for a call_pop pattern.  */
3951   if (CALL_P (insn))
3952     {
3953       /* We have to allow non-call_pop patterns for the case
3954 	 of emit_single_push_insn of a TLS address.  */
3955       if (GET_CODE (pat) != PARALLEL)
3956 	return 0;
3957 
3958       /* All call_pop have a stack pointer adjust in the parallel.
3959 	 The call itself is always first, and the stack adjust is
3960 	 usually last, so search from the end.  */
3961       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3962 	{
3963 	  set = XVECEXP (pat, 0, i);
3964 	  if (GET_CODE (set) != SET)
3965 	    continue;
3966 	  dest = SET_DEST (set);
3967 	  if (dest == stack_pointer_rtx)
3968 	    break;
3969 	}
3970       /* We'd better have found the stack pointer adjust.  */
3971       if (i == 0)
3972 	return 0;
3973       /* Fall through to process the extracted SET and DEST
3974 	 as if it was a standalone insn.  */
3975     }
3976   else if (GET_CODE (pat) == SET)
3977     set = pat;
3978   else if ((set = single_set (insn)) != NULL)
3979     ;
3980   else if (GET_CODE (pat) == PARALLEL)
3981     {
3982       /* ??? Some older ports use a parallel with a stack adjust
3983 	 and a store for a PUSH_ROUNDING pattern, rather than a
3984 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3985       /* ??? See h8300 and m68k, pushqi1.  */
3986       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3987 	{
3988 	  set = XVECEXP (pat, 0, i);
3989 	  if (GET_CODE (set) != SET)
3990 	    continue;
3991 	  dest = SET_DEST (set);
3992 	  if (dest == stack_pointer_rtx)
3993 	    break;
3994 
3995 	  /* We do not expect an auto-inc of the sp in the parallel.  */
3996 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3997 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3998 			       != stack_pointer_rtx);
3999 	}
4000       if (i < 0)
4001 	return 0;
4002     }
4003   else
4004     return 0;
4005 
4006   dest = SET_DEST (set);
4007 
4008   /* Look for direct modifications of the stack pointer.  */
4009   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4010     {
4011       /* Look for a trivial adjustment, otherwise assume nothing.  */
4012       /* Note that the SPU restore_stack_block pattern refers to
4013 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
4014       if (SCALAR_INT_MODE_P (GET_MODE (dest))
4015 	  && GET_CODE (SET_SRC (set)) == PLUS
4016 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
4017 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
4018 	return INTVAL (XEXP (SET_SRC (set), 1));
4019       /* ??? Reload can generate no-op moves, which will be cleaned
4020 	 up later.  Recognize it and continue searching.  */
4021       else if (rtx_equal_p (dest, SET_SRC (set)))
4022 	return 0;
4023       else
4024 	return HOST_WIDE_INT_MIN;
4025     }
4026   else
4027     {
4028       rtx mem, addr;
4029 
4030       /* Otherwise only think about autoinc patterns.  */
4031       if (mem_autoinc_base (dest) == stack_pointer_rtx)
4032 	{
4033 	  mem = dest;
4034 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4035 			       != stack_pointer_rtx);
4036 	}
4037       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4038 	mem = SET_SRC (set);
4039       else
4040 	return 0;
4041 
4042       addr = XEXP (mem, 0);
4043       switch (GET_CODE (addr))
4044 	{
4045 	case PRE_INC:
4046 	case POST_INC:
4047 	  return GET_MODE_SIZE (GET_MODE (mem));
4048 	case PRE_DEC:
4049 	case POST_DEC:
4050 	  return -GET_MODE_SIZE (GET_MODE (mem));
4051 	case PRE_MODIFY:
4052 	case POST_MODIFY:
4053 	  addr = XEXP (addr, 1);
4054 	  gcc_assert (GET_CODE (addr) == PLUS);
4055 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4056 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
4057 	  return INTVAL (XEXP (addr, 1));
4058 	default:
4059 	  gcc_unreachable ();
4060 	}
4061     }
4062 }
4063 
4064 int
4065 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
4066 {
4067   int args_size = end_args_size;
4068   bool saw_unknown = false;
4069   rtx_insn *insn;
4070 
4071   for (insn = last; insn != prev; insn = PREV_INSN (insn))
4072     {
4073       HOST_WIDE_INT this_delta;
4074 
4075       if (!NONDEBUG_INSN_P (insn))
4076 	continue;
4077 
4078       this_delta = find_args_size_adjust (insn);
4079       if (this_delta == 0)
4080 	{
4081 	  if (!CALL_P (insn)
4082 	      || ACCUMULATE_OUTGOING_ARGS
4083 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4084 	    continue;
4085 	}
4086 
4087       gcc_assert (!saw_unknown);
4088       if (this_delta == HOST_WIDE_INT_MIN)
4089 	saw_unknown = true;
4090 
4091       add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
4092       if (STACK_GROWS_DOWNWARD)
4093 	this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4094 
4095       args_size -= this_delta;
4096     }
4097 
4098   return saw_unknown ? INT_MIN : args_size;
4099 }
4100 
4101 #ifdef PUSH_ROUNDING
4102 /* Emit single push insn.  */
4103 
4104 static void
4105 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4106 {
4107   rtx dest_addr;
4108   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4109   rtx dest;
4110   enum insn_code icode;
4111 
4112   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4113   /* If there is push pattern, use it.  Otherwise try old way of throwing
4114      MEM representing push operation to move expander.  */
4115   icode = optab_handler (push_optab, mode);
4116   if (icode != CODE_FOR_nothing)
4117     {
4118       struct expand_operand ops[1];
4119 
4120       create_input_operand (&ops[0], x, mode);
4121       if (maybe_expand_insn (icode, 1, ops))
4122 	return;
4123     }
4124   if (GET_MODE_SIZE (mode) == rounded_size)
4125     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4126   /* If we are to pad downward, adjust the stack pointer first and
4127      then store X into the stack location using an offset.  This is
4128      because emit_move_insn does not know how to pad; it does not have
4129      access to type.  */
4130   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4131     {
4132       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4133       HOST_WIDE_INT offset;
4134 
4135       emit_move_insn (stack_pointer_rtx,
4136 		      expand_binop (Pmode,
4137 				    STACK_GROWS_DOWNWARD ? sub_optab
4138 				    : add_optab,
4139 				    stack_pointer_rtx,
4140 				    gen_int_mode (rounded_size, Pmode),
4141 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
4142 
4143       offset = (HOST_WIDE_INT) padding_size;
4144       if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4145 	/* We have already decremented the stack pointer, so get the
4146 	   previous value.  */
4147 	offset += (HOST_WIDE_INT) rounded_size;
4148 
4149       if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4150 	/* We have already incremented the stack pointer, so get the
4151 	   previous value.  */
4152 	offset -= (HOST_WIDE_INT) rounded_size;
4153 
4154       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4155 				gen_int_mode (offset, Pmode));
4156     }
4157   else
4158     {
4159       if (STACK_GROWS_DOWNWARD)
4160 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4161 	dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4162 				  gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4163 						Pmode));
4164       else
4165 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4166 	dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4167 				  gen_int_mode (rounded_size, Pmode));
4168 
4169       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4170     }
4171 
4172   dest = gen_rtx_MEM (mode, dest_addr);
4173 
4174   if (type != 0)
4175     {
4176       set_mem_attributes (dest, type, 1);
4177 
4178       if (cfun->tail_call_marked)
4179 	/* Function incoming arguments may overlap with sibling call
4180 	   outgoing arguments and we cannot allow reordering of reads
4181 	   from function arguments with stores to outgoing arguments
4182 	   of sibling calls.  */
4183 	set_mem_alias_set (dest, 0);
4184     }
4185   emit_move_insn (dest, x);
4186 }
4187 
4188 /* Emit and annotate a single push insn.  */
4189 
4190 static void
4191 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4192 {
4193   int delta, old_delta = stack_pointer_delta;
4194   rtx_insn *prev = get_last_insn ();
4195   rtx_insn *last;
4196 
4197   emit_single_push_insn_1 (mode, x, type);
4198 
4199   last = get_last_insn ();
4200 
4201   /* Notice the common case where we emitted exactly one insn.  */
4202   if (PREV_INSN (last) == prev)
4203     {
4204       add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4205       return;
4206     }
4207 
4208   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4209   gcc_assert (delta == INT_MIN || delta == old_delta);
4210 }
4211 #endif
4212 
4213 /* If reading SIZE bytes from X will end up reading from
4214    Y return the number of bytes that overlap.  Return -1
4215    if there is no overlap or -2 if we can't determine
4216    (for example when X and Y have different base registers).  */
4217 
4218 static int
4219 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4220 {
4221   rtx tmp = plus_constant (Pmode, x, size);
4222   rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4223 
4224   if (!CONST_INT_P (sub))
4225     return -2;
4226 
4227   HOST_WIDE_INT val = INTVAL (sub);
4228 
4229   return IN_RANGE (val, 1, size) ? val : -1;
4230 }
4231 
4232 /* Generate code to push X onto the stack, assuming it has mode MODE and
4233    type TYPE.
4234    MODE is redundant except when X is a CONST_INT (since they don't
4235    carry mode info).
4236    SIZE is an rtx for the size of data to be copied (in bytes),
4237    needed only if X is BLKmode.
4238    Return true if successful.  May return false if asked to push a
4239    partial argument during a sibcall optimization (as specified by
4240    SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4241    to not overlap.
4242 
4243    ALIGN (in bits) is maximum alignment we can assume.
4244 
4245    If PARTIAL and REG are both nonzero, then copy that many of the first
4246    bytes of X into registers starting with REG, and push the rest of X.
4247    The amount of space pushed is decreased by PARTIAL bytes.
4248    REG must be a hard register in this case.
4249    If REG is zero but PARTIAL is not, take any all others actions for an
4250    argument partially in registers, but do not actually load any
4251    registers.
4252 
4253    EXTRA is the amount in bytes of extra space to leave next to this arg.
4254    This is ignored if an argument block has already been allocated.
4255 
4256    On a machine that lacks real push insns, ARGS_ADDR is the address of
4257    the bottom of the argument block for this call.  We use indexing off there
4258    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4259    argument block has not been preallocated.
4260 
4261    ARGS_SO_FAR is the size of args previously pushed for this call.
4262 
4263    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4264    for arguments passed in registers.  If nonzero, it will be the number
4265    of bytes required.  */
4266 
4267 bool
4268 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4269 		unsigned int align, int partial, rtx reg, int extra,
4270 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4271 		rtx alignment_pad, bool sibcall_p)
4272 {
4273   rtx xinner;
4274   enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4275 
4276   /* Decide where to pad the argument: `downward' for below,
4277      `upward' for above, or `none' for don't pad it.
4278      Default is below for small data on big-endian machines; else above.  */
4279   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4280 
4281   /* Invert direction if stack is post-decrement.
4282      FIXME: why?  */
4283   if (STACK_PUSH_CODE == POST_DEC)
4284     if (where_pad != none)
4285       where_pad = (where_pad == downward ? upward : downward);
4286 
4287   xinner = x;
4288 
4289   int nregs = partial / UNITS_PER_WORD;
4290   rtx *tmp_regs = NULL;
4291   int overlapping = 0;
4292 
4293   if (mode == BLKmode
4294       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)
4295 	  && type != NULL_TREE))
4296     {
4297       /* Copy a block into the stack, entirely or partially.  */
4298 
4299       rtx temp;
4300       int used;
4301       int offset;
4302       int skip;
4303 
4304       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4305       used = partial - offset;
4306 
4307       if (mode != BLKmode)
4308 	{
4309 	  /* A value is to be stored in an insufficiently aligned
4310 	     stack slot; copy via a suitably aligned slot if
4311 	     necessary.  */
4312 	  size = GEN_INT (GET_MODE_SIZE (mode));
4313 	  if (!MEM_P (xinner))
4314 	    {
4315 	      temp = assign_temp (type, 1, 1);
4316 	      emit_move_insn (temp, xinner);
4317 	      xinner = temp;
4318 	    }
4319 	}
4320 
4321       gcc_assert (size);
4322 
4323       /* USED is now the # of bytes we need not copy to the stack
4324 	 because registers will take care of them.  */
4325 
4326       if (partial != 0)
4327 	xinner = adjust_address (xinner, BLKmode, used);
4328 
4329       /* If the partial register-part of the arg counts in its stack size,
4330 	 skip the part of stack space corresponding to the registers.
4331 	 Otherwise, start copying to the beginning of the stack space,
4332 	 by setting SKIP to 0.  */
4333       skip = (reg_parm_stack_space == 0) ? 0 : used;
4334 
4335 #ifdef PUSH_ROUNDING
4336       /* Do it with several push insns if that doesn't take lots of insns
4337 	 and if there is no difficulty with push insns that skip bytes
4338 	 on the stack for alignment purposes.  */
4339       if (args_addr == 0
4340 	  && PUSH_ARGS
4341 	  && CONST_INT_P (size)
4342 	  && skip == 0
4343 	  && MEM_ALIGN (xinner) >= align
4344 	  && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4345 	  /* Here we avoid the case of a structure whose weak alignment
4346 	     forces many pushes of a small amount of data,
4347 	     and such small pushes do rounding that causes trouble.  */
4348 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4349 	      || align >= BIGGEST_ALIGNMENT
4350 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4351 		  == (align / BITS_PER_UNIT)))
4352 	  && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4353 	{
4354 	  /* Push padding now if padding above and stack grows down,
4355 	     or if padding below and stack grows up.
4356 	     But if space already allocated, this has already been done.  */
4357 	  if (extra && args_addr == 0
4358 	      && where_pad != none && where_pad != stack_direction)
4359 	    anti_adjust_stack (GEN_INT (extra));
4360 
4361 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4362 	}
4363       else
4364 #endif /* PUSH_ROUNDING  */
4365 	{
4366 	  rtx target;
4367 
4368 	  /* Otherwise make space on the stack and copy the data
4369 	     to the address of that space.  */
4370 
4371 	  /* Deduct words put into registers from the size we must copy.  */
4372 	  if (partial != 0)
4373 	    {
4374 	      if (CONST_INT_P (size))
4375 		size = GEN_INT (INTVAL (size) - used);
4376 	      else
4377 		size = expand_binop (GET_MODE (size), sub_optab, size,
4378 				     gen_int_mode (used, GET_MODE (size)),
4379 				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4380 	    }
4381 
4382 	  /* Get the address of the stack space.
4383 	     In this case, we do not deal with EXTRA separately.
4384 	     A single stack adjust will do.  */
4385 	  if (! args_addr)
4386 	    {
4387 	      temp = push_block (size, extra, where_pad == downward);
4388 	      extra = 0;
4389 	    }
4390 	  else if (CONST_INT_P (args_so_far))
4391 	    temp = memory_address (BLKmode,
4392 				   plus_constant (Pmode, args_addr,
4393 						  skip + INTVAL (args_so_far)));
4394 	  else
4395 	    temp = memory_address (BLKmode,
4396 				   plus_constant (Pmode,
4397 						  gen_rtx_PLUS (Pmode,
4398 								args_addr,
4399 								args_so_far),
4400 						  skip));
4401 
4402 	  if (!ACCUMULATE_OUTGOING_ARGS)
4403 	    {
4404 	      /* If the source is referenced relative to the stack pointer,
4405 		 copy it to another register to stabilize it.  We do not need
4406 		 to do this if we know that we won't be changing sp.  */
4407 
4408 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4409 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4410 		temp = copy_to_reg (temp);
4411 	    }
4412 
4413 	  target = gen_rtx_MEM (BLKmode, temp);
4414 
4415 	  /* We do *not* set_mem_attributes here, because incoming arguments
4416 	     may overlap with sibling call outgoing arguments and we cannot
4417 	     allow reordering of reads from function arguments with stores
4418 	     to outgoing arguments of sibling calls.  We do, however, want
4419 	     to record the alignment of the stack slot.  */
4420 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4421 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4422 	  set_mem_align (target, align);
4423 
4424 	  /* If part should go in registers and pushing to that part would
4425 	     overwrite some of the values that need to go into regs, load the
4426 	     overlapping values into temporary pseudos to be moved into the hard
4427 	     regs at the end after the stack pushing has completed.
4428 	     We cannot load them directly into the hard regs here because
4429 	     they can be clobbered by the block move expansions.
4430 	     See PR 65358.  */
4431 
4432 	  if (partial > 0 && reg != 0 && mode == BLKmode
4433 	      && GET_CODE (reg) != PARALLEL)
4434 	    {
4435 	      overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4436 	      if (overlapping > 0)
4437 	        {
4438 		  gcc_assert (overlapping % UNITS_PER_WORD == 0);
4439 		  overlapping /= UNITS_PER_WORD;
4440 
4441 		  tmp_regs = XALLOCAVEC (rtx, overlapping);
4442 
4443 		  for (int i = 0; i < overlapping; i++)
4444 		    tmp_regs[i] = gen_reg_rtx (word_mode);
4445 
4446 		  for (int i = 0; i < overlapping; i++)
4447 		    emit_move_insn (tmp_regs[i],
4448 				    operand_subword_force (target, i, mode));
4449 	        }
4450 	      else if (overlapping == -1)
4451 		overlapping = 0;
4452 	      /* Could not determine whether there is overlap.
4453 	         Fail the sibcall.  */
4454 	      else
4455 		{
4456 		  overlapping = 0;
4457 		  if (sibcall_p)
4458 		    return false;
4459 		}
4460 	    }
4461 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4462 	}
4463     }
4464   else if (partial > 0)
4465     {
4466       /* Scalar partly in registers.  */
4467 
4468       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4469       int i;
4470       int not_stack;
4471       /* # bytes of start of argument
4472 	 that we must make space for but need not store.  */
4473       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4474       int args_offset = INTVAL (args_so_far);
4475       int skip;
4476 
4477       /* Push padding now if padding above and stack grows down,
4478 	 or if padding below and stack grows up.
4479 	 But if space already allocated, this has already been done.  */
4480       if (extra && args_addr == 0
4481 	  && where_pad != none && where_pad != stack_direction)
4482 	anti_adjust_stack (GEN_INT (extra));
4483 
4484       /* If we make space by pushing it, we might as well push
4485 	 the real data.  Otherwise, we can leave OFFSET nonzero
4486 	 and leave the space uninitialized.  */
4487       if (args_addr == 0)
4488 	offset = 0;
4489 
4490       /* Now NOT_STACK gets the number of words that we don't need to
4491 	 allocate on the stack.  Convert OFFSET to words too.  */
4492       not_stack = (partial - offset) / UNITS_PER_WORD;
4493       offset /= UNITS_PER_WORD;
4494 
4495       /* If the partial register-part of the arg counts in its stack size,
4496 	 skip the part of stack space corresponding to the registers.
4497 	 Otherwise, start copying to the beginning of the stack space,
4498 	 by setting SKIP to 0.  */
4499       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4500 
4501       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4502 	x = validize_mem (force_const_mem (mode, x));
4503 
4504       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4505 	 SUBREGs of such registers are not allowed.  */
4506       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4507 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4508 	x = copy_to_reg (x);
4509 
4510       /* Loop over all the words allocated on the stack for this arg.  */
4511       /* We can do it by words, because any scalar bigger than a word
4512 	 has a size a multiple of a word.  */
4513       for (i = size - 1; i >= not_stack; i--)
4514 	if (i >= not_stack + offset)
4515 	  if (!emit_push_insn (operand_subword_force (x, i, mode),
4516 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4517 			  0, args_addr,
4518 			  GEN_INT (args_offset + ((i - not_stack + skip)
4519 						  * UNITS_PER_WORD)),
4520 			  reg_parm_stack_space, alignment_pad, sibcall_p))
4521 	    return false;
4522     }
4523   else
4524     {
4525       rtx addr;
4526       rtx dest;
4527 
4528       /* Push padding now if padding above and stack grows down,
4529 	 or if padding below and stack grows up.
4530 	 But if space already allocated, this has already been done.  */
4531       if (extra && args_addr == 0
4532 	  && where_pad != none && where_pad != stack_direction)
4533 	anti_adjust_stack (GEN_INT (extra));
4534 
4535 #ifdef PUSH_ROUNDING
4536       if (args_addr == 0 && PUSH_ARGS)
4537 	emit_single_push_insn (mode, x, type);
4538       else
4539 #endif
4540 	{
4541 	  if (CONST_INT_P (args_so_far))
4542 	    addr
4543 	      = memory_address (mode,
4544 				plus_constant (Pmode, args_addr,
4545 					       INTVAL (args_so_far)));
4546 	  else
4547 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4548 						       args_so_far));
4549 	  dest = gen_rtx_MEM (mode, addr);
4550 
4551 	  /* We do *not* set_mem_attributes here, because incoming arguments
4552 	     may overlap with sibling call outgoing arguments and we cannot
4553 	     allow reordering of reads from function arguments with stores
4554 	     to outgoing arguments of sibling calls.  We do, however, want
4555 	     to record the alignment of the stack slot.  */
4556 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4557 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4558 	  set_mem_align (dest, align);
4559 
4560 	  emit_move_insn (dest, x);
4561 	}
4562     }
4563 
4564   /* Move the partial arguments into the registers and any overlapping
4565      values that we moved into the pseudos in tmp_regs.  */
4566   if (partial > 0 && reg != 0)
4567     {
4568       /* Handle calls that pass values in multiple non-contiguous locations.
4569 	 The Irix 6 ABI has examples of this.  */
4570       if (GET_CODE (reg) == PARALLEL)
4571 	emit_group_load (reg, x, type, -1);
4572       else
4573         {
4574 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4575 	  move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4576 
4577 	  for (int i = 0; i < overlapping; i++)
4578 	    emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4579 						    + nregs - overlapping + i),
4580 			    tmp_regs[i]);
4581 
4582 	}
4583     }
4584 
4585   if (extra && args_addr == 0 && where_pad == stack_direction)
4586     anti_adjust_stack (GEN_INT (extra));
4587 
4588   if (alignment_pad && args_addr == 0)
4589     anti_adjust_stack (alignment_pad);
4590 
4591   return true;
4592 }
4593 
4594 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4595    operations.  */
4596 
4597 static rtx
4598 get_subtarget (rtx x)
4599 {
4600   return (optimize
4601           || x == 0
4602 	   /* Only registers can be subtargets.  */
4603 	   || !REG_P (x)
4604 	   /* Don't use hard regs to avoid extending their life.  */
4605 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4606 	  ? 0 : x);
4607 }
4608 
4609 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4610    FIELD is a bitfield.  Returns true if the optimization was successful,
4611    and there's nothing else to do.  */
4612 
4613 static bool
4614 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4615 				 unsigned HOST_WIDE_INT bitpos,
4616 				 unsigned HOST_WIDE_INT bitregion_start,
4617 				 unsigned HOST_WIDE_INT bitregion_end,
4618 				 machine_mode mode1, rtx str_rtx,
4619 				 tree to, tree src, bool reverse)
4620 {
4621   machine_mode str_mode = GET_MODE (str_rtx);
4622   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4623   tree op0, op1;
4624   rtx value, result;
4625   optab binop;
4626   gimple *srcstmt;
4627   enum tree_code code;
4628 
4629   if (mode1 != VOIDmode
4630       || bitsize >= BITS_PER_WORD
4631       || str_bitsize > BITS_PER_WORD
4632       || TREE_SIDE_EFFECTS (to)
4633       || TREE_THIS_VOLATILE (to))
4634     return false;
4635 
4636   STRIP_NOPS (src);
4637   if (TREE_CODE (src) != SSA_NAME)
4638     return false;
4639   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4640     return false;
4641 
4642   srcstmt = get_gimple_for_ssa_name (src);
4643   if (!srcstmt
4644       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4645     return false;
4646 
4647   code = gimple_assign_rhs_code (srcstmt);
4648 
4649   op0 = gimple_assign_rhs1 (srcstmt);
4650 
4651   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4652      to find its initialization.  Hopefully the initialization will
4653      be from a bitfield load.  */
4654   if (TREE_CODE (op0) == SSA_NAME)
4655     {
4656       gimple *op0stmt = get_gimple_for_ssa_name (op0);
4657 
4658       /* We want to eventually have OP0 be the same as TO, which
4659 	 should be a bitfield.  */
4660       if (!op0stmt
4661 	  || !is_gimple_assign (op0stmt)
4662 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4663 	return false;
4664       op0 = gimple_assign_rhs1 (op0stmt);
4665     }
4666 
4667   op1 = gimple_assign_rhs2 (srcstmt);
4668 
4669   if (!operand_equal_p (to, op0, 0))
4670     return false;
4671 
4672   if (MEM_P (str_rtx))
4673     {
4674       unsigned HOST_WIDE_INT offset1;
4675 
4676       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4677 	str_mode = word_mode;
4678       str_mode = get_best_mode (bitsize, bitpos,
4679 				bitregion_start, bitregion_end,
4680 				MEM_ALIGN (str_rtx), str_mode, 0);
4681       if (str_mode == VOIDmode)
4682 	return false;
4683       str_bitsize = GET_MODE_BITSIZE (str_mode);
4684 
4685       offset1 = bitpos;
4686       bitpos %= str_bitsize;
4687       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4688       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4689     }
4690   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4691     return false;
4692   else
4693     gcc_assert (!reverse);
4694 
4695   /* If the bit field covers the whole REG/MEM, store_field
4696      will likely generate better code.  */
4697   if (bitsize >= str_bitsize)
4698     return false;
4699 
4700   /* We can't handle fields split across multiple entities.  */
4701   if (bitpos + bitsize > str_bitsize)
4702     return false;
4703 
4704   if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4705     bitpos = str_bitsize - bitpos - bitsize;
4706 
4707   switch (code)
4708     {
4709     case PLUS_EXPR:
4710     case MINUS_EXPR:
4711       /* For now, just optimize the case of the topmost bitfield
4712 	 where we don't need to do any masking and also
4713 	 1 bit bitfields where xor can be used.
4714 	 We might win by one instruction for the other bitfields
4715 	 too if insv/extv instructions aren't used, so that
4716 	 can be added later.  */
4717       if ((reverse || bitpos + bitsize != str_bitsize)
4718 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4719 	break;
4720 
4721       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4722       value = convert_modes (str_mode,
4723 			     TYPE_MODE (TREE_TYPE (op1)), value,
4724 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4725 
4726       /* We may be accessing data outside the field, which means
4727 	 we can alias adjacent data.  */
4728       if (MEM_P (str_rtx))
4729 	{
4730 	  str_rtx = shallow_copy_rtx (str_rtx);
4731 	  set_mem_alias_set (str_rtx, 0);
4732 	  set_mem_expr (str_rtx, 0);
4733 	}
4734 
4735       if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4736 	{
4737 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4738 	  binop = xor_optab;
4739 	}
4740       else
4741 	binop = code == PLUS_EXPR ? add_optab : sub_optab;
4742 
4743       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4744       if (reverse)
4745 	value = flip_storage_order (str_mode, value);
4746       result = expand_binop (str_mode, binop, str_rtx,
4747 			     value, str_rtx, 1, OPTAB_WIDEN);
4748       if (result != str_rtx)
4749 	emit_move_insn (str_rtx, result);
4750       return true;
4751 
4752     case BIT_IOR_EXPR:
4753     case BIT_XOR_EXPR:
4754       if (TREE_CODE (op1) != INTEGER_CST)
4755 	break;
4756       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4757       value = convert_modes (str_mode,
4758 			     TYPE_MODE (TREE_TYPE (op1)), value,
4759 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4760 
4761       /* We may be accessing data outside the field, which means
4762 	 we can alias adjacent data.  */
4763       if (MEM_P (str_rtx))
4764 	{
4765 	  str_rtx = shallow_copy_rtx (str_rtx);
4766 	  set_mem_alias_set (str_rtx, 0);
4767 	  set_mem_expr (str_rtx, 0);
4768 	}
4769 
4770       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4771       if (bitpos + bitsize != str_bitsize)
4772 	{
4773 	  rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4774 				   str_mode);
4775 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4776 	}
4777       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4778       if (reverse)
4779 	value = flip_storage_order (str_mode, value);
4780       result = expand_binop (str_mode, binop, str_rtx,
4781 			     value, str_rtx, 1, OPTAB_WIDEN);
4782       if (result != str_rtx)
4783 	emit_move_insn (str_rtx, result);
4784       return true;
4785 
4786     default:
4787       break;
4788     }
4789 
4790   return false;
4791 }
4792 
4793 /* In the C++ memory model, consecutive bit fields in a structure are
4794    considered one memory location.
4795 
4796    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4797    returns the bit range of consecutive bits in which this COMPONENT_REF
4798    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4799    and *OFFSET may be adjusted in the process.
4800 
4801    If the access does not need to be restricted, 0 is returned in both
4802    *BITSTART and *BITEND.  */
4803 
4804 void
4805 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4806 	       unsigned HOST_WIDE_INT *bitend,
4807 	       tree exp,
4808 	       HOST_WIDE_INT *bitpos,
4809 	       tree *offset)
4810 {
4811   HOST_WIDE_INT bitoffset;
4812   tree field, repr;
4813 
4814   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4815 
4816   field = TREE_OPERAND (exp, 1);
4817   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4818   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4819      need to limit the range we can access.  */
4820   if (!repr)
4821     {
4822       *bitstart = *bitend = 0;
4823       return;
4824     }
4825 
4826   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4827      part of a larger bit field, then the representative does not serve any
4828      useful purpose.  This can occur in Ada.  */
4829   if (handled_component_p (TREE_OPERAND (exp, 0)))
4830     {
4831       machine_mode rmode;
4832       HOST_WIDE_INT rbitsize, rbitpos;
4833       tree roffset;
4834       int unsignedp, reversep, volatilep = 0;
4835       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4836 			   &roffset, &rmode, &unsignedp, &reversep,
4837 			   &volatilep);
4838       if ((rbitpos % BITS_PER_UNIT) != 0)
4839 	{
4840 	  *bitstart = *bitend = 0;
4841 	  return;
4842 	}
4843     }
4844 
4845   /* Compute the adjustment to bitpos from the offset of the field
4846      relative to the representative.  DECL_FIELD_OFFSET of field and
4847      repr are the same by construction if they are not constants,
4848      see finish_bitfield_layout.  */
4849   if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4850       && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4851     bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4852 		 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4853   else
4854     bitoffset = 0;
4855   bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4856 		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4857 
4858   /* If the adjustment is larger than bitpos, we would have a negative bit
4859      position for the lower bound and this may wreak havoc later.  Adjust
4860      offset and bitpos to make the lower bound non-negative in that case.  */
4861   if (bitoffset > *bitpos)
4862     {
4863       HOST_WIDE_INT adjust = bitoffset - *bitpos;
4864       gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4865 
4866       *bitpos += adjust;
4867       if (*offset == NULL_TREE)
4868 	*offset = size_int (-adjust / BITS_PER_UNIT);
4869       else
4870 	*offset
4871 	  = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4872       *bitstart = 0;
4873     }
4874   else
4875     *bitstart = *bitpos - bitoffset;
4876 
4877   *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4878 }
4879 
4880 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4881    in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4882    DECL_RTL was not set yet, return NORTL.  */
4883 
4884 static inline bool
4885 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4886 {
4887   if (TREE_CODE (addr) != ADDR_EXPR)
4888     return false;
4889 
4890   tree base = TREE_OPERAND (addr, 0);
4891 
4892   if (!DECL_P (base)
4893       || TREE_ADDRESSABLE (base)
4894       || DECL_MODE (base) == BLKmode)
4895     return false;
4896 
4897   if (!DECL_RTL_SET_P (base))
4898     return nortl;
4899 
4900   return (!MEM_P (DECL_RTL (base)));
4901 }
4902 
4903 /* Returns true if the MEM_REF REF refers to an object that does not
4904    reside in memory and has non-BLKmode.  */
4905 
4906 static inline bool
4907 mem_ref_refers_to_non_mem_p (tree ref)
4908 {
4909   tree base = TREE_OPERAND (ref, 0);
4910   return addr_expr_of_non_mem_decl_p_1 (base, false);
4911 }
4912 
4913 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4914    is true, try generating a nontemporal store.  */
4915 
4916 void
4917 expand_assignment (tree to, tree from, bool nontemporal)
4918 {
4919   rtx to_rtx = 0;
4920   rtx result;
4921   machine_mode mode;
4922   unsigned int align;
4923   enum insn_code icode;
4924 
4925   /* Don't crash if the lhs of the assignment was erroneous.  */
4926   if (TREE_CODE (to) == ERROR_MARK)
4927     {
4928       expand_normal (from);
4929       return;
4930     }
4931 
4932   /* Optimize away no-op moves without side-effects.  */
4933   if (operand_equal_p (to, from, 0))
4934     return;
4935 
4936   /* Handle misaligned stores.  */
4937   mode = TYPE_MODE (TREE_TYPE (to));
4938   if ((TREE_CODE (to) == MEM_REF
4939        || TREE_CODE (to) == TARGET_MEM_REF)
4940       && mode != BLKmode
4941       && !mem_ref_refers_to_non_mem_p (to)
4942       && ((align = get_object_alignment (to))
4943 	  < GET_MODE_ALIGNMENT (mode))
4944       && (((icode = optab_handler (movmisalign_optab, mode))
4945 	   != CODE_FOR_nothing)
4946 	  || SLOW_UNALIGNED_ACCESS (mode, align)))
4947     {
4948       rtx reg, mem;
4949 
4950       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4951       reg = force_not_mem (reg);
4952       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4953       if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4954 	reg = flip_storage_order (mode, reg);
4955 
4956       if (icode != CODE_FOR_nothing)
4957 	{
4958 	  struct expand_operand ops[2];
4959 
4960 	  create_fixed_operand (&ops[0], mem);
4961 	  create_input_operand (&ops[1], reg, mode);
4962 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4963 	     would silently be omitted.  */
4964 	  expand_insn (icode, 2, ops);
4965 	}
4966       else
4967 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4968 			 false);
4969       return;
4970     }
4971 
4972   /* Assignment of a structure component needs special treatment
4973      if the structure component's rtx is not simply a MEM.
4974      Assignment of an array element at a constant index, and assignment of
4975      an array element in an unaligned packed structure field, has the same
4976      problem.  Same for (partially) storing into a non-memory object.  */
4977   if (handled_component_p (to)
4978       || (TREE_CODE (to) == MEM_REF
4979 	  && (REF_REVERSE_STORAGE_ORDER (to)
4980 	      || mem_ref_refers_to_non_mem_p (to)))
4981       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4982     {
4983       machine_mode mode1;
4984       HOST_WIDE_INT bitsize, bitpos;
4985       unsigned HOST_WIDE_INT bitregion_start = 0;
4986       unsigned HOST_WIDE_INT bitregion_end = 0;
4987       tree offset;
4988       int unsignedp, reversep, volatilep = 0;
4989       tree tem;
4990 
4991       push_temp_slots ();
4992       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4993 				 &unsignedp, &reversep, &volatilep);
4994 
4995       /* Make sure bitpos is not negative, it can wreak havoc later.  */
4996       if (bitpos < 0)
4997 	{
4998 	  gcc_assert (offset == NULL_TREE);
4999 	  offset = size_int (bitpos >> LOG2_BITS_PER_UNIT);
5000 	  bitpos &= BITS_PER_UNIT - 1;
5001 	}
5002 
5003       if (TREE_CODE (to) == COMPONENT_REF
5004 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5005 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5006       /* The C++ memory model naturally applies to byte-aligned fields.
5007 	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5008 	 BITSIZE are not byte-aligned, there is no need to limit the range
5009 	 we can access.  This can occur with packed structures in Ada.  */
5010       else if (bitsize > 0
5011 	       && bitsize % BITS_PER_UNIT == 0
5012 	       && bitpos % BITS_PER_UNIT == 0)
5013 	{
5014 	  bitregion_start = bitpos;
5015 	  bitregion_end = bitpos + bitsize - 1;
5016 	}
5017 
5018       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5019 
5020       /* If the field has a mode, we want to access it in the
5021 	 field's mode, not the computed mode.
5022 	 If a MEM has VOIDmode (external with incomplete type),
5023 	 use BLKmode for it instead.  */
5024       if (MEM_P (to_rtx))
5025 	{
5026 	  if (mode1 != VOIDmode)
5027 	    to_rtx = adjust_address (to_rtx, mode1, 0);
5028 	  else if (GET_MODE (to_rtx) == VOIDmode)
5029 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
5030 	}
5031 
5032       if (offset != 0)
5033 	{
5034 	  machine_mode address_mode;
5035 	  rtx offset_rtx;
5036 
5037 	  if (!MEM_P (to_rtx))
5038 	    {
5039 	      /* We can get constant negative offsets into arrays with broken
5040 		 user code.  Translate this to a trap instead of ICEing.  */
5041 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5042 	      expand_builtin_trap ();
5043 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5044 	    }
5045 
5046 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5047 	  address_mode = get_address_mode (to_rtx);
5048 	  if (GET_MODE (offset_rtx) != address_mode)
5049 	    {
5050 		/* We cannot be sure that the RTL in offset_rtx is valid outside
5051 		   of a memory address context, so force it into a register
5052 		   before attempting to convert it to the desired mode.  */
5053 	      offset_rtx = force_operand (offset_rtx, NULL_RTX);
5054 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5055 	    }
5056 
5057 	  /* If we have an expression in OFFSET_RTX and a non-zero
5058 	     byte offset in BITPOS, adding the byte offset before the
5059 	     OFFSET_RTX results in better intermediate code, which makes
5060 	     later rtl optimization passes perform better.
5061 
5062 	     We prefer intermediate code like this:
5063 
5064 	     r124:DI=r123:DI+0x18
5065 	     [r124:DI]=r121:DI
5066 
5067 	     ... instead of ...
5068 
5069 	     r124:DI=r123:DI+0x10
5070 	     [r124:DI+0x8]=r121:DI
5071 
5072 	     This is only done for aligned data values, as these can
5073 	     be expected to result in single move instructions.  */
5074 	  if (mode1 != VOIDmode
5075 	      && bitpos != 0
5076 	      && bitsize > 0
5077 	      && (bitpos % bitsize) == 0
5078 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
5079 	      && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5080 	    {
5081 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
5082 	      bitregion_start = 0;
5083 	      if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
5084 		bitregion_end -= bitpos;
5085 	      bitpos = 0;
5086 	    }
5087 
5088 	  to_rtx = offset_address (to_rtx, offset_rtx,
5089 				   highest_pow2_factor_for_target (to,
5090 				   				   offset));
5091 	}
5092 
5093       /* No action is needed if the target is not a memory and the field
5094 	 lies completely outside that target.  This can occur if the source
5095 	 code contains an out-of-bounds access to a small array.  */
5096       if (!MEM_P (to_rtx)
5097 	  && GET_MODE (to_rtx) != BLKmode
5098 	  && (unsigned HOST_WIDE_INT) bitpos
5099 	     >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
5100 	{
5101 	  expand_normal (from);
5102 	  result = NULL;
5103 	}
5104       /* Handle expand_expr of a complex value returning a CONCAT.  */
5105       else if (GET_CODE (to_rtx) == CONCAT)
5106 	{
5107 	  unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
5108 	  if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE (to_rtx)
5109 	      && COMPLEX_MODE_P (GET_MODE (to_rtx))
5110 	      && bitpos == 0
5111 	      && bitsize == mode_bitsize)
5112 	    result = store_expr (from, to_rtx, false, nontemporal, reversep);
5113 	  else if (COMPLEX_MODE_P (GET_MODE (to_rtx))
5114 		   && (TYPE_MODE (TREE_TYPE (from))
5115 		       == GET_MODE_INNER (GET_MODE (to_rtx)))
5116 		   && bitsize == mode_bitsize / 2
5117 		   && (bitpos == 0 || bitpos == mode_bitsize / 2))
5118 	    result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
5119 				 nontemporal, reversep);
5120 	  else if (bitpos + bitsize <= mode_bitsize / 2)
5121 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5122 				  bitregion_start, bitregion_end,
5123 				  mode1, from, get_alias_set (to),
5124 				  nontemporal, reversep);
5125 	  else if (bitpos >= mode_bitsize / 2)
5126 	    result = store_field (XEXP (to_rtx, 1), bitsize,
5127 				  bitpos - mode_bitsize / 2,
5128 				  bitregion_start, bitregion_end,
5129 				  mode1, from, get_alias_set (to),
5130 				  nontemporal, reversep);
5131 	  else if (bitpos == 0 && bitsize == mode_bitsize)
5132 	    {
5133 	      result = expand_normal (from);
5134 	      if (GET_CODE (result) == CONCAT)
5135 		{
5136 		  machine_mode to_mode = GET_MODE_INNER (GET_MODE (to_rtx));
5137 		  machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5138 		  rtx from_real
5139 		    = simplify_gen_subreg (to_mode, XEXP (result, 0),
5140 					   from_mode, 0);
5141 		  rtx from_imag
5142 		    = simplify_gen_subreg (to_mode, XEXP (result, 1),
5143 					   from_mode, 0);
5144 		  if (!from_real || !from_imag)
5145 		    goto concat_store_slow;
5146 		  emit_move_insn (XEXP (to_rtx, 0), from_real);
5147 		  emit_move_insn (XEXP (to_rtx, 1), from_imag);
5148 		}
5149 	      else
5150 		{
5151 		  rtx from_rtx
5152 		    = simplify_gen_subreg (GET_MODE (to_rtx), result,
5153 					   TYPE_MODE (TREE_TYPE (from)), 0);
5154 		  if (from_rtx)
5155 		    {
5156 		      emit_move_insn (XEXP (to_rtx, 0),
5157 				      read_complex_part (from_rtx, false));
5158 		      emit_move_insn (XEXP (to_rtx, 1),
5159 				      read_complex_part (from_rtx, true));
5160 		    }
5161 		  else
5162 		    {
5163 		      machine_mode to_mode
5164 			= GET_MODE_INNER (GET_MODE (to_rtx));
5165 		      rtx from_real
5166 			= simplify_gen_subreg (to_mode, result,
5167 					       TYPE_MODE (TREE_TYPE (from)),
5168 					       0);
5169 		      rtx from_imag
5170 			= simplify_gen_subreg (to_mode, result,
5171 					       TYPE_MODE (TREE_TYPE (from)),
5172 					       GET_MODE_SIZE (to_mode));
5173 		      if (!from_real || !from_imag)
5174 			goto concat_store_slow;
5175 		      emit_move_insn (XEXP (to_rtx, 0), from_real);
5176 		      emit_move_insn (XEXP (to_rtx, 1), from_imag);
5177 		    }
5178 		}
5179 	    }
5180 	  else
5181 	    {
5182 	    concat_store_slow:;
5183 	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5184 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
5185 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
5186 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
5187 	      result = store_field (temp, bitsize, bitpos,
5188 				    bitregion_start, bitregion_end,
5189 				    mode1, from, get_alias_set (to),
5190 				    nontemporal, reversep);
5191 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5192 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5193 	    }
5194 	}
5195       else
5196 	{
5197 	  if (MEM_P (to_rtx))
5198 	    {
5199 	      /* If the field is at offset zero, we could have been given the
5200 		 DECL_RTX of the parent struct.  Don't munge it.  */
5201 	      to_rtx = shallow_copy_rtx (to_rtx);
5202 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5203 	      if (volatilep)
5204 		MEM_VOLATILE_P (to_rtx) = 1;
5205 	    }
5206 
5207 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
5208 					       bitregion_start, bitregion_end,
5209 					       mode1, to_rtx, to, from,
5210 					       reversep))
5211 	    result = NULL;
5212 	  else
5213 	    result = store_field (to_rtx, bitsize, bitpos,
5214 				  bitregion_start, bitregion_end,
5215 				  mode1, from, get_alias_set (to),
5216 				  nontemporal, reversep);
5217 	}
5218 
5219       if (result)
5220 	preserve_temp_slots (result);
5221       pop_temp_slots ();
5222       return;
5223     }
5224 
5225   /* If the rhs is a function call and its value is not an aggregate,
5226      call the function before we start to compute the lhs.
5227      This is needed for correct code for cases such as
5228      val = setjmp (buf) on machines where reference to val
5229      requires loading up part of an address in a separate insn.
5230 
5231      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5232      since it might be a promoted variable where the zero- or sign- extension
5233      needs to be done.  Handling this in the normal way is safe because no
5234      computation is done before the call.  The same is true for SSA names.  */
5235   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5236       && COMPLETE_TYPE_P (TREE_TYPE (from))
5237       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5238       && ! (((VAR_P (to)
5239 	      || TREE_CODE (to) == PARM_DECL
5240 	      || TREE_CODE (to) == RESULT_DECL)
5241 	     && REG_P (DECL_RTL (to)))
5242 	    || TREE_CODE (to) == SSA_NAME))
5243     {
5244       rtx value;
5245       rtx bounds;
5246 
5247       push_temp_slots ();
5248       value = expand_normal (from);
5249 
5250       /* Split value and bounds to store them separately.  */
5251       chkp_split_slot (value, &value, &bounds);
5252 
5253       if (to_rtx == 0)
5254 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5255 
5256       /* Handle calls that return values in multiple non-contiguous locations.
5257 	 The Irix 6 ABI has examples of this.  */
5258       if (GET_CODE (to_rtx) == PARALLEL)
5259 	{
5260 	  if (GET_CODE (value) == PARALLEL)
5261 	    emit_group_move (to_rtx, value);
5262 	  else
5263 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5264 			     int_size_in_bytes (TREE_TYPE (from)));
5265 	}
5266       else if (GET_CODE (value) == PARALLEL)
5267 	emit_group_store (to_rtx, value, TREE_TYPE (from),
5268 			  int_size_in_bytes (TREE_TYPE (from)));
5269       else if (GET_MODE (to_rtx) == BLKmode)
5270 	{
5271 	  /* Handle calls that return BLKmode values in registers.  */
5272 	  if (REG_P (value))
5273 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5274 	  else
5275 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5276 	}
5277       else
5278 	{
5279 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5280 	    value = convert_memory_address_addr_space
5281 		      (GET_MODE (to_rtx), value,
5282 		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5283 
5284 	  emit_move_insn (to_rtx, value);
5285 	}
5286 
5287       /* Store bounds if required.  */
5288       if (bounds
5289 	  && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5290 	{
5291 	  gcc_assert (MEM_P (to_rtx));
5292 	  chkp_emit_bounds_store (bounds, value, to_rtx);
5293 	}
5294 
5295       preserve_temp_slots (to_rtx);
5296       pop_temp_slots ();
5297       return;
5298     }
5299 
5300   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5301   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5302 
5303   /* Don't move directly into a return register.  */
5304   if (TREE_CODE (to) == RESULT_DECL
5305       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5306     {
5307       rtx temp;
5308 
5309       push_temp_slots ();
5310 
5311       /* If the source is itself a return value, it still is in a pseudo at
5312 	 this point so we can move it back to the return register directly.  */
5313       if (REG_P (to_rtx)
5314 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5315 	  && TREE_CODE (from) != CALL_EXPR)
5316 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5317       else
5318 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5319 
5320       /* Handle calls that return values in multiple non-contiguous locations.
5321 	 The Irix 6 ABI has examples of this.  */
5322       if (GET_CODE (to_rtx) == PARALLEL)
5323 	{
5324 	  if (GET_CODE (temp) == PARALLEL)
5325 	    emit_group_move (to_rtx, temp);
5326 	  else
5327 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5328 			     int_size_in_bytes (TREE_TYPE (from)));
5329 	}
5330       else if (temp)
5331 	emit_move_insn (to_rtx, temp);
5332 
5333       preserve_temp_slots (to_rtx);
5334       pop_temp_slots ();
5335       return;
5336     }
5337 
5338   /* In case we are returning the contents of an object which overlaps
5339      the place the value is being stored, use a safe function when copying
5340      a value through a pointer into a structure value return block.  */
5341   if (TREE_CODE (to) == RESULT_DECL
5342       && TREE_CODE (from) == INDIRECT_REF
5343       && ADDR_SPACE_GENERIC_P
5344 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5345       && refs_may_alias_p (to, from)
5346       && cfun->returns_struct
5347       && !cfun->returns_pcc_struct)
5348     {
5349       rtx from_rtx, size;
5350 
5351       push_temp_slots ();
5352       size = expr_size (from);
5353       from_rtx = expand_normal (from);
5354 
5355       emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5356 
5357       preserve_temp_slots (to_rtx);
5358       pop_temp_slots ();
5359       return;
5360     }
5361 
5362   /* Compute FROM and store the value in the rtx we got.  */
5363 
5364   push_temp_slots ();
5365   result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5366   preserve_temp_slots (result);
5367   pop_temp_slots ();
5368   return;
5369 }
5370 
5371 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5372    succeeded, false otherwise.  */
5373 
5374 bool
5375 emit_storent_insn (rtx to, rtx from)
5376 {
5377   struct expand_operand ops[2];
5378   machine_mode mode = GET_MODE (to);
5379   enum insn_code code = optab_handler (storent_optab, mode);
5380 
5381   if (code == CODE_FOR_nothing)
5382     return false;
5383 
5384   create_fixed_operand (&ops[0], to);
5385   create_input_operand (&ops[1], from, mode);
5386   return maybe_expand_insn (code, 2, ops);
5387 }
5388 
5389 /* Generate code for computing expression EXP,
5390    and storing the value into TARGET.
5391 
5392    If the mode is BLKmode then we may return TARGET itself.
5393    It turns out that in BLKmode it doesn't cause a problem.
5394    because C has no operators that could combine two different
5395    assignments into the same BLKmode object with different values
5396    with no sequence point.  Will other languages need this to
5397    be more thorough?
5398 
5399    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5400    stack, and block moves may need to be treated specially.
5401 
5402    If NONTEMPORAL is true, try using a nontemporal store instruction.
5403 
5404    If REVERSE is true, the store is to be done in reverse order.
5405 
5406    If BTARGET is not NULL then computed bounds of EXP are
5407    associated with BTARGET.  */
5408 
5409 rtx
5410 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5411 			bool nontemporal, bool reverse, tree btarget)
5412 {
5413   rtx temp;
5414   rtx alt_rtl = NULL_RTX;
5415   location_t loc = curr_insn_location ();
5416 
5417   if (VOID_TYPE_P (TREE_TYPE (exp)))
5418     {
5419       /* C++ can generate ?: expressions with a throw expression in one
5420 	 branch and an rvalue in the other. Here, we resolve attempts to
5421 	 store the throw expression's nonexistent result.  */
5422       gcc_assert (!call_param_p);
5423       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5424       return NULL_RTX;
5425     }
5426   if (TREE_CODE (exp) == COMPOUND_EXPR)
5427     {
5428       /* Perform first part of compound expression, then assign from second
5429 	 part.  */
5430       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5431 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5432       return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5433 				     call_param_p, nontemporal, reverse,
5434 				     btarget);
5435     }
5436   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5437     {
5438       /* For conditional expression, get safe form of the target.  Then
5439 	 test the condition, doing the appropriate assignment on either
5440 	 side.  This avoids the creation of unnecessary temporaries.
5441 	 For non-BLKmode, it is more efficient not to do this.  */
5442 
5443       rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5444 
5445       do_pending_stack_adjust ();
5446       NO_DEFER_POP;
5447       jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5448       store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5449 			      nontemporal, reverse, btarget);
5450       emit_jump_insn (targetm.gen_jump (lab2));
5451       emit_barrier ();
5452       emit_label (lab1);
5453       store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5454 			      nontemporal, reverse, btarget);
5455       emit_label (lab2);
5456       OK_DEFER_POP;
5457 
5458       return NULL_RTX;
5459     }
5460   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5461     /* If this is a scalar in a register that is stored in a wider mode
5462        than the declared mode, compute the result into its declared mode
5463        and then convert to the wider mode.  Our value is the computed
5464        expression.  */
5465     {
5466       rtx inner_target = 0;
5467 
5468       /* We can do the conversion inside EXP, which will often result
5469 	 in some optimizations.  Do the conversion in two steps: first
5470 	 change the signedness, if needed, then the extend.  But don't
5471 	 do this if the type of EXP is a subtype of something else
5472 	 since then the conversion might involve more than just
5473 	 converting modes.  */
5474       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5475 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5476 	  && GET_MODE_PRECISION (GET_MODE (target))
5477 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5478 	{
5479 	  if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5480 					  TYPE_UNSIGNED (TREE_TYPE (exp))))
5481 	    {
5482 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5483 		 version, so use the mode instead.  */
5484 	      tree ntype
5485 		= (signed_or_unsigned_type_for
5486 		   (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5487 	      if (ntype == NULL)
5488 		ntype = lang_hooks.types.type_for_mode
5489 		  (TYPE_MODE (TREE_TYPE (exp)),
5490 		   SUBREG_PROMOTED_SIGN (target));
5491 
5492 	      exp = fold_convert_loc (loc, ntype, exp);
5493 	    }
5494 
5495 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5496 				  (GET_MODE (SUBREG_REG (target)),
5497 				   SUBREG_PROMOTED_SIGN (target)),
5498 				  exp);
5499 
5500 	  inner_target = SUBREG_REG (target);
5501 	}
5502 
5503       temp = expand_expr (exp, inner_target, VOIDmode,
5504 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5505 
5506       /* Handle bounds returned by call.  */
5507       if (TREE_CODE (exp) == CALL_EXPR)
5508 	{
5509 	  rtx bounds;
5510 	  chkp_split_slot (temp, &temp, &bounds);
5511 	  if (bounds && btarget)
5512 	    {
5513 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5514 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5515 	      chkp_set_rtl_bounds (btarget, tmp);
5516 	    }
5517 	}
5518 
5519       /* If TEMP is a VOIDmode constant, use convert_modes to make
5520 	 sure that we properly convert it.  */
5521       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5522 	{
5523 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5524 				temp, SUBREG_PROMOTED_SIGN (target));
5525 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5526 			        GET_MODE (target), temp,
5527 				SUBREG_PROMOTED_SIGN (target));
5528 	}
5529 
5530       convert_move (SUBREG_REG (target), temp,
5531 		    SUBREG_PROMOTED_SIGN (target));
5532 
5533       return NULL_RTX;
5534     }
5535   else if ((TREE_CODE (exp) == STRING_CST
5536 	    || (TREE_CODE (exp) == MEM_REF
5537 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5538 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5539 		   == STRING_CST
5540 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5541 	   && !nontemporal && !call_param_p
5542 	   && MEM_P (target))
5543     {
5544       /* Optimize initialization of an array with a STRING_CST.  */
5545       HOST_WIDE_INT exp_len, str_copy_len;
5546       rtx dest_mem;
5547       tree str = TREE_CODE (exp) == STRING_CST
5548 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5549 
5550       exp_len = int_expr_size (exp);
5551       if (exp_len <= 0)
5552 	goto normal_expr;
5553 
5554       if (TREE_STRING_LENGTH (str) <= 0)
5555 	goto normal_expr;
5556 
5557       str_copy_len = strlen (TREE_STRING_POINTER (str));
5558       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5559 	goto normal_expr;
5560 
5561       str_copy_len = TREE_STRING_LENGTH (str);
5562       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5563 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5564 	{
5565 	  str_copy_len += STORE_MAX_PIECES - 1;
5566 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5567 	}
5568       str_copy_len = MIN (str_copy_len, exp_len);
5569       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5570 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5571 				MEM_ALIGN (target), false))
5572 	goto normal_expr;
5573 
5574       dest_mem = target;
5575 
5576       dest_mem = store_by_pieces (dest_mem,
5577 				  str_copy_len, builtin_strncpy_read_str,
5578 				  CONST_CAST (char *,
5579 					      TREE_STRING_POINTER (str)),
5580 				  MEM_ALIGN (target), false,
5581 				  exp_len > str_copy_len ? 1 : 0);
5582       if (exp_len > str_copy_len)
5583 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5584 		       GEN_INT (exp_len - str_copy_len),
5585 		       BLOCK_OP_NORMAL);
5586       return NULL_RTX;
5587     }
5588   else
5589     {
5590       rtx tmp_target;
5591 
5592   normal_expr:
5593       /* If we want to use a nontemporal or a reverse order store, force the
5594 	 value into a register first.  */
5595       tmp_target = nontemporal || reverse ? NULL_RTX : target;
5596       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5597 			       (call_param_p
5598 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5599 			       &alt_rtl, false);
5600 
5601       /* Handle bounds returned by call.  */
5602       if (TREE_CODE (exp) == CALL_EXPR)
5603 	{
5604 	  rtx bounds;
5605 	  chkp_split_slot (temp, &temp, &bounds);
5606 	  if (bounds && btarget)
5607 	    {
5608 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5609 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5610 	      chkp_set_rtl_bounds (btarget, tmp);
5611 	    }
5612 	}
5613     }
5614 
5615   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5616      the same as that of TARGET, adjust the constant.  This is needed, for
5617      example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5618      only a word-sized value.  */
5619   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5620       && TREE_CODE (exp) != ERROR_MARK
5621       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5622     {
5623       if (GET_MODE_CLASS (GET_MODE (target))
5624 	  != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
5625 	  && GET_MODE_BITSIZE (GET_MODE (target))
5626 	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp))))
5627 	{
5628 	  rtx t = simplify_gen_subreg (GET_MODE (target), temp,
5629 				       TYPE_MODE (TREE_TYPE (exp)), 0);
5630 	  if (t)
5631 	    temp = t;
5632 	}
5633       if (GET_MODE (temp) == VOIDmode)
5634 	temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5635 			      temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5636     }
5637 
5638   /* If value was not generated in the target, store it there.
5639      Convert the value to TARGET's type first if necessary and emit the
5640      pending incrementations that have been queued when expanding EXP.
5641      Note that we cannot emit the whole queue blindly because this will
5642      effectively disable the POST_INC optimization later.
5643 
5644      If TEMP and TARGET compare equal according to rtx_equal_p, but
5645      one or both of them are volatile memory refs, we have to distinguish
5646      two cases:
5647      - expand_expr has used TARGET.  In this case, we must not generate
5648        another copy.  This can be detected by TARGET being equal according
5649        to == .
5650      - expand_expr has not used TARGET - that means that the source just
5651        happens to have the same RTX form.  Since temp will have been created
5652        by expand_expr, it will compare unequal according to == .
5653        We must generate a copy in this case, to reach the correct number
5654        of volatile memory references.  */
5655 
5656   if ((! rtx_equal_p (temp, target)
5657        || (temp != target && (side_effects_p (temp)
5658 			      || side_effects_p (target))))
5659       && TREE_CODE (exp) != ERROR_MARK
5660       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5661 	 but TARGET is not valid memory reference, TEMP will differ
5662 	 from TARGET although it is really the same location.  */
5663       && !(alt_rtl
5664 	   && rtx_equal_p (alt_rtl, target)
5665 	   && !side_effects_p (alt_rtl)
5666 	   && !side_effects_p (target))
5667       /* If there's nothing to copy, don't bother.  Don't call
5668 	 expr_size unless necessary, because some front-ends (C++)
5669 	 expr_size-hook must not be given objects that are not
5670 	 supposed to be bit-copied or bit-initialized.  */
5671       && expr_size (exp) != const0_rtx)
5672     {
5673       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5674 	{
5675 	  if (GET_MODE (target) == BLKmode)
5676 	    {
5677 	      /* Handle calls that return BLKmode values in registers.  */
5678 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5679 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5680 	      else
5681 		store_bit_field (target,
5682 				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5683 				 0, 0, 0, GET_MODE (temp), temp, reverse);
5684 	    }
5685 	  else
5686 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5687 	}
5688 
5689       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5690 	{
5691 	  /* Handle copying a string constant into an array.  The string
5692 	     constant may be shorter than the array.  So copy just the string's
5693 	     actual length, and clear the rest.  First get the size of the data
5694 	     type of the string, which is actually the size of the target.  */
5695 	  rtx size = expr_size (exp);
5696 
5697 	  if (CONST_INT_P (size)
5698 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5699 	    emit_block_move (target, temp, size,
5700 			     (call_param_p
5701 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5702 	  else
5703 	    {
5704 	      machine_mode pointer_mode
5705 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5706 	      machine_mode address_mode = get_address_mode (target);
5707 
5708 	      /* Compute the size of the data to copy from the string.  */
5709 	      tree copy_size
5710 		= size_binop_loc (loc, MIN_EXPR,
5711 				  make_tree (sizetype, size),
5712 				  size_int (TREE_STRING_LENGTH (exp)));
5713 	      rtx copy_size_rtx
5714 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5715 			       (call_param_p
5716 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5717 	      rtx_code_label *label = 0;
5718 
5719 	      /* Copy that much.  */
5720 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5721 					       TYPE_UNSIGNED (sizetype));
5722 	      emit_block_move (target, temp, copy_size_rtx,
5723 			       (call_param_p
5724 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5725 
5726 	      /* Figure out how much is left in TARGET that we have to clear.
5727 		 Do all calculations in pointer_mode.  */
5728 	      if (CONST_INT_P (copy_size_rtx))
5729 		{
5730 		  size = plus_constant (address_mode, size,
5731 					-INTVAL (copy_size_rtx));
5732 		  target = adjust_address (target, BLKmode,
5733 					   INTVAL (copy_size_rtx));
5734 		}
5735 	      else
5736 		{
5737 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5738 				       copy_size_rtx, NULL_RTX, 0,
5739 				       OPTAB_LIB_WIDEN);
5740 
5741 		  if (GET_MODE (copy_size_rtx) != address_mode)
5742 		    copy_size_rtx = convert_to_mode (address_mode,
5743 						     copy_size_rtx,
5744 						     TYPE_UNSIGNED (sizetype));
5745 
5746 		  target = offset_address (target, copy_size_rtx,
5747 					   highest_pow2_factor (copy_size));
5748 		  label = gen_label_rtx ();
5749 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5750 					   GET_MODE (size), 0, label);
5751 		}
5752 
5753 	      if (size != const0_rtx)
5754 		clear_storage (target, size, BLOCK_OP_NORMAL);
5755 
5756 	      if (label)
5757 		emit_label (label);
5758 	    }
5759 	}
5760       /* Handle calls that return values in multiple non-contiguous locations.
5761 	 The Irix 6 ABI has examples of this.  */
5762       else if (GET_CODE (target) == PARALLEL)
5763 	{
5764 	  if (GET_CODE (temp) == PARALLEL)
5765 	    emit_group_move (target, temp);
5766 	  else
5767 	    emit_group_load (target, temp, TREE_TYPE (exp),
5768 			     int_size_in_bytes (TREE_TYPE (exp)));
5769 	}
5770       else if (GET_CODE (temp) == PARALLEL)
5771 	emit_group_store (target, temp, TREE_TYPE (exp),
5772 			  int_size_in_bytes (TREE_TYPE (exp)));
5773       else if (GET_MODE (temp) == BLKmode)
5774 	emit_block_move (target, temp, expr_size (exp),
5775 			 (call_param_p
5776 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5777       /* If we emit a nontemporal store, there is nothing else to do.  */
5778       else if (nontemporal && emit_storent_insn (target, temp))
5779 	;
5780       else
5781 	{
5782 	  if (reverse)
5783 	    temp = flip_storage_order (GET_MODE (target), temp);
5784 	  temp = force_operand (temp, target);
5785 	  if (temp != target)
5786 	    emit_move_insn (target, temp);
5787 	}
5788     }
5789 
5790   return NULL_RTX;
5791 }
5792 
5793 /* Same as store_expr_with_bounds but ignoring bounds of EXP.  */
5794 rtx
5795 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5796 	    bool reverse)
5797 {
5798   return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5799 				 reverse, NULL);
5800 }
5801 
5802 /* Return true if field F of structure TYPE is a flexible array.  */
5803 
5804 static bool
5805 flexible_array_member_p (const_tree f, const_tree type)
5806 {
5807   const_tree tf;
5808 
5809   tf = TREE_TYPE (f);
5810   return (DECL_CHAIN (f) == NULL
5811 	  && TREE_CODE (tf) == ARRAY_TYPE
5812 	  && TYPE_DOMAIN (tf)
5813 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5814 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5815 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5816 	  && int_size_in_bytes (type) >= 0);
5817 }
5818 
5819 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5820    must have in order for it to completely initialize a value of type TYPE.
5821    Return -1 if the number isn't known.
5822 
5823    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5824 
5825 static HOST_WIDE_INT
5826 count_type_elements (const_tree type, bool for_ctor_p)
5827 {
5828   switch (TREE_CODE (type))
5829     {
5830     case ARRAY_TYPE:
5831       {
5832 	tree nelts;
5833 
5834 	nelts = array_type_nelts (type);
5835 	if (nelts && tree_fits_uhwi_p (nelts))
5836 	  {
5837 	    unsigned HOST_WIDE_INT n;
5838 
5839 	    n = tree_to_uhwi (nelts) + 1;
5840 	    if (n == 0 || for_ctor_p)
5841 	      return n;
5842 	    else
5843 	      return n * count_type_elements (TREE_TYPE (type), false);
5844 	  }
5845 	return for_ctor_p ? -1 : 1;
5846       }
5847 
5848     case RECORD_TYPE:
5849       {
5850 	unsigned HOST_WIDE_INT n;
5851 	tree f;
5852 
5853 	n = 0;
5854 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5855 	  if (TREE_CODE (f) == FIELD_DECL)
5856 	    {
5857 	      if (!for_ctor_p)
5858 		n += count_type_elements (TREE_TYPE (f), false);
5859 	      else if (!flexible_array_member_p (f, type))
5860 		/* Don't count flexible arrays, which are not supposed
5861 		   to be initialized.  */
5862 		n += 1;
5863 	    }
5864 
5865 	return n;
5866       }
5867 
5868     case UNION_TYPE:
5869     case QUAL_UNION_TYPE:
5870       {
5871 	tree f;
5872 	HOST_WIDE_INT n, m;
5873 
5874 	gcc_assert (!for_ctor_p);
5875 	/* Estimate the number of scalars in each field and pick the
5876 	   maximum.  Other estimates would do instead; the idea is simply
5877 	   to make sure that the estimate is not sensitive to the ordering
5878 	   of the fields.  */
5879 	n = 1;
5880 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5881 	  if (TREE_CODE (f) == FIELD_DECL)
5882 	    {
5883 	      m = count_type_elements (TREE_TYPE (f), false);
5884 	      /* If the field doesn't span the whole union, add an extra
5885 		 scalar for the rest.  */
5886 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5887 				    TYPE_SIZE (type)) != 1)
5888 		m++;
5889 	      if (n < m)
5890 		n = m;
5891 	    }
5892 	return n;
5893       }
5894 
5895     case COMPLEX_TYPE:
5896       return 2;
5897 
5898     case VECTOR_TYPE:
5899       return TYPE_VECTOR_SUBPARTS (type);
5900 
5901     case INTEGER_TYPE:
5902     case REAL_TYPE:
5903     case FIXED_POINT_TYPE:
5904     case ENUMERAL_TYPE:
5905     case BOOLEAN_TYPE:
5906     case POINTER_TYPE:
5907     case OFFSET_TYPE:
5908     case REFERENCE_TYPE:
5909     case NULLPTR_TYPE:
5910       return 1;
5911 
5912     case ERROR_MARK:
5913       return 0;
5914 
5915     case VOID_TYPE:
5916     case METHOD_TYPE:
5917     case FUNCTION_TYPE:
5918     case LANG_TYPE:
5919     default:
5920       gcc_unreachable ();
5921     }
5922 }
5923 
5924 /* Helper for categorize_ctor_elements.  Identical interface.  */
5925 
5926 static bool
5927 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5928 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5929 {
5930   unsigned HOST_WIDE_INT idx;
5931   HOST_WIDE_INT nz_elts, init_elts, num_fields;
5932   tree value, purpose, elt_type;
5933 
5934   /* Whether CTOR is a valid constant initializer, in accordance with what
5935      initializer_constant_valid_p does.  If inferred from the constructor
5936      elements, true until proven otherwise.  */
5937   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5938   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5939 
5940   nz_elts = 0;
5941   init_elts = 0;
5942   num_fields = 0;
5943   elt_type = NULL_TREE;
5944 
5945   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5946     {
5947       HOST_WIDE_INT mult = 1;
5948 
5949       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5950 	{
5951 	  tree lo_index = TREE_OPERAND (purpose, 0);
5952 	  tree hi_index = TREE_OPERAND (purpose, 1);
5953 
5954 	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5955 	    mult = (tree_to_uhwi (hi_index)
5956 		    - tree_to_uhwi (lo_index) + 1);
5957 	}
5958       num_fields += mult;
5959       elt_type = TREE_TYPE (value);
5960 
5961       switch (TREE_CODE (value))
5962 	{
5963 	case CONSTRUCTOR:
5964 	  {
5965 	    HOST_WIDE_INT nz = 0, ic = 0;
5966 
5967 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5968 							   p_complete);
5969 
5970 	    nz_elts += mult * nz;
5971  	    init_elts += mult * ic;
5972 
5973 	    if (const_from_elts_p && const_p)
5974 	      const_p = const_elt_p;
5975 	  }
5976 	  break;
5977 
5978 	case INTEGER_CST:
5979 	case REAL_CST:
5980 	case FIXED_CST:
5981 	  if (!initializer_zerop (value))
5982 	    nz_elts += mult;
5983 	  init_elts += mult;
5984 	  break;
5985 
5986 	case STRING_CST:
5987 	  nz_elts += mult * TREE_STRING_LENGTH (value);
5988 	  init_elts += mult * TREE_STRING_LENGTH (value);
5989 	  break;
5990 
5991 	case COMPLEX_CST:
5992 	  if (!initializer_zerop (TREE_REALPART (value)))
5993 	    nz_elts += mult;
5994 	  if (!initializer_zerop (TREE_IMAGPART (value)))
5995 	    nz_elts += mult;
5996 	  init_elts += mult;
5997 	  break;
5998 
5999 	case VECTOR_CST:
6000 	  {
6001 	    unsigned i;
6002 	    for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
6003 	      {
6004 		tree v = VECTOR_CST_ELT (value, i);
6005 		if (!initializer_zerop (v))
6006 		  nz_elts += mult;
6007 		init_elts += mult;
6008 	      }
6009 	  }
6010 	  break;
6011 
6012 	default:
6013 	  {
6014 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
6015 	    nz_elts += mult * tc;
6016 	    init_elts += mult * tc;
6017 
6018 	    if (const_from_elts_p && const_p)
6019 	      const_p
6020 		= initializer_constant_valid_p (value,
6021 						elt_type,
6022 						TYPE_REVERSE_STORAGE_ORDER
6023 						(TREE_TYPE (ctor)))
6024 		  != NULL_TREE;
6025 	  }
6026 	  break;
6027 	}
6028     }
6029 
6030   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
6031 						num_fields, elt_type))
6032     *p_complete = false;
6033 
6034   *p_nz_elts += nz_elts;
6035   *p_init_elts += init_elts;
6036 
6037   return const_p;
6038 }
6039 
6040 /* Examine CTOR to discover:
6041    * how many scalar fields are set to nonzero values,
6042      and place it in *P_NZ_ELTS;
6043    * how many scalar fields in total are in CTOR,
6044      and place it in *P_ELT_COUNT.
6045    * whether the constructor is complete -- in the sense that every
6046      meaningful byte is explicitly given a value --
6047      and place it in *P_COMPLETE.
6048 
6049    Return whether or not CTOR is a valid static constant initializer, the same
6050    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
6051 
6052 bool
6053 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6054 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
6055 {
6056   *p_nz_elts = 0;
6057   *p_init_elts = 0;
6058   *p_complete = true;
6059 
6060   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
6061 }
6062 
6063 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6064    of which had type LAST_TYPE.  Each element was itself a complete
6065    initializer, in the sense that every meaningful byte was explicitly
6066    given a value.  Return true if the same is true for the constructor
6067    as a whole.  */
6068 
6069 bool
6070 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6071 			  const_tree last_type)
6072 {
6073   if (TREE_CODE (type) == UNION_TYPE
6074       || TREE_CODE (type) == QUAL_UNION_TYPE)
6075     {
6076       if (num_elts == 0)
6077 	return false;
6078 
6079       gcc_assert (num_elts == 1 && last_type);
6080 
6081       /* ??? We could look at each element of the union, and find the
6082 	 largest element.  Which would avoid comparing the size of the
6083 	 initialized element against any tail padding in the union.
6084 	 Doesn't seem worth the effort...  */
6085       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6086     }
6087 
6088   return count_type_elements (type, true) == num_elts;
6089 }
6090 
6091 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
6092 
6093 static int
6094 mostly_zeros_p (const_tree exp)
6095 {
6096   if (TREE_CODE (exp) == CONSTRUCTOR)
6097     {
6098       HOST_WIDE_INT nz_elts, init_elts;
6099       bool complete_p;
6100 
6101       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6102       return !complete_p || nz_elts < init_elts / 4;
6103     }
6104 
6105   return initializer_zerop (exp);
6106 }
6107 
6108 /* Return 1 if EXP contains all zeros.  */
6109 
6110 static int
6111 all_zeros_p (const_tree exp)
6112 {
6113   if (TREE_CODE (exp) == CONSTRUCTOR)
6114     {
6115       HOST_WIDE_INT nz_elts, init_elts;
6116       bool complete_p;
6117 
6118       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
6119       return nz_elts == 0;
6120     }
6121 
6122   return initializer_zerop (exp);
6123 }
6124 
6125 /* Helper function for store_constructor.
6126    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6127    CLEARED is as for store_constructor.
6128    ALIAS_SET is the alias set to use for any stores.
6129    If REVERSE is true, the store is to be done in reverse order.
6130 
6131    This provides a recursive shortcut back to store_constructor when it isn't
6132    necessary to go through store_field.  This is so that we can pass through
6133    the cleared field to let store_constructor know that we may not have to
6134    clear a substructure if the outer structure has already been cleared.  */
6135 
6136 static void
6137 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
6138 			 HOST_WIDE_INT bitpos,
6139 			 unsigned HOST_WIDE_INT bitregion_start,
6140 			 unsigned HOST_WIDE_INT bitregion_end,
6141 			 machine_mode mode,
6142 			 tree exp, int cleared,
6143 			 alias_set_type alias_set, bool reverse)
6144 {
6145   if (TREE_CODE (exp) == CONSTRUCTOR
6146       /* We can only call store_constructor recursively if the size and
6147 	 bit position are on a byte boundary.  */
6148       && bitpos % BITS_PER_UNIT == 0
6149       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
6150       /* If we have a nonzero bitpos for a register target, then we just
6151 	 let store_field do the bitfield handling.  This is unlikely to
6152 	 generate unnecessary clear instructions anyways.  */
6153       && (bitpos == 0 || MEM_P (target)))
6154     {
6155       if (MEM_P (target))
6156 	target
6157 	  = adjust_address (target,
6158 			    GET_MODE (target) == BLKmode
6159 			    || 0 != (bitpos
6160 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
6161 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
6162 
6163 
6164       /* Update the alias set, if required.  */
6165       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6166 	  && MEM_ALIAS_SET (target) != 0)
6167 	{
6168 	  target = copy_rtx (target);
6169 	  set_mem_alias_set (target, alias_set);
6170 	}
6171 
6172       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
6173 			 reverse);
6174     }
6175   else
6176     store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6177 		 exp, alias_set, false, reverse);
6178 }
6179 
6180 
6181 /* Returns the number of FIELD_DECLs in TYPE.  */
6182 
6183 static int
6184 fields_length (const_tree type)
6185 {
6186   tree t = TYPE_FIELDS (type);
6187   int count = 0;
6188 
6189   for (; t; t = DECL_CHAIN (t))
6190     if (TREE_CODE (t) == FIELD_DECL)
6191       ++count;
6192 
6193   return count;
6194 }
6195 
6196 
6197 /* Store the value of constructor EXP into the rtx TARGET.
6198    TARGET is either a REG or a MEM; we know it cannot conflict, since
6199    safe_from_p has been called.
6200    CLEARED is true if TARGET is known to have been zero'd.
6201    SIZE is the number of bytes of TARGET we are allowed to modify: this
6202    may not be the same as the size of EXP if we are assigning to a field
6203    which has been packed to exclude padding bits.
6204    If REVERSE is true, the store is to be done in reverse order.  */
6205 
6206 static void
6207 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
6208 		   bool reverse)
6209 {
6210   tree type = TREE_TYPE (exp);
6211   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6212   HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0;
6213 
6214   switch (TREE_CODE (type))
6215     {
6216     case RECORD_TYPE:
6217     case UNION_TYPE:
6218     case QUAL_UNION_TYPE:
6219       {
6220 	unsigned HOST_WIDE_INT idx;
6221 	tree field, value;
6222 
6223 	/* The storage order is specified for every aggregate type.  */
6224 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6225 
6226 	/* If size is zero or the target is already cleared, do nothing.  */
6227 	if (size == 0 || cleared)
6228 	  cleared = 1;
6229 	/* We either clear the aggregate or indicate the value is dead.  */
6230 	else if ((TREE_CODE (type) == UNION_TYPE
6231 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
6232 		 && ! CONSTRUCTOR_ELTS (exp))
6233 	  /* If the constructor is empty, clear the union.  */
6234 	  {
6235 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6236 	    cleared = 1;
6237 	  }
6238 
6239 	/* If we are building a static constructor into a register,
6240 	   set the initial value as zero so we can fold the value into
6241 	   a constant.  But if more than one register is involved,
6242 	   this probably loses.  */
6243 	else if (REG_P (target) && TREE_STATIC (exp)
6244 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6245 	  {
6246 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6247 	    cleared = 1;
6248 	  }
6249 
6250         /* If the constructor has fewer fields than the structure or
6251 	   if we are initializing the structure to mostly zeros, clear
6252 	   the whole structure first.  Don't do this if TARGET is a
6253 	   register whose mode size isn't equal to SIZE since
6254 	   clear_storage can't handle this case.  */
6255 	else if (size > 0
6256 		 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6257 		     || mostly_zeros_p (exp))
6258 		 && (!REG_P (target)
6259 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6260 			 == size)))
6261 	  {
6262 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6263 	    cleared = 1;
6264 	  }
6265 
6266 	if (REG_P (target) && !cleared)
6267 	  emit_clobber (target);
6268 
6269 	/* Store each element of the constructor into the
6270 	   corresponding field of TARGET.  */
6271 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6272 	  {
6273 	    machine_mode mode;
6274 	    HOST_WIDE_INT bitsize;
6275 	    HOST_WIDE_INT bitpos = 0;
6276 	    tree offset;
6277 	    rtx to_rtx = target;
6278 
6279 	    /* Just ignore missing fields.  We cleared the whole
6280 	       structure, above, if any fields are missing.  */
6281 	    if (field == 0)
6282 	      continue;
6283 
6284 	    if (cleared && initializer_zerop (value))
6285 	      continue;
6286 
6287 	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
6288 	      bitsize = tree_to_uhwi (DECL_SIZE (field));
6289 	    else
6290 	      gcc_unreachable ();
6291 
6292 	    mode = DECL_MODE (field);
6293 	    if (DECL_BIT_FIELD (field))
6294 	      mode = VOIDmode;
6295 
6296 	    offset = DECL_FIELD_OFFSET (field);
6297 	    if (tree_fits_shwi_p (offset)
6298 		&& tree_fits_shwi_p (bit_position (field)))
6299 	      {
6300 		bitpos = int_bit_position (field);
6301 		offset = NULL_TREE;
6302 	      }
6303 	    else
6304 	      gcc_unreachable ();
6305 
6306 	    /* If this initializes a field that is smaller than a
6307 	       word, at the start of a word, try to widen it to a full
6308 	       word.  This special case allows us to output C++ member
6309 	       function initializations in a form that the optimizers
6310 	       can understand.  */
6311 	    if (WORD_REGISTER_OPERATIONS
6312 		&& REG_P (target)
6313 		&& bitsize < BITS_PER_WORD
6314 		&& bitpos % BITS_PER_WORD == 0
6315 		&& GET_MODE_CLASS (mode) == MODE_INT
6316 		&& TREE_CODE (value) == INTEGER_CST
6317 		&& exp_size >= 0
6318 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6319 	      {
6320 		tree type = TREE_TYPE (value);
6321 
6322 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6323 		  {
6324 		    type = lang_hooks.types.type_for_mode
6325 		      (word_mode, TYPE_UNSIGNED (type));
6326 		    value = fold_convert (type, value);
6327 		    /* Make sure the bits beyond the original bitsize are zero
6328 		       so that we can correctly avoid extra zeroing stores in
6329 		       later constructor elements.  */
6330 		    tree bitsize_mask
6331 		      = wide_int_to_tree (type, wi::mask (bitsize, false,
6332 							   BITS_PER_WORD));
6333 		    value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6334 		  }
6335 
6336 		if (BYTES_BIG_ENDIAN)
6337 		  value
6338 		   = fold_build2 (LSHIFT_EXPR, type, value,
6339 				   build_int_cst (type,
6340 						  BITS_PER_WORD - bitsize));
6341 		bitsize = BITS_PER_WORD;
6342 		mode = word_mode;
6343 	      }
6344 
6345 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6346 		&& DECL_NONADDRESSABLE_P (field))
6347 	      {
6348 		to_rtx = copy_rtx (to_rtx);
6349 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6350 	      }
6351 
6352 	    store_constructor_field (to_rtx, bitsize, bitpos,
6353 				     0, bitregion_end, mode,
6354 				     value, cleared,
6355 				     get_alias_set (TREE_TYPE (field)),
6356 				     reverse);
6357 	  }
6358 	break;
6359       }
6360     case ARRAY_TYPE:
6361       {
6362 	tree value, index;
6363 	unsigned HOST_WIDE_INT i;
6364 	int need_to_clear;
6365 	tree domain;
6366 	tree elttype = TREE_TYPE (type);
6367 	int const_bounds_p;
6368 	HOST_WIDE_INT minelt = 0;
6369 	HOST_WIDE_INT maxelt = 0;
6370 
6371 	/* The storage order is specified for every aggregate type.  */
6372 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6373 
6374 	domain = TYPE_DOMAIN (type);
6375 	const_bounds_p = (TYPE_MIN_VALUE (domain)
6376 			  && TYPE_MAX_VALUE (domain)
6377 			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6378 			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6379 
6380 	/* If we have constant bounds for the range of the type, get them.  */
6381 	if (const_bounds_p)
6382 	  {
6383 	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6384 	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6385 	  }
6386 
6387 	/* If the constructor has fewer elements than the array, clear
6388            the whole array first.  Similarly if this is static
6389            constructor of a non-BLKmode object.  */
6390 	if (cleared)
6391 	  need_to_clear = 0;
6392 	else if (REG_P (target) && TREE_STATIC (exp))
6393 	  need_to_clear = 1;
6394 	else
6395 	  {
6396 	    unsigned HOST_WIDE_INT idx;
6397 	    tree index, value;
6398 	    HOST_WIDE_INT count = 0, zero_count = 0;
6399 	    need_to_clear = ! const_bounds_p;
6400 
6401 	    /* This loop is a more accurate version of the loop in
6402 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6403 	       is also needed to check for missing elements.  */
6404 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6405 	      {
6406 		HOST_WIDE_INT this_node_count;
6407 
6408 		if (need_to_clear)
6409 		  break;
6410 
6411 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6412 		  {
6413 		    tree lo_index = TREE_OPERAND (index, 0);
6414 		    tree hi_index = TREE_OPERAND (index, 1);
6415 
6416 		    if (! tree_fits_uhwi_p (lo_index)
6417 			|| ! tree_fits_uhwi_p (hi_index))
6418 		      {
6419 			need_to_clear = 1;
6420 			break;
6421 		      }
6422 
6423 		    this_node_count = (tree_to_uhwi (hi_index)
6424 				       - tree_to_uhwi (lo_index) + 1);
6425 		  }
6426 		else
6427 		  this_node_count = 1;
6428 
6429 		count += this_node_count;
6430 		if (mostly_zeros_p (value))
6431 		  zero_count += this_node_count;
6432 	      }
6433 
6434 	    /* Clear the entire array first if there are any missing
6435 	       elements, or if the incidence of zero elements is >=
6436 	       75%.  */
6437 	    if (! need_to_clear
6438 		&& (count < maxelt - minelt + 1
6439 		    || 4 * zero_count >= 3 * count))
6440 	      need_to_clear = 1;
6441 	  }
6442 
6443 	if (need_to_clear && size > 0)
6444 	  {
6445 	    if (REG_P (target))
6446 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
6447 	    else
6448 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6449 	    cleared = 1;
6450 	  }
6451 
6452 	if (!cleared && REG_P (target))
6453 	  /* Inform later passes that the old value is dead.  */
6454 	  emit_clobber (target);
6455 
6456 	/* Store each element of the constructor into the
6457 	   corresponding element of TARGET, determined by counting the
6458 	   elements.  */
6459 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6460 	  {
6461 	    machine_mode mode;
6462 	    HOST_WIDE_INT bitsize;
6463 	    HOST_WIDE_INT bitpos;
6464 	    rtx xtarget = target;
6465 
6466 	    if (cleared && initializer_zerop (value))
6467 	      continue;
6468 
6469 	    mode = TYPE_MODE (elttype);
6470 	    if (mode == BLKmode)
6471 	      bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6472 			 ? tree_to_uhwi (TYPE_SIZE (elttype))
6473 			 : -1);
6474 	    else
6475 	      bitsize = GET_MODE_BITSIZE (mode);
6476 
6477 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6478 	      {
6479 		tree lo_index = TREE_OPERAND (index, 0);
6480 		tree hi_index = TREE_OPERAND (index, 1);
6481 		rtx index_r, pos_rtx;
6482 		HOST_WIDE_INT lo, hi, count;
6483 		tree position;
6484 
6485 		/* If the range is constant and "small", unroll the loop.  */
6486 		if (const_bounds_p
6487 		    && tree_fits_shwi_p (lo_index)
6488 		    && tree_fits_shwi_p (hi_index)
6489 		    && (lo = tree_to_shwi (lo_index),
6490 			hi = tree_to_shwi (hi_index),
6491 			count = hi - lo + 1,
6492 			(!MEM_P (target)
6493 			 || count <= 2
6494 			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6495 			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6496 				 <= 40 * 8)))))
6497 		  {
6498 		    lo -= minelt;  hi -= minelt;
6499 		    for (; lo <= hi; lo++)
6500 		      {
6501 			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6502 
6503 			if (MEM_P (target)
6504 			    && !MEM_KEEP_ALIAS_SET_P (target)
6505 			    && TREE_CODE (type) == ARRAY_TYPE
6506 			    && TYPE_NONALIASED_COMPONENT (type))
6507 			  {
6508 			    target = copy_rtx (target);
6509 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6510 			  }
6511 
6512 			store_constructor_field
6513 			  (target, bitsize, bitpos, 0, bitregion_end,
6514 			   mode, value, cleared,
6515 			   get_alias_set (elttype), reverse);
6516 		      }
6517 		  }
6518 		else
6519 		  {
6520 		    rtx_code_label *loop_start = gen_label_rtx ();
6521 		    rtx_code_label *loop_end = gen_label_rtx ();
6522 		    tree exit_cond;
6523 
6524 		    expand_normal (hi_index);
6525 
6526 		    index = build_decl (EXPR_LOCATION (exp),
6527 					VAR_DECL, NULL_TREE, domain);
6528 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6529 		    SET_DECL_RTL (index, index_r);
6530 		    store_expr (lo_index, index_r, 0, false, reverse);
6531 
6532 		    /* Build the head of the loop.  */
6533 		    do_pending_stack_adjust ();
6534 		    emit_label (loop_start);
6535 
6536 		    /* Assign value to element index.  */
6537 		    position =
6538 		      fold_convert (ssizetype,
6539 				    fold_build2 (MINUS_EXPR,
6540 						 TREE_TYPE (index),
6541 						 index,
6542 						 TYPE_MIN_VALUE (domain)));
6543 
6544 		    position =
6545 			size_binop (MULT_EXPR, position,
6546 				    fold_convert (ssizetype,
6547 						  TYPE_SIZE_UNIT (elttype)));
6548 
6549 		    pos_rtx = expand_normal (position);
6550 		    xtarget = offset_address (target, pos_rtx,
6551 					      highest_pow2_factor (position));
6552 		    xtarget = adjust_address (xtarget, mode, 0);
6553 		    if (TREE_CODE (value) == CONSTRUCTOR)
6554 		      store_constructor (value, xtarget, cleared,
6555 					 bitsize / BITS_PER_UNIT, reverse);
6556 		    else
6557 		      store_expr (value, xtarget, 0, false, reverse);
6558 
6559 		    /* Generate a conditional jump to exit the loop.  */
6560 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6561 					index, hi_index);
6562 		    jumpif (exit_cond, loop_end, -1);
6563 
6564 		    /* Update the loop counter, and jump to the head of
6565 		       the loop.  */
6566 		    expand_assignment (index,
6567 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6568 					       index, integer_one_node),
6569 				       false);
6570 
6571 		    emit_jump (loop_start);
6572 
6573 		    /* Build the end of the loop.  */
6574 		    emit_label (loop_end);
6575 		  }
6576 	      }
6577 	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6578 		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6579 	      {
6580 		tree position;
6581 
6582 		if (index == 0)
6583 		  index = ssize_int (1);
6584 
6585 		if (minelt)
6586 		  index = fold_convert (ssizetype,
6587 					fold_build2 (MINUS_EXPR,
6588 						     TREE_TYPE (index),
6589 						     index,
6590 						     TYPE_MIN_VALUE (domain)));
6591 
6592 		position =
6593 		  size_binop (MULT_EXPR, index,
6594 			      fold_convert (ssizetype,
6595 					    TYPE_SIZE_UNIT (elttype)));
6596 		xtarget = offset_address (target,
6597 					  expand_normal (position),
6598 					  highest_pow2_factor (position));
6599 		xtarget = adjust_address (xtarget, mode, 0);
6600 		store_expr (value, xtarget, 0, false, reverse);
6601 	      }
6602 	    else
6603 	      {
6604 		if (index != 0)
6605 		  bitpos = ((tree_to_shwi (index) - minelt)
6606 			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6607 		else
6608 		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6609 
6610 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6611 		    && TREE_CODE (type) == ARRAY_TYPE
6612 		    && TYPE_NONALIASED_COMPONENT (type))
6613 		  {
6614 		    target = copy_rtx (target);
6615 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6616 		  }
6617 		store_constructor_field (target, bitsize, bitpos, 0,
6618 					 bitregion_end, mode, value,
6619 					 cleared, get_alias_set (elttype),
6620 					 reverse);
6621 	      }
6622 	  }
6623 	break;
6624       }
6625 
6626     case VECTOR_TYPE:
6627       {
6628 	unsigned HOST_WIDE_INT idx;
6629 	constructor_elt *ce;
6630 	int i;
6631 	int need_to_clear;
6632 	int icode = CODE_FOR_nothing;
6633 	tree elttype = TREE_TYPE (type);
6634 	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6635 	machine_mode eltmode = TYPE_MODE (elttype);
6636 	HOST_WIDE_INT bitsize;
6637 	HOST_WIDE_INT bitpos;
6638 	rtvec vector = NULL;
6639 	unsigned n_elts;
6640 	alias_set_type alias;
6641 
6642 	gcc_assert (eltmode != BLKmode);
6643 
6644 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6645 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6646 	  {
6647 	    machine_mode mode = GET_MODE (target);
6648 
6649 	    icode = (int) optab_handler (vec_init_optab, mode);
6650 	    /* Don't use vec_init<mode> if some elements have VECTOR_TYPE.  */
6651 	    if (icode != CODE_FOR_nothing)
6652 	      {
6653 		tree value;
6654 
6655 		FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6656 		  if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6657 		    {
6658 		      icode = CODE_FOR_nothing;
6659 		      break;
6660 		    }
6661 	      }
6662 	    if (icode != CODE_FOR_nothing)
6663 	      {
6664 		unsigned int i;
6665 
6666 		vector = rtvec_alloc (n_elts);
6667 		for (i = 0; i < n_elts; i++)
6668 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6669 	      }
6670 	  }
6671 
6672 	/* If the constructor has fewer elements than the vector,
6673 	   clear the whole array first.  Similarly if this is static
6674 	   constructor of a non-BLKmode object.  */
6675 	if (cleared)
6676 	  need_to_clear = 0;
6677 	else if (REG_P (target) && TREE_STATIC (exp))
6678 	  need_to_clear = 1;
6679 	else
6680 	  {
6681 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6682 	    tree value;
6683 
6684 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6685 	      {
6686 		int n_elts_here = tree_to_uhwi
6687 		  (int_const_binop (TRUNC_DIV_EXPR,
6688 				    TYPE_SIZE (TREE_TYPE (value)),
6689 				    TYPE_SIZE (elttype)));
6690 
6691 		count += n_elts_here;
6692 		if (mostly_zeros_p (value))
6693 		  zero_count += n_elts_here;
6694 	      }
6695 
6696 	    /* Clear the entire vector first if there are any missing elements,
6697 	       or if the incidence of zero elements is >= 75%.  */
6698 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6699 	  }
6700 
6701 	if (need_to_clear && size > 0 && !vector)
6702 	  {
6703 	    if (REG_P (target))
6704 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6705 	    else
6706 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6707 	    cleared = 1;
6708 	  }
6709 
6710 	/* Inform later passes that the old value is dead.  */
6711 	if (!cleared && !vector && REG_P (target))
6712 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6713 
6714         if (MEM_P (target))
6715 	  alias = MEM_ALIAS_SET (target);
6716 	else
6717 	  alias = get_alias_set (elttype);
6718 
6719         /* Store each element of the constructor into the corresponding
6720 	   element of TARGET, determined by counting the elements.  */
6721 	for (idx = 0, i = 0;
6722 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6723 	     idx++, i += bitsize / elt_size)
6724 	  {
6725 	    HOST_WIDE_INT eltpos;
6726 	    tree value = ce->value;
6727 
6728 	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6729 	    if (cleared && initializer_zerop (value))
6730 	      continue;
6731 
6732 	    if (ce->index)
6733 	      eltpos = tree_to_uhwi (ce->index);
6734 	    else
6735 	      eltpos = i;
6736 
6737 	    if (vector)
6738 	      {
6739 		/* vec_init<mode> should not be used if there are VECTOR_TYPE
6740 		   elements.  */
6741 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6742 		RTVEC_ELT (vector, eltpos)
6743 		  = expand_normal (value);
6744 	      }
6745 	    else
6746 	      {
6747 		machine_mode value_mode =
6748 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6749 		  ? TYPE_MODE (TREE_TYPE (value))
6750 		  : eltmode;
6751 		bitpos = eltpos * elt_size;
6752 		store_constructor_field (target, bitsize, bitpos, 0,
6753 					 bitregion_end, value_mode,
6754 					 value, cleared, alias, reverse);
6755 	      }
6756 	  }
6757 
6758 	if (vector)
6759 	  emit_insn (GEN_FCN (icode)
6760 		     (target,
6761 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6762 	break;
6763       }
6764 
6765     default:
6766       gcc_unreachable ();
6767     }
6768 }
6769 
6770 /* Store the value of EXP (an expression tree)
6771    into a subfield of TARGET which has mode MODE and occupies
6772    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6773    If MODE is VOIDmode, it means that we are storing into a bit-field.
6774 
6775    BITREGION_START is bitpos of the first bitfield in this region.
6776    BITREGION_END is the bitpos of the ending bitfield in this region.
6777    These two fields are 0, if the C++ memory model does not apply,
6778    or we are not interested in keeping track of bitfield regions.
6779 
6780    Always return const0_rtx unless we have something particular to
6781    return.
6782 
6783    ALIAS_SET is the alias set for the destination.  This value will
6784    (in general) be different from that for TARGET, since TARGET is a
6785    reference to the containing structure.
6786 
6787    If NONTEMPORAL is true, try generating a nontemporal store.
6788 
6789    If REVERSE is true, the store is to be done in reverse order.  */
6790 
6791 static rtx
6792 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6793 	     unsigned HOST_WIDE_INT bitregion_start,
6794 	     unsigned HOST_WIDE_INT bitregion_end,
6795 	     machine_mode mode, tree exp,
6796 	     alias_set_type alias_set, bool nontemporal,  bool reverse)
6797 {
6798   if (TREE_CODE (exp) == ERROR_MARK)
6799     return const0_rtx;
6800 
6801   /* If we have nothing to store, do nothing unless the expression has
6802      side-effects.  Don't do that for zero sized addressable lhs of
6803      calls.  */
6804   if (bitsize == 0
6805       && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6806 	  || TREE_CODE (exp) != CALL_EXPR))
6807     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6808 
6809   if (GET_CODE (target) == CONCAT)
6810     {
6811       /* We're storing into a struct containing a single __complex.  */
6812 
6813       gcc_assert (!bitpos);
6814       return store_expr (exp, target, 0, nontemporal, reverse);
6815     }
6816 
6817   /* If the structure is in a register or if the component
6818      is a bit field, we cannot use addressing to access it.
6819      Use bit-field techniques or SUBREG to store in it.  */
6820 
6821   if (mode == VOIDmode
6822       || (mode != BLKmode && ! direct_store[(int) mode]
6823 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6824 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6825       || REG_P (target)
6826       || GET_CODE (target) == SUBREG
6827       /* If the field isn't aligned enough to store as an ordinary memref,
6828 	 store it as a bit field.  */
6829       || (mode != BLKmode
6830 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6831 		|| bitpos % GET_MODE_ALIGNMENT (mode))
6832 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6833 	      || (bitpos % BITS_PER_UNIT != 0)))
6834       || (bitsize >= 0 && mode != BLKmode
6835 	  && GET_MODE_BITSIZE (mode) > bitsize)
6836       /* If the RHS and field are a constant size and the size of the
6837 	 RHS isn't the same size as the bitfield, we must use bitfield
6838 	 operations.  */
6839       || (bitsize >= 0
6840 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6841 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6842 	  /* Except for initialization of full bytes from a CONSTRUCTOR, which
6843 	     we will handle specially below.  */
6844 	  && !(TREE_CODE (exp) == CONSTRUCTOR
6845 	       && bitsize % BITS_PER_UNIT == 0)
6846 	  /* And except for bitwise copying of TREE_ADDRESSABLE types,
6847 	     where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6848 	     includes some extra padding.  store_expr / expand_expr will in
6849 	     that case call get_inner_reference that will have the bitsize
6850 	     we check here and thus the block move will not clobber the
6851 	     padding that shouldn't be clobbered.  In the future we could
6852 	     replace the TREE_ADDRESSABLE check with a check that
6853 	     get_base_address needs to live in memory.  */
6854 	  && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6855 	      || TREE_CODE (exp) != COMPONENT_REF
6856 	      || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST
6857 	      || (bitsize % BITS_PER_UNIT != 0)
6858 	      || (bitpos % BITS_PER_UNIT != 0)
6859 	      || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize)
6860 		  != 0)))
6861       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6862          decl we must use bitfield operations.  */
6863       || (bitsize >= 0
6864 	  && TREE_CODE (exp) == MEM_REF
6865 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6866 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6867 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6868 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6869     {
6870       rtx temp;
6871       gimple *nop_def;
6872 
6873       /* If EXP is a NOP_EXPR of precision less than its mode, then that
6874 	 implies a mask operation.  If the precision is the same size as
6875 	 the field we're storing into, that mask is redundant.  This is
6876 	 particularly common with bit field assignments generated by the
6877 	 C front end.  */
6878       nop_def = get_def_for_expr (exp, NOP_EXPR);
6879       if (nop_def)
6880 	{
6881 	  tree type = TREE_TYPE (exp);
6882 	  if (INTEGRAL_TYPE_P (type)
6883 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6884 	      && bitsize == TYPE_PRECISION (type))
6885 	    {
6886 	      tree op = gimple_assign_rhs1 (nop_def);
6887 	      type = TREE_TYPE (op);
6888 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6889 		exp = op;
6890 	    }
6891 	}
6892 
6893       temp = expand_normal (exp);
6894 
6895       /* Handle calls that return values in multiple non-contiguous locations.
6896 	 The Irix 6 ABI has examples of this.  */
6897       if (GET_CODE (temp) == PARALLEL)
6898 	{
6899 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6900 	  machine_mode temp_mode = GET_MODE (temp);
6901 	  if (temp_mode == BLKmode || temp_mode == VOIDmode)
6902 	    temp_mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6903 	  rtx temp_target = gen_reg_rtx (temp_mode);
6904 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6905 	  temp = temp_target;
6906 	}
6907 
6908       /* Handle calls that return BLKmode values in registers.  */
6909       else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6910 	{
6911 	  rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6912 	  copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6913 	  temp = temp_target;
6914 	}
6915 
6916       /* If the value has aggregate type and an integral mode then, if BITSIZE
6917 	 is narrower than this mode and this is for big-endian data, we first
6918 	 need to put the value into the low-order bits for store_bit_field,
6919 	 except when MODE is BLKmode and BITSIZE larger than the word size
6920 	 (see the handling of fields larger than a word in store_bit_field).
6921 	 Moreover, the field may be not aligned on a byte boundary; in this
6922 	 case, if it has reverse storage order, it needs to be accessed as a
6923 	 scalar field with reverse storage order and we must first put the
6924 	 value into target order.  */
6925       if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6926 	  && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6927 	{
6928 	  HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6929 
6930 	  reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6931 
6932 	  if (reverse)
6933 	    temp = flip_storage_order (GET_MODE (temp), temp);
6934 
6935 	  if (bitsize < size
6936 	      && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
6937 	      && !(mode == BLKmode && bitsize > BITS_PER_WORD))
6938 	    temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6939 				 size - bitsize, NULL_RTX, 1);
6940 	}
6941 
6942       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
6943       if (mode != VOIDmode && mode != BLKmode
6944 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6945 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6946 
6947       /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6948 	 and BITPOS must be aligned on a byte boundary.  If so, we simply do
6949 	 a block copy.  Likewise for a BLKmode-like TARGET.  */
6950       if (GET_MODE (temp) == BLKmode
6951 	  && (GET_MODE (target) == BLKmode
6952 	      || (MEM_P (target)
6953 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6954 		  && (bitpos % BITS_PER_UNIT) == 0
6955 		  && (bitsize % BITS_PER_UNIT) == 0)))
6956 	{
6957 	  gcc_assert (MEM_P (target) && MEM_P (temp)
6958 		      && (bitpos % BITS_PER_UNIT) == 0);
6959 
6960 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6961 	  emit_block_move (target, temp,
6962 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6963 				    / BITS_PER_UNIT),
6964 			   BLOCK_OP_NORMAL);
6965 
6966 	  return const0_rtx;
6967 	}
6968 
6969       /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6970 	 word size, we need to load the value (see again store_bit_field).  */
6971       if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD)
6972 	{
6973 	  machine_mode temp_mode = smallest_mode_for_size (bitsize, MODE_INT);
6974 	  temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
6975 				    temp_mode, false);
6976 	}
6977 
6978       /* Store the value in the bitfield.  */
6979       store_bit_field (target, bitsize, bitpos,
6980 		       bitregion_start, bitregion_end,
6981 		       mode, temp, reverse);
6982 
6983       return const0_rtx;
6984     }
6985   else
6986     {
6987       /* Now build a reference to just the desired component.  */
6988       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6989 
6990       if (to_rtx == target)
6991 	to_rtx = copy_rtx (to_rtx);
6992 
6993       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6994 	set_mem_alias_set (to_rtx, alias_set);
6995 
6996       /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6997 	 into a target smaller than its type; handle that case now.  */
6998       if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6999 	{
7000 	  gcc_assert (bitsize % BITS_PER_UNIT == 0);
7001 	  store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
7002 	  return to_rtx;
7003 	}
7004 
7005       return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7006     }
7007 }
7008 
7009 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7010    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7011    codes and find the ultimate containing object, which we return.
7012 
7013    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7014    bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7015    storage order of the field.
7016    If the position of the field is variable, we store a tree
7017    giving the variable offset (in units) in *POFFSET.
7018    This offset is in addition to the bit position.
7019    If the position is not variable, we store 0 in *POFFSET.
7020 
7021    If any of the extraction expressions is volatile,
7022    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
7023 
7024    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7025    Otherwise, it is a mode that can be used to access the field.
7026 
7027    If the field describes a variable-sized object, *PMODE is set to
7028    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
7029    this case, but the address of the object can be found.  */
7030 
7031 tree
7032 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
7033 		     HOST_WIDE_INT *pbitpos, tree *poffset,
7034 		     machine_mode *pmode, int *punsignedp,
7035 		     int *preversep, int *pvolatilep)
7036 {
7037   tree size_tree = 0;
7038   machine_mode mode = VOIDmode;
7039   bool blkmode_bitfield = false;
7040   tree offset = size_zero_node;
7041   offset_int bit_offset = 0;
7042 
7043   /* First get the mode, signedness, storage order and size.  We do this from
7044      just the outermost expression.  */
7045   *pbitsize = -1;
7046   if (TREE_CODE (exp) == COMPONENT_REF)
7047     {
7048       tree field = TREE_OPERAND (exp, 1);
7049       size_tree = DECL_SIZE (field);
7050       if (flag_strict_volatile_bitfields > 0
7051 	  && TREE_THIS_VOLATILE (exp)
7052 	  && DECL_BIT_FIELD_TYPE (field)
7053 	  && DECL_MODE (field) != BLKmode)
7054 	/* Volatile bitfields should be accessed in the mode of the
7055 	     field's type, not the mode computed based on the bit
7056 	     size.  */
7057 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7058       else if (!DECL_BIT_FIELD (field))
7059 	{
7060 	  mode = DECL_MODE (field);
7061 	  /* For vector fields re-check the target flags, as DECL_MODE
7062 	     could have been set with different target flags than
7063 	     the current function has.  */
7064 	  if (mode == BLKmode
7065 	      && VECTOR_TYPE_P (TREE_TYPE (field))
7066 	      && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7067 	    mode = TYPE_MODE (TREE_TYPE (field));
7068 	}
7069       else if (DECL_MODE (field) == BLKmode)
7070 	blkmode_bitfield = true;
7071 
7072       *punsignedp = DECL_UNSIGNED (field);
7073     }
7074   else if (TREE_CODE (exp) == BIT_FIELD_REF)
7075     {
7076       size_tree = TREE_OPERAND (exp, 1);
7077       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7078 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
7079 
7080       /* For vector types, with the correct size of access, use the mode of
7081 	 inner type.  */
7082       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7083 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7084 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7085         mode = TYPE_MODE (TREE_TYPE (exp));
7086     }
7087   else
7088     {
7089       mode = TYPE_MODE (TREE_TYPE (exp));
7090       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7091 
7092       if (mode == BLKmode)
7093 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
7094       else
7095 	*pbitsize = GET_MODE_BITSIZE (mode);
7096     }
7097 
7098   if (size_tree != 0)
7099     {
7100       if (! tree_fits_uhwi_p (size_tree))
7101 	mode = BLKmode, *pbitsize = -1;
7102       else
7103 	*pbitsize = tree_to_uhwi (size_tree);
7104     }
7105 
7106   *preversep = reverse_storage_order_for_component_p (exp);
7107 
7108   /* Compute cumulative bit-offset for nested component-refs and array-refs,
7109      and find the ultimate containing object.  */
7110   while (1)
7111     {
7112       switch (TREE_CODE (exp))
7113 	{
7114 	case BIT_FIELD_REF:
7115 	  bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
7116 	  break;
7117 
7118 	case COMPONENT_REF:
7119 	  {
7120 	    tree field = TREE_OPERAND (exp, 1);
7121 	    tree this_offset = component_ref_field_offset (exp);
7122 
7123 	    /* If this field hasn't been filled in yet, don't go past it.
7124 	       This should only happen when folding expressions made during
7125 	       type construction.  */
7126 	    if (this_offset == 0)
7127 	      break;
7128 
7129 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
7130 	    bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
7131 
7132 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
7133 	  }
7134 	  break;
7135 
7136 	case ARRAY_REF:
7137 	case ARRAY_RANGE_REF:
7138 	  {
7139 	    tree index = TREE_OPERAND (exp, 1);
7140 	    tree low_bound = array_ref_low_bound (exp);
7141 	    tree unit_size = array_ref_element_size (exp);
7142 
7143 	    /* We assume all arrays have sizes that are a multiple of a byte.
7144 	       First subtract the lower bound, if any, in the type of the
7145 	       index, then convert to sizetype and multiply by the size of
7146 	       the array element.  */
7147 	    if (! integer_zerop (low_bound))
7148 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7149 				   index, low_bound);
7150 
7151 	    offset = size_binop (PLUS_EXPR, offset,
7152 			         size_binop (MULT_EXPR,
7153 					     fold_convert (sizetype, index),
7154 					     unit_size));
7155 	  }
7156 	  break;
7157 
7158 	case REALPART_EXPR:
7159 	  break;
7160 
7161 	case IMAGPART_EXPR:
7162 	  bit_offset += *pbitsize;
7163 	  break;
7164 
7165 	case VIEW_CONVERT_EXPR:
7166 	  break;
7167 
7168 	case MEM_REF:
7169 	  /* Hand back the decl for MEM[&decl, off].  */
7170 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7171 	    {
7172 	      tree off = TREE_OPERAND (exp, 1);
7173 	      if (!integer_zerop (off))
7174 		{
7175 		  offset_int boff, coff = mem_ref_offset (exp);
7176 		  boff = coff << LOG2_BITS_PER_UNIT;
7177 		  bit_offset += boff;
7178 		}
7179 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7180 	    }
7181 	  goto done;
7182 
7183 	default:
7184 	  goto done;
7185 	}
7186 
7187       /* If any reference in the chain is volatile, the effect is volatile.  */
7188       if (TREE_THIS_VOLATILE (exp))
7189 	*pvolatilep = 1;
7190 
7191       exp = TREE_OPERAND (exp, 0);
7192     }
7193  done:
7194 
7195   /* If OFFSET is constant, see if we can return the whole thing as a
7196      constant bit position.  Make sure to handle overflow during
7197      this conversion.  */
7198   if (TREE_CODE (offset) == INTEGER_CST)
7199     {
7200       offset_int tem = wi::sext (wi::to_offset (offset),
7201 				 TYPE_PRECISION (sizetype));
7202       tem <<= LOG2_BITS_PER_UNIT;
7203       tem += bit_offset;
7204       if (wi::fits_shwi_p (tem))
7205 	{
7206 	  *pbitpos = tem.to_shwi ();
7207 	  *poffset = offset = NULL_TREE;
7208 	}
7209     }
7210 
7211   /* Otherwise, split it up.  */
7212   if (offset)
7213     {
7214       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
7215       if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7216         {
7217 	  offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7218 	  offset_int tem = bit_offset.and_not (mask);
7219 	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7220 	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
7221 	  bit_offset -= tem;
7222 	  tem >>= LOG2_BITS_PER_UNIT;
7223 	  offset = size_binop (PLUS_EXPR, offset,
7224 			       wide_int_to_tree (sizetype, tem));
7225 	}
7226 
7227       *pbitpos = bit_offset.to_shwi ();
7228       *poffset = offset;
7229     }
7230 
7231   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
7232   if (mode == VOIDmode
7233       && blkmode_bitfield
7234       && (*pbitpos % BITS_PER_UNIT) == 0
7235       && (*pbitsize % BITS_PER_UNIT) == 0)
7236     *pmode = BLKmode;
7237   else
7238     *pmode = mode;
7239 
7240   return exp;
7241 }
7242 
7243 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7244 
7245 static unsigned HOST_WIDE_INT
7246 target_align (const_tree target)
7247 {
7248   /* We might have a chain of nested references with intermediate misaligning
7249      bitfields components, so need to recurse to find out.  */
7250 
7251   unsigned HOST_WIDE_INT this_align, outer_align;
7252 
7253   switch (TREE_CODE (target))
7254     {
7255     case BIT_FIELD_REF:
7256       return 1;
7257 
7258     case COMPONENT_REF:
7259       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7260       outer_align = target_align (TREE_OPERAND (target, 0));
7261       return MIN (this_align, outer_align);
7262 
7263     case ARRAY_REF:
7264     case ARRAY_RANGE_REF:
7265       this_align = TYPE_ALIGN (TREE_TYPE (target));
7266       outer_align = target_align (TREE_OPERAND (target, 0));
7267       return MIN (this_align, outer_align);
7268 
7269     CASE_CONVERT:
7270     case NON_LVALUE_EXPR:
7271     case VIEW_CONVERT_EXPR:
7272       this_align = TYPE_ALIGN (TREE_TYPE (target));
7273       outer_align = target_align (TREE_OPERAND (target, 0));
7274       return MAX (this_align, outer_align);
7275 
7276     default:
7277       return TYPE_ALIGN (TREE_TYPE (target));
7278     }
7279 }
7280 
7281 
7282 /* Given an rtx VALUE that may contain additions and multiplications, return
7283    an equivalent value that just refers to a register, memory, or constant.
7284    This is done by generating instructions to perform the arithmetic and
7285    returning a pseudo-register containing the value.
7286 
7287    The returned value may be a REG, SUBREG, MEM or constant.  */
7288 
7289 rtx
7290 force_operand (rtx value, rtx target)
7291 {
7292   rtx op1, op2;
7293   /* Use subtarget as the target for operand 0 of a binary operation.  */
7294   rtx subtarget = get_subtarget (target);
7295   enum rtx_code code = GET_CODE (value);
7296 
7297   /* Check for subreg applied to an expression produced by loop optimizer.  */
7298   if (code == SUBREG
7299       && !REG_P (SUBREG_REG (value))
7300       && !MEM_P (SUBREG_REG (value)))
7301     {
7302       value
7303 	= simplify_gen_subreg (GET_MODE (value),
7304 			       force_reg (GET_MODE (SUBREG_REG (value)),
7305 					  force_operand (SUBREG_REG (value),
7306 							 NULL_RTX)),
7307 			       GET_MODE (SUBREG_REG (value)),
7308 			       SUBREG_BYTE (value));
7309       code = GET_CODE (value);
7310     }
7311 
7312   /* Check for a PIC address load.  */
7313   if ((code == PLUS || code == MINUS)
7314       && XEXP (value, 0) == pic_offset_table_rtx
7315       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7316 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7317 	  || GET_CODE (XEXP (value, 1)) == CONST))
7318     {
7319       if (!subtarget)
7320 	subtarget = gen_reg_rtx (GET_MODE (value));
7321       emit_move_insn (subtarget, value);
7322       return subtarget;
7323     }
7324 
7325   if (ARITHMETIC_P (value))
7326     {
7327       op2 = XEXP (value, 1);
7328       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7329 	subtarget = 0;
7330       if (code == MINUS && CONST_INT_P (op2))
7331 	{
7332 	  code = PLUS;
7333 	  op2 = negate_rtx (GET_MODE (value), op2);
7334 	}
7335 
7336       /* Check for an addition with OP2 a constant integer and our first
7337          operand a PLUS of a virtual register and something else.  In that
7338          case, we want to emit the sum of the virtual register and the
7339          constant first and then add the other value.  This allows virtual
7340          register instantiation to simply modify the constant rather than
7341          creating another one around this addition.  */
7342       if (code == PLUS && CONST_INT_P (op2)
7343 	  && GET_CODE (XEXP (value, 0)) == PLUS
7344 	  && REG_P (XEXP (XEXP (value, 0), 0))
7345 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7346 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7347 	{
7348 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7349 					  XEXP (XEXP (value, 0), 0), op2,
7350 					  subtarget, 0, OPTAB_LIB_WIDEN);
7351 	  return expand_simple_binop (GET_MODE (value), code, temp,
7352 				      force_operand (XEXP (XEXP (value,
7353 								 0), 1), 0),
7354 				      target, 0, OPTAB_LIB_WIDEN);
7355 	}
7356 
7357       op1 = force_operand (XEXP (value, 0), subtarget);
7358       op2 = force_operand (op2, NULL_RTX);
7359       switch (code)
7360 	{
7361 	case MULT:
7362 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7363 	case DIV:
7364 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7365 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7366 					target, 1, OPTAB_LIB_WIDEN);
7367 	  else
7368 	    return expand_divmod (0,
7369 				  FLOAT_MODE_P (GET_MODE (value))
7370 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7371 				  GET_MODE (value), op1, op2, target, 0);
7372 	case MOD:
7373 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7374 				target, 0);
7375 	case UDIV:
7376 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7377 				target, 1);
7378 	case UMOD:
7379 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7380 				target, 1);
7381 	case ASHIFTRT:
7382 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7383 				      target, 0, OPTAB_LIB_WIDEN);
7384 	default:
7385 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7386 				      target, 1, OPTAB_LIB_WIDEN);
7387 	}
7388     }
7389   if (UNARY_P (value))
7390     {
7391       if (!target)
7392 	target = gen_reg_rtx (GET_MODE (value));
7393       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7394       switch (code)
7395 	{
7396 	case ZERO_EXTEND:
7397 	case SIGN_EXTEND:
7398 	case TRUNCATE:
7399 	case FLOAT_EXTEND:
7400 	case FLOAT_TRUNCATE:
7401 	  convert_move (target, op1, code == ZERO_EXTEND);
7402 	  return target;
7403 
7404 	case FIX:
7405 	case UNSIGNED_FIX:
7406 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7407 	  return target;
7408 
7409 	case FLOAT:
7410 	case UNSIGNED_FLOAT:
7411 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7412 	  return target;
7413 
7414 	default:
7415 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7416 	}
7417     }
7418 
7419 #ifdef INSN_SCHEDULING
7420   /* On machines that have insn scheduling, we want all memory reference to be
7421      explicit, so we need to deal with such paradoxical SUBREGs.  */
7422   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7423     value
7424       = simplify_gen_subreg (GET_MODE (value),
7425 			     force_reg (GET_MODE (SUBREG_REG (value)),
7426 					force_operand (SUBREG_REG (value),
7427 						       NULL_RTX)),
7428 			     GET_MODE (SUBREG_REG (value)),
7429 			     SUBREG_BYTE (value));
7430 #endif
7431 
7432   return value;
7433 }
7434 
7435 /* Subroutine of expand_expr: return nonzero iff there is no way that
7436    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7437    call is going to be used to determine whether we need a temporary
7438    for EXP, as opposed to a recursive call to this function.
7439 
7440    It is always safe for this routine to return zero since it merely
7441    searches for optimization opportunities.  */
7442 
7443 int
7444 safe_from_p (const_rtx x, tree exp, int top_p)
7445 {
7446   rtx exp_rtl = 0;
7447   int i, nops;
7448 
7449   if (x == 0
7450       /* If EXP has varying size, we MUST use a target since we currently
7451 	 have no way of allocating temporaries of variable size
7452 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7453 	 So we assume here that something at a higher level has prevented a
7454 	 clash.  This is somewhat bogus, but the best we can do.  Only
7455 	 do this when X is BLKmode and when we are at the top level.  */
7456       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7457 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7458 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7459 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7460 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7461 	      != INTEGER_CST)
7462 	  && GET_MODE (x) == BLKmode)
7463       /* If X is in the outgoing argument area, it is always safe.  */
7464       || (MEM_P (x)
7465 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7466 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7467 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7468     return 1;
7469 
7470   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7471      find the underlying pseudo.  */
7472   if (GET_CODE (x) == SUBREG)
7473     {
7474       x = SUBREG_REG (x);
7475       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7476 	return 0;
7477     }
7478 
7479   /* Now look at our tree code and possibly recurse.  */
7480   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7481     {
7482     case tcc_declaration:
7483       exp_rtl = DECL_RTL_IF_SET (exp);
7484       break;
7485 
7486     case tcc_constant:
7487       return 1;
7488 
7489     case tcc_exceptional:
7490       if (TREE_CODE (exp) == TREE_LIST)
7491 	{
7492 	  while (1)
7493 	    {
7494 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7495 		return 0;
7496 	      exp = TREE_CHAIN (exp);
7497 	      if (!exp)
7498 		return 1;
7499 	      if (TREE_CODE (exp) != TREE_LIST)
7500 		return safe_from_p (x, exp, 0);
7501 	    }
7502 	}
7503       else if (TREE_CODE (exp) == CONSTRUCTOR)
7504 	{
7505 	  constructor_elt *ce;
7506 	  unsigned HOST_WIDE_INT idx;
7507 
7508 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7509 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7510 		|| !safe_from_p (x, ce->value, 0))
7511 	      return 0;
7512 	  return 1;
7513 	}
7514       else if (TREE_CODE (exp) == ERROR_MARK)
7515 	return 1;	/* An already-visited SAVE_EXPR? */
7516       else
7517 	return 0;
7518 
7519     case tcc_statement:
7520       /* The only case we look at here is the DECL_INITIAL inside a
7521 	 DECL_EXPR.  */
7522       return (TREE_CODE (exp) != DECL_EXPR
7523 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7524 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7525 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7526 
7527     case tcc_binary:
7528     case tcc_comparison:
7529       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7530 	return 0;
7531       /* Fall through.  */
7532 
7533     case tcc_unary:
7534       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7535 
7536     case tcc_expression:
7537     case tcc_reference:
7538     case tcc_vl_exp:
7539       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7540 	 the expression.  If it is set, we conflict iff we are that rtx or
7541 	 both are in memory.  Otherwise, we check all operands of the
7542 	 expression recursively.  */
7543 
7544       switch (TREE_CODE (exp))
7545 	{
7546 	case ADDR_EXPR:
7547 	  /* If the operand is static or we are static, we can't conflict.
7548 	     Likewise if we don't conflict with the operand at all.  */
7549 	  if (staticp (TREE_OPERAND (exp, 0))
7550 	      || TREE_STATIC (exp)
7551 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7552 	    return 1;
7553 
7554 	  /* Otherwise, the only way this can conflict is if we are taking
7555 	     the address of a DECL a that address if part of X, which is
7556 	     very rare.  */
7557 	  exp = TREE_OPERAND (exp, 0);
7558 	  if (DECL_P (exp))
7559 	    {
7560 	      if (!DECL_RTL_SET_P (exp)
7561 		  || !MEM_P (DECL_RTL (exp)))
7562 		return 0;
7563 	      else
7564 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7565 	    }
7566 	  break;
7567 
7568 	case MEM_REF:
7569 	  if (MEM_P (x)
7570 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7571 					get_alias_set (exp)))
7572 	    return 0;
7573 	  break;
7574 
7575 	case CALL_EXPR:
7576 	  /* Assume that the call will clobber all hard registers and
7577 	     all of memory.  */
7578 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7579 	      || MEM_P (x))
7580 	    return 0;
7581 	  break;
7582 
7583 	case WITH_CLEANUP_EXPR:
7584 	case CLEANUP_POINT_EXPR:
7585 	  /* Lowered by gimplify.c.  */
7586 	  gcc_unreachable ();
7587 
7588 	case SAVE_EXPR:
7589 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7590 
7591 	default:
7592 	  break;
7593 	}
7594 
7595       /* If we have an rtx, we do not need to scan our operands.  */
7596       if (exp_rtl)
7597 	break;
7598 
7599       nops = TREE_OPERAND_LENGTH (exp);
7600       for (i = 0; i < nops; i++)
7601 	if (TREE_OPERAND (exp, i) != 0
7602 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7603 	  return 0;
7604 
7605       break;
7606 
7607     case tcc_type:
7608       /* Should never get a type here.  */
7609       gcc_unreachable ();
7610     }
7611 
7612   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7613      with it.  */
7614   if (exp_rtl)
7615     {
7616       if (GET_CODE (exp_rtl) == SUBREG)
7617 	{
7618 	  exp_rtl = SUBREG_REG (exp_rtl);
7619 	  if (REG_P (exp_rtl)
7620 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7621 	    return 0;
7622 	}
7623 
7624       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7625 	 are memory and they conflict.  */
7626       return ! (rtx_equal_p (x, exp_rtl)
7627 		|| (MEM_P (x) && MEM_P (exp_rtl)
7628 		    && true_dependence (exp_rtl, VOIDmode, x)));
7629     }
7630 
7631   /* If we reach here, it is safe.  */
7632   return 1;
7633 }
7634 
7635 
7636 /* Return the highest power of two that EXP is known to be a multiple of.
7637    This is used in updating alignment of MEMs in array references.  */
7638 
7639 unsigned HOST_WIDE_INT
7640 highest_pow2_factor (const_tree exp)
7641 {
7642   unsigned HOST_WIDE_INT ret;
7643   int trailing_zeros = tree_ctz (exp);
7644   if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7645     return BIGGEST_ALIGNMENT;
7646   ret = HOST_WIDE_INT_1U << trailing_zeros;
7647   if (ret > BIGGEST_ALIGNMENT)
7648     return BIGGEST_ALIGNMENT;
7649   return ret;
7650 }
7651 
7652 /* Similar, except that the alignment requirements of TARGET are
7653    taken into account.  Assume it is at least as aligned as its
7654    type, unless it is a COMPONENT_REF in which case the layout of
7655    the structure gives the alignment.  */
7656 
7657 static unsigned HOST_WIDE_INT
7658 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7659 {
7660   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7661   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7662 
7663   return MAX (factor, talign);
7664 }
7665 
7666 /* Convert the tree comparison code TCODE to the rtl one where the
7667    signedness is UNSIGNEDP.  */
7668 
7669 static enum rtx_code
7670 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7671 {
7672   enum rtx_code code;
7673   switch (tcode)
7674     {
7675     case EQ_EXPR:
7676       code = EQ;
7677       break;
7678     case NE_EXPR:
7679       code = NE;
7680       break;
7681     case LT_EXPR:
7682       code = unsignedp ? LTU : LT;
7683       break;
7684     case LE_EXPR:
7685       code = unsignedp ? LEU : LE;
7686       break;
7687     case GT_EXPR:
7688       code = unsignedp ? GTU : GT;
7689       break;
7690     case GE_EXPR:
7691       code = unsignedp ? GEU : GE;
7692       break;
7693     case UNORDERED_EXPR:
7694       code = UNORDERED;
7695       break;
7696     case ORDERED_EXPR:
7697       code = ORDERED;
7698       break;
7699     case UNLT_EXPR:
7700       code = UNLT;
7701       break;
7702     case UNLE_EXPR:
7703       code = UNLE;
7704       break;
7705     case UNGT_EXPR:
7706       code = UNGT;
7707       break;
7708     case UNGE_EXPR:
7709       code = UNGE;
7710       break;
7711     case UNEQ_EXPR:
7712       code = UNEQ;
7713       break;
7714     case LTGT_EXPR:
7715       code = LTGT;
7716       break;
7717 
7718     default:
7719       gcc_unreachable ();
7720     }
7721   return code;
7722 }
7723 
7724 /* Subroutine of expand_expr.  Expand the two operands of a binary
7725    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7726    The value may be stored in TARGET if TARGET is nonzero.  The
7727    MODIFIER argument is as documented by expand_expr.  */
7728 
7729 void
7730 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7731 		 enum expand_modifier modifier)
7732 {
7733   if (! safe_from_p (target, exp1, 1))
7734     target = 0;
7735   if (operand_equal_p (exp0, exp1, 0))
7736     {
7737       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7738       *op1 = copy_rtx (*op0);
7739     }
7740   else
7741     {
7742       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7743       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7744     }
7745 }
7746 
7747 
7748 /* Return a MEM that contains constant EXP.  DEFER is as for
7749    output_constant_def and MODIFIER is as for expand_expr.  */
7750 
7751 static rtx
7752 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7753 {
7754   rtx mem;
7755 
7756   mem = output_constant_def (exp, defer);
7757   if (modifier != EXPAND_INITIALIZER)
7758     mem = use_anchored_address (mem);
7759   return mem;
7760 }
7761 
7762 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7763    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7764 
7765 static rtx
7766 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7767 		         enum expand_modifier modifier, addr_space_t as)
7768 {
7769   rtx result, subtarget;
7770   tree inner, offset;
7771   HOST_WIDE_INT bitsize, bitpos;
7772   int unsignedp, reversep, volatilep = 0;
7773   machine_mode mode1;
7774 
7775   /* If we are taking the address of a constant and are at the top level,
7776      we have to use output_constant_def since we can't call force_const_mem
7777      at top level.  */
7778   /* ??? This should be considered a front-end bug.  We should not be
7779      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7780      exception here is STRING_CST.  */
7781   if (CONSTANT_CLASS_P (exp))
7782     {
7783       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7784       if (modifier < EXPAND_SUM)
7785 	result = force_operand (result, target);
7786       return result;
7787     }
7788 
7789   /* Everything must be something allowed by is_gimple_addressable.  */
7790   switch (TREE_CODE (exp))
7791     {
7792     case INDIRECT_REF:
7793       /* This case will happen via recursion for &a->b.  */
7794       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7795 
7796     case MEM_REF:
7797       {
7798 	tree tem = TREE_OPERAND (exp, 0);
7799 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7800 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7801 	return expand_expr (tem, target, tmode, modifier);
7802       }
7803 
7804     case CONST_DECL:
7805       /* Expand the initializer like constants above.  */
7806       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7807 					   0, modifier), 0);
7808       if (modifier < EXPAND_SUM)
7809 	result = force_operand (result, target);
7810       return result;
7811 
7812     case REALPART_EXPR:
7813       /* The real part of the complex number is always first, therefore
7814 	 the address is the same as the address of the parent object.  */
7815       offset = 0;
7816       bitpos = 0;
7817       inner = TREE_OPERAND (exp, 0);
7818       break;
7819 
7820     case IMAGPART_EXPR:
7821       /* The imaginary part of the complex number is always second.
7822 	 The expression is therefore always offset by the size of the
7823 	 scalar type.  */
7824       offset = 0;
7825       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7826       inner = TREE_OPERAND (exp, 0);
7827       break;
7828 
7829     case COMPOUND_LITERAL_EXPR:
7830       /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7831 	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7832 	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7833 	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7834 	 the initializers aren't gimplified.  */
7835       if (COMPOUND_LITERAL_EXPR_DECL (exp)
7836 	  && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7837 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7838 					target, tmode, modifier, as);
7839       /* FALLTHRU */
7840     default:
7841       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7842 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7843 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7844 	 CONSTRUCTORs too, which should yield a memory reference for the
7845 	 constructor's contents.  Assume language specific tree nodes can
7846 	 be expanded in some interesting way.  */
7847       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7848       if (DECL_P (exp)
7849 	  || TREE_CODE (exp) == CONSTRUCTOR
7850 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7851 	{
7852 	  result = expand_expr (exp, target, tmode,
7853 				modifier == EXPAND_INITIALIZER
7854 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7855 
7856 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7857 	     marked TREE_ADDRESSABLE, which will be either a front-end
7858 	     or a tree optimizer bug.  */
7859 
7860 	  gcc_assert (MEM_P (result));
7861 	  result = XEXP (result, 0);
7862 
7863 	  /* ??? Is this needed anymore?  */
7864 	  if (DECL_P (exp))
7865 	    TREE_USED (exp) = 1;
7866 
7867 	  if (modifier != EXPAND_INITIALIZER
7868 	      && modifier != EXPAND_CONST_ADDRESS
7869 	      && modifier != EXPAND_SUM)
7870 	    result = force_operand (result, target);
7871 	  return result;
7872 	}
7873 
7874       /* Pass FALSE as the last argument to get_inner_reference although
7875 	 we are expanding to RTL.  The rationale is that we know how to
7876 	 handle "aligning nodes" here: we can just bypass them because
7877 	 they won't change the final object whose address will be returned
7878 	 (they actually exist only for that purpose).  */
7879       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7880 				   &unsignedp, &reversep, &volatilep);
7881       break;
7882     }
7883 
7884   /* We must have made progress.  */
7885   gcc_assert (inner != exp);
7886 
7887   subtarget = offset || bitpos ? NULL_RTX : target;
7888   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7889      inner alignment, force the inner to be sufficiently aligned.  */
7890   if (CONSTANT_CLASS_P (inner)
7891       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7892     {
7893       inner = copy_node (inner);
7894       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7895       SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
7896       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7897     }
7898   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7899 
7900   if (offset)
7901     {
7902       rtx tmp;
7903 
7904       if (modifier != EXPAND_NORMAL)
7905 	result = force_operand (result, NULL);
7906       tmp = expand_expr (offset, NULL_RTX, tmode,
7907 			 modifier == EXPAND_INITIALIZER
7908 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7909 
7910       /* expand_expr is allowed to return an object in a mode other
7911 	 than TMODE.  If it did, we need to convert.  */
7912       if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7913 	tmp = convert_modes (tmode, GET_MODE (tmp),
7914 			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7915       result = convert_memory_address_addr_space (tmode, result, as);
7916       tmp = convert_memory_address_addr_space (tmode, tmp, as);
7917 
7918       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7919 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
7920       else
7921 	{
7922 	  subtarget = bitpos ? NULL_RTX : target;
7923 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7924 					1, OPTAB_LIB_WIDEN);
7925 	}
7926     }
7927 
7928   if (bitpos)
7929     {
7930       /* Someone beforehand should have rejected taking the address
7931 	 of such an object.  */
7932       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7933 
7934       result = convert_memory_address_addr_space (tmode, result, as);
7935       result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7936       if (modifier < EXPAND_SUM)
7937 	result = force_operand (result, target);
7938     }
7939 
7940   return result;
7941 }
7942 
7943 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7944    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7945 
7946 static rtx
7947 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7948 		       enum expand_modifier modifier)
7949 {
7950   addr_space_t as = ADDR_SPACE_GENERIC;
7951   machine_mode address_mode = Pmode;
7952   machine_mode pointer_mode = ptr_mode;
7953   machine_mode rmode;
7954   rtx result;
7955 
7956   /* Target mode of VOIDmode says "whatever's natural".  */
7957   if (tmode == VOIDmode)
7958     tmode = TYPE_MODE (TREE_TYPE (exp));
7959 
7960   if (POINTER_TYPE_P (TREE_TYPE (exp)))
7961     {
7962       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7963       address_mode = targetm.addr_space.address_mode (as);
7964       pointer_mode = targetm.addr_space.pointer_mode (as);
7965     }
7966 
7967   /* We can get called with some Weird Things if the user does silliness
7968      like "(short) &a".  In that case, convert_memory_address won't do
7969      the right thing, so ignore the given target mode.  */
7970   if (tmode != address_mode && tmode != pointer_mode)
7971     tmode = address_mode;
7972 
7973   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7974 				    tmode, modifier, as);
7975 
7976   /* Despite expand_expr claims concerning ignoring TMODE when not
7977      strictly convenient, stuff breaks if we don't honor it.  Note
7978      that combined with the above, we only do this for pointer modes.  */
7979   rmode = GET_MODE (result);
7980   if (rmode == VOIDmode)
7981     rmode = tmode;
7982   if (rmode != tmode)
7983     result = convert_memory_address_addr_space (tmode, result, as);
7984 
7985   return result;
7986 }
7987 
7988 /* Generate code for computing CONSTRUCTOR EXP.
7989    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7990    is TRUE, instead of creating a temporary variable in memory
7991    NULL is returned and the caller needs to handle it differently.  */
7992 
7993 static rtx
7994 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7995 		    bool avoid_temp_mem)
7996 {
7997   tree type = TREE_TYPE (exp);
7998   machine_mode mode = TYPE_MODE (type);
7999 
8000   /* Try to avoid creating a temporary at all.  This is possible
8001      if all of the initializer is zero.
8002      FIXME: try to handle all [0..255] initializers we can handle
8003      with memset.  */
8004   if (TREE_STATIC (exp)
8005       && !TREE_ADDRESSABLE (exp)
8006       && target != 0 && mode == BLKmode
8007       && all_zeros_p (exp))
8008     {
8009       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8010       return target;
8011     }
8012 
8013   /* All elts simple constants => refer to a constant in memory.  But
8014      if this is a non-BLKmode mode, let it store a field at a time
8015      since that should make a CONST_INT, CONST_WIDE_INT or
8016      CONST_DOUBLE when we fold.  Likewise, if we have a target we can
8017      use, it is best to store directly into the target unless the type
8018      is large enough that memcpy will be used.  If we are making an
8019      initializer and all operands are constant, put it in memory as
8020      well.
8021 
8022      FIXME: Avoid trying to fill vector constructors piece-meal.
8023      Output them with output_constant_def below unless we're sure
8024      they're zeros.  This should go away when vector initializers
8025      are treated like VECTOR_CST instead of arrays.  */
8026   if ((TREE_STATIC (exp)
8027        && ((mode == BLKmode
8028 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
8029 		  || TREE_ADDRESSABLE (exp)
8030 		  || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8031 		      && (! can_move_by_pieces
8032 				     (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8033 				      TYPE_ALIGN (type)))
8034 		      && ! mostly_zeros_p (exp))))
8035       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8036 	  && TREE_CONSTANT (exp)))
8037     {
8038       rtx constructor;
8039 
8040       if (avoid_temp_mem)
8041 	return NULL_RTX;
8042 
8043       constructor = expand_expr_constant (exp, 1, modifier);
8044 
8045       if (modifier != EXPAND_CONST_ADDRESS
8046 	  && modifier != EXPAND_INITIALIZER
8047 	  && modifier != EXPAND_SUM)
8048 	constructor = validize_mem (constructor);
8049 
8050       return constructor;
8051     }
8052 
8053   /* Handle calls that pass values in multiple non-contiguous
8054      locations.  The Irix 6 ABI has examples of this.  */
8055   if (target == 0 || ! safe_from_p (target, exp, 1)
8056       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
8057     {
8058       if (avoid_temp_mem)
8059 	return NULL_RTX;
8060 
8061       target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8062     }
8063 
8064   store_constructor (exp, target, 0, int_expr_size (exp), false);
8065   return target;
8066 }
8067 
8068 
8069 /* expand_expr: generate code for computing expression EXP.
8070    An rtx for the computed value is returned.  The value is never null.
8071    In the case of a void EXP, const0_rtx is returned.
8072 
8073    The value may be stored in TARGET if TARGET is nonzero.
8074    TARGET is just a suggestion; callers must assume that
8075    the rtx returned may not be the same as TARGET.
8076 
8077    If TARGET is CONST0_RTX, it means that the value will be ignored.
8078 
8079    If TMODE is not VOIDmode, it suggests generating the
8080    result in mode TMODE.  But this is done only when convenient.
8081    Otherwise, TMODE is ignored and the value generated in its natural mode.
8082    TMODE is just a suggestion; callers must assume that
8083    the rtx returned may not have mode TMODE.
8084 
8085    Note that TARGET may have neither TMODE nor MODE.  In that case, it
8086    probably will not be used.
8087 
8088    If MODIFIER is EXPAND_SUM then when EXP is an addition
8089    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8090    or a nest of (PLUS ...) and (MINUS ...) where the terms are
8091    products as above, or REG or MEM, or constant.
8092    Ordinarily in such cases we would output mul or add instructions
8093    and then return a pseudo reg containing the sum.
8094 
8095    EXPAND_INITIALIZER is much like EXPAND_SUM except that
8096    it also marks a label as absolutely required (it can't be dead).
8097    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8098    This is used for outputting expressions used in initializers.
8099 
8100    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8101    with a constant address even if that address is not normally legitimate.
8102    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8103 
8104    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8105    a call parameter.  Such targets require special care as we haven't yet
8106    marked TARGET so that it's safe from being trashed by libcalls.  We
8107    don't want to use TARGET for anything but the final result;
8108    Intermediate values must go elsewhere.   Additionally, calls to
8109    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8110 
8111    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8112    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8113    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
8114    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8115    recursively.
8116 
8117    If INNER_REFERENCE_P is true, we are expanding an inner reference.
8118    In this case, we don't adjust a returned MEM rtx that wouldn't be
8119    sufficiently aligned for its mode; instead, it's up to the caller
8120    to deal with it afterwards.  This is used to make sure that unaligned
8121    base objects for which out-of-bounds accesses are supported, for
8122    example record types with trailing arrays, aren't realigned behind
8123    the back of the caller.
8124    The normal operating mode is to pass FALSE for this parameter.  */
8125 
8126 rtx
8127 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8128 		  enum expand_modifier modifier, rtx *alt_rtl,
8129 		  bool inner_reference_p)
8130 {
8131   rtx ret;
8132 
8133   /* Handle ERROR_MARK before anybody tries to access its type.  */
8134   if (TREE_CODE (exp) == ERROR_MARK
8135       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8136     {
8137       ret = CONST0_RTX (tmode);
8138       return ret ? ret : const0_rtx;
8139     }
8140 
8141   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8142 			    inner_reference_p);
8143   return ret;
8144 }
8145 
8146 /* Try to expand the conditional expression which is represented by
8147    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If it succeeds
8148    return the rtl reg which represents the result.  Otherwise return
8149    NULL_RTX.  */
8150 
8151 static rtx
8152 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8153 			      tree treeop1 ATTRIBUTE_UNUSED,
8154 			      tree treeop2 ATTRIBUTE_UNUSED)
8155 {
8156   rtx insn;
8157   rtx op00, op01, op1, op2;
8158   enum rtx_code comparison_code;
8159   machine_mode comparison_mode;
8160   gimple *srcstmt;
8161   rtx temp;
8162   tree type = TREE_TYPE (treeop1);
8163   int unsignedp = TYPE_UNSIGNED (type);
8164   machine_mode mode = TYPE_MODE (type);
8165   machine_mode orig_mode = mode;
8166   static bool expanding_cond_expr_using_cmove = false;
8167 
8168   /* Conditional move expansion can end up TERing two operands which,
8169      when recursively hitting conditional expressions can result in
8170      exponential behavior if the cmove expansion ultimatively fails.
8171      It's hardly profitable to TER a cmove into a cmove so avoid doing
8172      that by failing early if we end up recursing.  */
8173   if (expanding_cond_expr_using_cmove)
8174     return NULL_RTX;
8175 
8176   /* If we cannot do a conditional move on the mode, try doing it
8177      with the promoted mode. */
8178   if (!can_conditionally_move_p (mode))
8179     {
8180       mode = promote_mode (type, mode, &unsignedp);
8181       if (!can_conditionally_move_p (mode))
8182 	return NULL_RTX;
8183       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
8184     }
8185   else
8186     temp = assign_temp (type, 0, 1);
8187 
8188   expanding_cond_expr_using_cmove = true;
8189   start_sequence ();
8190   expand_operands (treeop1, treeop2,
8191 		   temp, &op1, &op2, EXPAND_NORMAL);
8192 
8193   if (TREE_CODE (treeop0) == SSA_NAME
8194       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8195     {
8196       tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8197       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8198       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8199       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8200       comparison_mode = TYPE_MODE (type);
8201       unsignedp = TYPE_UNSIGNED (type);
8202       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8203     }
8204   else if (COMPARISON_CLASS_P (treeop0))
8205     {
8206       tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8207       enum tree_code cmpcode = TREE_CODE (treeop0);
8208       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8209       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8210       unsignedp = TYPE_UNSIGNED (type);
8211       comparison_mode = TYPE_MODE (type);
8212       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8213     }
8214   else
8215     {
8216       op00 = expand_normal (treeop0);
8217       op01 = const0_rtx;
8218       comparison_code = NE;
8219       comparison_mode = GET_MODE (op00);
8220       if (comparison_mode == VOIDmode)
8221 	comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8222     }
8223   expanding_cond_expr_using_cmove = false;
8224 
8225   if (GET_MODE (op1) != mode)
8226     op1 = gen_lowpart (mode, op1);
8227 
8228   if (GET_MODE (op2) != mode)
8229     op2 = gen_lowpart (mode, op2);
8230 
8231   /* Try to emit the conditional move.  */
8232   insn = emit_conditional_move (temp, comparison_code,
8233 				op00, op01, comparison_mode,
8234 				op1, op2, mode,
8235 				unsignedp);
8236 
8237   /* If we could do the conditional move, emit the sequence,
8238      and return.  */
8239   if (insn)
8240     {
8241       rtx_insn *seq = get_insns ();
8242       end_sequence ();
8243       emit_insn (seq);
8244       return convert_modes (orig_mode, mode, temp, 0);
8245     }
8246 
8247   /* Otherwise discard the sequence and fall back to code with
8248      branches.  */
8249   end_sequence ();
8250   return NULL_RTX;
8251 }
8252 
8253 rtx
8254 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8255 		    enum expand_modifier modifier)
8256 {
8257   rtx op0, op1, op2, temp;
8258   rtx_code_label *lab;
8259   tree type;
8260   int unsignedp;
8261   machine_mode mode;
8262   enum tree_code code = ops->code;
8263   optab this_optab;
8264   rtx subtarget, original_target;
8265   int ignore;
8266   bool reduce_bit_field;
8267   location_t loc = ops->location;
8268   tree treeop0, treeop1, treeop2;
8269 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8270 				 ? reduce_to_bit_field_precision ((expr), \
8271 								  target, \
8272 								  type)	  \
8273 				 : (expr))
8274 
8275   type = ops->type;
8276   mode = TYPE_MODE (type);
8277   unsignedp = TYPE_UNSIGNED (type);
8278 
8279   treeop0 = ops->op0;
8280   treeop1 = ops->op1;
8281   treeop2 = ops->op2;
8282 
8283   /* We should be called only on simple (binary or unary) expressions,
8284      exactly those that are valid in gimple expressions that aren't
8285      GIMPLE_SINGLE_RHS (or invalid).  */
8286   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8287 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8288 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8289 
8290   ignore = (target == const0_rtx
8291 	    || ((CONVERT_EXPR_CODE_P (code)
8292 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8293 		&& TREE_CODE (type) == VOID_TYPE));
8294 
8295   /* We should be called only if we need the result.  */
8296   gcc_assert (!ignore);
8297 
8298   /* An operation in what may be a bit-field type needs the
8299      result to be reduced to the precision of the bit-field type,
8300      which is narrower than that of the type's mode.  */
8301   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8302 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8303 
8304   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8305     target = 0;
8306 
8307   /* Use subtarget as the target for operand 0 of a binary operation.  */
8308   subtarget = get_subtarget (target);
8309   original_target = target;
8310 
8311   switch (code)
8312     {
8313     case NON_LVALUE_EXPR:
8314     case PAREN_EXPR:
8315     CASE_CONVERT:
8316       if (treeop0 == error_mark_node)
8317 	return const0_rtx;
8318 
8319       if (TREE_CODE (type) == UNION_TYPE)
8320 	{
8321 	  tree valtype = TREE_TYPE (treeop0);
8322 
8323 	  /* If both input and output are BLKmode, this conversion isn't doing
8324 	     anything except possibly changing memory attribute.  */
8325 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8326 	    {
8327 	      rtx result = expand_expr (treeop0, target, tmode,
8328 					modifier);
8329 
8330 	      result = copy_rtx (result);
8331 	      set_mem_attributes (result, type, 0);
8332 	      return result;
8333 	    }
8334 
8335 	  if (target == 0)
8336 	    {
8337 	      if (TYPE_MODE (type) != BLKmode)
8338 		target = gen_reg_rtx (TYPE_MODE (type));
8339 	      else
8340 		target = assign_temp (type, 1, 1);
8341 	    }
8342 
8343 	  if (MEM_P (target))
8344 	    /* Store data into beginning of memory target.  */
8345 	    store_expr (treeop0,
8346 			adjust_address (target, TYPE_MODE (valtype), 0),
8347 			modifier == EXPAND_STACK_PARM,
8348 			false, TYPE_REVERSE_STORAGE_ORDER (type));
8349 
8350 	  else
8351 	    {
8352 	      gcc_assert (REG_P (target)
8353 			  && !TYPE_REVERSE_STORAGE_ORDER (type));
8354 
8355 	      /* Store this field into a union of the proper type.  */
8356 	      store_field (target,
8357 			   MIN ((int_size_in_bytes (TREE_TYPE
8358 						    (treeop0))
8359 				 * BITS_PER_UNIT),
8360 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8361 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8362 			   false, false);
8363 	    }
8364 
8365 	  /* Return the entire union.  */
8366 	  return target;
8367 	}
8368 
8369       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8370 	{
8371 	  op0 = expand_expr (treeop0, target, VOIDmode,
8372 			     modifier);
8373 
8374 	  /* If the signedness of the conversion differs and OP0 is
8375 	     a promoted SUBREG, clear that indication since we now
8376 	     have to do the proper extension.  */
8377 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8378 	      && GET_CODE (op0) == SUBREG)
8379 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8380 
8381 	  return REDUCE_BIT_FIELD (op0);
8382 	}
8383 
8384       op0 = expand_expr (treeop0, NULL_RTX, mode,
8385 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8386       if (GET_MODE (op0) == mode)
8387 	;
8388 
8389       /* If OP0 is a constant, just convert it into the proper mode.  */
8390       else if (CONSTANT_P (op0))
8391 	{
8392 	  tree inner_type = TREE_TYPE (treeop0);
8393 	  machine_mode inner_mode = GET_MODE (op0);
8394 
8395 	  if (inner_mode == VOIDmode)
8396 	    inner_mode = TYPE_MODE (inner_type);
8397 
8398 	  if (modifier == EXPAND_INITIALIZER)
8399 	    op0 = lowpart_subreg (mode, op0, inner_mode);
8400 	  else
8401 	    op0=  convert_modes (mode, inner_mode, op0,
8402 				 TYPE_UNSIGNED (inner_type));
8403 	}
8404 
8405       else if (modifier == EXPAND_INITIALIZER)
8406 	op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8407 			     ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8408 
8409       else if (target == 0)
8410 	op0 = convert_to_mode (mode, op0,
8411 			       TYPE_UNSIGNED (TREE_TYPE
8412 					      (treeop0)));
8413       else
8414 	{
8415 	  convert_move (target, op0,
8416 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8417 	  op0 = target;
8418 	}
8419 
8420       return REDUCE_BIT_FIELD (op0);
8421 
8422     case ADDR_SPACE_CONVERT_EXPR:
8423       {
8424 	tree treeop0_type = TREE_TYPE (treeop0);
8425 
8426 	gcc_assert (POINTER_TYPE_P (type));
8427 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8428 
8429 	addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8430 	addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8431 
8432         /* Conversions between pointers to the same address space should
8433 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8434 	gcc_assert (as_to != as_from);
8435 
8436 	op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8437 
8438         /* Ask target code to handle conversion between pointers
8439 	   to overlapping address spaces.  */
8440 	if (targetm.addr_space.subset_p (as_to, as_from)
8441 	    || targetm.addr_space.subset_p (as_from, as_to))
8442 	  {
8443 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8444 	  }
8445         else
8446           {
8447 	    /* For disjoint address spaces, converting anything but a null
8448 	       pointer invokes undefined behavior.  We truncate or extend the
8449 	       value as if we'd converted via integers, which handles 0 as
8450 	       required, and all others as the programmer likely expects.  */
8451 #ifndef POINTERS_EXTEND_UNSIGNED
8452 	    const int POINTERS_EXTEND_UNSIGNED = 1;
8453 #endif
8454 	    op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8455 				 op0, POINTERS_EXTEND_UNSIGNED);
8456 	  }
8457 	gcc_assert (op0);
8458 	return op0;
8459       }
8460 
8461     case POINTER_PLUS_EXPR:
8462       /* Even though the sizetype mode and the pointer's mode can be different
8463          expand is able to handle this correctly and get the correct result out
8464          of the PLUS_EXPR code.  */
8465       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8466          if sizetype precision is smaller than pointer precision.  */
8467       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8468 	treeop1 = fold_convert_loc (loc, type,
8469 				    fold_convert_loc (loc, ssizetype,
8470 						      treeop1));
8471       /* If sizetype precision is larger than pointer precision, truncate the
8472 	 offset to have matching modes.  */
8473       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8474 	treeop1 = fold_convert_loc (loc, type, treeop1);
8475       /* FALLTHRU */
8476 
8477     case PLUS_EXPR:
8478       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8479 	 something else, make sure we add the register to the constant and
8480 	 then to the other thing.  This case can occur during strength
8481 	 reduction and doing it this way will produce better code if the
8482 	 frame pointer or argument pointer is eliminated.
8483 
8484 	 fold-const.c will ensure that the constant is always in the inner
8485 	 PLUS_EXPR, so the only case we need to do anything about is if
8486 	 sp, ap, or fp is our second argument, in which case we must swap
8487 	 the innermost first argument and our second argument.  */
8488 
8489       if (TREE_CODE (treeop0) == PLUS_EXPR
8490 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8491 	  && VAR_P (treeop1)
8492 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8493 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8494 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8495 	{
8496 	  gcc_unreachable ();
8497 	}
8498 
8499       /* If the result is to be ptr_mode and we are adding an integer to
8500 	 something, we might be forming a constant.  So try to use
8501 	 plus_constant.  If it produces a sum and we can't accept it,
8502 	 use force_operand.  This allows P = &ARR[const] to generate
8503 	 efficient code on machines where a SYMBOL_REF is not a valid
8504 	 address.
8505 
8506 	 If this is an EXPAND_SUM call, always return the sum.  */
8507       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8508 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8509 	{
8510 	  if (modifier == EXPAND_STACK_PARM)
8511 	    target = 0;
8512 	  if (TREE_CODE (treeop0) == INTEGER_CST
8513 	      && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8514 	      && TREE_CONSTANT (treeop1))
8515 	    {
8516 	      rtx constant_part;
8517 	      HOST_WIDE_INT wc;
8518 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8519 
8520 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8521 				 EXPAND_SUM);
8522 	      /* Use wi::shwi to ensure that the constant is
8523 		 truncated according to the mode of OP1, then sign extended
8524 		 to a HOST_WIDE_INT.  Using the constant directly can result
8525 		 in non-canonical RTL in a 64x32 cross compile.  */
8526 	      wc = TREE_INT_CST_LOW (treeop0);
8527 	      constant_part =
8528 		immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8529 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8530 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8531 		op1 = force_operand (op1, target);
8532 	      return REDUCE_BIT_FIELD (op1);
8533 	    }
8534 
8535 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8536 		   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8537 		   && TREE_CONSTANT (treeop0))
8538 	    {
8539 	      rtx constant_part;
8540 	      HOST_WIDE_INT wc;
8541 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8542 
8543 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8544 				 (modifier == EXPAND_INITIALIZER
8545 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8546 	      if (! CONSTANT_P (op0))
8547 		{
8548 		  op1 = expand_expr (treeop1, NULL_RTX,
8549 				     VOIDmode, modifier);
8550 		  /* Return a PLUS if modifier says it's OK.  */
8551 		  if (modifier == EXPAND_SUM
8552 		      || modifier == EXPAND_INITIALIZER)
8553 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8554 		  goto binop2;
8555 		}
8556 	      /* Use wi::shwi to ensure that the constant is
8557 		 truncated according to the mode of OP1, then sign extended
8558 		 to a HOST_WIDE_INT.  Using the constant directly can result
8559 		 in non-canonical RTL in a 64x32 cross compile.  */
8560 	      wc = TREE_INT_CST_LOW (treeop1);
8561 	      constant_part
8562 		= immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8563 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8564 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8565 		op0 = force_operand (op0, target);
8566 	      return REDUCE_BIT_FIELD (op0);
8567 	    }
8568 	}
8569 
8570       /* Use TER to expand pointer addition of a negated value
8571 	 as pointer subtraction.  */
8572       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8573 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8574 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8575 	  && TREE_CODE (treeop1) == SSA_NAME
8576 	  && TYPE_MODE (TREE_TYPE (treeop0))
8577 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8578 	{
8579 	  gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8580 	  if (def)
8581 	    {
8582 	      treeop1 = gimple_assign_rhs1 (def);
8583 	      code = MINUS_EXPR;
8584 	      goto do_minus;
8585 	    }
8586 	}
8587 
8588       /* No sense saving up arithmetic to be done
8589 	 if it's all in the wrong mode to form part of an address.
8590 	 And force_operand won't know whether to sign-extend or
8591 	 zero-extend.  */
8592       if (modifier != EXPAND_INITIALIZER
8593 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8594 	{
8595 	  expand_operands (treeop0, treeop1,
8596 			   subtarget, &op0, &op1, modifier);
8597 	  if (op0 == const0_rtx)
8598 	    return op1;
8599 	  if (op1 == const0_rtx)
8600 	    return op0;
8601 	  goto binop2;
8602 	}
8603 
8604       expand_operands (treeop0, treeop1,
8605 		       subtarget, &op0, &op1, modifier);
8606       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8607 
8608     case MINUS_EXPR:
8609     do_minus:
8610       /* For initializers, we are allowed to return a MINUS of two
8611 	 symbolic constants.  Here we handle all cases when both operands
8612 	 are constant.  */
8613       /* Handle difference of two symbolic constants,
8614 	 for the sake of an initializer.  */
8615       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8616 	  && really_constant_p (treeop0)
8617 	  && really_constant_p (treeop1))
8618 	{
8619 	  expand_operands (treeop0, treeop1,
8620 			   NULL_RTX, &op0, &op1, modifier);
8621 
8622 	  /* If the last operand is a CONST_INT, use plus_constant of
8623 	     the negated constant.  Else make the MINUS.  */
8624 	  if (CONST_INT_P (op1))
8625 	    return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8626 						    -INTVAL (op1)));
8627 	  else
8628 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8629 	}
8630 
8631       /* No sense saving up arithmetic to be done
8632 	 if it's all in the wrong mode to form part of an address.
8633 	 And force_operand won't know whether to sign-extend or
8634 	 zero-extend.  */
8635       if (modifier != EXPAND_INITIALIZER
8636 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8637 	goto binop;
8638 
8639       expand_operands (treeop0, treeop1,
8640 		       subtarget, &op0, &op1, modifier);
8641 
8642       /* Convert A - const to A + (-const).  */
8643       if (CONST_INT_P (op1))
8644 	{
8645 	  op1 = negate_rtx (mode, op1);
8646 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8647 	}
8648 
8649       goto binop2;
8650 
8651     case WIDEN_MULT_PLUS_EXPR:
8652     case WIDEN_MULT_MINUS_EXPR:
8653       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8654       op2 = expand_normal (treeop2);
8655       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8656 					  target, unsignedp);
8657       return target;
8658 
8659     case WIDEN_MULT_EXPR:
8660       /* If first operand is constant, swap them.
8661 	 Thus the following special case checks need only
8662 	 check the second operand.  */
8663       if (TREE_CODE (treeop0) == INTEGER_CST)
8664 	std::swap (treeop0, treeop1);
8665 
8666       /* First, check if we have a multiplication of one signed and one
8667 	 unsigned operand.  */
8668       if (TREE_CODE (treeop1) != INTEGER_CST
8669 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8670 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8671 	{
8672 	  machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8673 	  this_optab = usmul_widen_optab;
8674 	  if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8675 		!= CODE_FOR_nothing)
8676 	    {
8677 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8678 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8679 				 EXPAND_NORMAL);
8680 	      else
8681 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8682 				 EXPAND_NORMAL);
8683 	      /* op0 and op1 might still be constant, despite the above
8684 		 != INTEGER_CST check.  Handle it.  */
8685 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8686 		{
8687 		  op0 = convert_modes (innermode, mode, op0, true);
8688 		  op1 = convert_modes (innermode, mode, op1, false);
8689 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8690 							target, unsignedp));
8691 		}
8692 	      goto binop3;
8693 	    }
8694 	}
8695       /* Check for a multiplication with matching signedness.  */
8696       else if ((TREE_CODE (treeop1) == INTEGER_CST
8697 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8698 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8699 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8700 	{
8701 	  tree op0type = TREE_TYPE (treeop0);
8702 	  machine_mode innermode = TYPE_MODE (op0type);
8703 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8704 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8705 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8706 
8707 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8708 	    {
8709 	      if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8710 		    != CODE_FOR_nothing)
8711 		{
8712 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8713 				   EXPAND_NORMAL);
8714 		  /* op0 and op1 might still be constant, despite the above
8715 		     != INTEGER_CST check.  Handle it.  */
8716 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8717 		    {
8718 		     widen_mult_const:
8719 		      op0 = convert_modes (innermode, mode, op0, zextend_p);
8720 		      op1
8721 			= convert_modes (innermode, mode, op1,
8722 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8723 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8724 							    target,
8725 							    unsignedp));
8726 		    }
8727 		  temp = expand_widening_mult (mode, op0, op1, target,
8728 					       unsignedp, this_optab);
8729 		  return REDUCE_BIT_FIELD (temp);
8730 		}
8731 	      if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8732 		    != CODE_FOR_nothing
8733 		  && innermode == word_mode)
8734 		{
8735 		  rtx htem, hipart;
8736 		  op0 = expand_normal (treeop0);
8737 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8738 		    op1 = convert_modes (innermode, mode,
8739 					 expand_normal (treeop1),
8740 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8741 		  else
8742 		    op1 = expand_normal (treeop1);
8743 		  /* op0 and op1 might still be constant, despite the above
8744 		     != INTEGER_CST check.  Handle it.  */
8745 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8746 		    goto widen_mult_const;
8747 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8748 				       unsignedp, OPTAB_LIB_WIDEN);
8749 		  hipart = gen_highpart (innermode, temp);
8750 		  htem = expand_mult_highpart_adjust (innermode, hipart,
8751 						      op0, op1, hipart,
8752 						      zextend_p);
8753 		  if (htem != hipart)
8754 		    emit_move_insn (hipart, htem);
8755 		  return REDUCE_BIT_FIELD (temp);
8756 		}
8757 	    }
8758 	}
8759       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8760       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8761       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8762       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8763 
8764     case FMA_EXPR:
8765       {
8766 	optab opt = fma_optab;
8767 	gimple *def0, *def2;
8768 
8769 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8770 	   call.  */
8771 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8772 	  {
8773 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8774 	    tree call_expr;
8775 
8776 	    gcc_assert (fn != NULL_TREE);
8777 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8778 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8779 	  }
8780 
8781 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8782 	/* The multiplication is commutative - look at its 2nd operand
8783 	   if the first isn't fed by a negate.  */
8784 	if (!def0)
8785 	  {
8786 	    def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8787 	    /* Swap operands if the 2nd operand is fed by a negate.  */
8788 	    if (def0)
8789 	      std::swap (treeop0, treeop1);
8790 	  }
8791 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8792 
8793 	op0 = op2 = NULL;
8794 
8795 	if (def0 && def2
8796 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8797 	  {
8798 	    opt = fnms_optab;
8799 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8800 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8801 	  }
8802 	else if (def0
8803 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8804 	  {
8805 	    opt = fnma_optab;
8806 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8807 	  }
8808 	else if (def2
8809 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8810 	  {
8811 	    opt = fms_optab;
8812 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8813 	  }
8814 
8815 	if (op0 == NULL)
8816 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8817 	if (op2 == NULL)
8818 	  op2 = expand_normal (treeop2);
8819 	op1 = expand_normal (treeop1);
8820 
8821 	return expand_ternary_op (TYPE_MODE (type), opt,
8822 				  op0, op1, op2, target, 0);
8823       }
8824 
8825     case MULT_EXPR:
8826       /* If this is a fixed-point operation, then we cannot use the code
8827 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8828          multiplications.   */
8829       if (ALL_FIXED_POINT_MODE_P (mode))
8830 	goto binop;
8831 
8832       /* If first operand is constant, swap them.
8833 	 Thus the following special case checks need only
8834 	 check the second operand.  */
8835       if (TREE_CODE (treeop0) == INTEGER_CST)
8836 	std::swap (treeop0, treeop1);
8837 
8838       /* Attempt to return something suitable for generating an
8839 	 indexed address, for machines that support that.  */
8840 
8841       if (modifier == EXPAND_SUM && mode == ptr_mode
8842 	  && tree_fits_shwi_p (treeop1))
8843 	{
8844 	  tree exp1 = treeop1;
8845 
8846 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8847 			     EXPAND_SUM);
8848 
8849 	  if (!REG_P (op0))
8850 	    op0 = force_operand (op0, NULL_RTX);
8851 	  if (!REG_P (op0))
8852 	    op0 = copy_to_mode_reg (mode, op0);
8853 
8854 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8855 			       gen_int_mode (tree_to_shwi (exp1),
8856 					     TYPE_MODE (TREE_TYPE (exp1)))));
8857 	}
8858 
8859       if (modifier == EXPAND_STACK_PARM)
8860 	target = 0;
8861 
8862       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8863       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8864 
8865     case TRUNC_MOD_EXPR:
8866     case FLOOR_MOD_EXPR:
8867     case CEIL_MOD_EXPR:
8868     case ROUND_MOD_EXPR:
8869 
8870     case TRUNC_DIV_EXPR:
8871     case FLOOR_DIV_EXPR:
8872     case CEIL_DIV_EXPR:
8873     case ROUND_DIV_EXPR:
8874     case EXACT_DIV_EXPR:
8875      {
8876        /* If this is a fixed-point operation, then we cannot use the code
8877 	  below because "expand_divmod" doesn't support sat/no-sat fixed-point
8878 	  divisions.   */
8879        if (ALL_FIXED_POINT_MODE_P (mode))
8880 	 goto binop;
8881 
8882        if (modifier == EXPAND_STACK_PARM)
8883 	 target = 0;
8884        /* Possible optimization: compute the dividend with EXPAND_SUM
8885 	  then if the divisor is constant can optimize the case
8886 	  where some terms of the dividend have coeffs divisible by it.  */
8887        expand_operands (treeop0, treeop1,
8888 			subtarget, &op0, &op1, EXPAND_NORMAL);
8889        bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
8890 		    || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
8891        if (SCALAR_INT_MODE_P (mode)
8892 	   && optimize >= 2
8893 	   && get_range_pos_neg (treeop0) == 1
8894 	   && get_range_pos_neg (treeop1) == 1)
8895 	 {
8896 	   /* If both arguments are known to be positive when interpreted
8897 	      as signed, we can expand it as both signed and unsigned
8898 	      division or modulo.  Choose the cheaper sequence in that case.  */
8899 	   bool speed_p = optimize_insn_for_speed_p ();
8900 	   do_pending_stack_adjust ();
8901 	   start_sequence ();
8902 	   rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
8903 	   rtx_insn *uns_insns = get_insns ();
8904 	   end_sequence ();
8905 	   start_sequence ();
8906 	   rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
8907 	   rtx_insn *sgn_insns = get_insns ();
8908 	   end_sequence ();
8909 	   unsigned uns_cost = seq_cost (uns_insns, speed_p);
8910 	   unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
8911 	   if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
8912 	     {
8913 	       emit_insn (uns_insns);
8914 	       return uns_ret;
8915 	     }
8916 	   emit_insn (sgn_insns);
8917 	   return sgn_ret;
8918 	 }
8919        return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
8920      }
8921     case RDIV_EXPR:
8922       goto binop;
8923 
8924     case MULT_HIGHPART_EXPR:
8925       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8926       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8927       gcc_assert (temp);
8928       return temp;
8929 
8930     case FIXED_CONVERT_EXPR:
8931       op0 = expand_normal (treeop0);
8932       if (target == 0 || modifier == EXPAND_STACK_PARM)
8933 	target = gen_reg_rtx (mode);
8934 
8935       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8936 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8937           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8938 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8939       else
8940 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8941       return target;
8942 
8943     case FIX_TRUNC_EXPR:
8944       op0 = expand_normal (treeop0);
8945       if (target == 0 || modifier == EXPAND_STACK_PARM)
8946 	target = gen_reg_rtx (mode);
8947       expand_fix (target, op0, unsignedp);
8948       return target;
8949 
8950     case FLOAT_EXPR:
8951       op0 = expand_normal (treeop0);
8952       if (target == 0 || modifier == EXPAND_STACK_PARM)
8953 	target = gen_reg_rtx (mode);
8954       /* expand_float can't figure out what to do if FROM has VOIDmode.
8955 	 So give it the correct mode.  With -O, cse will optimize this.  */
8956       if (GET_MODE (op0) == VOIDmode)
8957 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8958 				op0);
8959       expand_float (target, op0,
8960 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8961       return target;
8962 
8963     case NEGATE_EXPR:
8964       op0 = expand_expr (treeop0, subtarget,
8965 			 VOIDmode, EXPAND_NORMAL);
8966       if (modifier == EXPAND_STACK_PARM)
8967 	target = 0;
8968       temp = expand_unop (mode,
8969       			  optab_for_tree_code (NEGATE_EXPR, type,
8970 					       optab_default),
8971 			  op0, target, 0);
8972       gcc_assert (temp);
8973       return REDUCE_BIT_FIELD (temp);
8974 
8975     case ABS_EXPR:
8976       op0 = expand_expr (treeop0, subtarget,
8977 			 VOIDmode, EXPAND_NORMAL);
8978       if (modifier == EXPAND_STACK_PARM)
8979 	target = 0;
8980 
8981       /* ABS_EXPR is not valid for complex arguments.  */
8982       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8983 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8984 
8985       /* Unsigned abs is simply the operand.  Testing here means we don't
8986 	 risk generating incorrect code below.  */
8987       if (TYPE_UNSIGNED (type))
8988 	return op0;
8989 
8990       return expand_abs (mode, op0, target, unsignedp,
8991 			 safe_from_p (target, treeop0, 1));
8992 
8993     case MAX_EXPR:
8994     case MIN_EXPR:
8995       target = original_target;
8996       if (target == 0
8997 	  || modifier == EXPAND_STACK_PARM
8998 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8999 	  || GET_MODE (target) != mode
9000 	  || (REG_P (target)
9001 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
9002 	target = gen_reg_rtx (mode);
9003       expand_operands (treeop0, treeop1,
9004 		       target, &op0, &op1, EXPAND_NORMAL);
9005 
9006       /* First try to do it with a special MIN or MAX instruction.
9007 	 If that does not win, use a conditional jump to select the proper
9008 	 value.  */
9009       this_optab = optab_for_tree_code (code, type, optab_default);
9010       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9011 			   OPTAB_WIDEN);
9012       if (temp != 0)
9013 	return temp;
9014 
9015       /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9016 	 and similarly for MAX <x, y>.  */
9017       if (VECTOR_TYPE_P (type))
9018 	{
9019 	  tree t0 = make_tree (type, op0);
9020 	  tree t1 = make_tree (type, op1);
9021 	  tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
9022 				    type, t0, t1);
9023 	  return expand_vec_cond_expr (type, comparison, t0, t1,
9024 				       original_target);
9025 	}
9026 
9027       /* At this point, a MEM target is no longer useful; we will get better
9028 	 code without it.  */
9029 
9030       if (! REG_P (target))
9031 	target = gen_reg_rtx (mode);
9032 
9033       /* If op1 was placed in target, swap op0 and op1.  */
9034       if (target != op0 && target == op1)
9035 	std::swap (op0, op1);
9036 
9037       /* We generate better code and avoid problems with op1 mentioning
9038 	 target by forcing op1 into a pseudo if it isn't a constant.  */
9039       if (! CONSTANT_P (op1))
9040 	op1 = force_reg (mode, op1);
9041 
9042       {
9043 	enum rtx_code comparison_code;
9044 	rtx cmpop1 = op1;
9045 
9046 	if (code == MAX_EXPR)
9047 	  comparison_code = unsignedp ? GEU : GE;
9048 	else
9049 	  comparison_code = unsignedp ? LEU : LE;
9050 
9051 	/* Canonicalize to comparisons against 0.  */
9052 	if (op1 == const1_rtx)
9053 	  {
9054 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9055 	       or (a != 0 ? a : 1) for unsigned.
9056 	       For MIN we are safe converting (a <= 1 ? a : 1)
9057 	       into (a <= 0 ? a : 1)  */
9058 	    cmpop1 = const0_rtx;
9059 	    if (code == MAX_EXPR)
9060 	      comparison_code = unsignedp ? NE : GT;
9061 	  }
9062 	if (op1 == constm1_rtx && !unsignedp)
9063 	  {
9064 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9065 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9066 	    cmpop1 = const0_rtx;
9067 	    if (code == MIN_EXPR)
9068 	      comparison_code = LT;
9069 	  }
9070 
9071 	/* Use a conditional move if possible.  */
9072 	if (can_conditionally_move_p (mode))
9073 	  {
9074 	    rtx insn;
9075 
9076 	    start_sequence ();
9077 
9078 	    /* Try to emit the conditional move.  */
9079 	    insn = emit_conditional_move (target, comparison_code,
9080 					  op0, cmpop1, mode,
9081 					  op0, op1, mode,
9082 					  unsignedp);
9083 
9084 	    /* If we could do the conditional move, emit the sequence,
9085 	       and return.  */
9086 	    if (insn)
9087 	      {
9088 		rtx_insn *seq = get_insns ();
9089 		end_sequence ();
9090 		emit_insn (seq);
9091 		return target;
9092 	      }
9093 
9094 	    /* Otherwise discard the sequence and fall back to code with
9095 	       branches.  */
9096 	    end_sequence ();
9097 	  }
9098 
9099 	if (target != op0)
9100 	  emit_move_insn (target, op0);
9101 
9102 	lab = gen_label_rtx ();
9103 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9104 				 unsignedp, mode, NULL_RTX, NULL, lab,
9105 				 -1);
9106       }
9107       emit_move_insn (target, op1);
9108       emit_label (lab);
9109       return target;
9110 
9111     case BIT_NOT_EXPR:
9112       op0 = expand_expr (treeop0, subtarget,
9113 			 VOIDmode, EXPAND_NORMAL);
9114       if (modifier == EXPAND_STACK_PARM)
9115 	target = 0;
9116       /* In case we have to reduce the result to bitfield precision
9117 	 for unsigned bitfield expand this as XOR with a proper constant
9118 	 instead.  */
9119       if (reduce_bit_field && TYPE_UNSIGNED (type))
9120 	{
9121 	  wide_int mask = wi::mask (TYPE_PRECISION (type),
9122 				    false, GET_MODE_PRECISION (mode));
9123 
9124 	  temp = expand_binop (mode, xor_optab, op0,
9125 			       immed_wide_int_const (mask, mode),
9126 			       target, 1, OPTAB_LIB_WIDEN);
9127 	}
9128       else
9129 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9130       gcc_assert (temp);
9131       return temp;
9132 
9133       /* ??? Can optimize bitwise operations with one arg constant.
9134 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9135 	 and (a bitwise1 b) bitwise2 b (etc)
9136 	 but that is probably not worth while.  */
9137 
9138     case BIT_AND_EXPR:
9139     case BIT_IOR_EXPR:
9140     case BIT_XOR_EXPR:
9141       goto binop;
9142 
9143     case LROTATE_EXPR:
9144     case RROTATE_EXPR:
9145       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9146 		  || (GET_MODE_PRECISION (TYPE_MODE (type))
9147 		      == TYPE_PRECISION (type)));
9148       /* fall through */
9149 
9150     case LSHIFT_EXPR:
9151     case RSHIFT_EXPR:
9152       {
9153 	/* If this is a fixed-point operation, then we cannot use the code
9154 	   below because "expand_shift" doesn't support sat/no-sat fixed-point
9155 	   shifts.  */
9156 	if (ALL_FIXED_POINT_MODE_P (mode))
9157 	  goto binop;
9158 
9159 	if (! safe_from_p (subtarget, treeop1, 1))
9160 	  subtarget = 0;
9161 	if (modifier == EXPAND_STACK_PARM)
9162 	  target = 0;
9163 	op0 = expand_expr (treeop0, subtarget,
9164 			   VOIDmode, EXPAND_NORMAL);
9165 
9166 	/* Left shift optimization when shifting across word_size boundary.
9167 
9168 	   If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9169 	   there isn't native instruction to support this wide mode
9170 	   left shift.  Given below scenario:
9171 
9172 	    Type A = (Type) B  << C
9173 
9174 	    |<		 T	    >|
9175 	    | dest_high  |  dest_low |
9176 
9177 			 | word_size |
9178 
9179 	   If the shift amount C caused we shift B to across the word
9180 	   size boundary, i.e part of B shifted into high half of
9181 	   destination register, and part of B remains in the low
9182 	   half, then GCC will use the following left shift expand
9183 	   logic:
9184 
9185 	   1. Initialize dest_low to B.
9186 	   2. Initialize every bit of dest_high to the sign bit of B.
9187 	   3. Logic left shift dest_low by C bit to finalize dest_low.
9188 	      The value of dest_low before this shift is kept in a temp D.
9189 	   4. Logic left shift dest_high by C.
9190 	   5. Logic right shift D by (word_size - C).
9191 	   6. Or the result of 4 and 5 to finalize dest_high.
9192 
9193 	   While, by checking gimple statements, if operand B is
9194 	   coming from signed extension, then we can simplify above
9195 	   expand logic into:
9196 
9197 	      1. dest_high = src_low >> (word_size - C).
9198 	      2. dest_low = src_low << C.
9199 
9200 	   We can use one arithmetic right shift to finish all the
9201 	   purpose of steps 2, 4, 5, 6, thus we reduce the steps
9202 	   needed from 6 into 2.
9203 
9204 	   The case is similar for zero extension, except that we
9205 	   initialize dest_high to zero rather than copies of the sign
9206 	   bit from B.  Furthermore, we need to use a logical right shift
9207 	   in this case.
9208 
9209 	   The choice of sign-extension versus zero-extension is
9210 	   determined entirely by whether or not B is signed and is
9211 	   independent of the current setting of unsignedp.  */
9212 
9213 	temp = NULL_RTX;
9214 	if (code == LSHIFT_EXPR
9215 	    && target
9216 	    && REG_P (target)
9217 	    && mode == GET_MODE_WIDER_MODE (word_mode)
9218 	    && GET_MODE_SIZE (mode) == 2 * GET_MODE_SIZE (word_mode)
9219 	    && TREE_CONSTANT (treeop1)
9220 	    && TREE_CODE (treeop0) == SSA_NAME)
9221 	  {
9222 	    gimple *def = SSA_NAME_DEF_STMT (treeop0);
9223 	    if (is_gimple_assign (def)
9224 		&& gimple_assign_rhs_code (def) == NOP_EXPR)
9225 	      {
9226 		machine_mode rmode = TYPE_MODE
9227 		  (TREE_TYPE (gimple_assign_rhs1 (def)));
9228 
9229 		if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (mode)
9230 		    && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9231 		    && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9232 			>= GET_MODE_BITSIZE (word_mode)))
9233 		  {
9234 		    rtx_insn *seq, *seq_old;
9235 		    unsigned int high_off = subreg_highpart_offset (word_mode,
9236 								    mode);
9237 		    bool extend_unsigned
9238 		      = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9239 		    rtx low = lowpart_subreg (word_mode, op0, mode);
9240 		    rtx dest_low = lowpart_subreg (word_mode, target, mode);
9241 		    rtx dest_high = simplify_gen_subreg (word_mode, target,
9242 							 mode, high_off);
9243 		    HOST_WIDE_INT ramount = (BITS_PER_WORD
9244 					     - TREE_INT_CST_LOW (treeop1));
9245 		    tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9246 
9247 		    start_sequence ();
9248 		    /* dest_high = src_low >> (word_size - C).  */
9249 		    temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9250 						  rshift, dest_high,
9251 						  extend_unsigned);
9252 		    if (temp != dest_high)
9253 		      emit_move_insn (dest_high, temp);
9254 
9255 		    /* dest_low = src_low << C.  */
9256 		    temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9257 						  treeop1, dest_low, unsignedp);
9258 		    if (temp != dest_low)
9259 		      emit_move_insn (dest_low, temp);
9260 
9261 		    seq = get_insns ();
9262 		    end_sequence ();
9263 		    temp = target ;
9264 
9265 		    if (have_insn_for (ASHIFT, mode))
9266 		      {
9267 			bool speed_p = optimize_insn_for_speed_p ();
9268 			start_sequence ();
9269 			rtx ret_old = expand_variable_shift (code, mode, op0,
9270 							     treeop1, target,
9271 							     unsignedp);
9272 
9273 			seq_old = get_insns ();
9274 			end_sequence ();
9275 			if (seq_cost (seq, speed_p)
9276 			    >= seq_cost (seq_old, speed_p))
9277 			  {
9278 			    seq = seq_old;
9279 			    temp = ret_old;
9280 			  }
9281 		      }
9282 		      emit_insn (seq);
9283 		  }
9284 	      }
9285 	  }
9286 
9287 	if (temp == NULL_RTX)
9288 	  temp = expand_variable_shift (code, mode, op0, treeop1, target,
9289 					unsignedp);
9290 	if (code == LSHIFT_EXPR)
9291 	  temp = REDUCE_BIT_FIELD (temp);
9292 	return temp;
9293       }
9294 
9295       /* Could determine the answer when only additive constants differ.  Also,
9296 	 the addition of one can be handled by changing the condition.  */
9297     case LT_EXPR:
9298     case LE_EXPR:
9299     case GT_EXPR:
9300     case GE_EXPR:
9301     case EQ_EXPR:
9302     case NE_EXPR:
9303     case UNORDERED_EXPR:
9304     case ORDERED_EXPR:
9305     case UNLT_EXPR:
9306     case UNLE_EXPR:
9307     case UNGT_EXPR:
9308     case UNGE_EXPR:
9309     case UNEQ_EXPR:
9310     case LTGT_EXPR:
9311       {
9312 	temp = do_store_flag (ops,
9313 			      modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9314 			      tmode != VOIDmode ? tmode : mode);
9315 	if (temp)
9316 	  return temp;
9317 
9318 	/* Use a compare and a jump for BLKmode comparisons, or for function
9319 	   type comparisons is have_canonicalize_funcptr_for_compare.  */
9320 
9321 	if ((target == 0
9322 	     || modifier == EXPAND_STACK_PARM
9323 	     || ! safe_from_p (target, treeop0, 1)
9324 	     || ! safe_from_p (target, treeop1, 1)
9325 	     /* Make sure we don't have a hard reg (such as function's return
9326 		value) live across basic blocks, if not optimizing.  */
9327 	     || (!optimize && REG_P (target)
9328 		 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9329 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9330 
9331 	emit_move_insn (target, const0_rtx);
9332 
9333 	rtx_code_label *lab1 = gen_label_rtx ();
9334 	jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9335 
9336 	if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9337 	  emit_move_insn (target, constm1_rtx);
9338 	else
9339 	  emit_move_insn (target, const1_rtx);
9340 
9341 	emit_label (lab1);
9342 	return target;
9343       }
9344     case COMPLEX_EXPR:
9345       /* Get the rtx code of the operands.  */
9346       op0 = expand_normal (treeop0);
9347       op1 = expand_normal (treeop1);
9348 
9349       if (!target)
9350 	target = gen_reg_rtx (TYPE_MODE (type));
9351       else
9352 	/* If target overlaps with op1, then either we need to force
9353 	   op1 into a pseudo (if target also overlaps with op0),
9354 	   or write the complex parts in reverse order.  */
9355 	switch (GET_CODE (target))
9356 	  {
9357 	  case CONCAT:
9358 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9359 	      {
9360 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9361 		  {
9362 		  complex_expr_force_op1:
9363 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9364 		    emit_move_insn (temp, op1);
9365 		    op1 = temp;
9366 		    break;
9367 		  }
9368 	      complex_expr_swap_order:
9369 		/* Move the imaginary (op1) and real (op0) parts to their
9370 		   location.  */
9371 		write_complex_part (target, op1, true);
9372 		write_complex_part (target, op0, false);
9373 
9374 		return target;
9375 	      }
9376 	    break;
9377 	  case MEM:
9378 	    temp = adjust_address_nv (target,
9379 				      GET_MODE_INNER (GET_MODE (target)), 0);
9380 	    if (reg_overlap_mentioned_p (temp, op1))
9381 	      {
9382 		machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9383 		temp = adjust_address_nv (target, imode,
9384 					  GET_MODE_SIZE (imode));
9385 		if (reg_overlap_mentioned_p (temp, op0))
9386 		  goto complex_expr_force_op1;
9387 		goto complex_expr_swap_order;
9388 	      }
9389 	    break;
9390 	  default:
9391 	    if (reg_overlap_mentioned_p (target, op1))
9392 	      {
9393 		if (reg_overlap_mentioned_p (target, op0))
9394 		  goto complex_expr_force_op1;
9395 		goto complex_expr_swap_order;
9396 	      }
9397 	    break;
9398 	  }
9399 
9400       /* Move the real (op0) and imaginary (op1) parts to their location.  */
9401       write_complex_part (target, op0, false);
9402       write_complex_part (target, op1, true);
9403 
9404       return target;
9405 
9406     case WIDEN_SUM_EXPR:
9407       {
9408         tree oprnd0 = treeop0;
9409         tree oprnd1 = treeop1;
9410 
9411         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9412         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9413                                             target, unsignedp);
9414         return target;
9415       }
9416 
9417     case REDUC_MAX_EXPR:
9418     case REDUC_MIN_EXPR:
9419     case REDUC_PLUS_EXPR:
9420       {
9421         op0 = expand_normal (treeop0);
9422         this_optab = optab_for_tree_code (code, type, optab_default);
9423         machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9424 
9425 	struct expand_operand ops[2];
9426 	enum insn_code icode = optab_handler (this_optab, vec_mode);
9427 
9428 	create_output_operand (&ops[0], target, mode);
9429 	create_input_operand (&ops[1], op0, vec_mode);
9430 	expand_insn (icode, 2, ops);
9431 	target = ops[0].value;
9432 	if (GET_MODE (target) != mode)
9433 	  return gen_lowpart (tmode, target);
9434 	return target;
9435       }
9436 
9437     case VEC_UNPACK_HI_EXPR:
9438     case VEC_UNPACK_LO_EXPR:
9439       {
9440 	op0 = expand_normal (treeop0);
9441 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9442 					  target, unsignedp);
9443 	gcc_assert (temp);
9444 	return temp;
9445       }
9446 
9447     case VEC_UNPACK_FLOAT_HI_EXPR:
9448     case VEC_UNPACK_FLOAT_LO_EXPR:
9449       {
9450 	op0 = expand_normal (treeop0);
9451 	/* The signedness is determined from input operand.  */
9452 	temp = expand_widen_pattern_expr
9453 	  (ops, op0, NULL_RTX, NULL_RTX,
9454 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9455 
9456 	gcc_assert (temp);
9457 	return temp;
9458       }
9459 
9460     case VEC_WIDEN_MULT_HI_EXPR:
9461     case VEC_WIDEN_MULT_LO_EXPR:
9462     case VEC_WIDEN_MULT_EVEN_EXPR:
9463     case VEC_WIDEN_MULT_ODD_EXPR:
9464     case VEC_WIDEN_LSHIFT_HI_EXPR:
9465     case VEC_WIDEN_LSHIFT_LO_EXPR:
9466       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9467       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9468 					  target, unsignedp);
9469       gcc_assert (target);
9470       return target;
9471 
9472     case VEC_PACK_TRUNC_EXPR:
9473     case VEC_PACK_SAT_EXPR:
9474     case VEC_PACK_FIX_TRUNC_EXPR:
9475       mode = TYPE_MODE (TREE_TYPE (treeop0));
9476       goto binop;
9477 
9478     case VEC_PERM_EXPR:
9479       expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9480       op2 = expand_normal (treeop2);
9481 
9482       /* Careful here: if the target doesn't support integral vector modes,
9483 	 a constant selection vector could wind up smooshed into a normal
9484 	 integral constant.  */
9485       if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9486 	{
9487 	  tree sel_type = TREE_TYPE (treeop2);
9488 	  machine_mode vmode
9489 	    = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9490 			       TYPE_VECTOR_SUBPARTS (sel_type));
9491 	  gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9492 	  op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9493 	  gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9494 	}
9495       else
9496         gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9497 
9498       temp = expand_vec_perm (mode, op0, op1, op2, target);
9499       gcc_assert (temp);
9500       return temp;
9501 
9502     case DOT_PROD_EXPR:
9503       {
9504 	tree oprnd0 = treeop0;
9505 	tree oprnd1 = treeop1;
9506 	tree oprnd2 = treeop2;
9507 	rtx op2;
9508 
9509 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9510 	op2 = expand_normal (oprnd2);
9511 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9512 					    target, unsignedp);
9513 	return target;
9514       }
9515 
9516       case SAD_EXPR:
9517       {
9518 	tree oprnd0 = treeop0;
9519 	tree oprnd1 = treeop1;
9520 	tree oprnd2 = treeop2;
9521 	rtx op2;
9522 
9523 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9524 	op2 = expand_normal (oprnd2);
9525 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9526 					    target, unsignedp);
9527 	return target;
9528       }
9529 
9530     case REALIGN_LOAD_EXPR:
9531       {
9532         tree oprnd0 = treeop0;
9533         tree oprnd1 = treeop1;
9534         tree oprnd2 = treeop2;
9535         rtx op2;
9536 
9537         this_optab = optab_for_tree_code (code, type, optab_default);
9538         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9539         op2 = expand_normal (oprnd2);
9540         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9541 				  target, unsignedp);
9542         gcc_assert (temp);
9543         return temp;
9544       }
9545 
9546     case COND_EXPR:
9547       {
9548 	/* A COND_EXPR with its type being VOID_TYPE represents a
9549 	   conditional jump and is handled in
9550 	   expand_gimple_cond_expr.  */
9551 	gcc_assert (!VOID_TYPE_P (type));
9552 
9553 	/* Note that COND_EXPRs whose type is a structure or union
9554 	   are required to be constructed to contain assignments of
9555 	   a temporary variable, so that we can evaluate them here
9556 	   for side effect only.  If type is void, we must do likewise.  */
9557 
9558 	gcc_assert (!TREE_ADDRESSABLE (type)
9559 		    && !ignore
9560 		    && TREE_TYPE (treeop1) != void_type_node
9561 		    && TREE_TYPE (treeop2) != void_type_node);
9562 
9563 	temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9564 	if (temp)
9565 	  return temp;
9566 
9567 	/* If we are not to produce a result, we have no target.  Otherwise,
9568 	   if a target was specified use it; it will not be used as an
9569 	   intermediate target unless it is safe.  If no target, use a
9570 	   temporary.  */
9571 
9572 	if (modifier != EXPAND_STACK_PARM
9573 	    && original_target
9574 	    && safe_from_p (original_target, treeop0, 1)
9575 	    && GET_MODE (original_target) == mode
9576 	    && !MEM_P (original_target))
9577 	  temp = original_target;
9578 	else
9579 	  temp = assign_temp (type, 0, 1);
9580 
9581 	do_pending_stack_adjust ();
9582 	NO_DEFER_POP;
9583 	rtx_code_label *lab0 = gen_label_rtx ();
9584 	rtx_code_label *lab1 = gen_label_rtx ();
9585 	jumpifnot (treeop0, lab0, -1);
9586 	store_expr (treeop1, temp,
9587 		    modifier == EXPAND_STACK_PARM,
9588 		    false, false);
9589 
9590 	emit_jump_insn (targetm.gen_jump (lab1));
9591 	emit_barrier ();
9592 	emit_label (lab0);
9593 	store_expr (treeop2, temp,
9594 		    modifier == EXPAND_STACK_PARM,
9595 		    false, false);
9596 
9597 	emit_label (lab1);
9598 	OK_DEFER_POP;
9599 	return temp;
9600       }
9601 
9602     case VEC_COND_EXPR:
9603       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9604       return target;
9605 
9606     case BIT_INSERT_EXPR:
9607       {
9608 	unsigned bitpos = tree_to_uhwi (treeop2);
9609 	unsigned bitsize;
9610 	if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9611 	  bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9612 	else
9613 	  bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9614 	rtx op0 = expand_normal (treeop0);
9615 	rtx op1 = expand_normal (treeop1);
9616 	rtx dst = gen_reg_rtx (mode);
9617 	emit_move_insn (dst, op0);
9618 	store_bit_field (dst, bitsize, bitpos, 0, 0,
9619 			 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9620 	return dst;
9621       }
9622 
9623     default:
9624       gcc_unreachable ();
9625     }
9626 
9627   /* Here to do an ordinary binary operator.  */
9628  binop:
9629   expand_operands (treeop0, treeop1,
9630 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9631  binop2:
9632   this_optab = optab_for_tree_code (code, type, optab_default);
9633  binop3:
9634   if (modifier == EXPAND_STACK_PARM)
9635     target = 0;
9636   temp = expand_binop (mode, this_optab, op0, op1, target,
9637 		       unsignedp, OPTAB_LIB_WIDEN);
9638   gcc_assert (temp);
9639   /* Bitwise operations do not need bitfield reduction as we expect their
9640      operands being properly truncated.  */
9641   if (code == BIT_XOR_EXPR
9642       || code == BIT_AND_EXPR
9643       || code == BIT_IOR_EXPR)
9644     return temp;
9645   return REDUCE_BIT_FIELD (temp);
9646 }
9647 #undef REDUCE_BIT_FIELD
9648 
9649 
9650 /* Return TRUE if expression STMT is suitable for replacement.
9651    Never consider memory loads as replaceable, because those don't ever lead
9652    into constant expressions.  */
9653 
9654 static bool
9655 stmt_is_replaceable_p (gimple *stmt)
9656 {
9657   if (ssa_is_replaceable_p (stmt))
9658     {
9659       /* Don't move around loads.  */
9660       if (!gimple_assign_single_p (stmt)
9661 	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9662 	return true;
9663     }
9664   return false;
9665 }
9666 
9667 rtx
9668 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9669 		    enum expand_modifier modifier, rtx *alt_rtl,
9670 		    bool inner_reference_p)
9671 {
9672   rtx op0, op1, temp, decl_rtl;
9673   tree type;
9674   int unsignedp;
9675   machine_mode mode, dmode;
9676   enum tree_code code = TREE_CODE (exp);
9677   rtx subtarget, original_target;
9678   int ignore;
9679   tree context;
9680   bool reduce_bit_field;
9681   location_t loc = EXPR_LOCATION (exp);
9682   struct separate_ops ops;
9683   tree treeop0, treeop1, treeop2;
9684   tree ssa_name = NULL_TREE;
9685   gimple *g;
9686 
9687   type = TREE_TYPE (exp);
9688   mode = TYPE_MODE (type);
9689   unsignedp = TYPE_UNSIGNED (type);
9690 
9691   treeop0 = treeop1 = treeop2 = NULL_TREE;
9692   if (!VL_EXP_CLASS_P (exp))
9693     switch (TREE_CODE_LENGTH (code))
9694       {
9695 	default:
9696 	case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
9697 	case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
9698 	case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
9699 	case 0: break;
9700       }
9701   ops.code = code;
9702   ops.type = type;
9703   ops.op0 = treeop0;
9704   ops.op1 = treeop1;
9705   ops.op2 = treeop2;
9706   ops.location = loc;
9707 
9708   ignore = (target == const0_rtx
9709 	    || ((CONVERT_EXPR_CODE_P (code)
9710 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9711 		&& TREE_CODE (type) == VOID_TYPE));
9712 
9713   /* An operation in what may be a bit-field type needs the
9714      result to be reduced to the precision of the bit-field type,
9715      which is narrower than that of the type's mode.  */
9716   reduce_bit_field = (!ignore
9717 		      && INTEGRAL_TYPE_P (type)
9718 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9719 
9720   /* If we are going to ignore this result, we need only do something
9721      if there is a side-effect somewhere in the expression.  If there
9722      is, short-circuit the most common cases here.  Note that we must
9723      not call expand_expr with anything but const0_rtx in case this
9724      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9725 
9726   if (ignore)
9727     {
9728       if (! TREE_SIDE_EFFECTS (exp))
9729 	return const0_rtx;
9730 
9731       /* Ensure we reference a volatile object even if value is ignored, but
9732 	 don't do this if all we are doing is taking its address.  */
9733       if (TREE_THIS_VOLATILE (exp)
9734 	  && TREE_CODE (exp) != FUNCTION_DECL
9735 	  && mode != VOIDmode && mode != BLKmode
9736 	  && modifier != EXPAND_CONST_ADDRESS)
9737 	{
9738 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9739 	  if (MEM_P (temp))
9740 	    copy_to_reg (temp);
9741 	  return const0_rtx;
9742 	}
9743 
9744       if (TREE_CODE_CLASS (code) == tcc_unary
9745 	  || code == BIT_FIELD_REF
9746 	  || code == COMPONENT_REF
9747 	  || code == INDIRECT_REF)
9748 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9749 			    modifier);
9750 
9751       else if (TREE_CODE_CLASS (code) == tcc_binary
9752 	       || TREE_CODE_CLASS (code) == tcc_comparison
9753 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9754 	{
9755 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9756 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9757 	  return const0_rtx;
9758 	}
9759 
9760       target = 0;
9761     }
9762 
9763   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9764     target = 0;
9765 
9766   /* Use subtarget as the target for operand 0 of a binary operation.  */
9767   subtarget = get_subtarget (target);
9768   original_target = target;
9769 
9770   switch (code)
9771     {
9772     case LABEL_DECL:
9773       {
9774 	tree function = decl_function_context (exp);
9775 
9776 	temp = label_rtx (exp);
9777 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9778 
9779 	if (function != current_function_decl
9780 	    && function != 0)
9781 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9782 
9783 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9784 	return temp;
9785       }
9786 
9787     case SSA_NAME:
9788       /* ??? ivopts calls expander, without any preparation from
9789          out-of-ssa.  So fake instructions as if this was an access to the
9790 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9791 	 reuse it, if partition base vars have it set already.  */
9792       if (!currently_expanding_to_rtl)
9793 	{
9794 	  tree var = SSA_NAME_VAR (exp);
9795 	  if (var && DECL_RTL_SET_P (var))
9796 	    return DECL_RTL (var);
9797 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9798 			      LAST_VIRTUAL_REGISTER + 1);
9799 	}
9800 
9801       g = get_gimple_for_ssa_name (exp);
9802       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9803       if (g == NULL
9804 	  && modifier == EXPAND_INITIALIZER
9805 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9806 	  && (optimize || !SSA_NAME_VAR (exp)
9807 	      || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9808 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9809 	g = SSA_NAME_DEF_STMT (exp);
9810       if (g)
9811 	{
9812 	  rtx r;
9813 	  location_t saved_loc = curr_insn_location ();
9814 	  location_t loc = gimple_location (g);
9815 	  if (loc != UNKNOWN_LOCATION)
9816 	    set_curr_insn_location (loc);
9817 	  ops.code = gimple_assign_rhs_code (g);
9818           switch (get_gimple_rhs_class (ops.code))
9819 	    {
9820 	    case GIMPLE_TERNARY_RHS:
9821 	      ops.op2 = gimple_assign_rhs3 (g);
9822 	      /* Fallthru */
9823 	    case GIMPLE_BINARY_RHS:
9824 	      ops.op1 = gimple_assign_rhs2 (g);
9825 
9826 	      /* Try to expand conditonal compare.  */
9827 	      if (targetm.gen_ccmp_first)
9828 		{
9829 		  gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9830 		  r = expand_ccmp_expr (g);
9831 		  if (r)
9832 		    break;
9833 		}
9834 	      /* Fallthru */
9835 	    case GIMPLE_UNARY_RHS:
9836 	      ops.op0 = gimple_assign_rhs1 (g);
9837 	      ops.type = TREE_TYPE (gimple_assign_lhs (g));
9838 	      ops.location = loc;
9839 	      r = expand_expr_real_2 (&ops, target, tmode, modifier);
9840 	      break;
9841 	    case GIMPLE_SINGLE_RHS:
9842 	      {
9843 		r = expand_expr_real (gimple_assign_rhs1 (g), target,
9844 				      tmode, modifier, NULL, inner_reference_p);
9845 		break;
9846 	      }
9847 	    default:
9848 	      gcc_unreachable ();
9849 	    }
9850 	  set_curr_insn_location (saved_loc);
9851 	  if (REG_P (r) && !REG_EXPR (r))
9852 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9853 	  return r;
9854 	}
9855 
9856       ssa_name = exp;
9857       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9858       exp = SSA_NAME_VAR (ssa_name);
9859       goto expand_decl_rtl;
9860 
9861     case PARM_DECL:
9862     case VAR_DECL:
9863       /* If a static var's type was incomplete when the decl was written,
9864 	 but the type is complete now, lay out the decl now.  */
9865       if (DECL_SIZE (exp) == 0
9866 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9867 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9868 	layout_decl (exp, 0);
9869 
9870       /* fall through */
9871 
9872     case FUNCTION_DECL:
9873     case RESULT_DECL:
9874       decl_rtl = DECL_RTL (exp);
9875     expand_decl_rtl:
9876       gcc_assert (decl_rtl);
9877 
9878       /* DECL_MODE might change when TYPE_MODE depends on attribute target
9879 	 settings for VECTOR_TYPE_P that might switch for the function.  */
9880       if (currently_expanding_to_rtl
9881 	  && code == VAR_DECL && MEM_P (decl_rtl)
9882 	  && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
9883 	decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
9884       else
9885 	decl_rtl = copy_rtx (decl_rtl);
9886 
9887       /* Record writes to register variables.  */
9888       if (modifier == EXPAND_WRITE
9889 	  && REG_P (decl_rtl)
9890 	  && HARD_REGISTER_P (decl_rtl))
9891         add_to_hard_reg_set (&crtl->asm_clobbers,
9892 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9893 
9894       /* Ensure variable marked as used even if it doesn't go through
9895 	 a parser.  If it hasn't be used yet, write out an external
9896 	 definition.  */
9897       if (exp)
9898 	TREE_USED (exp) = 1;
9899 
9900       /* Show we haven't gotten RTL for this yet.  */
9901       temp = 0;
9902 
9903       /* Variables inherited from containing functions should have
9904 	 been lowered by this point.  */
9905       if (exp)
9906 	context = decl_function_context (exp);
9907       gcc_assert (!exp
9908 		  || SCOPE_FILE_SCOPE_P (context)
9909 		  || context == current_function_decl
9910 		  || TREE_STATIC (exp)
9911 		  || DECL_EXTERNAL (exp)
9912 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
9913 		  || TREE_CODE (exp) == FUNCTION_DECL);
9914 
9915       /* This is the case of an array whose size is to be determined
9916 	 from its initializer, while the initializer is still being parsed.
9917 	 ??? We aren't parsing while expanding anymore.  */
9918 
9919       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9920 	temp = validize_mem (decl_rtl);
9921 
9922       /* If DECL_RTL is memory, we are in the normal case and the
9923 	 address is not valid, get the address into a register.  */
9924 
9925       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9926 	{
9927 	  if (alt_rtl)
9928 	    *alt_rtl = decl_rtl;
9929 	  decl_rtl = use_anchored_address (decl_rtl);
9930 	  if (modifier != EXPAND_CONST_ADDRESS
9931 	      && modifier != EXPAND_SUM
9932 	      && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9933 					       : GET_MODE (decl_rtl),
9934 					       XEXP (decl_rtl, 0),
9935 					       MEM_ADDR_SPACE (decl_rtl)))
9936 	    temp = replace_equiv_address (decl_rtl,
9937 					  copy_rtx (XEXP (decl_rtl, 0)));
9938 	}
9939 
9940       /* If we got something, return it.  But first, set the alignment
9941 	 if the address is a register.  */
9942       if (temp != 0)
9943 	{
9944 	  if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9945 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9946 
9947 	  return temp;
9948 	}
9949 
9950       if (exp)
9951 	dmode = DECL_MODE (exp);
9952       else
9953 	dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9954 
9955       /* If the mode of DECL_RTL does not match that of the decl,
9956 	 there are two cases: we are dealing with a BLKmode value
9957 	 that is returned in a register, or we are dealing with
9958 	 a promoted value.  In the latter case, return a SUBREG
9959 	 of the wanted mode, but mark it so that we know that it
9960 	 was already extended.  */
9961       if (REG_P (decl_rtl)
9962 	  && dmode != BLKmode
9963 	  && GET_MODE (decl_rtl) != dmode)
9964 	{
9965 	  machine_mode pmode;
9966 
9967 	  /* Get the signedness to be used for this variable.  Ensure we get
9968 	     the same mode we got when the variable was declared.  */
9969 	  if (code != SSA_NAME)
9970 	    pmode = promote_decl_mode (exp, &unsignedp);
9971 	  else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9972 		   && gimple_code (g) == GIMPLE_CALL
9973 		   && !gimple_call_internal_p (g))
9974 	    pmode = promote_function_mode (type, mode, &unsignedp,
9975 					   gimple_call_fntype (g),
9976 					   2);
9977 	  else
9978 	    pmode = promote_ssa_mode (ssa_name, &unsignedp);
9979 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
9980 
9981 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
9982 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
9983 	  SUBREG_PROMOTED_SET (temp, unsignedp);
9984 	  return temp;
9985 	}
9986 
9987       return decl_rtl;
9988 
9989     case INTEGER_CST:
9990       /* Given that TYPE_PRECISION (type) is not always equal to
9991          GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9992          the former to the latter according to the signedness of the
9993          type. */
9994       temp = immed_wide_int_const (wi::to_wide
9995 				   (exp,
9996 				    GET_MODE_PRECISION (TYPE_MODE (type))),
9997 				   TYPE_MODE (type));
9998       return temp;
9999 
10000     case VECTOR_CST:
10001       {
10002 	tree tmp = NULL_TREE;
10003 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
10004 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
10005 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
10006 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
10007 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
10008 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
10009 	  return const_vector_from_tree (exp);
10010 	if (GET_MODE_CLASS (mode) == MODE_INT)
10011 	  {
10012 	    if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10013 	      return const_scalar_mask_from_tree (exp);
10014 	    else
10015 	      {
10016 		tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
10017 		if (type_for_mode)
10018 		  tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10019 					type_for_mode, exp);
10020 	      }
10021 	  }
10022 	if (!tmp)
10023 	  {
10024 	    vec<constructor_elt, va_gc> *v;
10025 	    unsigned i;
10026 	    vec_alloc (v, VECTOR_CST_NELTS (exp));
10027 	    for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
10028 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10029 	    tmp = build_constructor (type, v);
10030 	  }
10031 	return expand_expr (tmp, ignore ? const0_rtx : target,
10032 			    tmode, modifier);
10033       }
10034 
10035     case CONST_DECL:
10036       if (modifier == EXPAND_WRITE)
10037 	{
10038 	  /* Writing into CONST_DECL is always invalid, but handle it
10039 	     gracefully.  */
10040 	  addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10041 	  machine_mode address_mode = targetm.addr_space.address_mode (as);
10042 	  op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10043 					 EXPAND_NORMAL, as);
10044 	  op0 = memory_address_addr_space (mode, op0, as);
10045 	  temp = gen_rtx_MEM (mode, op0);
10046 	  set_mem_addr_space (temp, as);
10047 	  return temp;
10048 	}
10049       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10050 
10051     case REAL_CST:
10052       /* If optimized, generate immediate CONST_DOUBLE
10053 	 which will be turned into memory by reload if necessary.
10054 
10055 	 We used to force a register so that loop.c could see it.  But
10056 	 this does not allow gen_* patterns to perform optimizations with
10057 	 the constants.  It also produces two insns in cases like "x = 1.0;".
10058 	 On most machines, floating-point constants are not permitted in
10059 	 many insns, so we'd end up copying it to a register in any case.
10060 
10061 	 Now, we do the copying in expand_binop, if appropriate.  */
10062       return const_double_from_real_value (TREE_REAL_CST (exp),
10063 					   TYPE_MODE (TREE_TYPE (exp)));
10064 
10065     case FIXED_CST:
10066       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10067 					   TYPE_MODE (TREE_TYPE (exp)));
10068 
10069     case COMPLEX_CST:
10070       /* Handle evaluating a complex constant in a CONCAT target.  */
10071       if (original_target && GET_CODE (original_target) == CONCAT)
10072 	{
10073 	  machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10074 	  rtx rtarg, itarg;
10075 
10076 	  rtarg = XEXP (original_target, 0);
10077 	  itarg = XEXP (original_target, 1);
10078 
10079 	  /* Move the real and imaginary parts separately.  */
10080 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10081 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10082 
10083 	  if (op0 != rtarg)
10084 	    emit_move_insn (rtarg, op0);
10085 	  if (op1 != itarg)
10086 	    emit_move_insn (itarg, op1);
10087 
10088 	  return original_target;
10089 	}
10090 
10091       /* fall through */
10092 
10093     case STRING_CST:
10094       temp = expand_expr_constant (exp, 1, modifier);
10095 
10096       /* temp contains a constant address.
10097 	 On RISC machines where a constant address isn't valid,
10098 	 make some insns to get that address into a register.  */
10099       if (modifier != EXPAND_CONST_ADDRESS
10100 	  && modifier != EXPAND_INITIALIZER
10101 	  && modifier != EXPAND_SUM
10102 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10103 					    MEM_ADDR_SPACE (temp)))
10104 	return replace_equiv_address (temp,
10105 				      copy_rtx (XEXP (temp, 0)));
10106       return temp;
10107 
10108     case SAVE_EXPR:
10109       {
10110 	tree val = treeop0;
10111 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10112 				      inner_reference_p);
10113 
10114 	if (!SAVE_EXPR_RESOLVED_P (exp))
10115 	  {
10116 	    /* We can indeed still hit this case, typically via builtin
10117 	       expanders calling save_expr immediately before expanding
10118 	       something.  Assume this means that we only have to deal
10119 	       with non-BLKmode values.  */
10120 	    gcc_assert (GET_MODE (ret) != BLKmode);
10121 
10122 	    val = build_decl (curr_insn_location (),
10123 			      VAR_DECL, NULL, TREE_TYPE (exp));
10124 	    DECL_ARTIFICIAL (val) = 1;
10125 	    DECL_IGNORED_P (val) = 1;
10126 	    treeop0 = val;
10127 	    TREE_OPERAND (exp, 0) = treeop0;
10128 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
10129 
10130 	    if (!CONSTANT_P (ret))
10131 	      ret = copy_to_reg (ret);
10132 	    SET_DECL_RTL (val, ret);
10133 	  }
10134 
10135         return ret;
10136       }
10137 
10138 
10139     case CONSTRUCTOR:
10140       /* If we don't need the result, just ensure we evaluate any
10141 	 subexpressions.  */
10142       if (ignore)
10143 	{
10144 	  unsigned HOST_WIDE_INT idx;
10145 	  tree value;
10146 
10147 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10148 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10149 
10150 	  return const0_rtx;
10151 	}
10152 
10153       return expand_constructor (exp, target, modifier, false);
10154 
10155     case TARGET_MEM_REF:
10156       {
10157 	addr_space_t as
10158 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10159 	enum insn_code icode;
10160 	unsigned int align;
10161 
10162 	op0 = addr_for_mem_ref (exp, as, true);
10163 	op0 = memory_address_addr_space (mode, op0, as);
10164 	temp = gen_rtx_MEM (mode, op0);
10165 	set_mem_attributes (temp, exp, 0);
10166 	set_mem_addr_space (temp, as);
10167 	align = get_object_alignment (exp);
10168 	if (modifier != EXPAND_WRITE
10169 	    && modifier != EXPAND_MEMORY
10170 	    && mode != BLKmode
10171 	    && align < GET_MODE_ALIGNMENT (mode)
10172 	    /* If the target does not have special handling for unaligned
10173 	       loads of mode then it can use regular moves for them.  */
10174 	    && ((icode = optab_handler (movmisalign_optab, mode))
10175 		!= CODE_FOR_nothing))
10176 	  {
10177 	    struct expand_operand ops[2];
10178 
10179 	    /* We've already validated the memory, and we're creating a
10180 	       new pseudo destination.  The predicates really can't fail,
10181 	       nor can the generator.  */
10182 	    create_output_operand (&ops[0], NULL_RTX, mode);
10183 	    create_fixed_operand (&ops[1], temp);
10184 	    expand_insn (icode, 2, ops);
10185 	    temp = ops[0].value;
10186 	  }
10187 	return temp;
10188       }
10189 
10190     case MEM_REF:
10191       {
10192 	const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10193 	addr_space_t as
10194 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10195 	machine_mode address_mode;
10196 	tree base = TREE_OPERAND (exp, 0);
10197 	gimple *def_stmt;
10198 	enum insn_code icode;
10199 	unsigned align;
10200 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
10201 	   might end up in a register.  */
10202 	if (mem_ref_refers_to_non_mem_p (exp))
10203 	  {
10204 	    HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
10205 	    base = TREE_OPERAND (base, 0);
10206 	    if (offset == 0
10207 	        && !reverse
10208 		&& tree_fits_uhwi_p (TYPE_SIZE (type))
10209 		&& (GET_MODE_BITSIZE (DECL_MODE (base))
10210 		    == tree_to_uhwi (TYPE_SIZE (type))))
10211 	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10212 				  target, tmode, modifier);
10213 	    if (TYPE_MODE (type) == BLKmode)
10214 	      {
10215 		temp = assign_stack_temp (DECL_MODE (base),
10216 					  GET_MODE_SIZE (DECL_MODE (base)));
10217 		store_expr (base, temp, 0, false, false);
10218 		temp = adjust_address (temp, BLKmode, offset);
10219 		set_mem_size (temp, int_size_in_bytes (type));
10220 		return temp;
10221 	      }
10222 	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10223 			  bitsize_int (offset * BITS_PER_UNIT));
10224 	    REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10225 	    return expand_expr (exp, target, tmode, modifier);
10226 	  }
10227 	address_mode = targetm.addr_space.address_mode (as);
10228 	base = TREE_OPERAND (exp, 0);
10229 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10230 	  {
10231 	    tree mask = gimple_assign_rhs2 (def_stmt);
10232 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10233 			   gimple_assign_rhs1 (def_stmt), mask);
10234 	    TREE_OPERAND (exp, 0) = base;
10235 	  }
10236 	align = get_object_alignment (exp);
10237 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10238 	op0 = memory_address_addr_space (mode, op0, as);
10239 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
10240 	  {
10241 	    rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10242 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10243 	    op0 = memory_address_addr_space (mode, op0, as);
10244 	  }
10245 	temp = gen_rtx_MEM (mode, op0);
10246 	set_mem_attributes (temp, exp, 0);
10247 	set_mem_addr_space (temp, as);
10248 	if (TREE_THIS_VOLATILE (exp))
10249 	  MEM_VOLATILE_P (temp) = 1;
10250 	if (modifier != EXPAND_WRITE
10251 	    && modifier != EXPAND_MEMORY
10252 	    && !inner_reference_p
10253 	    && mode != BLKmode
10254 	    && align < GET_MODE_ALIGNMENT (mode))
10255 	  {
10256 	    if ((icode = optab_handler (movmisalign_optab, mode))
10257 		!= CODE_FOR_nothing)
10258 	      {
10259 		struct expand_operand ops[2];
10260 
10261 		/* We've already validated the memory, and we're creating a
10262 		   new pseudo destination.  The predicates really can't fail,
10263 		   nor can the generator.  */
10264 		create_output_operand (&ops[0], NULL_RTX, mode);
10265 		create_fixed_operand (&ops[1], temp);
10266 		expand_insn (icode, 2, ops);
10267 		temp = ops[0].value;
10268 	      }
10269 	    else if (SLOW_UNALIGNED_ACCESS (mode, align))
10270 	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10271 					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10272 					(modifier == EXPAND_STACK_PARM
10273 					 ? NULL_RTX : target),
10274 					mode, mode, false);
10275 	  }
10276 	if (reverse
10277 	    && modifier != EXPAND_MEMORY
10278 	    && modifier != EXPAND_WRITE)
10279 	  temp = flip_storage_order (mode, temp);
10280 	return temp;
10281       }
10282 
10283     case ARRAY_REF:
10284 
10285       {
10286 	tree array = treeop0;
10287 	tree index = treeop1;
10288 	tree init;
10289 
10290 	/* Fold an expression like: "foo"[2].
10291 	   This is not done in fold so it won't happen inside &.
10292 	   Don't fold if this is for wide characters since it's too
10293 	   difficult to do correctly and this is a very rare case.  */
10294 
10295 	if (modifier != EXPAND_CONST_ADDRESS
10296 	    && modifier != EXPAND_INITIALIZER
10297 	    && modifier != EXPAND_MEMORY)
10298 	  {
10299 	    tree t = fold_read_from_constant_string (exp);
10300 
10301 	    if (t)
10302 	      return expand_expr (t, target, tmode, modifier);
10303 	  }
10304 
10305 	/* If this is a constant index into a constant array,
10306 	   just get the value from the array.  Handle both the cases when
10307 	   we have an explicit constructor and when our operand is a variable
10308 	   that was declared const.  */
10309 
10310 	if (modifier != EXPAND_CONST_ADDRESS
10311 	    && modifier != EXPAND_INITIALIZER
10312 	    && modifier != EXPAND_MEMORY
10313 	    && TREE_CODE (array) == CONSTRUCTOR
10314 	    && ! TREE_SIDE_EFFECTS (array)
10315 	    && TREE_CODE (index) == INTEGER_CST)
10316 	  {
10317 	    unsigned HOST_WIDE_INT ix;
10318 	    tree field, value;
10319 
10320 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10321 				      field, value)
10322 	      if (tree_int_cst_equal (field, index))
10323 		{
10324 		  if (!TREE_SIDE_EFFECTS (value))
10325 		    return expand_expr (fold (value), target, tmode, modifier);
10326 		  break;
10327 		}
10328 	  }
10329 
10330 	else if (optimize >= 1
10331 		 && modifier != EXPAND_CONST_ADDRESS
10332 		 && modifier != EXPAND_INITIALIZER
10333 		 && modifier != EXPAND_MEMORY
10334 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10335 		 && TREE_CODE (index) == INTEGER_CST
10336 		 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10337 		 && (init = ctor_for_folding (array)) != error_mark_node)
10338 	  {
10339 	    if (init == NULL_TREE)
10340 	      {
10341 		tree value = build_zero_cst (type);
10342 		if (TREE_CODE (value) == CONSTRUCTOR)
10343 		  {
10344 		    /* If VALUE is a CONSTRUCTOR, this optimization is only
10345 		       useful if this doesn't store the CONSTRUCTOR into
10346 		       memory.  If it does, it is more efficient to just
10347 		       load the data from the array directly.  */
10348 		    rtx ret = expand_constructor (value, target,
10349 						  modifier, true);
10350 		    if (ret == NULL_RTX)
10351 		      value = NULL_TREE;
10352 		  }
10353 
10354 		if (value)
10355 		  return expand_expr (value, target, tmode, modifier);
10356 	      }
10357 	    else if (TREE_CODE (init) == CONSTRUCTOR)
10358 	      {
10359 		unsigned HOST_WIDE_INT ix;
10360 		tree field, value;
10361 
10362 		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10363 					  field, value)
10364 		  if (tree_int_cst_equal (field, index))
10365 		    {
10366 		      if (TREE_SIDE_EFFECTS (value))
10367 			break;
10368 
10369 		      if (TREE_CODE (value) == CONSTRUCTOR)
10370 			{
10371 			  /* If VALUE is a CONSTRUCTOR, this
10372 			     optimization is only useful if
10373 			     this doesn't store the CONSTRUCTOR
10374 			     into memory.  If it does, it is more
10375 			     efficient to just load the data from
10376 			     the array directly.  */
10377 			  rtx ret = expand_constructor (value, target,
10378 							modifier, true);
10379 			  if (ret == NULL_RTX)
10380 			    break;
10381 			}
10382 
10383 		      return
10384 		        expand_expr (fold (value), target, tmode, modifier);
10385 		    }
10386 	      }
10387 	    else if (TREE_CODE (init) == STRING_CST)
10388 	      {
10389 		tree low_bound = array_ref_low_bound (exp);
10390 		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10391 
10392 		/* Optimize the special case of a zero lower bound.
10393 
10394 		   We convert the lower bound to sizetype to avoid problems
10395 		   with constant folding.  E.g. suppose the lower bound is
10396 		   1 and its mode is QI.  Without the conversion
10397 		      (ARRAY + (INDEX - (unsigned char)1))
10398 		   becomes
10399 		      (ARRAY + (-(unsigned char)1) + INDEX)
10400 		   which becomes
10401 		      (ARRAY + 255 + INDEX).  Oops!  */
10402 		if (!integer_zerop (low_bound))
10403 		  index1 = size_diffop_loc (loc, index1,
10404 					    fold_convert_loc (loc, sizetype,
10405 							      low_bound));
10406 
10407 		if (tree_fits_uhwi_p (index1)
10408 		    && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10409 		  {
10410 		    tree type = TREE_TYPE (TREE_TYPE (init));
10411 		    machine_mode mode = TYPE_MODE (type);
10412 
10413 		    if (GET_MODE_CLASS (mode) == MODE_INT
10414 			&& GET_MODE_SIZE (mode) == 1)
10415 		      return gen_int_mode (TREE_STRING_POINTER (init)
10416 					   [TREE_INT_CST_LOW (index1)],
10417 					   mode);
10418 		  }
10419 	      }
10420 	  }
10421       }
10422       goto normal_inner_ref;
10423 
10424     case COMPONENT_REF:
10425       /* If the operand is a CONSTRUCTOR, we can just extract the
10426 	 appropriate field if it is present.  */
10427       if (TREE_CODE (treeop0) == CONSTRUCTOR)
10428 	{
10429 	  unsigned HOST_WIDE_INT idx;
10430 	  tree field, value;
10431 
10432 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10433 				    idx, field, value)
10434 	    if (field == treeop1
10435 		/* We can normally use the value of the field in the
10436 		   CONSTRUCTOR.  However, if this is a bitfield in
10437 		   an integral mode that we can fit in a HOST_WIDE_INT,
10438 		   we must mask only the number of bits in the bitfield,
10439 		   since this is done implicitly by the constructor.  If
10440 		   the bitfield does not meet either of those conditions,
10441 		   we can't do this optimization.  */
10442 		&& (! DECL_BIT_FIELD (field)
10443 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10444 			&& (GET_MODE_PRECISION (DECL_MODE (field))
10445 			    <= HOST_BITS_PER_WIDE_INT))))
10446 	      {
10447 		if (DECL_BIT_FIELD (field)
10448 		    && modifier == EXPAND_STACK_PARM)
10449 		  target = 0;
10450 		op0 = expand_expr (value, target, tmode, modifier);
10451 		if (DECL_BIT_FIELD (field))
10452 		  {
10453 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10454 		    machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10455 
10456 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
10457 		      {
10458 			op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10459 					    imode);
10460 			op0 = expand_and (imode, op0, op1, target);
10461 		      }
10462 		    else
10463 		      {
10464 			int count = GET_MODE_PRECISION (imode) - bitsize;
10465 
10466 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10467 					    target, 0);
10468 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10469 					    target, 0);
10470 		      }
10471 		  }
10472 
10473 		return op0;
10474 	      }
10475 	}
10476       goto normal_inner_ref;
10477 
10478     case BIT_FIELD_REF:
10479     case ARRAY_RANGE_REF:
10480     normal_inner_ref:
10481       {
10482 	machine_mode mode1, mode2;
10483 	HOST_WIDE_INT bitsize, bitpos;
10484 	tree offset;
10485 	int reversep, volatilep = 0, must_force_mem;
10486 	tree tem
10487 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10488 				 &unsignedp, &reversep, &volatilep);
10489 	rtx orig_op0, memloc;
10490 	bool clear_mem_expr = false;
10491 
10492 	/* If we got back the original object, something is wrong.  Perhaps
10493 	   we are evaluating an expression too early.  In any event, don't
10494 	   infinitely recurse.  */
10495 	gcc_assert (tem != exp);
10496 
10497 	/* If TEM's type is a union of variable size, pass TARGET to the inner
10498 	   computation, since it will need a temporary and TARGET is known
10499 	   to have to do.  This occurs in unchecked conversion in Ada.  */
10500 	orig_op0 = op0
10501 	  = expand_expr_real (tem,
10502 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10503 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10504 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10505 				   != INTEGER_CST)
10506 			       && modifier != EXPAND_STACK_PARM
10507 			       ? target : NULL_RTX),
10508 			      VOIDmode,
10509 			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10510 			      NULL, true);
10511 
10512 	/* If the field has a mode, we want to access it in the
10513 	   field's mode, not the computed mode.
10514 	   If a MEM has VOIDmode (external with incomplete type),
10515 	   use BLKmode for it instead.  */
10516 	if (MEM_P (op0))
10517 	  {
10518 	    if (mode1 != VOIDmode)
10519 	      op0 = adjust_address (op0, mode1, 0);
10520 	    else if (GET_MODE (op0) == VOIDmode)
10521 	      op0 = adjust_address (op0, BLKmode, 0);
10522 	  }
10523 
10524 	mode2
10525 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10526 
10527 	/* If we have either an offset, a BLKmode result, or a reference
10528 	   outside the underlying object, we must force it to memory.
10529 	   Such a case can occur in Ada if we have unchecked conversion
10530 	   of an expression from a scalar type to an aggregate type or
10531 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10532 	   passed a partially uninitialized object or a view-conversion
10533 	   to a larger size.  */
10534 	must_force_mem = (offset
10535 			  || mode1 == BLKmode
10536 			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10537 
10538 	/* Handle CONCAT first.  */
10539 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10540 	  {
10541 	    if (bitpos == 0
10542 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0))
10543 		&& COMPLEX_MODE_P (mode1)
10544 		&& COMPLEX_MODE_P (GET_MODE (op0))
10545 		&& (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10546 		    == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10547 	      {
10548 		if (reversep)
10549 		  op0 = flip_storage_order (GET_MODE (op0), op0);
10550 		if (mode1 != GET_MODE (op0))
10551 		  {
10552 		    rtx parts[2];
10553 		    for (int i = 0; i < 2; i++)
10554 		      {
10555 			rtx op = read_complex_part (op0, i != 0);
10556 			if (GET_CODE (op) == SUBREG)
10557 			  op = force_reg (GET_MODE (op), op);
10558 			rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10559 						       op);
10560 			if (temp)
10561 			  op = temp;
10562 			else
10563 			  {
10564 			    if (!REG_P (op) && !MEM_P (op))
10565 			      op = force_reg (GET_MODE (op), op);
10566 			    op = gen_lowpart (GET_MODE_INNER (mode1), op);
10567 			  }
10568 			parts[i] = op;
10569 		      }
10570 		    op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10571 		  }
10572 		return op0;
10573 	      }
10574 	    if (bitpos == 0
10575 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10576 		&& bitsize)
10577 	      {
10578 		op0 = XEXP (op0, 0);
10579 		mode2 = GET_MODE (op0);
10580 	      }
10581 	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10582 		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10583 		     && bitpos
10584 		     && bitsize)
10585 	      {
10586 		op0 = XEXP (op0, 1);
10587 		bitpos = 0;
10588 		mode2 = GET_MODE (op0);
10589 	      }
10590 	    else
10591 	      /* Otherwise force into memory.  */
10592 	      must_force_mem = 1;
10593 	  }
10594 
10595 	/* If this is a constant, put it in a register if it is a legitimate
10596 	   constant and we don't need a memory reference.  */
10597 	if (CONSTANT_P (op0)
10598 	    && mode2 != BLKmode
10599 	    && targetm.legitimate_constant_p (mode2, op0)
10600 	    && !must_force_mem)
10601 	  op0 = force_reg (mode2, op0);
10602 
10603 	/* Otherwise, if this is a constant, try to force it to the constant
10604 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10605 	   is a legitimate constant.  */
10606 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10607 	  op0 = validize_mem (memloc);
10608 
10609 	/* Otherwise, if this is a constant or the object is not in memory
10610 	   and need be, put it there.  */
10611 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10612 	  {
10613 	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10614 	    emit_move_insn (memloc, op0);
10615 	    op0 = memloc;
10616 	    clear_mem_expr = true;
10617 	  }
10618 
10619 	if (offset)
10620 	  {
10621 	    machine_mode address_mode;
10622 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10623 					  EXPAND_SUM);
10624 
10625 	    gcc_assert (MEM_P (op0));
10626 
10627 	    address_mode = get_address_mode (op0);
10628 	    if (GET_MODE (offset_rtx) != address_mode)
10629 	      {
10630 		/* We cannot be sure that the RTL in offset_rtx is valid outside
10631 		   of a memory address context, so force it into a register
10632 		   before attempting to convert it to the desired mode.  */
10633 		offset_rtx = force_operand (offset_rtx, NULL_RTX);
10634 		offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10635 	      }
10636 
10637 	    /* See the comment in expand_assignment for the rationale.  */
10638 	    if (mode1 != VOIDmode
10639 		&& bitpos != 0
10640 		&& bitsize > 0
10641 		&& (bitpos % bitsize) == 0
10642 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10643 		&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10644 	      {
10645 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10646 		bitpos = 0;
10647 	      }
10648 
10649 	    op0 = offset_address (op0, offset_rtx,
10650 				  highest_pow2_factor (offset));
10651 	  }
10652 
10653 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10654 	   record its alignment as BIGGEST_ALIGNMENT.  */
10655 	if (MEM_P (op0) && bitpos == 0 && offset != 0
10656 	    && is_aligning_offset (offset, tem))
10657 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10658 
10659 	/* Don't forget about volatility even if this is a bitfield.  */
10660 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10661 	  {
10662 	    if (op0 == orig_op0)
10663 	      op0 = copy_rtx (op0);
10664 
10665 	    MEM_VOLATILE_P (op0) = 1;
10666 	  }
10667 
10668 	/* In cases where an aligned union has an unaligned object
10669 	   as a field, we might be extracting a BLKmode value from
10670 	   an integer-mode (e.g., SImode) object.  Handle this case
10671 	   by doing the extract into an object as wide as the field
10672 	   (which we know to be the width of a basic mode), then
10673 	   storing into memory, and changing the mode to BLKmode.  */
10674 	if (mode1 == VOIDmode
10675 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10676 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10677 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10678 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10679 		&& modifier != EXPAND_CONST_ADDRESS
10680 		&& modifier != EXPAND_INITIALIZER
10681 		&& modifier != EXPAND_MEMORY)
10682 	    /* If the bitfield is volatile and the bitsize
10683 	       is narrower than the access size of the bitfield,
10684 	       we need to extract bitfields from the access.  */
10685 	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10686 		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10687 		&& mode1 != BLKmode
10688 		&& bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10689 	    /* If the field isn't aligned enough to fetch as a memref,
10690 	       fetch it as a bit field.  */
10691 	    || (mode1 != BLKmode
10692 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10693 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10694 		      || (MEM_P (op0)
10695 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10696 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10697 		     && modifier != EXPAND_MEMORY
10698 		     && ((modifier == EXPAND_CONST_ADDRESS
10699 			  || modifier == EXPAND_INITIALIZER)
10700 			 ? STRICT_ALIGNMENT
10701 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10702 		    || (bitpos % BITS_PER_UNIT != 0)))
10703 	    /* If the type and the field are a constant size and the
10704 	       size of the type isn't the same size as the bitfield,
10705 	       we must use bitfield operations.  */
10706 	    || (bitsize >= 0
10707 		&& TYPE_SIZE (TREE_TYPE (exp))
10708 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10709 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10710 					  bitsize)))
10711 	  {
10712 	    machine_mode ext_mode = mode;
10713 
10714 	    if (ext_mode == BLKmode
10715 		&& ! (target != 0 && MEM_P (op0)
10716 		      && MEM_P (target)
10717 		      && bitpos % BITS_PER_UNIT == 0))
10718 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10719 
10720 	    if (ext_mode == BLKmode)
10721 	      {
10722 		if (target == 0)
10723 		  target = assign_temp (type, 1, 1);
10724 
10725 		/* ??? Unlike the similar test a few lines below, this one is
10726 		   very likely obsolete.  */
10727 		if (bitsize == 0)
10728 		  return target;
10729 
10730 		/* In this case, BITPOS must start at a byte boundary and
10731 		   TARGET, if specified, must be a MEM.  */
10732 		gcc_assert (MEM_P (op0)
10733 			    && (!target || MEM_P (target))
10734 			    && !(bitpos % BITS_PER_UNIT));
10735 
10736 		emit_block_move (target,
10737 				 adjust_address (op0, VOIDmode,
10738 						 bitpos / BITS_PER_UNIT),
10739 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10740 					  / BITS_PER_UNIT),
10741 				 (modifier == EXPAND_STACK_PARM
10742 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10743 
10744 		return target;
10745 	      }
10746 
10747 	    /* If we have nothing to extract, the result will be 0 for targets
10748 	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10749 	       return 0 for the sake of consistency, as reading a zero-sized
10750 	       bitfield is valid in Ada and the value is fully specified.  */
10751 	    if (bitsize == 0)
10752 	      return const0_rtx;
10753 
10754 	    op0 = validize_mem (op0);
10755 
10756 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10757 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10758 
10759 	    /* If the result has a record type and the extraction is done in
10760 	       an integral mode, then the field may be not aligned on a byte
10761 	       boundary; in this case, if it has reverse storage order, it
10762 	       needs to be extracted as a scalar field with reverse storage
10763 	       order and put back into memory order afterwards.  */
10764 	    if (TREE_CODE (type) == RECORD_TYPE
10765 		&& GET_MODE_CLASS (ext_mode) == MODE_INT)
10766 	      reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10767 
10768 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10769 				     (modifier == EXPAND_STACK_PARM
10770 				      ? NULL_RTX : target),
10771 				     ext_mode, ext_mode, reversep);
10772 
10773 	    /* If the result has a record type and the mode of OP0 is an
10774 	       integral mode then, if BITSIZE is narrower than this mode
10775 	       and this is for big-endian data, we must put the field
10776 	       into the high-order bits.  And we must also put it back
10777 	       into memory order if it has been previously reversed.  */
10778 	    if (TREE_CODE (type) == RECORD_TYPE
10779 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10780 	      {
10781 		HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10782 
10783 		if (bitsize < size
10784 		    && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10785 		  op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10786 				      size - bitsize, op0, 1);
10787 
10788 		if (reversep)
10789 		  op0 = flip_storage_order (GET_MODE (op0), op0);
10790 	      }
10791 
10792 	    /* If the result type is BLKmode, store the data into a temporary
10793 	       of the appropriate type, but with the mode corresponding to the
10794 	       mode for the data we have (op0's mode).  */
10795 	    if (mode == BLKmode)
10796 	      {
10797 		rtx new_rtx
10798 		  = assign_stack_temp_for_type (ext_mode,
10799 						GET_MODE_BITSIZE (ext_mode),
10800 						type);
10801 		emit_move_insn (new_rtx, op0);
10802 		op0 = copy_rtx (new_rtx);
10803 		PUT_MODE (op0, BLKmode);
10804 	      }
10805 
10806 	    return op0;
10807 	  }
10808 
10809 	/* If the result is BLKmode, use that to access the object
10810 	   now as well.  */
10811 	if (mode == BLKmode)
10812 	  mode1 = BLKmode;
10813 
10814 	/* Get a reference to just this component.  */
10815 	if (modifier == EXPAND_CONST_ADDRESS
10816 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10817 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10818 	else
10819 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10820 
10821 	if (op0 == orig_op0)
10822 	  op0 = copy_rtx (op0);
10823 
10824 	/* Don't set memory attributes if the base expression is
10825 	   SSA_NAME that got expanded as a MEM.  In that case, we should
10826 	   just honor its original memory attributes.  */
10827 	if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10828 	  set_mem_attributes (op0, exp, 0);
10829 
10830 	if (REG_P (XEXP (op0, 0)))
10831 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10832 
10833 	/* If op0 is a temporary because the original expressions was forced
10834 	   to memory, clear MEM_EXPR so that the original expression cannot
10835 	   be marked as addressable through MEM_EXPR of the temporary.  */
10836 	if (clear_mem_expr)
10837 	  set_mem_expr (op0, NULL_TREE);
10838 
10839 	MEM_VOLATILE_P (op0) |= volatilep;
10840 
10841         if (reversep
10842 	    && modifier != EXPAND_MEMORY
10843 	    && modifier != EXPAND_WRITE)
10844 	  op0 = flip_storage_order (mode1, op0);
10845 
10846 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10847 	    || modifier == EXPAND_CONST_ADDRESS
10848 	    || modifier == EXPAND_INITIALIZER)
10849 	  return op0;
10850 
10851 	if (target == 0)
10852 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10853 
10854 	convert_move (target, op0, unsignedp);
10855 	return target;
10856       }
10857 
10858     case OBJ_TYPE_REF:
10859       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10860 
10861     case CALL_EXPR:
10862       /* All valid uses of __builtin_va_arg_pack () are removed during
10863 	 inlining.  */
10864       if (CALL_EXPR_VA_ARG_PACK (exp))
10865 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10866       {
10867 	tree fndecl = get_callee_fndecl (exp), attr;
10868 
10869 	if (fndecl
10870 	    /* Don't diagnose the error attribute in thunks, those are
10871 	       artificially created.  */
10872 	    && !CALL_FROM_THUNK_P (exp)
10873 	    && (attr = lookup_attribute ("error",
10874 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10875 	  {
10876 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
10877 	    error ("%Kcall to %qs declared with attribute error: %s", exp,
10878 		   identifier_to_locale (ident),
10879 		   TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10880 	  }
10881 	if (fndecl
10882 	    /* Don't diagnose the warning attribute in thunks, those are
10883 	       artificially created.  */
10884 	    && !CALL_FROM_THUNK_P (exp)
10885 	    && (attr = lookup_attribute ("warning",
10886 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10887 	  {
10888 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
10889 	    warning_at (tree_nonartificial_location (exp), 0,
10890 			"%Kcall to %qs declared with attribute warning: %s",
10891 			exp, identifier_to_locale (ident),
10892 			TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10893 	  }
10894 
10895 	/* Check for a built-in function.  */
10896 	if (fndecl && DECL_BUILT_IN (fndecl))
10897 	  {
10898 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10899 	    if (CALL_WITH_BOUNDS_P (exp))
10900 	      return expand_builtin_with_bounds (exp, target, subtarget,
10901 						 tmode, ignore);
10902 	    else
10903 	      return expand_builtin (exp, target, subtarget, tmode, ignore);
10904 	  }
10905       }
10906       return expand_call (exp, target, ignore);
10907 
10908     case VIEW_CONVERT_EXPR:
10909       op0 = NULL_RTX;
10910 
10911       /* If we are converting to BLKmode, try to avoid an intermediate
10912 	 temporary by fetching an inner memory reference.  */
10913       if (mode == BLKmode
10914 	  && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10915 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10916 	  && handled_component_p (treeop0))
10917       {
10918 	machine_mode mode1;
10919 	HOST_WIDE_INT bitsize, bitpos;
10920 	tree offset;
10921 	int unsignedp, reversep, volatilep = 0;
10922 	tree tem
10923 	  = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10924 				 &unsignedp, &reversep, &volatilep);
10925 	rtx orig_op0;
10926 
10927 	/* ??? We should work harder and deal with non-zero offsets.  */
10928 	if (!offset
10929 	    && (bitpos % BITS_PER_UNIT) == 0
10930 	    && !reversep
10931 	    && bitsize >= 0
10932 	    && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10933 	  {
10934 	    /* See the normal_inner_ref case for the rationale.  */
10935 	    orig_op0
10936 	      = expand_expr_real (tem,
10937 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10938 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10939 				       != INTEGER_CST)
10940 				   && modifier != EXPAND_STACK_PARM
10941 				   ? target : NULL_RTX),
10942 				  VOIDmode,
10943 				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10944 				  NULL, true);
10945 
10946 	    if (MEM_P (orig_op0))
10947 	      {
10948 		op0 = orig_op0;
10949 
10950 		/* Get a reference to just this component.  */
10951 		if (modifier == EXPAND_CONST_ADDRESS
10952 		    || modifier == EXPAND_SUM
10953 		    || modifier == EXPAND_INITIALIZER)
10954 		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10955 		else
10956 		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10957 
10958 		if (op0 == orig_op0)
10959 		  op0 = copy_rtx (op0);
10960 
10961 		set_mem_attributes (op0, treeop0, 0);
10962 		if (REG_P (XEXP (op0, 0)))
10963 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10964 
10965 		MEM_VOLATILE_P (op0) |= volatilep;
10966 	      }
10967 	  }
10968       }
10969 
10970       if (!op0)
10971 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10972 				NULL, inner_reference_p);
10973 
10974       /* If the input and output modes are both the same, we are done.  */
10975       if (mode == GET_MODE (op0))
10976 	;
10977       /* If neither mode is BLKmode, and both modes are the same size
10978 	 then we can use gen_lowpart.  */
10979       else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10980 	       && (GET_MODE_PRECISION (mode)
10981 		   == GET_MODE_PRECISION (GET_MODE (op0)))
10982 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
10983 	{
10984 	  if (GET_CODE (op0) == SUBREG)
10985 	    op0 = force_reg (GET_MODE (op0), op0);
10986 	  temp = gen_lowpart_common (mode, op0);
10987 	  if (temp)
10988 	    op0 = temp;
10989 	  else
10990 	    {
10991 	      if (!REG_P (op0) && !MEM_P (op0))
10992 		op0 = force_reg (GET_MODE (op0), op0);
10993 	      op0 = gen_lowpart (mode, op0);
10994 	    }
10995 	}
10996       /* If both types are integral, convert from one mode to the other.  */
10997       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10998 	op0 = convert_modes (mode, GET_MODE (op0), op0,
10999 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
11000       /* If the output type is a bit-field type, do an extraction.  */
11001       else if (reduce_bit_field)
11002 	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
11003 				  TYPE_UNSIGNED (type), NULL_RTX,
11004 				  mode, mode, false);
11005       /* As a last resort, spill op0 to memory, and reload it in a
11006 	 different mode.  */
11007       else if (!MEM_P (op0))
11008 	{
11009 	  /* If the operand is not a MEM, force it into memory.  Since we
11010 	     are going to be changing the mode of the MEM, don't call
11011 	     force_const_mem for constants because we don't allow pool
11012 	     constants to change mode.  */
11013 	  tree inner_type = TREE_TYPE (treeop0);
11014 
11015 	  gcc_assert (!TREE_ADDRESSABLE (exp));
11016 
11017 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11018 	    target
11019 	      = assign_stack_temp_for_type
11020 		(TYPE_MODE (inner_type),
11021 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11022 
11023 	  emit_move_insn (target, op0);
11024 	  op0 = target;
11025 	}
11026 
11027       /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
11028 	 output type is such that the operand is known to be aligned, indicate
11029 	 that it is.  Otherwise, we need only be concerned about alignment for
11030 	 non-BLKmode results.  */
11031       if (MEM_P (op0))
11032 	{
11033 	  enum insn_code icode;
11034 
11035 	  if (modifier != EXPAND_WRITE
11036 	      && modifier != EXPAND_MEMORY
11037 	      && !inner_reference_p
11038 	      && mode != BLKmode
11039 	      && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11040 	    {
11041 	      /* If the target does have special handling for unaligned
11042 		 loads of mode then use them.  */
11043 	      if ((icode = optab_handler (movmisalign_optab, mode))
11044 		  != CODE_FOR_nothing)
11045 		{
11046 		  rtx reg;
11047 
11048 		  op0 = adjust_address (op0, mode, 0);
11049 		  /* We've already validated the memory, and we're creating a
11050 		     new pseudo destination.  The predicates really can't
11051 		     fail.  */
11052 		  reg = gen_reg_rtx (mode);
11053 
11054 		  /* Nor can the insn generator.  */
11055 		  rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11056 		  emit_insn (insn);
11057 		  return reg;
11058 		}
11059 	      else if (STRICT_ALIGNMENT)
11060 		{
11061 		  tree inner_type = TREE_TYPE (treeop0);
11062 		  HOST_WIDE_INT temp_size
11063 		    = MAX (int_size_in_bytes (inner_type),
11064 			   (HOST_WIDE_INT) GET_MODE_SIZE (mode));
11065 		  rtx new_rtx
11066 		    = assign_stack_temp_for_type (mode, temp_size, type);
11067 		  rtx new_with_op0_mode
11068 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
11069 
11070 		  gcc_assert (!TREE_ADDRESSABLE (exp));
11071 
11072 		  if (GET_MODE (op0) == BLKmode)
11073 		    emit_block_move (new_with_op0_mode, op0,
11074 				     GEN_INT (GET_MODE_SIZE (mode)),
11075 				     (modifier == EXPAND_STACK_PARM
11076 				      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
11077 		  else
11078 		    emit_move_insn (new_with_op0_mode, op0);
11079 
11080 		  op0 = new_rtx;
11081 		}
11082 	    }
11083 
11084 	  op0 = adjust_address (op0, mode, 0);
11085 	}
11086 
11087       return op0;
11088 
11089     case MODIFY_EXPR:
11090       {
11091 	tree lhs = treeop0;
11092 	tree rhs = treeop1;
11093 	gcc_assert (ignore);
11094 
11095 	/* Check for |= or &= of a bitfield of size one into another bitfield
11096 	   of size 1.  In this case, (unless we need the result of the
11097 	   assignment) we can do this more efficiently with a
11098 	   test followed by an assignment, if necessary.
11099 
11100 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
11101 	   things change so we do, this code should be enhanced to
11102 	   support it.  */
11103 	if (TREE_CODE (lhs) == COMPONENT_REF
11104 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
11105 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
11106 	    && TREE_OPERAND (rhs, 0) == lhs
11107 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11108 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11109 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11110 	  {
11111 	    rtx_code_label *label = gen_label_rtx ();
11112 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11113 	    do_jump (TREE_OPERAND (rhs, 1),
11114 		     value ? label : 0,
11115 		     value ? 0 : label, -1);
11116 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11117 			       false);
11118 	    do_pending_stack_adjust ();
11119 	    emit_label (label);
11120 	    return const0_rtx;
11121 	  }
11122 
11123 	expand_assignment (lhs, rhs, false);
11124 	return const0_rtx;
11125       }
11126 
11127     case ADDR_EXPR:
11128       return expand_expr_addr_expr (exp, target, tmode, modifier);
11129 
11130     case REALPART_EXPR:
11131       op0 = expand_normal (treeop0);
11132       return read_complex_part (op0, false);
11133 
11134     case IMAGPART_EXPR:
11135       op0 = expand_normal (treeop0);
11136       return read_complex_part (op0, true);
11137 
11138     case RETURN_EXPR:
11139     case LABEL_EXPR:
11140     case GOTO_EXPR:
11141     case SWITCH_EXPR:
11142     case ASM_EXPR:
11143       /* Expanded in cfgexpand.c.  */
11144       gcc_unreachable ();
11145 
11146     case TRY_CATCH_EXPR:
11147     case CATCH_EXPR:
11148     case EH_FILTER_EXPR:
11149     case TRY_FINALLY_EXPR:
11150       /* Lowered by tree-eh.c.  */
11151       gcc_unreachable ();
11152 
11153     case WITH_CLEANUP_EXPR:
11154     case CLEANUP_POINT_EXPR:
11155     case TARGET_EXPR:
11156     case CASE_LABEL_EXPR:
11157     case VA_ARG_EXPR:
11158     case BIND_EXPR:
11159     case INIT_EXPR:
11160     case CONJ_EXPR:
11161     case COMPOUND_EXPR:
11162     case PREINCREMENT_EXPR:
11163     case PREDECREMENT_EXPR:
11164     case POSTINCREMENT_EXPR:
11165     case POSTDECREMENT_EXPR:
11166     case LOOP_EXPR:
11167     case EXIT_EXPR:
11168     case COMPOUND_LITERAL_EXPR:
11169       /* Lowered by gimplify.c.  */
11170       gcc_unreachable ();
11171 
11172     case FDESC_EXPR:
11173       /* Function descriptors are not valid except for as
11174 	 initialization constants, and should not be expanded.  */
11175       gcc_unreachable ();
11176 
11177     case WITH_SIZE_EXPR:
11178       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
11179 	 have pulled out the size to use in whatever context it needed.  */
11180       return expand_expr_real (treeop0, original_target, tmode,
11181 			       modifier, alt_rtl, inner_reference_p);
11182 
11183     default:
11184       return expand_expr_real_2 (&ops, target, tmode, modifier);
11185     }
11186 }
11187 
11188 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11189    signedness of TYPE), possibly returning the result in TARGET.  */
11190 static rtx
11191 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11192 {
11193   HOST_WIDE_INT prec = TYPE_PRECISION (type);
11194   if (target && GET_MODE (target) != GET_MODE (exp))
11195     target = 0;
11196   /* For constant values, reduce using build_int_cst_type. */
11197   if (CONST_INT_P (exp))
11198     {
11199       HOST_WIDE_INT value = INTVAL (exp);
11200       tree t = build_int_cst_type (type, value);
11201       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11202     }
11203   else if (TYPE_UNSIGNED (type))
11204     {
11205       machine_mode mode = GET_MODE (exp);
11206       rtx mask = immed_wide_int_const
11207 	(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11208       return expand_and (mode, exp, mask, target);
11209     }
11210   else
11211     {
11212       int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
11213       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
11214 			  exp, count, target, 0);
11215       return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
11216 			   exp, count, target, 0);
11217     }
11218 }
11219 
11220 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11221    when applied to the address of EXP produces an address known to be
11222    aligned more than BIGGEST_ALIGNMENT.  */
11223 
11224 static int
11225 is_aligning_offset (const_tree offset, const_tree exp)
11226 {
11227   /* Strip off any conversions.  */
11228   while (CONVERT_EXPR_P (offset))
11229     offset = TREE_OPERAND (offset, 0);
11230 
11231   /* We must now have a BIT_AND_EXPR with a constant that is one less than
11232      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
11233   if (TREE_CODE (offset) != BIT_AND_EXPR
11234       || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11235       || compare_tree_int (TREE_OPERAND (offset, 1),
11236 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11237       || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11238     return 0;
11239 
11240   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11241      It must be NEGATE_EXPR.  Then strip any more conversions.  */
11242   offset = TREE_OPERAND (offset, 0);
11243   while (CONVERT_EXPR_P (offset))
11244     offset = TREE_OPERAND (offset, 0);
11245 
11246   if (TREE_CODE (offset) != NEGATE_EXPR)
11247     return 0;
11248 
11249   offset = TREE_OPERAND (offset, 0);
11250   while (CONVERT_EXPR_P (offset))
11251     offset = TREE_OPERAND (offset, 0);
11252 
11253   /* This must now be the address of EXP.  */
11254   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11255 }
11256 
11257 /* Return the tree node if an ARG corresponds to a string constant or zero
11258    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
11259    in bytes within the string that ARG is accessing.  The type of the
11260    offset will be `sizetype'.  */
11261 
11262 tree
11263 string_constant (tree arg, tree *ptr_offset)
11264 {
11265   tree array, offset, lower_bound;
11266   STRIP_NOPS (arg);
11267 
11268   if (TREE_CODE (arg) == ADDR_EXPR)
11269     {
11270       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11271 	{
11272 	  *ptr_offset = size_zero_node;
11273 	  return TREE_OPERAND (arg, 0);
11274 	}
11275       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11276 	{
11277 	  array = TREE_OPERAND (arg, 0);
11278 	  offset = size_zero_node;
11279 	}
11280       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11281 	{
11282 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11283 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11284 	  if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11285 	    return 0;
11286 
11287 	  /* Check if the array has a nonzero lower bound.  */
11288 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11289 	  if (!integer_zerop (lower_bound))
11290 	    {
11291 	      /* If the offset and base aren't both constants, return 0.  */
11292 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
11293 	        return 0;
11294 	      if (TREE_CODE (offset) != INTEGER_CST)
11295 		return 0;
11296 	      /* Adjust offset by the lower bound.  */
11297 	      offset = size_diffop (fold_convert (sizetype, offset),
11298 				    fold_convert (sizetype, lower_bound));
11299 	    }
11300 	}
11301       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11302 	{
11303 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11304 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11305 	  if (TREE_CODE (array) != ADDR_EXPR)
11306 	    return 0;
11307 	  array = TREE_OPERAND (array, 0);
11308 	  if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11309 	    return 0;
11310 	}
11311       else
11312 	return 0;
11313     }
11314   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11315     {
11316       tree arg0 = TREE_OPERAND (arg, 0);
11317       tree arg1 = TREE_OPERAND (arg, 1);
11318 
11319       STRIP_NOPS (arg0);
11320       STRIP_NOPS (arg1);
11321 
11322       if (TREE_CODE (arg0) == ADDR_EXPR
11323 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11324 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11325 	{
11326 	  array = TREE_OPERAND (arg0, 0);
11327 	  offset = arg1;
11328 	}
11329       else if (TREE_CODE (arg1) == ADDR_EXPR
11330 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11331 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11332 	{
11333 	  array = TREE_OPERAND (arg1, 0);
11334 	  offset = arg0;
11335 	}
11336       else
11337 	return 0;
11338     }
11339   else
11340     return 0;
11341 
11342   if (TREE_CODE (array) == STRING_CST)
11343     {
11344       *ptr_offset = fold_convert (sizetype, offset);
11345       return array;
11346     }
11347   else if (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
11348     {
11349       int length;
11350       tree init = ctor_for_folding (array);
11351 
11352       /* Variables initialized to string literals can be handled too.  */
11353       if (init == error_mark_node
11354 	  || !init
11355 	  || TREE_CODE (init) != STRING_CST)
11356 	return 0;
11357 
11358       /* Avoid const char foo[4] = "abcde";  */
11359       if (DECL_SIZE_UNIT (array) == NULL_TREE
11360 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11361 	  || (length = TREE_STRING_LENGTH (init)) <= 0
11362 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11363 	return 0;
11364 
11365       /* If variable is bigger than the string literal, OFFSET must be constant
11366 	 and inside of the bounds of the string literal.  */
11367       offset = fold_convert (sizetype, offset);
11368       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11369 	  && (! tree_fits_uhwi_p (offset)
11370 	      || compare_tree_int (offset, length) >= 0))
11371 	return 0;
11372 
11373       *ptr_offset = offset;
11374       return init;
11375     }
11376 
11377   return 0;
11378 }
11379 
11380 /* Generate code to calculate OPS, and exploded expression
11381    using a store-flag instruction and return an rtx for the result.
11382    OPS reflects a comparison.
11383 
11384    If TARGET is nonzero, store the result there if convenient.
11385 
11386    Return zero if there is no suitable set-flag instruction
11387    available on this machine.
11388 
11389    Once expand_expr has been called on the arguments of the comparison,
11390    we are committed to doing the store flag, since it is not safe to
11391    re-evaluate the expression.  We emit the store-flag insn by calling
11392    emit_store_flag, but only expand the arguments if we have a reason
11393    to believe that emit_store_flag will be successful.  If we think that
11394    it will, but it isn't, we have to simulate the store-flag with a
11395    set/jump/set sequence.  */
11396 
11397 static rtx
11398 do_store_flag (sepops ops, rtx target, machine_mode mode)
11399 {
11400   enum rtx_code code;
11401   tree arg0, arg1, type;
11402   machine_mode operand_mode;
11403   int unsignedp;
11404   rtx op0, op1;
11405   rtx subtarget = target;
11406   location_t loc = ops->location;
11407 
11408   arg0 = ops->op0;
11409   arg1 = ops->op1;
11410 
11411   /* Don't crash if the comparison was erroneous.  */
11412   if (arg0 == error_mark_node || arg1 == error_mark_node)
11413     return const0_rtx;
11414 
11415   type = TREE_TYPE (arg0);
11416   operand_mode = TYPE_MODE (type);
11417   unsignedp = TYPE_UNSIGNED (type);
11418 
11419   /* We won't bother with BLKmode store-flag operations because it would mean
11420      passing a lot of information to emit_store_flag.  */
11421   if (operand_mode == BLKmode)
11422     return 0;
11423 
11424   /* We won't bother with store-flag operations involving function pointers
11425      when function pointers must be canonicalized before comparisons.  */
11426   if (targetm.have_canonicalize_funcptr_for_compare ()
11427       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11428 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11429 	       == FUNCTION_TYPE))
11430 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11431 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11432 		  == FUNCTION_TYPE))))
11433     return 0;
11434 
11435   STRIP_NOPS (arg0);
11436   STRIP_NOPS (arg1);
11437 
11438   /* For vector typed comparisons emit code to generate the desired
11439      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
11440      expander for this.  */
11441   if (TREE_CODE (ops->type) == VECTOR_TYPE)
11442     {
11443       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11444       if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11445 	  && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
11446 	return expand_vec_cmp_expr (ops->type, ifexp, target);
11447       else
11448 	{
11449 	  tree if_true = constant_boolean_node (true, ops->type);
11450 	  tree if_false = constant_boolean_node (false, ops->type);
11451 	  return expand_vec_cond_expr (ops->type, ifexp, if_true,
11452 				       if_false, target);
11453 	}
11454     }
11455 
11456   /* Get the rtx comparison code to use.  We know that EXP is a comparison
11457      operation of some type.  Some comparisons against 1 and -1 can be
11458      converted to comparisons with zero.  Do so here so that the tests
11459      below will be aware that we have a comparison with zero.   These
11460      tests will not catch constants in the first operand, but constants
11461      are rarely passed as the first operand.  */
11462 
11463   switch (ops->code)
11464     {
11465     case EQ_EXPR:
11466       code = EQ;
11467       break;
11468     case NE_EXPR:
11469       code = NE;
11470       break;
11471     case LT_EXPR:
11472       if (integer_onep (arg1))
11473 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11474       else
11475 	code = unsignedp ? LTU : LT;
11476       break;
11477     case LE_EXPR:
11478       if (! unsignedp && integer_all_onesp (arg1))
11479 	arg1 = integer_zero_node, code = LT;
11480       else
11481 	code = unsignedp ? LEU : LE;
11482       break;
11483     case GT_EXPR:
11484       if (! unsignedp && integer_all_onesp (arg1))
11485 	arg1 = integer_zero_node, code = GE;
11486       else
11487 	code = unsignedp ? GTU : GT;
11488       break;
11489     case GE_EXPR:
11490       if (integer_onep (arg1))
11491 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11492       else
11493 	code = unsignedp ? GEU : GE;
11494       break;
11495 
11496     case UNORDERED_EXPR:
11497       code = UNORDERED;
11498       break;
11499     case ORDERED_EXPR:
11500       code = ORDERED;
11501       break;
11502     case UNLT_EXPR:
11503       code = UNLT;
11504       break;
11505     case UNLE_EXPR:
11506       code = UNLE;
11507       break;
11508     case UNGT_EXPR:
11509       code = UNGT;
11510       break;
11511     case UNGE_EXPR:
11512       code = UNGE;
11513       break;
11514     case UNEQ_EXPR:
11515       code = UNEQ;
11516       break;
11517     case LTGT_EXPR:
11518       code = LTGT;
11519       break;
11520 
11521     default:
11522       gcc_unreachable ();
11523     }
11524 
11525   /* Put a constant second.  */
11526   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11527       || TREE_CODE (arg0) == FIXED_CST)
11528     {
11529       std::swap (arg0, arg1);
11530       code = swap_condition (code);
11531     }
11532 
11533   /* If this is an equality or inequality test of a single bit, we can
11534      do this by shifting the bit being tested to the low-order bit and
11535      masking the result with the constant 1.  If the condition was EQ,
11536      we xor it with 1.  This does not require an scc insn and is faster
11537      than an scc insn even if we have it.
11538 
11539      The code to make this transformation was moved into fold_single_bit_test,
11540      so we just call into the folder and expand its result.  */
11541 
11542   if ((code == NE || code == EQ)
11543       && integer_zerop (arg1)
11544       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11545     {
11546       gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11547       if (srcstmt
11548 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11549 	{
11550 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11551 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11552 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11553 				       gimple_assign_rhs1 (srcstmt),
11554 				       gimple_assign_rhs2 (srcstmt));
11555 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11556 	  if (temp)
11557 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11558 	}
11559     }
11560 
11561   if (! get_subtarget (target)
11562       || GET_MODE (subtarget) != operand_mode)
11563     subtarget = 0;
11564 
11565   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11566 
11567   if (target == 0)
11568     target = gen_reg_rtx (mode);
11569 
11570   /* Try a cstore if possible.  */
11571   return emit_store_flag_force (target, code, op0, op1,
11572 				operand_mode, unsignedp,
11573 				(TYPE_PRECISION (ops->type) == 1
11574 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11575 }
11576 
11577 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
11578    0 otherwise (i.e. if there is no casesi instruction).
11579 
11580    DEFAULT_PROBABILITY is the probability of jumping to the default
11581    label.  */
11582 int
11583 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11584 	    rtx table_label, rtx default_label, rtx fallback_label,
11585             int default_probability)
11586 {
11587   struct expand_operand ops[5];
11588   machine_mode index_mode = SImode;
11589   rtx op1, op2, index;
11590 
11591   if (! targetm.have_casesi ())
11592     return 0;
11593 
11594   /* Convert the index to SImode.  */
11595   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11596     {
11597       machine_mode omode = TYPE_MODE (index_type);
11598       rtx rangertx = expand_normal (range);
11599 
11600       /* We must handle the endpoints in the original mode.  */
11601       index_expr = build2 (MINUS_EXPR, index_type,
11602 			   index_expr, minval);
11603       minval = integer_zero_node;
11604       index = expand_normal (index_expr);
11605       if (default_label)
11606         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11607 				 omode, 1, default_label,
11608                                  default_probability);
11609       /* Now we can safely truncate.  */
11610       index = convert_to_mode (index_mode, index, 0);
11611     }
11612   else
11613     {
11614       if (TYPE_MODE (index_type) != index_mode)
11615 	{
11616 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11617 	  index_expr = fold_convert (index_type, index_expr);
11618 	}
11619 
11620       index = expand_normal (index_expr);
11621     }
11622 
11623   do_pending_stack_adjust ();
11624 
11625   op1 = expand_normal (minval);
11626   op2 = expand_normal (range);
11627 
11628   create_input_operand (&ops[0], index, index_mode);
11629   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11630   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11631   create_fixed_operand (&ops[3], table_label);
11632   create_fixed_operand (&ops[4], (default_label
11633 				  ? default_label
11634 				  : fallback_label));
11635   expand_jump_insn (targetm.code_for_casesi, 5, ops);
11636   return 1;
11637 }
11638 
11639 /* Attempt to generate a tablejump instruction; same concept.  */
11640 /* Subroutine of the next function.
11641 
11642    INDEX is the value being switched on, with the lowest value
11643    in the table already subtracted.
11644    MODE is its expected mode (needed if INDEX is constant).
11645    RANGE is the length of the jump table.
11646    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11647 
11648    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11649    index value is out of range.
11650    DEFAULT_PROBABILITY is the probability of jumping to
11651    the default label.  */
11652 
11653 static void
11654 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11655 	      rtx default_label, int default_probability)
11656 {
11657   rtx temp, vector;
11658 
11659   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11660     cfun->cfg->max_jumptable_ents = INTVAL (range);
11661 
11662   /* Do an unsigned comparison (in the proper mode) between the index
11663      expression and the value which represents the length of the range.
11664      Since we just finished subtracting the lower bound of the range
11665      from the index expression, this comparison allows us to simultaneously
11666      check that the original index expression value is both greater than
11667      or equal to the minimum value of the range and less than or equal to
11668      the maximum value of the range.  */
11669 
11670   if (default_label)
11671     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11672 			     default_label, default_probability);
11673 
11674 
11675   /* If index is in range, it must fit in Pmode.
11676      Convert to Pmode so we can index with it.  */
11677   if (mode != Pmode)
11678     index = convert_to_mode (Pmode, index, 1);
11679 
11680   /* Don't let a MEM slip through, because then INDEX that comes
11681      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11682      and break_out_memory_refs will go to work on it and mess it up.  */
11683 #ifdef PIC_CASE_VECTOR_ADDRESS
11684   if (flag_pic && !REG_P (index))
11685     index = copy_to_mode_reg (Pmode, index);
11686 #endif
11687 
11688   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11689      GET_MODE_SIZE, because this indicates how large insns are.  The other
11690      uses should all be Pmode, because they are addresses.  This code
11691      could fail if addresses and insns are not the same size.  */
11692   index = simplify_gen_binary (MULT, Pmode, index,
11693 			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11694 					     Pmode));
11695   index = simplify_gen_binary (PLUS, Pmode, index,
11696 			       gen_rtx_LABEL_REF (Pmode, table_label));
11697 
11698 #ifdef PIC_CASE_VECTOR_ADDRESS
11699   if (flag_pic)
11700     index = PIC_CASE_VECTOR_ADDRESS (index);
11701   else
11702 #endif
11703     index = memory_address (CASE_VECTOR_MODE, index);
11704   temp = gen_reg_rtx (CASE_VECTOR_MODE);
11705   vector = gen_const_mem (CASE_VECTOR_MODE, index);
11706   convert_move (temp, vector, 0);
11707 
11708   emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11709 
11710   /* If we are generating PIC code or if the table is PC-relative, the
11711      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11712   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11713     emit_barrier ();
11714 }
11715 
11716 int
11717 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11718 	       rtx table_label, rtx default_label, int default_probability)
11719 {
11720   rtx index;
11721 
11722   if (! targetm.have_tablejump ())
11723     return 0;
11724 
11725   index_expr = fold_build2 (MINUS_EXPR, index_type,
11726 			    fold_convert (index_type, index_expr),
11727 			    fold_convert (index_type, minval));
11728   index = expand_normal (index_expr);
11729   do_pending_stack_adjust ();
11730 
11731   do_tablejump (index, TYPE_MODE (index_type),
11732 		convert_modes (TYPE_MODE (index_type),
11733 			       TYPE_MODE (TREE_TYPE (range)),
11734 			       expand_normal (range),
11735 			       TYPE_UNSIGNED (TREE_TYPE (range))),
11736 		table_label, default_label, default_probability);
11737   return 1;
11738 }
11739 
11740 /* Return a CONST_VECTOR rtx representing vector mask for
11741    a VECTOR_CST of booleans.  */
11742 static rtx
11743 const_vector_mask_from_tree (tree exp)
11744 {
11745   rtvec v;
11746   unsigned i;
11747   int units;
11748   tree elt;
11749   machine_mode inner, mode;
11750 
11751   mode = TYPE_MODE (TREE_TYPE (exp));
11752   units = GET_MODE_NUNITS (mode);
11753   inner = GET_MODE_INNER (mode);
11754 
11755   v = rtvec_alloc (units);
11756 
11757   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11758     {
11759       elt = VECTOR_CST_ELT (exp, i);
11760 
11761       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11762       if (integer_zerop (elt))
11763 	RTVEC_ELT (v, i) = CONST0_RTX (inner);
11764       else if (integer_onep (elt)
11765 	       || integer_minus_onep (elt))
11766 	RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11767       else
11768 	gcc_unreachable ();
11769     }
11770 
11771   return gen_rtx_CONST_VECTOR (mode, v);
11772 }
11773 
11774 /* Return a CONST_INT rtx representing vector mask for
11775    a VECTOR_CST of booleans.  */
11776 static rtx
11777 const_scalar_mask_from_tree (tree exp)
11778 {
11779   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11780   wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11781   tree elt;
11782   unsigned i;
11783 
11784   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11785     {
11786       elt = VECTOR_CST_ELT (exp, i);
11787       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11788       if (integer_all_onesp (elt))
11789 	res = wi::set_bit (res, i);
11790       else
11791 	gcc_assert (integer_zerop (elt));
11792     }
11793 
11794   return immed_wide_int_const (res, mode);
11795 }
11796 
11797 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11798 static rtx
11799 const_vector_from_tree (tree exp)
11800 {
11801   rtvec v;
11802   unsigned i;
11803   int units;
11804   tree elt;
11805   machine_mode inner, mode;
11806 
11807   mode = TYPE_MODE (TREE_TYPE (exp));
11808 
11809   if (initializer_zerop (exp))
11810     return CONST0_RTX (mode);
11811 
11812   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11813     return const_vector_mask_from_tree (exp);
11814 
11815   units = GET_MODE_NUNITS (mode);
11816   inner = GET_MODE_INNER (mode);
11817 
11818   v = rtvec_alloc (units);
11819 
11820   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11821     {
11822       elt = VECTOR_CST_ELT (exp, i);
11823 
11824       if (TREE_CODE (elt) == REAL_CST)
11825 	RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11826 							 inner);
11827       else if (TREE_CODE (elt) == FIXED_CST)
11828 	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11829 							 inner);
11830       else
11831 	RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11832     }
11833 
11834   return gen_rtx_CONST_VECTOR (mode, v);
11835 }
11836 
11837 /* Build a decl for a personality function given a language prefix.  */
11838 
11839 tree
11840 build_personality_function (const char *lang)
11841 {
11842   const char *unwind_and_version;
11843   tree decl, type;
11844   char *name;
11845 
11846   switch (targetm_common.except_unwind_info (&global_options))
11847     {
11848     case UI_NONE:
11849       return NULL;
11850     case UI_SJLJ:
11851       unwind_and_version = "_sj0";
11852       break;
11853     case UI_DWARF2:
11854     case UI_TARGET:
11855       unwind_and_version = "_v0";
11856       break;
11857     case UI_SEH:
11858       unwind_and_version = "_seh0";
11859       break;
11860     default:
11861       gcc_unreachable ();
11862     }
11863 
11864   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11865 
11866   type = build_function_type_list (integer_type_node, integer_type_node,
11867 				   long_long_unsigned_type_node,
11868 				   ptr_type_node, ptr_type_node, NULL_TREE);
11869   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11870 		     get_identifier (name), type);
11871   DECL_ARTIFICIAL (decl) = 1;
11872   DECL_EXTERNAL (decl) = 1;
11873   TREE_PUBLIC (decl) = 1;
11874 
11875   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
11876      are the flags assigned by targetm.encode_section_info.  */
11877   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11878 
11879   return decl;
11880 }
11881 
11882 /* Extracts the personality function of DECL and returns the corresponding
11883    libfunc.  */
11884 
11885 rtx
11886 get_personality_function (tree decl)
11887 {
11888   tree personality = DECL_FUNCTION_PERSONALITY (decl);
11889   enum eh_personality_kind pk;
11890 
11891   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11892   if (pk == eh_personality_none)
11893     return NULL;
11894 
11895   if (!personality
11896       && pk == eh_personality_any)
11897     personality = lang_hooks.eh_personality ();
11898 
11899   if (pk == eh_personality_lang)
11900     gcc_assert (personality != NULL_TREE);
11901 
11902   return XEXP (DECL_RTL (personality), 0);
11903 }
11904 
11905 /* Returns a tree for the size of EXP in bytes.  */
11906 
11907 static tree
11908 tree_expr_size (const_tree exp)
11909 {
11910   if (DECL_P (exp)
11911       && DECL_SIZE_UNIT (exp) != 0)
11912     return DECL_SIZE_UNIT (exp);
11913   else
11914     return size_in_bytes (TREE_TYPE (exp));
11915 }
11916 
11917 /* Return an rtx for the size in bytes of the value of EXP.  */
11918 
11919 rtx
11920 expr_size (tree exp)
11921 {
11922   tree size;
11923 
11924   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11925     size = TREE_OPERAND (exp, 1);
11926   else
11927     {
11928       size = tree_expr_size (exp);
11929       gcc_assert (size);
11930       gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11931     }
11932 
11933   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11934 }
11935 
11936 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11937    if the size can vary or is larger than an integer.  */
11938 
11939 static HOST_WIDE_INT
11940 int_expr_size (tree exp)
11941 {
11942   tree size;
11943 
11944   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11945     size = TREE_OPERAND (exp, 1);
11946   else
11947     {
11948       size = tree_expr_size (exp);
11949       gcc_assert (size);
11950     }
11951 
11952   if (size == 0 || !tree_fits_shwi_p (size))
11953     return -1;
11954 
11955   return tree_to_shwi (size);
11956 }
11957