xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/expr.c (revision 627f7eb200a4419d89b531d55fccd2ee3ffdcde0)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2018 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "ssa.h"
32 #include "expmed.h"
33 #include "optabs.h"
34 #include "regs.h"
35 #include "emit-rtl.h"
36 #include "recog.h"
37 #include "cgraph.h"
38 #include "diagnostic.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "attribs.h"
43 #include "varasm.h"
44 #include "except.h"
45 #include "insn-attr.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "stmt.h"
50 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
51 #include "expr.h"
52 #include "optabs-tree.h"
53 #include "libfuncs.h"
54 #include "reload.h"
55 #include "langhooks.h"
56 #include "common/common-target.h"
57 #include "tree-ssa-live.h"
58 #include "tree-outof-ssa.h"
59 #include "tree-ssa-address.h"
60 #include "builtins.h"
61 #include "tree-chkp.h"
62 #include "rtl-chkp.h"
63 #include "ccmp.h"
64 #include "rtx-vector-builder.h"
65 
66 
67 /* If this is nonzero, we do not bother generating VOLATILE
68    around volatile memory references, and we are willing to
69    output indirect addresses.  If cse is to follow, we reject
70    indirect addresses so a useful potential cse is generated;
71    if it is used only once, instruction combination will produce
72    the same indirect address eventually.  */
73 int cse_not_expected;
74 
75 static bool block_move_libcall_safe_for_call_parm (void);
76 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
77 					unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
78 					unsigned HOST_WIDE_INT);
79 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
80 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
81 static rtx_insn *compress_float_constant (rtx, rtx);
82 static rtx get_subtarget (rtx);
83 static void store_constructor (tree, rtx, int, poly_int64, bool);
84 static rtx store_field (rtx, poly_int64, poly_int64, poly_uint64, poly_uint64,
85 			machine_mode, tree, alias_set_type, bool, bool);
86 
87 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
88 
89 static int is_aligning_offset (const_tree, const_tree);
90 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
91 static rtx do_store_flag (sepops, rtx, machine_mode);
92 #ifdef PUSH_ROUNDING
93 static void emit_single_push_insn (machine_mode, rtx, tree);
94 #endif
95 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx,
96 			  profile_probability);
97 static rtx const_vector_from_tree (tree);
98 static rtx const_scalar_mask_from_tree (scalar_int_mode, tree);
99 static tree tree_expr_size (const_tree);
100 static HOST_WIDE_INT int_expr_size (tree);
101 static void convert_mode_scalar (rtx, rtx, int);
102 
103 
104 /* This is run to set up which modes can be used
105    directly in memory and to initialize the block move optab.  It is run
106    at the beginning of compilation and when the target is reinitialized.  */
107 
108 void
109 init_expr_target (void)
110 {
111   rtx pat;
112   int num_clobbers;
113   rtx mem, mem1;
114   rtx reg;
115 
116   /* Try indexing by frame ptr and try by stack ptr.
117      It is known that on the Convex the stack ptr isn't a valid index.
118      With luck, one or the other is valid on any machine.  */
119   mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
120   mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
121 
122   /* A scratch register we can modify in-place below to avoid
123      useless RTL allocations.  */
124   reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
125 
126   rtx_insn *insn = as_a<rtx_insn *> (rtx_alloc (INSN));
127   pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
128   PATTERN (insn) = pat;
129 
130   for (machine_mode mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
131        mode = (machine_mode) ((int) mode + 1))
132     {
133       int regno;
134 
135       direct_load[(int) mode] = direct_store[(int) mode] = 0;
136       PUT_MODE (mem, mode);
137       PUT_MODE (mem1, mode);
138 
139       /* See if there is some register that can be used in this mode and
140 	 directly loaded or stored from memory.  */
141 
142       if (mode != VOIDmode && mode != BLKmode)
143 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
144 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
145 	     regno++)
146 	  {
147 	    if (!targetm.hard_regno_mode_ok (regno, mode))
148 	      continue;
149 
150 	    set_mode_and_regno (reg, mode, regno);
151 
152 	    SET_SRC (pat) = mem;
153 	    SET_DEST (pat) = reg;
154 	    if (recog (pat, insn, &num_clobbers) >= 0)
155 	      direct_load[(int) mode] = 1;
156 
157 	    SET_SRC (pat) = mem1;
158 	    SET_DEST (pat) = reg;
159 	    if (recog (pat, insn, &num_clobbers) >= 0)
160 	      direct_load[(int) mode] = 1;
161 
162 	    SET_SRC (pat) = reg;
163 	    SET_DEST (pat) = mem;
164 	    if (recog (pat, insn, &num_clobbers) >= 0)
165 	      direct_store[(int) mode] = 1;
166 
167 	    SET_SRC (pat) = reg;
168 	    SET_DEST (pat) = mem1;
169 	    if (recog (pat, insn, &num_clobbers) >= 0)
170 	      direct_store[(int) mode] = 1;
171 	  }
172     }
173 
174   mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
175 
176   opt_scalar_float_mode mode_iter;
177   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_FLOAT)
178     {
179       scalar_float_mode mode = mode_iter.require ();
180       scalar_float_mode srcmode;
181       FOR_EACH_MODE_UNTIL (srcmode, mode)
182 	{
183 	  enum insn_code ic;
184 
185 	  ic = can_extend_p (mode, srcmode, 0);
186 	  if (ic == CODE_FOR_nothing)
187 	    continue;
188 
189 	  PUT_MODE (mem, srcmode);
190 
191 	  if (insn_operand_matches (ic, 1, mem))
192 	    float_extend_from_mem[mode][srcmode] = true;
193 	}
194     }
195 }
196 
197 /* This is run at the start of compiling a function.  */
198 
199 void
200 init_expr (void)
201 {
202   memset (&crtl->expr, 0, sizeof (crtl->expr));
203 }
204 
205 /* Copy data from FROM to TO, where the machine modes are not the same.
206    Both modes may be integer, or both may be floating, or both may be
207    fixed-point.
208    UNSIGNEDP should be nonzero if FROM is an unsigned type.
209    This causes zero-extension instead of sign-extension.  */
210 
211 void
212 convert_move (rtx to, rtx from, int unsignedp)
213 {
214   machine_mode to_mode = GET_MODE (to);
215   machine_mode from_mode = GET_MODE (from);
216 
217   gcc_assert (to_mode != BLKmode);
218   gcc_assert (from_mode != BLKmode);
219 
220   /* If the source and destination are already the same, then there's
221      nothing to do.  */
222   if (to == from)
223     return;
224 
225   /* If FROM is a SUBREG that indicates that we have already done at least
226      the required extension, strip it.  We don't handle such SUBREGs as
227      TO here.  */
228 
229   scalar_int_mode to_int_mode;
230   if (GET_CODE (from) == SUBREG
231       && SUBREG_PROMOTED_VAR_P (from)
232       && is_a <scalar_int_mode> (to_mode, &to_int_mode)
233       && (GET_MODE_PRECISION (subreg_promoted_mode (from))
234 	  >= GET_MODE_PRECISION (to_int_mode))
235       && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
236     from = gen_lowpart (to_int_mode, from), from_mode = to_int_mode;
237 
238   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
239 
240   if (to_mode == from_mode
241       || (from_mode == VOIDmode && CONSTANT_P (from)))
242     {
243       emit_move_insn (to, from);
244       return;
245     }
246 
247   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
248     {
249       gcc_assert (known_eq (GET_MODE_BITSIZE (from_mode),
250 			    GET_MODE_BITSIZE (to_mode)));
251 
252       if (VECTOR_MODE_P (to_mode))
253 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
254       else
255 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
256 
257       emit_move_insn (to, from);
258       return;
259     }
260 
261   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
262     {
263       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
264       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
265       return;
266     }
267 
268   convert_mode_scalar (to, from, unsignedp);
269 }
270 
271 /* Like convert_move, but deals only with scalar modes.  */
272 
273 static void
274 convert_mode_scalar (rtx to, rtx from, int unsignedp)
275 {
276   /* Both modes should be scalar types.  */
277   scalar_mode from_mode = as_a <scalar_mode> (GET_MODE (from));
278   scalar_mode to_mode = as_a <scalar_mode> (GET_MODE (to));
279   bool to_real = SCALAR_FLOAT_MODE_P (to_mode);
280   bool from_real = SCALAR_FLOAT_MODE_P (from_mode);
281   enum insn_code code;
282   rtx libcall;
283 
284   gcc_assert (to_real == from_real);
285 
286   /* rtx code for making an equivalent value.  */
287   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
288 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
289 
290   if (to_real)
291     {
292       rtx value;
293       rtx_insn *insns;
294       convert_optab tab;
295 
296       gcc_assert ((GET_MODE_PRECISION (from_mode)
297 		   != GET_MODE_PRECISION (to_mode))
298 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
299 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
300 
301       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
302 	/* Conversion between decimal float and binary float, same size.  */
303 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
304       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
305 	tab = sext_optab;
306       else
307 	tab = trunc_optab;
308 
309       /* Try converting directly if the insn is supported.  */
310 
311       code = convert_optab_handler (tab, to_mode, from_mode);
312       if (code != CODE_FOR_nothing)
313 	{
314 	  emit_unop_insn (code, to, from,
315 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
316 	  return;
317 	}
318 
319       /* Otherwise use a libcall.  */
320       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
321 
322       /* Is this conversion implemented yet?  */
323       gcc_assert (libcall);
324 
325       start_sequence ();
326       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
327 				       from, from_mode);
328       insns = get_insns ();
329       end_sequence ();
330       emit_libcall_block (insns, to, value,
331 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
332 								       from)
333 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
334       return;
335     }
336 
337   /* Handle pointer conversion.  */			/* SPEE 900220.  */
338   /* If the target has a converter from FROM_MODE to TO_MODE, use it.  */
339   {
340     convert_optab ctab;
341 
342     if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
343       ctab = trunc_optab;
344     else if (unsignedp)
345       ctab = zext_optab;
346     else
347       ctab = sext_optab;
348 
349     if (convert_optab_handler (ctab, to_mode, from_mode)
350 	!= CODE_FOR_nothing)
351       {
352 	emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
353 			to, from, UNKNOWN);
354 	return;
355       }
356   }
357 
358   /* Targets are expected to provide conversion insns between PxImode and
359      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
360   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
361     {
362       scalar_int_mode full_mode
363 	= smallest_int_mode_for_size (GET_MODE_BITSIZE (to_mode));
364 
365       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
366 		  != CODE_FOR_nothing);
367 
368       if (full_mode != from_mode)
369 	from = convert_to_mode (full_mode, from, unsignedp);
370       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
371 		      to, from, UNKNOWN);
372       return;
373     }
374   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
375     {
376       rtx new_from;
377       scalar_int_mode full_mode
378 	= smallest_int_mode_for_size (GET_MODE_BITSIZE (from_mode));
379       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
380       enum insn_code icode;
381 
382       icode = convert_optab_handler (ctab, full_mode, from_mode);
383       gcc_assert (icode != CODE_FOR_nothing);
384 
385       if (to_mode == full_mode)
386 	{
387 	  emit_unop_insn (icode, to, from, UNKNOWN);
388 	  return;
389 	}
390 
391       new_from = gen_reg_rtx (full_mode);
392       emit_unop_insn (icode, new_from, from, UNKNOWN);
393 
394       /* else proceed to integer conversions below.  */
395       from_mode = full_mode;
396       from = new_from;
397     }
398 
399    /* Make sure both are fixed-point modes or both are not.  */
400    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
401 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
402    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
403     {
404       /* If we widen from_mode to to_mode and they are in the same class,
405 	 we won't saturate the result.
406 	 Otherwise, always saturate the result to play safe.  */
407       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
408 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
409 	expand_fixed_convert (to, from, 0, 0);
410       else
411 	expand_fixed_convert (to, from, 0, 1);
412       return;
413     }
414 
415   /* Now both modes are integers.  */
416 
417   /* Handle expanding beyond a word.  */
418   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
419       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
420     {
421       rtx_insn *insns;
422       rtx lowpart;
423       rtx fill_value;
424       rtx lowfrom;
425       int i;
426       scalar_mode lowpart_mode;
427       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
428 
429       /* Try converting directly if the insn is supported.  */
430       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
431 	  != CODE_FOR_nothing)
432 	{
433 	  /* If FROM is a SUBREG, put it into a register.  Do this
434 	     so that we always generate the same set of insns for
435 	     better cse'ing; if an intermediate assignment occurred,
436 	     we won't be doing the operation directly on the SUBREG.  */
437 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
438 	    from = force_reg (from_mode, from);
439 	  emit_unop_insn (code, to, from, equiv_code);
440 	  return;
441 	}
442       /* Next, try converting via full word.  */
443       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
444 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
445 		   != CODE_FOR_nothing))
446 	{
447 	  rtx word_to = gen_reg_rtx (word_mode);
448 	  if (REG_P (to))
449 	    {
450 	      if (reg_overlap_mentioned_p (to, from))
451 		from = force_reg (from_mode, from);
452 	      emit_clobber (to);
453 	    }
454 	  convert_move (word_to, from, unsignedp);
455 	  emit_unop_insn (code, to, word_to, equiv_code);
456 	  return;
457 	}
458 
459       /* No special multiword conversion insn; do it by hand.  */
460       start_sequence ();
461 
462       /* Since we will turn this into a no conflict block, we must ensure
463          the source does not overlap the target so force it into an isolated
464          register when maybe so.  Likewise for any MEM input, since the
465          conversion sequence might require several references to it and we
466          must ensure we're getting the same value every time.  */
467 
468       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
469 	from = force_reg (from_mode, from);
470 
471       /* Get a copy of FROM widened to a word, if necessary.  */
472       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
473 	lowpart_mode = word_mode;
474       else
475 	lowpart_mode = from_mode;
476 
477       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
478 
479       lowpart = gen_lowpart (lowpart_mode, to);
480       emit_move_insn (lowpart, lowfrom);
481 
482       /* Compute the value to put in each remaining word.  */
483       if (unsignedp)
484 	fill_value = const0_rtx;
485       else
486 	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
487 					    LT, lowfrom, const0_rtx,
488 					    lowpart_mode, 0, -1);
489 
490       /* Fill the remaining words.  */
491       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
492 	{
493 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
494 	  rtx subword = operand_subword (to, index, 1, to_mode);
495 
496 	  gcc_assert (subword);
497 
498 	  if (fill_value != subword)
499 	    emit_move_insn (subword, fill_value);
500 	}
501 
502       insns = get_insns ();
503       end_sequence ();
504 
505       emit_insn (insns);
506       return;
507     }
508 
509   /* Truncating multi-word to a word or less.  */
510   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
511       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
512     {
513       if (!((MEM_P (from)
514 	     && ! MEM_VOLATILE_P (from)
515 	     && direct_load[(int) to_mode]
516 	     && ! mode_dependent_address_p (XEXP (from, 0),
517 					    MEM_ADDR_SPACE (from)))
518 	    || REG_P (from)
519 	    || GET_CODE (from) == SUBREG))
520 	from = force_reg (from_mode, from);
521       convert_move (to, gen_lowpart (word_mode, from), 0);
522       return;
523     }
524 
525   /* Now follow all the conversions between integers
526      no more than a word long.  */
527 
528   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
529   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
530       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
531     {
532       if (!((MEM_P (from)
533 	     && ! MEM_VOLATILE_P (from)
534 	     && direct_load[(int) to_mode]
535 	     && ! mode_dependent_address_p (XEXP (from, 0),
536 					    MEM_ADDR_SPACE (from)))
537 	    || REG_P (from)
538 	    || GET_CODE (from) == SUBREG))
539 	from = force_reg (from_mode, from);
540       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
541 	  && !targetm.hard_regno_mode_ok (REGNO (from), to_mode))
542 	from = copy_to_reg (from);
543       emit_move_insn (to, gen_lowpart (to_mode, from));
544       return;
545     }
546 
547   /* Handle extension.  */
548   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
549     {
550       /* Convert directly if that works.  */
551       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
552 	  != CODE_FOR_nothing)
553 	{
554 	  emit_unop_insn (code, to, from, equiv_code);
555 	  return;
556 	}
557       else
558 	{
559 	  scalar_mode intermediate;
560 	  rtx tmp;
561 	  int shift_amount;
562 
563 	  /* Search for a mode to convert via.  */
564 	  opt_scalar_mode intermediate_iter;
565 	  FOR_EACH_MODE_FROM (intermediate_iter, from_mode)
566 	    {
567 	      scalar_mode intermediate = intermediate_iter.require ();
568 	      if (((can_extend_p (to_mode, intermediate, unsignedp)
569 		    != CODE_FOR_nothing)
570 		   || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
571 		       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode,
572 							 intermediate)))
573 		  && (can_extend_p (intermediate, from_mode, unsignedp)
574 		      != CODE_FOR_nothing))
575 		{
576 		  convert_move (to, convert_to_mode (intermediate, from,
577 						     unsignedp), unsignedp);
578 		  return;
579 		}
580 	    }
581 
582 	  /* No suitable intermediate mode.
583 	     Generate what we need with	shifts.  */
584 	  shift_amount = (GET_MODE_PRECISION (to_mode)
585 			  - GET_MODE_PRECISION (from_mode));
586 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
587 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
588 			      to, unsignedp);
589 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
590 			      to, unsignedp);
591 	  if (tmp != to)
592 	    emit_move_insn (to, tmp);
593 	  return;
594 	}
595     }
596 
597   /* Support special truncate insns for certain modes.  */
598   if (convert_optab_handler (trunc_optab, to_mode,
599 			     from_mode) != CODE_FOR_nothing)
600     {
601       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
602 		      to, from, UNKNOWN);
603       return;
604     }
605 
606   /* Handle truncation of volatile memrefs, and so on;
607      the things that couldn't be truncated directly,
608      and for which there was no special instruction.
609 
610      ??? Code above formerly short-circuited this, for most integer
611      mode pairs, with a force_reg in from_mode followed by a recursive
612      call to this routine.  Appears always to have been wrong.  */
613   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
614     {
615       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
616       emit_move_insn (to, temp);
617       return;
618     }
619 
620   /* Mode combination is not recognized.  */
621   gcc_unreachable ();
622 }
623 
624 /* Return an rtx for a value that would result
625    from converting X to mode MODE.
626    Both X and MODE may be floating, or both integer.
627    UNSIGNEDP is nonzero if X is an unsigned value.
628    This can be done by referring to a part of X in place
629    or by copying to a new temporary with conversion.  */
630 
631 rtx
632 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
633 {
634   return convert_modes (mode, VOIDmode, x, unsignedp);
635 }
636 
637 /* Return an rtx for a value that would result
638    from converting X from mode OLDMODE to mode MODE.
639    Both modes may be floating, or both integer.
640    UNSIGNEDP is nonzero if X is an unsigned value.
641 
642    This can be done by referring to a part of X in place
643    or by copying to a new temporary with conversion.
644 
645    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
646 
647 rtx
648 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
649 {
650   rtx temp;
651   scalar_int_mode int_mode;
652 
653   /* If FROM is a SUBREG that indicates that we have already done at least
654      the required extension, strip it.  */
655 
656   if (GET_CODE (x) == SUBREG
657       && SUBREG_PROMOTED_VAR_P (x)
658       && is_a <scalar_int_mode> (mode, &int_mode)
659       && (GET_MODE_PRECISION (subreg_promoted_mode (x))
660 	  >= GET_MODE_PRECISION (int_mode))
661       && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
662     x = gen_lowpart (int_mode, SUBREG_REG (x));
663 
664   if (GET_MODE (x) != VOIDmode)
665     oldmode = GET_MODE (x);
666 
667   if (mode == oldmode)
668     return x;
669 
670   if (CONST_SCALAR_INT_P (x)
671       && is_int_mode (mode, &int_mode))
672     {
673       /* If the caller did not tell us the old mode, then there is not
674 	 much to do with respect to canonicalization.  We have to
675 	 assume that all the bits are significant.  */
676       if (GET_MODE_CLASS (oldmode) != MODE_INT)
677 	oldmode = MAX_MODE_INT;
678       wide_int w = wide_int::from (rtx_mode_t (x, oldmode),
679 				   GET_MODE_PRECISION (int_mode),
680 				   unsignedp ? UNSIGNED : SIGNED);
681       return immed_wide_int_const (w, int_mode);
682     }
683 
684   /* We can do this with a gen_lowpart if both desired and current modes
685      are integer, and this is either a constant integer, a register, or a
686      non-volatile MEM. */
687   scalar_int_mode int_oldmode;
688   if (is_int_mode (mode, &int_mode)
689       && is_int_mode (oldmode, &int_oldmode)
690       && GET_MODE_PRECISION (int_mode) <= GET_MODE_PRECISION (int_oldmode)
691       && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) int_mode])
692 	  || CONST_POLY_INT_P (x)
693           || (REG_P (x)
694               && (!HARD_REGISTER_P (x)
695 		  || targetm.hard_regno_mode_ok (REGNO (x), int_mode))
696               && TRULY_NOOP_TRUNCATION_MODES_P (int_mode, GET_MODE (x)))))
697    return gen_lowpart (int_mode, x);
698 
699   /* Converting from integer constant into mode is always equivalent to an
700      subreg operation.  */
701   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
702     {
703       gcc_assert (known_eq (GET_MODE_BITSIZE (mode),
704 			    GET_MODE_BITSIZE (oldmode)));
705       return simplify_gen_subreg (mode, x, oldmode, 0);
706     }
707 
708   temp = gen_reg_rtx (mode);
709   convert_move (temp, x, unsignedp);
710   return temp;
711 }
712 
713 /* Return the largest alignment we can use for doing a move (or store)
714    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
715 
716 static unsigned int
717 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
718 {
719   scalar_int_mode tmode
720     = int_mode_for_size (max_pieces * BITS_PER_UNIT, 1).require ();
721 
722   if (align >= GET_MODE_ALIGNMENT (tmode))
723     align = GET_MODE_ALIGNMENT (tmode);
724   else
725     {
726       scalar_int_mode xmode = NARROWEST_INT_MODE;
727       opt_scalar_int_mode mode_iter;
728       FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
729 	{
730 	  tmode = mode_iter.require ();
731 	  if (GET_MODE_SIZE (tmode) > max_pieces
732 	      || targetm.slow_unaligned_access (tmode, align))
733 	    break;
734 	  xmode = tmode;
735 	}
736 
737       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
738     }
739 
740   return align;
741 }
742 
743 /* Return the widest integer mode that is narrower than SIZE bytes.  */
744 
745 static scalar_int_mode
746 widest_int_mode_for_size (unsigned int size)
747 {
748   scalar_int_mode result = NARROWEST_INT_MODE;
749 
750   gcc_checking_assert (size > 1);
751 
752   opt_scalar_int_mode tmode;
753   FOR_EACH_MODE_IN_CLASS (tmode, MODE_INT)
754     if (GET_MODE_SIZE (tmode.require ()) < size)
755       result = tmode.require ();
756 
757   return result;
758 }
759 
760 /* Determine whether an operation OP on LEN bytes with alignment ALIGN can
761    and should be performed piecewise.  */
762 
763 static bool
764 can_do_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align,
765 		  enum by_pieces_operation op)
766 {
767   return targetm.use_by_pieces_infrastructure_p (len, align, op,
768 						 optimize_insn_for_speed_p ());
769 }
770 
771 /* Determine whether the LEN bytes can be moved by using several move
772    instructions.  Return nonzero if a call to move_by_pieces should
773    succeed.  */
774 
775 bool
776 can_move_by_pieces (unsigned HOST_WIDE_INT len, unsigned int align)
777 {
778   return can_do_by_pieces (len, align, MOVE_BY_PIECES);
779 }
780 
781 /* Return number of insns required to perform operation OP by pieces
782    for L bytes.  ALIGN (in bits) is maximum alignment we can assume.  */
783 
784 unsigned HOST_WIDE_INT
785 by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
786 		  unsigned int max_size, by_pieces_operation op)
787 {
788   unsigned HOST_WIDE_INT n_insns = 0;
789 
790   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
791 
792   while (max_size > 1 && l > 0)
793     {
794       scalar_int_mode mode = widest_int_mode_for_size (max_size);
795       enum insn_code icode;
796 
797       unsigned int modesize = GET_MODE_SIZE (mode);
798 
799       icode = optab_handler (mov_optab, mode);
800       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
801 	{
802 	  unsigned HOST_WIDE_INT n_pieces = l / modesize;
803 	  l %= modesize;
804 	  switch (op)
805 	    {
806 	    default:
807 	      n_insns += n_pieces;
808 	      break;
809 
810 	    case COMPARE_BY_PIECES:
811 	      int batch = targetm.compare_by_pieces_branch_ratio (mode);
812 	      int batch_ops = 4 * batch - 1;
813 	      unsigned HOST_WIDE_INT full = n_pieces / batch;
814 	      n_insns += full * batch_ops;
815 	      if (n_pieces % batch != 0)
816 		n_insns++;
817 	      break;
818 
819 	    }
820 	}
821       max_size = modesize;
822     }
823 
824   gcc_assert (!l);
825   return n_insns;
826 }
827 
828 /* Used when performing piecewise block operations, holds information
829    about one of the memory objects involved.  The member functions
830    can be used to generate code for loading from the object and
831    updating the address when iterating.  */
832 
833 class pieces_addr
834 {
835   /* The object being referenced, a MEM.  Can be NULL_RTX to indicate
836      stack pushes.  */
837   rtx m_obj;
838   /* The address of the object.  Can differ from that seen in the
839      MEM rtx if we copied the address to a register.  */
840   rtx m_addr;
841   /* Nonzero if the address on the object has an autoincrement already,
842      signifies whether that was an increment or decrement.  */
843   signed char m_addr_inc;
844   /* Nonzero if we intend to use autoinc without the address already
845      having autoinc form.  We will insert add insns around each memory
846      reference, expecting later passes to form autoinc addressing modes.
847      The only supported options are predecrement and postincrement.  */
848   signed char m_explicit_inc;
849   /* True if we have either of the two possible cases of using
850      autoincrement.  */
851   bool m_auto;
852   /* True if this is an address to be used for load operations rather
853      than stores.  */
854   bool m_is_load;
855 
856   /* Optionally, a function to obtain constants for any given offset into
857      the objects, and data associated with it.  */
858   by_pieces_constfn m_constfn;
859   void *m_cfndata;
860 public:
861   pieces_addr (rtx, bool, by_pieces_constfn, void *);
862   rtx adjust (scalar_int_mode, HOST_WIDE_INT);
863   void increment_address (HOST_WIDE_INT);
864   void maybe_predec (HOST_WIDE_INT);
865   void maybe_postinc (HOST_WIDE_INT);
866   void decide_autoinc (machine_mode, bool, HOST_WIDE_INT);
867   int get_addr_inc ()
868   {
869     return m_addr_inc;
870   }
871 };
872 
873 /* Initialize a pieces_addr structure from an object OBJ.  IS_LOAD is
874    true if the operation to be performed on this object is a load
875    rather than a store.  For stores, OBJ can be NULL, in which case we
876    assume the operation is a stack push.  For loads, the optional
877    CONSTFN and its associated CFNDATA can be used in place of the
878    memory load.  */
879 
880 pieces_addr::pieces_addr (rtx obj, bool is_load, by_pieces_constfn constfn,
881 			  void *cfndata)
882   : m_obj (obj), m_is_load (is_load), m_constfn (constfn), m_cfndata (cfndata)
883 {
884   m_addr_inc = 0;
885   m_auto = false;
886   if (obj)
887     {
888       rtx addr = XEXP (obj, 0);
889       rtx_code code = GET_CODE (addr);
890       m_addr = addr;
891       bool dec = code == PRE_DEC || code == POST_DEC;
892       bool inc = code == PRE_INC || code == POST_INC;
893       m_auto = inc || dec;
894       if (m_auto)
895 	m_addr_inc = dec ? -1 : 1;
896 
897       /* While we have always looked for these codes here, the code
898 	 implementing the memory operation has never handled them.
899 	 Support could be added later if necessary or beneficial.  */
900       gcc_assert (code != PRE_INC && code != POST_DEC);
901     }
902   else
903     {
904       m_addr = NULL_RTX;
905       if (!is_load)
906 	{
907 	  m_auto = true;
908 	  if (STACK_GROWS_DOWNWARD)
909 	    m_addr_inc = -1;
910 	  else
911 	    m_addr_inc = 1;
912 	}
913       else
914 	gcc_assert (constfn != NULL);
915     }
916   m_explicit_inc = 0;
917   if (constfn)
918     gcc_assert (is_load);
919 }
920 
921 /* Decide whether to use autoinc for an address involved in a memory op.
922    MODE is the mode of the accesses, REVERSE is true if we've decided to
923    perform the operation starting from the end, and LEN is the length of
924    the operation.  Don't override an earlier decision to set m_auto.  */
925 
926 void
927 pieces_addr::decide_autoinc (machine_mode ARG_UNUSED (mode), bool reverse,
928 			     HOST_WIDE_INT len)
929 {
930   if (m_auto || m_obj == NULL_RTX)
931     return;
932 
933   bool use_predec = (m_is_load
934 		     ? USE_LOAD_PRE_DECREMENT (mode)
935 		     : USE_STORE_PRE_DECREMENT (mode));
936   bool use_postinc = (m_is_load
937 		      ? USE_LOAD_POST_INCREMENT (mode)
938 		      : USE_STORE_POST_INCREMENT (mode));
939   machine_mode addr_mode = get_address_mode (m_obj);
940 
941   if (use_predec && reverse)
942     {
943       m_addr = copy_to_mode_reg (addr_mode,
944 				 plus_constant (addr_mode,
945 						m_addr, len));
946       m_auto = true;
947       m_explicit_inc = -1;
948     }
949   else if (use_postinc && !reverse)
950     {
951       m_addr = copy_to_mode_reg (addr_mode, m_addr);
952       m_auto = true;
953       m_explicit_inc = 1;
954     }
955   else if (CONSTANT_P (m_addr))
956     m_addr = copy_to_mode_reg (addr_mode, m_addr);
957 }
958 
959 /* Adjust the address to refer to the data at OFFSET in MODE.  If we
960    are using autoincrement for this address, we don't add the offset,
961    but we still modify the MEM's properties.  */
962 
963 rtx
964 pieces_addr::adjust (scalar_int_mode mode, HOST_WIDE_INT offset)
965 {
966   if (m_constfn)
967     return m_constfn (m_cfndata, offset, mode);
968   if (m_obj == NULL_RTX)
969     return NULL_RTX;
970   if (m_auto)
971     return adjust_automodify_address (m_obj, mode, m_addr, offset);
972   else
973     return adjust_address (m_obj, mode, offset);
974 }
975 
976 /* Emit an add instruction to increment the address by SIZE.  */
977 
978 void
979 pieces_addr::increment_address (HOST_WIDE_INT size)
980 {
981   rtx amount = gen_int_mode (size, GET_MODE (m_addr));
982   emit_insn (gen_add2_insn (m_addr, amount));
983 }
984 
985 /* If we are supposed to decrement the address after each access, emit code
986    to do so now.  Increment by SIZE (which has should have the correct sign
987    already).  */
988 
989 void
990 pieces_addr::maybe_predec (HOST_WIDE_INT size)
991 {
992   if (m_explicit_inc >= 0)
993     return;
994   gcc_assert (HAVE_PRE_DECREMENT);
995   increment_address (size);
996 }
997 
998 /* If we are supposed to decrement the address after each access, emit code
999    to do so now.  Increment by SIZE.  */
1000 
1001 void
1002 pieces_addr::maybe_postinc (HOST_WIDE_INT size)
1003 {
1004   if (m_explicit_inc <= 0)
1005     return;
1006   gcc_assert (HAVE_POST_INCREMENT);
1007   increment_address (size);
1008 }
1009 
1010 /* This structure is used by do_op_by_pieces to describe the operation
1011    to be performed.  */
1012 
1013 class op_by_pieces_d
1014 {
1015  protected:
1016   pieces_addr m_to, m_from;
1017   unsigned HOST_WIDE_INT m_len;
1018   HOST_WIDE_INT m_offset;
1019   unsigned int m_align;
1020   unsigned int m_max_size;
1021   bool m_reverse;
1022 
1023   /* Virtual functions, overriden by derived classes for the specific
1024      operation.  */
1025   virtual void generate (rtx, rtx, machine_mode) = 0;
1026   virtual bool prepare_mode (machine_mode, unsigned int) = 0;
1027   virtual void finish_mode (machine_mode)
1028   {
1029   }
1030 
1031  public:
1032   op_by_pieces_d (rtx, bool, rtx, bool, by_pieces_constfn, void *,
1033 		  unsigned HOST_WIDE_INT, unsigned int);
1034   void run ();
1035 };
1036 
1037 /* The constructor for an op_by_pieces_d structure.  We require two
1038    objects named TO and FROM, which are identified as loads or stores
1039    by TO_LOAD and FROM_LOAD.  If FROM is a load, the optional FROM_CFN
1040    and its associated FROM_CFN_DATA can be used to replace loads with
1041    constant values.  LEN describes the length of the operation.  */
1042 
1043 op_by_pieces_d::op_by_pieces_d (rtx to, bool to_load,
1044 				rtx from, bool from_load,
1045 				by_pieces_constfn from_cfn,
1046 				void *from_cfn_data,
1047 				unsigned HOST_WIDE_INT len,
1048 				unsigned int align)
1049   : m_to (to, to_load, NULL, NULL),
1050     m_from (from, from_load, from_cfn, from_cfn_data),
1051     m_len (len), m_max_size (MOVE_MAX_PIECES + 1)
1052 {
1053   int toi = m_to.get_addr_inc ();
1054   int fromi = m_from.get_addr_inc ();
1055   if (toi >= 0 && fromi >= 0)
1056     m_reverse = false;
1057   else if (toi <= 0 && fromi <= 0)
1058     m_reverse = true;
1059   else
1060     gcc_unreachable ();
1061 
1062   m_offset = m_reverse ? len : 0;
1063   align = MIN (to ? MEM_ALIGN (to) : align,
1064 	       from ? MEM_ALIGN (from) : align);
1065 
1066   /* If copying requires more than two move insns,
1067      copy addresses to registers (to make displacements shorter)
1068      and use post-increment if available.  */
1069   if (by_pieces_ninsns (len, align, m_max_size, MOVE_BY_PIECES) > 2)
1070     {
1071       /* Find the mode of the largest comparison.  */
1072       scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1073 
1074       m_from.decide_autoinc (mode, m_reverse, len);
1075       m_to.decide_autoinc (mode, m_reverse, len);
1076     }
1077 
1078   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1079   m_align = align;
1080 }
1081 
1082 /* This function contains the main loop used for expanding a block
1083    operation.  First move what we can in the largest integer mode,
1084    then go to successively smaller modes.  For every access, call
1085    GENFUN with the two operands and the EXTRA_DATA.  */
1086 
1087 void
1088 op_by_pieces_d::run ()
1089 {
1090   while (m_max_size > 1 && m_len > 0)
1091     {
1092       scalar_int_mode mode = widest_int_mode_for_size (m_max_size);
1093 
1094       if (prepare_mode (mode, m_align))
1095 	{
1096 	  unsigned int size = GET_MODE_SIZE (mode);
1097 	  rtx to1 = NULL_RTX, from1;
1098 
1099 	  while (m_len >= size)
1100 	    {
1101 	      if (m_reverse)
1102 		m_offset -= size;
1103 
1104 	      to1 = m_to.adjust (mode, m_offset);
1105 	      from1 = m_from.adjust (mode, m_offset);
1106 
1107 	      m_to.maybe_predec (-(HOST_WIDE_INT)size);
1108 	      m_from.maybe_predec (-(HOST_WIDE_INT)size);
1109 
1110 	      generate (to1, from1, mode);
1111 
1112 	      m_to.maybe_postinc (size);
1113 	      m_from.maybe_postinc (size);
1114 
1115 	      if (!m_reverse)
1116 		m_offset += size;
1117 
1118 	      m_len -= size;
1119 	    }
1120 
1121 	  finish_mode (mode);
1122 	}
1123 
1124       m_max_size = GET_MODE_SIZE (mode);
1125     }
1126 
1127   /* The code above should have handled everything.  */
1128   gcc_assert (!m_len);
1129 }
1130 
1131 /* Derived class from op_by_pieces_d, providing support for block move
1132    operations.  */
1133 
1134 class move_by_pieces_d : public op_by_pieces_d
1135 {
1136   insn_gen_fn m_gen_fun;
1137   void generate (rtx, rtx, machine_mode);
1138   bool prepare_mode (machine_mode, unsigned int);
1139 
1140  public:
1141   move_by_pieces_d (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1142 		    unsigned int align)
1143     : op_by_pieces_d (to, false, from, true, NULL, NULL, len, align)
1144   {
1145   }
1146   rtx finish_endp (int);
1147 };
1148 
1149 /* Return true if MODE can be used for a set of copies, given an
1150    alignment ALIGN.  Prepare whatever data is necessary for later
1151    calls to generate.  */
1152 
1153 bool
1154 move_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1155 {
1156   insn_code icode = optab_handler (mov_optab, mode);
1157   m_gen_fun = GEN_FCN (icode);
1158   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1159 }
1160 
1161 /* A callback used when iterating for a compare_by_pieces_operation.
1162    OP0 and OP1 are the values that have been loaded and should be
1163    compared in MODE.  If OP0 is NULL, this means we should generate a
1164    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1165    gen function that should be used to generate the mode.  */
1166 
1167 void
1168 move_by_pieces_d::generate (rtx op0, rtx op1,
1169 			    machine_mode mode ATTRIBUTE_UNUSED)
1170 {
1171 #ifdef PUSH_ROUNDING
1172   if (op0 == NULL_RTX)
1173     {
1174       emit_single_push_insn (mode, op1, NULL);
1175       return;
1176     }
1177 #endif
1178   emit_insn (m_gen_fun (op0, op1));
1179 }
1180 
1181 /* Perform the final adjustment at the end of a string to obtain the
1182    correct return value for the block operation.  If ENDP is 1 return
1183    memory at the end ala mempcpy, and if ENDP is 2 return memory the
1184    end minus one byte ala stpcpy.  */
1185 
1186 rtx
1187 move_by_pieces_d::finish_endp (int endp)
1188 {
1189   gcc_assert (!m_reverse);
1190   if (endp == 2)
1191     {
1192       m_to.maybe_postinc (-1);
1193       --m_offset;
1194     }
1195   return m_to.adjust (QImode, m_offset);
1196 }
1197 
1198 /* Generate several move instructions to copy LEN bytes from block FROM to
1199    block TO.  (These are MEM rtx's with BLKmode).
1200 
1201    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1202    used to push FROM to the stack.
1203 
1204    ALIGN is maximum stack alignment we can assume.
1205 
1206    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1207    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1208    stpcpy.  */
1209 
1210 rtx
1211 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1212 		unsigned int align, int endp)
1213 {
1214 #ifndef PUSH_ROUNDING
1215   if (to == NULL)
1216     gcc_unreachable ();
1217 #endif
1218 
1219   move_by_pieces_d data (to, from, len, align);
1220 
1221   data.run ();
1222 
1223   if (endp)
1224     return data.finish_endp (endp);
1225   else
1226     return to;
1227 }
1228 
1229 /* Derived class from op_by_pieces_d, providing support for block move
1230    operations.  */
1231 
1232 class store_by_pieces_d : public op_by_pieces_d
1233 {
1234   insn_gen_fn m_gen_fun;
1235   void generate (rtx, rtx, machine_mode);
1236   bool prepare_mode (machine_mode, unsigned int);
1237 
1238  public:
1239   store_by_pieces_d (rtx to, by_pieces_constfn cfn, void *cfn_data,
1240 		     unsigned HOST_WIDE_INT len, unsigned int align)
1241     : op_by_pieces_d (to, false, NULL_RTX, true, cfn, cfn_data, len, align)
1242   {
1243   }
1244   rtx finish_endp (int);
1245 };
1246 
1247 /* Return true if MODE can be used for a set of stores, given an
1248    alignment ALIGN.  Prepare whatever data is necessary for later
1249    calls to generate.  */
1250 
1251 bool
1252 store_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1253 {
1254   insn_code icode = optab_handler (mov_optab, mode);
1255   m_gen_fun = GEN_FCN (icode);
1256   return icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode);
1257 }
1258 
1259 /* A callback used when iterating for a store_by_pieces_operation.
1260    OP0 and OP1 are the values that have been loaded and should be
1261    compared in MODE.  If OP0 is NULL, this means we should generate a
1262    push; otherwise EXTRA_DATA holds a pointer to a pointer to the insn
1263    gen function that should be used to generate the mode.  */
1264 
1265 void
1266 store_by_pieces_d::generate (rtx op0, rtx op1, machine_mode)
1267 {
1268   emit_insn (m_gen_fun (op0, op1));
1269 }
1270 
1271 /* Perform the final adjustment at the end of a string to obtain the
1272    correct return value for the block operation.  If ENDP is 1 return
1273    memory at the end ala mempcpy, and if ENDP is 2 return memory the
1274    end minus one byte ala stpcpy.  */
1275 
1276 rtx
1277 store_by_pieces_d::finish_endp (int endp)
1278 {
1279   gcc_assert (!m_reverse);
1280   if (endp == 2)
1281     {
1282       m_to.maybe_postinc (-1);
1283       --m_offset;
1284     }
1285   return m_to.adjust (QImode, m_offset);
1286 }
1287 
1288 /* Determine whether the LEN bytes generated by CONSTFUN can be
1289    stored to memory using several move instructions.  CONSTFUNDATA is
1290    a pointer which will be passed as argument in every CONSTFUN call.
1291    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1292    a memset operation and false if it's a copy of a constant string.
1293    Return nonzero if a call to store_by_pieces should succeed.  */
1294 
1295 int
1296 can_store_by_pieces (unsigned HOST_WIDE_INT len,
1297 		     rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1298 		     void *constfundata, unsigned int align, bool memsetp)
1299 {
1300   unsigned HOST_WIDE_INT l;
1301   unsigned int max_size;
1302   HOST_WIDE_INT offset = 0;
1303   enum insn_code icode;
1304   int reverse;
1305   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
1306   rtx cst ATTRIBUTE_UNUSED;
1307 
1308   if (len == 0)
1309     return 1;
1310 
1311   if (!targetm.use_by_pieces_infrastructure_p (len, align,
1312 					       memsetp
1313 						 ? SET_BY_PIECES
1314 						 : STORE_BY_PIECES,
1315 					       optimize_insn_for_speed_p ()))
1316     return 0;
1317 
1318   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
1319 
1320   /* We would first store what we can in the largest integer mode, then go to
1321      successively smaller modes.  */
1322 
1323   for (reverse = 0;
1324        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
1325        reverse++)
1326     {
1327       l = len;
1328       max_size = STORE_MAX_PIECES + 1;
1329       while (max_size > 1 && l > 0)
1330 	{
1331 	  scalar_int_mode mode = widest_int_mode_for_size (max_size);
1332 
1333 	  icode = optab_handler (mov_optab, mode);
1334 	  if (icode != CODE_FOR_nothing
1335 	      && align >= GET_MODE_ALIGNMENT (mode))
1336 	    {
1337 	      unsigned int size = GET_MODE_SIZE (mode);
1338 
1339 	      while (l >= size)
1340 		{
1341 		  if (reverse)
1342 		    offset -= size;
1343 
1344 		  cst = (*constfun) (constfundata, offset, mode);
1345 		  if (!targetm.legitimate_constant_p (mode, cst))
1346 		    return 0;
1347 
1348 		  if (!reverse)
1349 		    offset += size;
1350 
1351 		  l -= size;
1352 		}
1353 	    }
1354 
1355 	  max_size = GET_MODE_SIZE (mode);
1356 	}
1357 
1358       /* The code above should have handled everything.  */
1359       gcc_assert (!l);
1360     }
1361 
1362   return 1;
1363 }
1364 
1365 /* Generate several move instructions to store LEN bytes generated by
1366    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
1367    pointer which will be passed as argument in every CONSTFUN call.
1368    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
1369    a memset operation and false if it's a copy of a constant string.
1370    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1371    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1372    stpcpy.  */
1373 
1374 rtx
1375 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
1376 		 rtx (*constfun) (void *, HOST_WIDE_INT, scalar_int_mode),
1377 		 void *constfundata, unsigned int align, bool memsetp, int endp)
1378 {
1379   if (len == 0)
1380     {
1381       gcc_assert (endp != 2);
1382       return to;
1383     }
1384 
1385   gcc_assert (targetm.use_by_pieces_infrastructure_p
1386 		(len, align,
1387 		 memsetp ? SET_BY_PIECES : STORE_BY_PIECES,
1388 		 optimize_insn_for_speed_p ()));
1389 
1390   store_by_pieces_d data (to, constfun, constfundata, len, align);
1391   data.run ();
1392 
1393   if (endp)
1394     return data.finish_endp (endp);
1395   else
1396     return to;
1397 }
1398 
1399 /* Callback routine for clear_by_pieces.
1400    Return const0_rtx unconditionally.  */
1401 
1402 static rtx
1403 clear_by_pieces_1 (void *, HOST_WIDE_INT, scalar_int_mode)
1404 {
1405   return const0_rtx;
1406 }
1407 
1408 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
1409    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
1410 
1411 static void
1412 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
1413 {
1414   if (len == 0)
1415     return;
1416 
1417   store_by_pieces_d data (to, clear_by_pieces_1, NULL, len, align);
1418   data.run ();
1419 }
1420 
1421 /* Context used by compare_by_pieces_genfn.  It stores the fail label
1422    to jump to in case of miscomparison, and for branch ratios greater than 1,
1423    it stores an accumulator and the current and maximum counts before
1424    emitting another branch.  */
1425 
1426 class compare_by_pieces_d : public op_by_pieces_d
1427 {
1428   rtx_code_label *m_fail_label;
1429   rtx m_accumulator;
1430   int m_count, m_batch;
1431 
1432   void generate (rtx, rtx, machine_mode);
1433   bool prepare_mode (machine_mode, unsigned int);
1434   void finish_mode (machine_mode);
1435  public:
1436   compare_by_pieces_d (rtx op0, rtx op1, by_pieces_constfn op1_cfn,
1437 		       void *op1_cfn_data, HOST_WIDE_INT len, int align,
1438 		       rtx_code_label *fail_label)
1439     : op_by_pieces_d (op0, true, op1, true, op1_cfn, op1_cfn_data, len, align)
1440   {
1441     m_fail_label = fail_label;
1442   }
1443 };
1444 
1445 /* A callback used when iterating for a compare_by_pieces_operation.
1446    OP0 and OP1 are the values that have been loaded and should be
1447    compared in MODE.  DATA holds a pointer to the compare_by_pieces_data
1448    context structure.  */
1449 
1450 void
1451 compare_by_pieces_d::generate (rtx op0, rtx op1, machine_mode mode)
1452 {
1453   if (m_batch > 1)
1454     {
1455       rtx temp = expand_binop (mode, sub_optab, op0, op1, NULL_RTX,
1456 			       true, OPTAB_LIB_WIDEN);
1457       if (m_count != 0)
1458 	temp = expand_binop (mode, ior_optab, m_accumulator, temp, temp,
1459 			     true, OPTAB_LIB_WIDEN);
1460       m_accumulator = temp;
1461 
1462       if (++m_count < m_batch)
1463 	return;
1464 
1465       m_count = 0;
1466       op0 = m_accumulator;
1467       op1 = const0_rtx;
1468       m_accumulator = NULL_RTX;
1469     }
1470   do_compare_rtx_and_jump (op0, op1, NE, true, mode, NULL_RTX, NULL,
1471 			   m_fail_label, profile_probability::uninitialized ());
1472 }
1473 
1474 /* Return true if MODE can be used for a set of moves and comparisons,
1475    given an alignment ALIGN.  Prepare whatever data is necessary for
1476    later calls to generate.  */
1477 
1478 bool
1479 compare_by_pieces_d::prepare_mode (machine_mode mode, unsigned int align)
1480 {
1481   insn_code icode = optab_handler (mov_optab, mode);
1482   if (icode == CODE_FOR_nothing
1483       || align < GET_MODE_ALIGNMENT (mode)
1484       || !can_compare_p (EQ, mode, ccp_jump))
1485     return false;
1486   m_batch = targetm.compare_by_pieces_branch_ratio (mode);
1487   if (m_batch < 0)
1488     return false;
1489   m_accumulator = NULL_RTX;
1490   m_count = 0;
1491   return true;
1492 }
1493 
1494 /* Called after expanding a series of comparisons in MODE.  If we have
1495    accumulated results for which we haven't emitted a branch yet, do
1496    so now.  */
1497 
1498 void
1499 compare_by_pieces_d::finish_mode (machine_mode mode)
1500 {
1501   if (m_accumulator != NULL_RTX)
1502     do_compare_rtx_and_jump (m_accumulator, const0_rtx, NE, true, mode,
1503 			     NULL_RTX, NULL, m_fail_label,
1504 			     profile_probability::uninitialized ());
1505 }
1506 
1507 /* Generate several move instructions to compare LEN bytes from blocks
1508    ARG0 and ARG1.  (These are MEM rtx's with BLKmode).
1509 
1510    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1511    used to push FROM to the stack.
1512 
1513    ALIGN is maximum stack alignment we can assume.
1514 
1515    Optionally, the caller can pass a constfn and associated data in A1_CFN
1516    and A1_CFN_DATA. describing that the second operand being compared is a
1517    known constant and how to obtain its data.  */
1518 
1519 static rtx
1520 compare_by_pieces (rtx arg0, rtx arg1, unsigned HOST_WIDE_INT len,
1521 		   rtx target, unsigned int align,
1522 		   by_pieces_constfn a1_cfn, void *a1_cfn_data)
1523 {
1524   rtx_code_label *fail_label = gen_label_rtx ();
1525   rtx_code_label *end_label = gen_label_rtx ();
1526 
1527   if (target == NULL_RTX
1528       || !REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
1529     target = gen_reg_rtx (TYPE_MODE (integer_type_node));
1530 
1531   compare_by_pieces_d data (arg0, arg1, a1_cfn, a1_cfn_data, len, align,
1532 			    fail_label);
1533 
1534   data.run ();
1535 
1536   emit_move_insn (target, const0_rtx);
1537   emit_jump (end_label);
1538   emit_barrier ();
1539   emit_label (fail_label);
1540   emit_move_insn (target, const1_rtx);
1541   emit_label (end_label);
1542 
1543   return target;
1544 }
1545 
1546 /* Emit code to move a block Y to a block X.  This may be done with
1547    string-move instructions, with multiple scalar move instructions,
1548    or with a library call.
1549 
1550    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1551    SIZE is an rtx that says how long they are.
1552    ALIGN is the maximum alignment we can assume they have.
1553    METHOD describes what kind of copy this is, and what mechanisms may be used.
1554    MIN_SIZE is the minimal size of block to move
1555    MAX_SIZE is the maximal size of block to move, if it can not be represented
1556    in unsigned HOST_WIDE_INT, than it is mask of all ones.
1557 
1558    Return the address of the new block, if memcpy is called and returns it,
1559    0 otherwise.  */
1560 
1561 rtx
1562 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1563 		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1564 		       unsigned HOST_WIDE_INT min_size,
1565 		       unsigned HOST_WIDE_INT max_size,
1566 		       unsigned HOST_WIDE_INT probable_max_size)
1567 {
1568   int may_use_call;
1569   rtx retval = 0;
1570   unsigned int align;
1571 
1572   gcc_assert (size);
1573   if (CONST_INT_P (size) && INTVAL (size) == 0)
1574     return 0;
1575 
1576   switch (method)
1577     {
1578     case BLOCK_OP_NORMAL:
1579     case BLOCK_OP_TAILCALL:
1580       may_use_call = 1;
1581       break;
1582 
1583     case BLOCK_OP_CALL_PARM:
1584       may_use_call = block_move_libcall_safe_for_call_parm ();
1585 
1586       /* Make inhibit_defer_pop nonzero around the library call
1587 	 to force it to pop the arguments right away.  */
1588       NO_DEFER_POP;
1589       break;
1590 
1591     case BLOCK_OP_NO_LIBCALL:
1592       may_use_call = 0;
1593       break;
1594 
1595     case BLOCK_OP_NO_LIBCALL_RET:
1596       may_use_call = -1;
1597       break;
1598 
1599     default:
1600       gcc_unreachable ();
1601     }
1602 
1603   gcc_assert (MEM_P (x) && MEM_P (y));
1604   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1605   gcc_assert (align >= BITS_PER_UNIT);
1606 
1607   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1608      block copy is more efficient for other large modes, e.g. DCmode.  */
1609   x = adjust_address (x, BLKmode, 0);
1610   y = adjust_address (y, BLKmode, 0);
1611 
1612   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1613      can be incorrect is coming from __builtin_memcpy.  */
1614   if (CONST_INT_P (size))
1615     {
1616       x = shallow_copy_rtx (x);
1617       y = shallow_copy_rtx (y);
1618       set_mem_size (x, INTVAL (size));
1619       set_mem_size (y, INTVAL (size));
1620     }
1621 
1622   if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1623     move_by_pieces (x, y, INTVAL (size), align, 0);
1624   else if (emit_block_move_via_movmem (x, y, size, align,
1625 				       expected_align, expected_size,
1626 				       min_size, max_size, probable_max_size))
1627     ;
1628   else if (may_use_call
1629 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1630 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1631     {
1632       if (may_use_call < 0)
1633 	return pc_rtx;
1634 
1635       retval = emit_block_copy_via_libcall (x, y, size,
1636 					    method == BLOCK_OP_TAILCALL);
1637     }
1638 
1639   else
1640     emit_block_move_via_loop (x, y, size, align);
1641 
1642   if (method == BLOCK_OP_CALL_PARM)
1643     OK_DEFER_POP;
1644 
1645   return retval;
1646 }
1647 
1648 rtx
1649 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1650 {
1651   unsigned HOST_WIDE_INT max, min = 0;
1652   if (GET_CODE (size) == CONST_INT)
1653     min = max = UINTVAL (size);
1654   else
1655     max = GET_MODE_MASK (GET_MODE (size));
1656   return emit_block_move_hints (x, y, size, method, 0, -1,
1657 				min, max, max);
1658 }
1659 
1660 /* A subroutine of emit_block_move.  Returns true if calling the
1661    block move libcall will not clobber any parameters which may have
1662    already been placed on the stack.  */
1663 
1664 static bool
1665 block_move_libcall_safe_for_call_parm (void)
1666 {
1667 #if defined (REG_PARM_STACK_SPACE)
1668   tree fn;
1669 #endif
1670 
1671   /* If arguments are pushed on the stack, then they're safe.  */
1672   if (PUSH_ARGS)
1673     return true;
1674 
1675   /* If registers go on the stack anyway, any argument is sure to clobber
1676      an outgoing argument.  */
1677 #if defined (REG_PARM_STACK_SPACE)
1678   fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1679   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1680      depend on its argument.  */
1681   (void) fn;
1682   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1683       && REG_PARM_STACK_SPACE (fn) != 0)
1684     return false;
1685 #endif
1686 
1687   /* If any argument goes in memory, then it might clobber an outgoing
1688      argument.  */
1689   {
1690     CUMULATIVE_ARGS args_so_far_v;
1691     cumulative_args_t args_so_far;
1692     tree fn, arg;
1693 
1694     fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1695     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1696     args_so_far = pack_cumulative_args (&args_so_far_v);
1697 
1698     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1699     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1700       {
1701 	machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1702 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1703 					      NULL_TREE, true);
1704 	if (!tmp || !REG_P (tmp))
1705 	  return false;
1706 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1707 	  return false;
1708 	targetm.calls.function_arg_advance (args_so_far, mode,
1709 					    NULL_TREE, true);
1710       }
1711   }
1712   return true;
1713 }
1714 
1715 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1716    return true if successful.  */
1717 
1718 static bool
1719 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1720 			    unsigned int expected_align, HOST_WIDE_INT expected_size,
1721 			    unsigned HOST_WIDE_INT min_size,
1722 			    unsigned HOST_WIDE_INT max_size,
1723 			    unsigned HOST_WIDE_INT probable_max_size)
1724 {
1725   int save_volatile_ok = volatile_ok;
1726 
1727   if (expected_align < align)
1728     expected_align = align;
1729   if (expected_size != -1)
1730     {
1731       if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1732 	expected_size = probable_max_size;
1733       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1734 	expected_size = min_size;
1735     }
1736 
1737   /* Since this is a move insn, we don't care about volatility.  */
1738   volatile_ok = 1;
1739 
1740   /* Try the most limited insn first, because there's no point
1741      including more than one in the machine description unless
1742      the more limited one has some advantage.  */
1743 
1744   opt_scalar_int_mode mode_iter;
1745   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
1746     {
1747       scalar_int_mode mode = mode_iter.require ();
1748       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1749 
1750       if (code != CODE_FOR_nothing
1751 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1752 	     here because if SIZE is less than the mode mask, as it is
1753 	     returned by the macro, it will definitely be less than the
1754 	     actual mode mask.  Since SIZE is within the Pmode address
1755 	     space, we limit MODE to Pmode.  */
1756 	  && ((CONST_INT_P (size)
1757 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1758 		   <= (GET_MODE_MASK (mode) >> 1)))
1759 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1760 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1761 	{
1762 	  struct expand_operand ops[9];
1763 	  unsigned int nops;
1764 
1765 	  /* ??? When called via emit_block_move_for_call, it'd be
1766 	     nice if there were some way to inform the backend, so
1767 	     that it doesn't fail the expansion because it thinks
1768 	     emitting the libcall would be more efficient.  */
1769 	  nops = insn_data[(int) code].n_generator_args;
1770 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1771 
1772 	  create_fixed_operand (&ops[0], x);
1773 	  create_fixed_operand (&ops[1], y);
1774 	  /* The check above guarantees that this size conversion is valid.  */
1775 	  create_convert_operand_to (&ops[2], size, mode, true);
1776 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1777 	  if (nops >= 6)
1778 	    {
1779 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1780 	      create_integer_operand (&ops[5], expected_size);
1781 	    }
1782 	  if (nops >= 8)
1783 	    {
1784 	      create_integer_operand (&ops[6], min_size);
1785 	      /* If we can not represent the maximal size,
1786 		 make parameter NULL.  */
1787 	      if ((HOST_WIDE_INT) max_size != -1)
1788 	        create_integer_operand (&ops[7], max_size);
1789 	      else
1790 		create_fixed_operand (&ops[7], NULL);
1791 	    }
1792 	  if (nops == 9)
1793 	    {
1794 	      /* If we can not represent the maximal size,
1795 		 make parameter NULL.  */
1796 	      if ((HOST_WIDE_INT) probable_max_size != -1)
1797 	        create_integer_operand (&ops[8], probable_max_size);
1798 	      else
1799 		create_fixed_operand (&ops[8], NULL);
1800 	    }
1801 	  if (maybe_expand_insn (code, nops, ops))
1802 	    {
1803 	      volatile_ok = save_volatile_ok;
1804 	      return true;
1805 	    }
1806 	}
1807     }
1808 
1809   volatile_ok = save_volatile_ok;
1810   return false;
1811 }
1812 
1813 /* A subroutine of emit_block_move.  Copy the data via an explicit
1814    loop.  This is used only when libcalls are forbidden.  */
1815 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1816 
1817 static void
1818 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1819 			  unsigned int align ATTRIBUTE_UNUSED)
1820 {
1821   rtx_code_label *cmp_label, *top_label;
1822   rtx iter, x_addr, y_addr, tmp;
1823   machine_mode x_addr_mode = get_address_mode (x);
1824   machine_mode y_addr_mode = get_address_mode (y);
1825   machine_mode iter_mode;
1826 
1827   iter_mode = GET_MODE (size);
1828   if (iter_mode == VOIDmode)
1829     iter_mode = word_mode;
1830 
1831   top_label = gen_label_rtx ();
1832   cmp_label = gen_label_rtx ();
1833   iter = gen_reg_rtx (iter_mode);
1834 
1835   emit_move_insn (iter, const0_rtx);
1836 
1837   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1838   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1839   do_pending_stack_adjust ();
1840 
1841   emit_jump (cmp_label);
1842   emit_label (top_label);
1843 
1844   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1845   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1846 
1847   if (x_addr_mode != y_addr_mode)
1848     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1849   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1850 
1851   x = change_address (x, QImode, x_addr);
1852   y = change_address (y, QImode, y_addr);
1853 
1854   emit_move_insn (x, y);
1855 
1856   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1857 			     true, OPTAB_LIB_WIDEN);
1858   if (tmp != iter)
1859     emit_move_insn (iter, tmp);
1860 
1861   emit_label (cmp_label);
1862 
1863   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1864 			   true, top_label,
1865 			   profile_probability::guessed_always ()
1866 				.apply_scale (9, 10));
1867 }
1868 
1869 /* Expand a call to memcpy or memmove or memcmp, and return the result.
1870    TAILCALL is true if this is a tail call.  */
1871 
1872 rtx
1873 emit_block_op_via_libcall (enum built_in_function fncode, rtx dst, rtx src,
1874 			   rtx size, bool tailcall)
1875 {
1876   rtx dst_addr, src_addr;
1877   tree call_expr, dst_tree, src_tree, size_tree;
1878   machine_mode size_mode;
1879 
1880   /* Since dst and src are passed to a libcall, mark the corresponding
1881      tree EXPR as addressable.  */
1882   tree dst_expr = MEM_EXPR (dst);
1883   tree src_expr = MEM_EXPR (src);
1884   if (dst_expr)
1885     mark_addressable (dst_expr);
1886   if (src_expr)
1887     mark_addressable (src_expr);
1888 
1889   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1890   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1891   dst_tree = make_tree (ptr_type_node, dst_addr);
1892 
1893   src_addr = copy_addr_to_reg (XEXP (src, 0));
1894   src_addr = convert_memory_address (ptr_mode, src_addr);
1895   src_tree = make_tree (ptr_type_node, src_addr);
1896 
1897   size_mode = TYPE_MODE (sizetype);
1898   size = convert_to_mode (size_mode, size, 1);
1899   size = copy_to_mode_reg (size_mode, size);
1900   size_tree = make_tree (sizetype, size);
1901 
1902   /* It is incorrect to use the libcall calling conventions for calls to
1903      memcpy/memmove/memcmp because they can be provided by the user.  */
1904   tree fn = builtin_decl_implicit (fncode);
1905   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1906   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1907 
1908   return expand_call (call_expr, NULL_RTX, false);
1909 }
1910 
1911 /* Try to expand cmpstrn or cmpmem operation ICODE with the given operands.
1912    ARG3_TYPE is the type of ARG3_RTX.  Return the result rtx on success,
1913    otherwise return null.  */
1914 
1915 rtx
1916 expand_cmpstrn_or_cmpmem (insn_code icode, rtx target, rtx arg1_rtx,
1917 			  rtx arg2_rtx, tree arg3_type, rtx arg3_rtx,
1918 			  HOST_WIDE_INT align)
1919 {
1920   machine_mode insn_mode = insn_data[icode].operand[0].mode;
1921 
1922   if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
1923     target = NULL_RTX;
1924 
1925   struct expand_operand ops[5];
1926   create_output_operand (&ops[0], target, insn_mode);
1927   create_fixed_operand (&ops[1], arg1_rtx);
1928   create_fixed_operand (&ops[2], arg2_rtx);
1929   create_convert_operand_from (&ops[3], arg3_rtx, TYPE_MODE (arg3_type),
1930 			       TYPE_UNSIGNED (arg3_type));
1931   create_integer_operand (&ops[4], align);
1932   if (maybe_expand_insn (icode, 5, ops))
1933     return ops[0].value;
1934   return NULL_RTX;
1935 }
1936 
1937 /* Expand a block compare between X and Y with length LEN using the
1938    cmpmem optab, placing the result in TARGET.  LEN_TYPE is the type
1939    of the expression that was used to calculate the length.  ALIGN
1940    gives the known minimum common alignment.  */
1941 
1942 static rtx
1943 emit_block_cmp_via_cmpmem (rtx x, rtx y, rtx len, tree len_type, rtx target,
1944 			   unsigned align)
1945 {
1946   /* Note: The cmpstrnsi pattern, if it exists, is not suitable for
1947      implementing memcmp because it will stop if it encounters two
1948      zero bytes.  */
1949   insn_code icode = direct_optab_handler (cmpmem_optab, SImode);
1950 
1951   if (icode == CODE_FOR_nothing)
1952     return NULL_RTX;
1953 
1954   return expand_cmpstrn_or_cmpmem (icode, target, x, y, len_type, len, align);
1955 }
1956 
1957 /* Emit code to compare a block Y to a block X.  This may be done with
1958    string-compare instructions, with multiple scalar instructions,
1959    or with a library call.
1960 
1961    Both X and Y must be MEM rtx's.  LEN is an rtx that says how long
1962    they are.  LEN_TYPE is the type of the expression that was used to
1963    calculate it.
1964 
1965    If EQUALITY_ONLY is true, it means we don't have to return the tri-state
1966    value of a normal memcmp call, instead we can just compare for equality.
1967    If FORCE_LIBCALL is true, we should emit a call to memcmp rather than
1968    returning NULL_RTX.
1969 
1970    Optionally, the caller can pass a constfn and associated data in Y_CFN
1971    and Y_CFN_DATA. describing that the second operand being compared is a
1972    known constant and how to obtain its data.
1973    Return the result of the comparison, or NULL_RTX if we failed to
1974    perform the operation.  */
1975 
1976 rtx
1977 emit_block_cmp_hints (rtx x, rtx y, rtx len, tree len_type, rtx target,
1978 		      bool equality_only, by_pieces_constfn y_cfn,
1979 		      void *y_cfndata)
1980 {
1981   rtx result = 0;
1982 
1983   if (CONST_INT_P (len) && INTVAL (len) == 0)
1984     return const0_rtx;
1985 
1986   gcc_assert (MEM_P (x) && MEM_P (y));
1987   unsigned int align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1988   gcc_assert (align >= BITS_PER_UNIT);
1989 
1990   x = adjust_address (x, BLKmode, 0);
1991   y = adjust_address (y, BLKmode, 0);
1992 
1993   if (equality_only
1994       && CONST_INT_P (len)
1995       && can_do_by_pieces (INTVAL (len), align, COMPARE_BY_PIECES))
1996     result = compare_by_pieces (x, y, INTVAL (len), target, align,
1997 				y_cfn, y_cfndata);
1998   else
1999     result = emit_block_cmp_via_cmpmem (x, y, len, len_type, target, align);
2000 
2001   return result;
2002 }
2003 
2004 /* Copy all or part of a value X into registers starting at REGNO.
2005    The number of registers to be filled is NREGS.  */
2006 
2007 void
2008 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
2009 {
2010   if (nregs == 0)
2011     return;
2012 
2013   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
2014     x = validize_mem (force_const_mem (mode, x));
2015 
2016   /* See if the machine can do this with a load multiple insn.  */
2017   if (targetm.have_load_multiple ())
2018     {
2019       rtx_insn *last = get_last_insn ();
2020       rtx first = gen_rtx_REG (word_mode, regno);
2021       if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
2022 						     GEN_INT (nregs)))
2023 	{
2024 	  emit_insn (pat);
2025 	  return;
2026 	}
2027       else
2028 	delete_insns_since (last);
2029     }
2030 
2031   for (int i = 0; i < nregs; i++)
2032     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2033 		    operand_subword_force (x, i, mode));
2034 }
2035 
2036 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2037    The number of registers to be filled is NREGS.  */
2038 
2039 void
2040 move_block_from_reg (int regno, rtx x, int nregs)
2041 {
2042   if (nregs == 0)
2043     return;
2044 
2045   /* See if the machine can do this with a store multiple insn.  */
2046   if (targetm.have_store_multiple ())
2047     {
2048       rtx_insn *last = get_last_insn ();
2049       rtx first = gen_rtx_REG (word_mode, regno);
2050       if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
2051 						      GEN_INT (nregs)))
2052 	{
2053 	  emit_insn (pat);
2054 	  return;
2055 	}
2056       else
2057 	delete_insns_since (last);
2058     }
2059 
2060   for (int i = 0; i < nregs; i++)
2061     {
2062       rtx tem = operand_subword (x, i, 1, BLKmode);
2063 
2064       gcc_assert (tem);
2065 
2066       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2067     }
2068 }
2069 
2070 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2071    ORIG, where ORIG is a non-consecutive group of registers represented by
2072    a PARALLEL.  The clone is identical to the original except in that the
2073    original set of registers is replaced by a new set of pseudo registers.
2074    The new set has the same modes as the original set.  */
2075 
2076 rtx
2077 gen_group_rtx (rtx orig)
2078 {
2079   int i, length;
2080   rtx *tmps;
2081 
2082   gcc_assert (GET_CODE (orig) == PARALLEL);
2083 
2084   length = XVECLEN (orig, 0);
2085   tmps = XALLOCAVEC (rtx, length);
2086 
2087   /* Skip a NULL entry in first slot.  */
2088   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2089 
2090   if (i)
2091     tmps[0] = 0;
2092 
2093   for (; i < length; i++)
2094     {
2095       machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2096       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2097 
2098       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2099     }
2100 
2101   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2102 }
2103 
2104 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
2105    except that values are placed in TMPS[i], and must later be moved
2106    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
2107 
2108 static void
2109 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type,
2110 		   poly_int64 ssize)
2111 {
2112   rtx src;
2113   int start, i;
2114   machine_mode m = GET_MODE (orig_src);
2115 
2116   gcc_assert (GET_CODE (dst) == PARALLEL);
2117 
2118   if (m != VOIDmode
2119       && !SCALAR_INT_MODE_P (m)
2120       && !MEM_P (orig_src)
2121       && GET_CODE (orig_src) != CONCAT)
2122     {
2123       scalar_int_mode imode;
2124       if (int_mode_for_mode (GET_MODE (orig_src)).exists (&imode))
2125 	{
2126 	  src = gen_reg_rtx (imode);
2127 	  emit_move_insn (gen_lowpart (GET_MODE (orig_src), src), orig_src);
2128 	}
2129       else
2130 	{
2131 	  src = assign_stack_temp (GET_MODE (orig_src), ssize);
2132 	  emit_move_insn (src, orig_src);
2133 	}
2134       emit_group_load_1 (tmps, dst, src, type, ssize);
2135       return;
2136     }
2137 
2138   /* Check for a NULL entry, used to indicate that the parameter goes
2139      both on the stack and in registers.  */
2140   if (XEXP (XVECEXP (dst, 0, 0), 0))
2141     start = 0;
2142   else
2143     start = 1;
2144 
2145   /* Process the pieces.  */
2146   for (i = start; i < XVECLEN (dst, 0); i++)
2147     {
2148       machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2149       poly_int64 bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2150       poly_int64 bytelen = GET_MODE_SIZE (mode);
2151       poly_int64 shift = 0;
2152 
2153       /* Handle trailing fragments that run over the size of the struct.
2154 	 It's the target's responsibility to make sure that the fragment
2155 	 cannot be strictly smaller in some cases and strictly larger
2156 	 in others.  */
2157       gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2158       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2159 	{
2160 	  /* Arrange to shift the fragment to where it belongs.
2161 	     extract_bit_field loads to the lsb of the reg.  */
2162 	  if (
2163 #ifdef BLOCK_REG_PADDING
2164 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
2165 	      == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2166 #else
2167 	      BYTES_BIG_ENDIAN
2168 #endif
2169 	      )
2170 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2171 	  bytelen = ssize - bytepos;
2172 	  gcc_assert (maybe_gt (bytelen, 0));
2173 	}
2174 
2175       /* If we won't be loading directly from memory, protect the real source
2176 	 from strange tricks we might play; but make sure that the source can
2177 	 be loaded directly into the destination.  */
2178       src = orig_src;
2179       if (!MEM_P (orig_src)
2180 	  && (!CONSTANT_P (orig_src)
2181 	      || (GET_MODE (orig_src) != mode
2182 		  && GET_MODE (orig_src) != VOIDmode)))
2183 	{
2184 	  if (GET_MODE (orig_src) == VOIDmode)
2185 	    src = gen_reg_rtx (mode);
2186 	  else
2187 	    src = gen_reg_rtx (GET_MODE (orig_src));
2188 
2189 	  emit_move_insn (src, orig_src);
2190 	}
2191 
2192       /* Optimize the access just a bit.  */
2193       if (MEM_P (src)
2194 	  && (! targetm.slow_unaligned_access (mode, MEM_ALIGN (src))
2195 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
2196 	  && multiple_p (bytepos * BITS_PER_UNIT, GET_MODE_ALIGNMENT (mode))
2197 	  && known_eq (bytelen, GET_MODE_SIZE (mode)))
2198 	{
2199 	  tmps[i] = gen_reg_rtx (mode);
2200 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2201 	}
2202       else if (COMPLEX_MODE_P (mode)
2203 	       && GET_MODE (src) == mode
2204 	       && known_eq (bytelen, GET_MODE_SIZE (mode)))
2205 	/* Let emit_move_complex do the bulk of the work.  */
2206 	tmps[i] = src;
2207       else if (GET_CODE (src) == CONCAT)
2208 	{
2209 	  poly_int64 slen = GET_MODE_SIZE (GET_MODE (src));
2210 	  poly_int64 slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2211 	  unsigned int elt;
2212 	  poly_int64 subpos;
2213 
2214 	  if (can_div_trunc_p (bytepos, slen0, &elt, &subpos)
2215 	      && known_le (subpos + bytelen, slen0))
2216 	    {
2217 	      /* The following assumes that the concatenated objects all
2218 		 have the same size.  In this case, a simple calculation
2219 		 can be used to determine the object and the bit field
2220 		 to be extracted.  */
2221 	      tmps[i] = XEXP (src, elt);
2222 	      if (maybe_ne (subpos, 0)
2223 		  || maybe_ne (subpos + bytelen, slen0)
2224 		  || (!CONSTANT_P (tmps[i])
2225 		      && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode)))
2226 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2227 					     subpos * BITS_PER_UNIT,
2228 					     1, NULL_RTX, mode, mode, false,
2229 					     NULL);
2230 	    }
2231 	  else
2232 	    {
2233 	      rtx mem;
2234 
2235 	      gcc_assert (known_eq (bytepos, 0));
2236 	      mem = assign_stack_temp (GET_MODE (src), slen);
2237 	      emit_move_insn (mem, src);
2238 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
2239 					   0, 1, NULL_RTX, mode, mode, false,
2240 					   NULL);
2241 	    }
2242 	}
2243       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2244 	 SIMD register, which is currently broken.  While we get GCC
2245 	 to emit proper RTL for these cases, let's dump to memory.  */
2246       else if (VECTOR_MODE_P (GET_MODE (dst))
2247 	       && REG_P (src))
2248 	{
2249 	  poly_uint64 slen = GET_MODE_SIZE (GET_MODE (src));
2250 	  rtx mem;
2251 
2252 	  mem = assign_stack_temp (GET_MODE (src), slen);
2253 	  emit_move_insn (mem, src);
2254 	  tmps[i] = adjust_address (mem, mode, bytepos);
2255 	}
2256       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
2257                && XVECLEN (dst, 0) > 1)
2258         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
2259       else if (CONSTANT_P (src))
2260 	{
2261 	  if (known_eq (bytelen, ssize))
2262 	    tmps[i] = src;
2263 	  else
2264 	    {
2265 	      rtx first, second;
2266 
2267 	      /* TODO: const_wide_int can have sizes other than this...  */
2268 	      gcc_assert (known_eq (2 * bytelen, ssize));
2269 	      split_double (src, &first, &second);
2270 	      if (i)
2271 		tmps[i] = second;
2272 	      else
2273 		tmps[i] = first;
2274 	    }
2275 	}
2276       else if (REG_P (src) && GET_MODE (src) == mode)
2277 	tmps[i] = src;
2278       else
2279 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2280 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2281 				     mode, mode, false, NULL);
2282 
2283       if (maybe_ne (shift, 0))
2284 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
2285 				shift, tmps[i], 0);
2286     }
2287 }
2288 
2289 /* Emit code to move a block SRC of type TYPE to a block DST,
2290    where DST is non-consecutive registers represented by a PARALLEL.
2291    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
2292    if not known.  */
2293 
2294 void
2295 emit_group_load (rtx dst, rtx src, tree type, poly_int64 ssize)
2296 {
2297   rtx *tmps;
2298   int i;
2299 
2300   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
2301   emit_group_load_1 (tmps, dst, src, type, ssize);
2302 
2303   /* Copy the extracted pieces into the proper (probable) hard regs.  */
2304   for (i = 0; i < XVECLEN (dst, 0); i++)
2305     {
2306       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
2307       if (d == NULL)
2308 	continue;
2309       emit_move_insn (d, tmps[i]);
2310     }
2311 }
2312 
2313 /* Similar, but load SRC into new pseudos in a format that looks like
2314    PARALLEL.  This can later be fed to emit_group_move to get things
2315    in the right place.  */
2316 
2317 rtx
2318 emit_group_load_into_temps (rtx parallel, rtx src, tree type, poly_int64 ssize)
2319 {
2320   rtvec vec;
2321   int i;
2322 
2323   vec = rtvec_alloc (XVECLEN (parallel, 0));
2324   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
2325 
2326   /* Convert the vector to look just like the original PARALLEL, except
2327      with the computed values.  */
2328   for (i = 0; i < XVECLEN (parallel, 0); i++)
2329     {
2330       rtx e = XVECEXP (parallel, 0, i);
2331       rtx d = XEXP (e, 0);
2332 
2333       if (d)
2334 	{
2335 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
2336 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
2337 	}
2338       RTVEC_ELT (vec, i) = e;
2339     }
2340 
2341   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
2342 }
2343 
2344 /* Emit code to move a block SRC to block DST, where SRC and DST are
2345    non-consecutive groups of registers, each represented by a PARALLEL.  */
2346 
2347 void
2348 emit_group_move (rtx dst, rtx src)
2349 {
2350   int i;
2351 
2352   gcc_assert (GET_CODE (src) == PARALLEL
2353 	      && GET_CODE (dst) == PARALLEL
2354 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
2355 
2356   /* Skip first entry if NULL.  */
2357   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2358     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2359 		    XEXP (XVECEXP (src, 0, i), 0));
2360 }
2361 
2362 /* Move a group of registers represented by a PARALLEL into pseudos.  */
2363 
2364 rtx
2365 emit_group_move_into_temps (rtx src)
2366 {
2367   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
2368   int i;
2369 
2370   for (i = 0; i < XVECLEN (src, 0); i++)
2371     {
2372       rtx e = XVECEXP (src, 0, i);
2373       rtx d = XEXP (e, 0);
2374 
2375       if (d)
2376 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
2377       RTVEC_ELT (vec, i) = e;
2378     }
2379 
2380   return gen_rtx_PARALLEL (GET_MODE (src), vec);
2381 }
2382 
2383 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
2384    where SRC is non-consecutive registers represented by a PARALLEL.
2385    SSIZE represents the total size of block ORIG_DST, or -1 if not
2386    known.  */
2387 
2388 void
2389 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED,
2390 		  poly_int64 ssize)
2391 {
2392   rtx *tmps, dst;
2393   int start, finish, i;
2394   machine_mode m = GET_MODE (orig_dst);
2395 
2396   gcc_assert (GET_CODE (src) == PARALLEL);
2397 
2398   if (!SCALAR_INT_MODE_P (m)
2399       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
2400     {
2401       scalar_int_mode imode;
2402       if (int_mode_for_mode (GET_MODE (orig_dst)).exists (&imode))
2403 	{
2404 	  dst = gen_reg_rtx (imode);
2405 	  emit_group_store (dst, src, type, ssize);
2406 	  dst = gen_lowpart (GET_MODE (orig_dst), dst);
2407 	}
2408       else
2409 	{
2410 	  dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
2411 	  emit_group_store (dst, src, type, ssize);
2412 	}
2413       emit_move_insn (orig_dst, dst);
2414       return;
2415     }
2416 
2417   /* Check for a NULL entry, used to indicate that the parameter goes
2418      both on the stack and in registers.  */
2419   if (XEXP (XVECEXP (src, 0, 0), 0))
2420     start = 0;
2421   else
2422     start = 1;
2423   finish = XVECLEN (src, 0);
2424 
2425   tmps = XALLOCAVEC (rtx, finish);
2426 
2427   /* Copy the (probable) hard regs into pseudos.  */
2428   for (i = start; i < finish; i++)
2429     {
2430       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2431       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
2432 	{
2433 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
2434 	  emit_move_insn (tmps[i], reg);
2435 	}
2436       else
2437 	tmps[i] = reg;
2438     }
2439 
2440   /* If we won't be storing directly into memory, protect the real destination
2441      from strange tricks we might play.  */
2442   dst = orig_dst;
2443   if (GET_CODE (dst) == PARALLEL)
2444     {
2445       rtx temp;
2446 
2447       /* We can get a PARALLEL dst if there is a conditional expression in
2448 	 a return statement.  In that case, the dst and src are the same,
2449 	 so no action is necessary.  */
2450       if (rtx_equal_p (dst, src))
2451 	return;
2452 
2453       /* It is unclear if we can ever reach here, but we may as well handle
2454 	 it.  Allocate a temporary, and split this into a store/load to/from
2455 	 the temporary.  */
2456       temp = assign_stack_temp (GET_MODE (dst), ssize);
2457       emit_group_store (temp, src, type, ssize);
2458       emit_group_load (dst, temp, type, ssize);
2459       return;
2460     }
2461   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2462     {
2463       machine_mode outer = GET_MODE (dst);
2464       machine_mode inner;
2465       poly_int64 bytepos;
2466       bool done = false;
2467       rtx temp;
2468 
2469       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
2470 	dst = gen_reg_rtx (outer);
2471 
2472       /* Make life a bit easier for combine.  */
2473       /* If the first element of the vector is the low part
2474 	 of the destination mode, use a paradoxical subreg to
2475 	 initialize the destination.  */
2476       if (start < finish)
2477 	{
2478 	  inner = GET_MODE (tmps[start]);
2479 	  bytepos = subreg_lowpart_offset (inner, outer);
2480 	  if (known_eq (INTVAL (XEXP (XVECEXP (src, 0, start), 1)), bytepos))
2481 	    {
2482 	      temp = simplify_gen_subreg (outer, tmps[start],
2483 					  inner, 0);
2484 	      if (temp)
2485 		{
2486 		  emit_move_insn (dst, temp);
2487 		  done = true;
2488 		  start++;
2489 		}
2490 	    }
2491 	}
2492 
2493       /* If the first element wasn't the low part, try the last.  */
2494       if (!done
2495 	  && start < finish - 1)
2496 	{
2497 	  inner = GET_MODE (tmps[finish - 1]);
2498 	  bytepos = subreg_lowpart_offset (inner, outer);
2499 	  if (known_eq (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)),
2500 			bytepos))
2501 	    {
2502 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2503 					  inner, 0);
2504 	      if (temp)
2505 		{
2506 		  emit_move_insn (dst, temp);
2507 		  done = true;
2508 		  finish--;
2509 		}
2510 	    }
2511 	}
2512 
2513       /* Otherwise, simply initialize the result to zero.  */
2514       if (!done)
2515         emit_move_insn (dst, CONST0_RTX (outer));
2516     }
2517 
2518   /* Process the pieces.  */
2519   for (i = start; i < finish; i++)
2520     {
2521       poly_int64 bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2522       machine_mode mode = GET_MODE (tmps[i]);
2523       poly_int64 bytelen = GET_MODE_SIZE (mode);
2524       poly_uint64 adj_bytelen;
2525       rtx dest = dst;
2526 
2527       /* Handle trailing fragments that run over the size of the struct.
2528 	 It's the target's responsibility to make sure that the fragment
2529 	 cannot be strictly smaller in some cases and strictly larger
2530 	 in others.  */
2531       gcc_checking_assert (ordered_p (bytepos + bytelen, ssize));
2532       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2533 	adj_bytelen = ssize - bytepos;
2534       else
2535 	adj_bytelen = bytelen;
2536 
2537       if (GET_CODE (dst) == CONCAT)
2538 	{
2539 	  if (known_le (bytepos + adj_bytelen,
2540 			GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2541 	    dest = XEXP (dst, 0);
2542 	  else if (known_ge (bytepos, GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)))))
2543 	    {
2544 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2545 	      dest = XEXP (dst, 1);
2546 	    }
2547 	  else
2548 	    {
2549 	      machine_mode dest_mode = GET_MODE (dest);
2550 	      machine_mode tmp_mode = GET_MODE (tmps[i]);
2551 
2552 	      gcc_assert (known_eq (bytepos, 0) && XVECLEN (src, 0));
2553 
2554 	      if (GET_MODE_ALIGNMENT (dest_mode)
2555 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2556 		{
2557 		  dest = assign_stack_temp (dest_mode,
2558 					    GET_MODE_SIZE (dest_mode));
2559 		  emit_move_insn (adjust_address (dest,
2560 						  tmp_mode,
2561 						  bytepos),
2562 				  tmps[i]);
2563 		  dst = dest;
2564 		}
2565 	      else
2566 		{
2567 		  dest = assign_stack_temp (tmp_mode,
2568 					    GET_MODE_SIZE (tmp_mode));
2569 		  emit_move_insn (dest, tmps[i]);
2570 		  dst = adjust_address (dest, dest_mode, bytepos);
2571 		}
2572 	      break;
2573 	    }
2574 	}
2575 
2576       /* Handle trailing fragments that run over the size of the struct.  */
2577       if (known_size_p (ssize) && maybe_gt (bytepos + bytelen, ssize))
2578 	{
2579 	  /* store_bit_field always takes its value from the lsb.
2580 	     Move the fragment to the lsb if it's not already there.  */
2581 	  if (
2582 #ifdef BLOCK_REG_PADDING
2583 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2584 	      == (BYTES_BIG_ENDIAN ? PAD_UPWARD : PAD_DOWNWARD)
2585 #else
2586 	      BYTES_BIG_ENDIAN
2587 #endif
2588 	      )
2589 	    {
2590 	      poly_int64 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2591 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2592 				      shift, tmps[i], 0);
2593 	    }
2594 
2595 	  /* Make sure not to write past the end of the struct.  */
2596 	  store_bit_field (dest,
2597 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2598 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2599 			   VOIDmode, tmps[i], false);
2600 	}
2601 
2602       /* Optimize the access just a bit.  */
2603       else if (MEM_P (dest)
2604 	       && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (dest))
2605 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2606 	       && multiple_p (bytepos * BITS_PER_UNIT,
2607 			      GET_MODE_ALIGNMENT (mode))
2608 	       && known_eq (bytelen, GET_MODE_SIZE (mode)))
2609 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2610 
2611       else
2612 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2613 			 0, 0, mode, tmps[i], false);
2614     }
2615 
2616   /* Copy from the pseudo into the (probable) hard reg.  */
2617   if (orig_dst != dst)
2618     emit_move_insn (orig_dst, dst);
2619 }
2620 
2621 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2622    of the value stored in X.  */
2623 
2624 rtx
2625 maybe_emit_group_store (rtx x, tree type)
2626 {
2627   machine_mode mode = TYPE_MODE (type);
2628   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2629   if (GET_CODE (x) == PARALLEL)
2630     {
2631       rtx result = gen_reg_rtx (mode);
2632       emit_group_store (result, x, type, int_size_in_bytes (type));
2633       return result;
2634     }
2635   return x;
2636 }
2637 
2638 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2639 
2640    This is used on targets that return BLKmode values in registers.  */
2641 
2642 static void
2643 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2644 {
2645   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2646   rtx src = NULL, dst = NULL;
2647   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2648   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2649   /* No current ABI uses variable-sized modes to pass a BLKmnode type.  */
2650   fixed_size_mode mode = as_a <fixed_size_mode> (GET_MODE (srcreg));
2651   fixed_size_mode tmode = as_a <fixed_size_mode> (GET_MODE (target));
2652   fixed_size_mode copy_mode;
2653 
2654   /* BLKmode registers created in the back-end shouldn't have survived.  */
2655   gcc_assert (mode != BLKmode);
2656 
2657   /* If the structure doesn't take up a whole number of words, see whether
2658      SRCREG is padded on the left or on the right.  If it's on the left,
2659      set PADDING_CORRECTION to the number of bits to skip.
2660 
2661      In most ABIs, the structure will be returned at the least end of
2662      the register, which translates to right padding on little-endian
2663      targets and left padding on big-endian targets.  The opposite
2664      holds if the structure is returned at the most significant
2665      end of the register.  */
2666   if (bytes % UNITS_PER_WORD != 0
2667       && (targetm.calls.return_in_msb (type)
2668 	  ? !BYTES_BIG_ENDIAN
2669 	  : BYTES_BIG_ENDIAN))
2670     padding_correction
2671       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2672 
2673   /* We can use a single move if we have an exact mode for the size.  */
2674   else if (MEM_P (target)
2675 	   && (!targetm.slow_unaligned_access (mode, MEM_ALIGN (target))
2676 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2677 	   && bytes == GET_MODE_SIZE (mode))
2678   {
2679     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2680     return;
2681   }
2682 
2683   /* And if we additionally have the same mode for a register.  */
2684   else if (REG_P (target)
2685 	   && GET_MODE (target) == mode
2686 	   && bytes == GET_MODE_SIZE (mode))
2687   {
2688     emit_move_insn (target, srcreg);
2689     return;
2690   }
2691 
2692   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2693      into a new pseudo which is a full word.  */
2694   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2695     {
2696       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2697       mode = word_mode;
2698     }
2699 
2700   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2701      memory, take care of not reading/writing past its end by selecting
2702      a copy mode suited to BITSIZE.  This should always be possible given
2703      how it is computed.
2704 
2705      If the target lives in register, make sure not to select a copy mode
2706      larger than the mode of the register.
2707 
2708      We could probably emit more efficient code for machines which do not use
2709      strict alignment, but it doesn't seem worth the effort at the current
2710      time.  */
2711 
2712   copy_mode = word_mode;
2713   if (MEM_P (target))
2714     {
2715       opt_scalar_int_mode mem_mode = int_mode_for_size (bitsize, 1);
2716       if (mem_mode.exists ())
2717 	copy_mode = mem_mode.require ();
2718     }
2719   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2720     copy_mode = tmode;
2721 
2722   for (bitpos = 0, xbitpos = padding_correction;
2723        bitpos < bytes * BITS_PER_UNIT;
2724        bitpos += bitsize, xbitpos += bitsize)
2725     {
2726       /* We need a new source operand each time xbitpos is on a
2727 	 word boundary and when xbitpos == padding_correction
2728 	 (the first time through).  */
2729       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2730 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2731 
2732       /* We need a new destination operand each time bitpos is on
2733 	 a word boundary.  */
2734       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2735 	dst = target;
2736       else if (bitpos % BITS_PER_WORD == 0)
2737 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2738 
2739       /* Use xbitpos for the source extraction (right justified) and
2740 	 bitpos for the destination store (left justified).  */
2741       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2742 		       extract_bit_field (src, bitsize,
2743 					  xbitpos % BITS_PER_WORD, 1,
2744 					  NULL_RTX, copy_mode, copy_mode,
2745 					  false, NULL),
2746 		       false);
2747     }
2748 }
2749 
2750 /* Copy BLKmode value SRC into a register of mode MODE_IN.  Return the
2751    register if it contains any data, otherwise return null.
2752 
2753    This is used on targets that return BLKmode values in registers.  */
2754 
2755 rtx
2756 copy_blkmode_to_reg (machine_mode mode_in, tree src)
2757 {
2758   int i, n_regs;
2759   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2760   unsigned int bitsize;
2761   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2762   /* No current ABI uses variable-sized modes to pass a BLKmnode type.  */
2763   fixed_size_mode mode = as_a <fixed_size_mode> (mode_in);
2764   fixed_size_mode dst_mode;
2765 
2766   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2767 
2768   x = expand_normal (src);
2769 
2770   bytes = arg_int_size_in_bytes (TREE_TYPE (src));
2771   if (bytes == 0)
2772     return NULL_RTX;
2773 
2774   /* If the structure doesn't take up a whole number of words, see
2775      whether the register value should be padded on the left or on
2776      the right.  Set PADDING_CORRECTION to the number of padding
2777      bits needed on the left side.
2778 
2779      In most ABIs, the structure will be returned at the least end of
2780      the register, which translates to right padding on little-endian
2781      targets and left padding on big-endian targets.  The opposite
2782      holds if the structure is returned at the most significant
2783      end of the register.  */
2784   if (bytes % UNITS_PER_WORD != 0
2785       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2786 	  ? !BYTES_BIG_ENDIAN
2787 	  : BYTES_BIG_ENDIAN))
2788     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2789 					   * BITS_PER_UNIT));
2790 
2791   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2792   dst_words = XALLOCAVEC (rtx, n_regs);
2793   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2794 
2795   /* Copy the structure BITSIZE bits at a time.  */
2796   for (bitpos = 0, xbitpos = padding_correction;
2797        bitpos < bytes * BITS_PER_UNIT;
2798        bitpos += bitsize, xbitpos += bitsize)
2799     {
2800       /* We need a new destination pseudo each time xbitpos is
2801 	 on a word boundary and when xbitpos == padding_correction
2802 	 (the first time through).  */
2803       if (xbitpos % BITS_PER_WORD == 0
2804 	  || xbitpos == padding_correction)
2805 	{
2806 	  /* Generate an appropriate register.  */
2807 	  dst_word = gen_reg_rtx (word_mode);
2808 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2809 
2810 	  /* Clear the destination before we move anything into it.  */
2811 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2812 	}
2813 
2814       /* We need a new source operand each time bitpos is on a word
2815 	 boundary.  */
2816       if (bitpos % BITS_PER_WORD == 0)
2817 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2818 
2819       /* Use bitpos for the source extraction (left justified) and
2820 	 xbitpos for the destination store (right justified).  */
2821       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2822 		       0, 0, word_mode,
2823 		       extract_bit_field (src_word, bitsize,
2824 					  bitpos % BITS_PER_WORD, 1,
2825 					  NULL_RTX, word_mode, word_mode,
2826 					  false, NULL),
2827 		       false);
2828     }
2829 
2830   if (mode == BLKmode)
2831     {
2832       /* Find the smallest integer mode large enough to hold the
2833 	 entire structure.  */
2834       opt_scalar_int_mode mode_iter;
2835       FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
2836 	if (GET_MODE_SIZE (mode_iter.require ()) >= bytes)
2837 	  break;
2838 
2839       /* A suitable mode should have been found.  */
2840       mode = mode_iter.require ();
2841     }
2842 
2843   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2844     dst_mode = word_mode;
2845   else
2846     dst_mode = mode;
2847   dst = gen_reg_rtx (dst_mode);
2848 
2849   for (i = 0; i < n_regs; i++)
2850     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2851 
2852   if (mode != dst_mode)
2853     dst = gen_lowpart (mode, dst);
2854 
2855   return dst;
2856 }
2857 
2858 /* Add a USE expression for REG to the (possibly empty) list pointed
2859    to by CALL_FUSAGE.  REG must denote a hard register.  */
2860 
2861 void
2862 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2863 {
2864   gcc_assert (REG_P (reg));
2865 
2866   if (!HARD_REGISTER_P (reg))
2867     return;
2868 
2869   *call_fusage
2870     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2871 }
2872 
2873 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2874    to by CALL_FUSAGE.  REG must denote a hard register.  */
2875 
2876 void
2877 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2878 {
2879   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2880 
2881   *call_fusage
2882     = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2883 }
2884 
2885 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2886    starting at REGNO.  All of these registers must be hard registers.  */
2887 
2888 void
2889 use_regs (rtx *call_fusage, int regno, int nregs)
2890 {
2891   int i;
2892 
2893   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2894 
2895   for (i = 0; i < nregs; i++)
2896     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2897 }
2898 
2899 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2900    PARALLEL REGS.  This is for calls that pass values in multiple
2901    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2902 
2903 void
2904 use_group_regs (rtx *call_fusage, rtx regs)
2905 {
2906   int i;
2907 
2908   for (i = 0; i < XVECLEN (regs, 0); i++)
2909     {
2910       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2911 
2912       /* A NULL entry means the parameter goes both on the stack and in
2913 	 registers.  This can also be a MEM for targets that pass values
2914 	 partially on the stack and partially in registers.  */
2915       if (reg != 0 && REG_P (reg))
2916 	use_reg (call_fusage, reg);
2917     }
2918 }
2919 
2920 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2921    assigment and the code of the expresion on the RHS is CODE.  Return
2922    NULL otherwise.  */
2923 
2924 static gimple *
2925 get_def_for_expr (tree name, enum tree_code code)
2926 {
2927   gimple *def_stmt;
2928 
2929   if (TREE_CODE (name) != SSA_NAME)
2930     return NULL;
2931 
2932   def_stmt = get_gimple_for_ssa_name (name);
2933   if (!def_stmt
2934       || gimple_assign_rhs_code (def_stmt) != code)
2935     return NULL;
2936 
2937   return def_stmt;
2938 }
2939 
2940 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2941    assigment and the class of the expresion on the RHS is CLASS.  Return
2942    NULL otherwise.  */
2943 
2944 static gimple *
2945 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2946 {
2947   gimple *def_stmt;
2948 
2949   if (TREE_CODE (name) != SSA_NAME)
2950     return NULL;
2951 
2952   def_stmt = get_gimple_for_ssa_name (name);
2953   if (!def_stmt
2954       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2955     return NULL;
2956 
2957   return def_stmt;
2958 }
2959 
2960 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2961    its length in bytes.  */
2962 
2963 rtx
2964 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2965 		     unsigned int expected_align, HOST_WIDE_INT expected_size,
2966 		     unsigned HOST_WIDE_INT min_size,
2967 		     unsigned HOST_WIDE_INT max_size,
2968 		     unsigned HOST_WIDE_INT probable_max_size)
2969 {
2970   machine_mode mode = GET_MODE (object);
2971   unsigned int align;
2972 
2973   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2974 
2975   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2976      just move a zero.  Otherwise, do this a piece at a time.  */
2977   if (mode != BLKmode
2978       && CONST_INT_P (size)
2979       && known_eq (INTVAL (size), GET_MODE_SIZE (mode)))
2980     {
2981       rtx zero = CONST0_RTX (mode);
2982       if (zero != NULL)
2983 	{
2984 	  emit_move_insn (object, zero);
2985 	  return NULL;
2986 	}
2987 
2988       if (COMPLEX_MODE_P (mode))
2989 	{
2990 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2991 	  if (zero != NULL)
2992 	    {
2993 	      write_complex_part (object, zero, 0);
2994 	      write_complex_part (object, zero, 1);
2995 	      return NULL;
2996 	    }
2997 	}
2998     }
2999 
3000   if (size == const0_rtx)
3001     return NULL;
3002 
3003   align = MEM_ALIGN (object);
3004 
3005   if (CONST_INT_P (size)
3006       && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
3007 						 CLEAR_BY_PIECES,
3008 						 optimize_insn_for_speed_p ()))
3009     clear_by_pieces (object, INTVAL (size), align);
3010   else if (set_storage_via_setmem (object, size, const0_rtx, align,
3011 				   expected_align, expected_size,
3012 				   min_size, max_size, probable_max_size))
3013     ;
3014   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
3015     return set_storage_via_libcall (object, size, const0_rtx,
3016 				    method == BLOCK_OP_TAILCALL);
3017   else
3018     gcc_unreachable ();
3019 
3020   return NULL;
3021 }
3022 
3023 rtx
3024 clear_storage (rtx object, rtx size, enum block_op_methods method)
3025 {
3026   unsigned HOST_WIDE_INT max, min = 0;
3027   if (GET_CODE (size) == CONST_INT)
3028     min = max = UINTVAL (size);
3029   else
3030     max = GET_MODE_MASK (GET_MODE (size));
3031   return clear_storage_hints (object, size, method, 0, -1, min, max, max);
3032 }
3033 
3034 
3035 /* A subroutine of clear_storage.  Expand a call to memset.
3036    Return the return value of memset, 0 otherwise.  */
3037 
3038 rtx
3039 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
3040 {
3041   tree call_expr, fn, object_tree, size_tree, val_tree;
3042   machine_mode size_mode;
3043 
3044   object = copy_addr_to_reg (XEXP (object, 0));
3045   object_tree = make_tree (ptr_type_node, object);
3046 
3047   if (!CONST_INT_P (val))
3048     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
3049   val_tree = make_tree (integer_type_node, val);
3050 
3051   size_mode = TYPE_MODE (sizetype);
3052   size = convert_to_mode (size_mode, size, 1);
3053   size = copy_to_mode_reg (size_mode, size);
3054   size_tree = make_tree (sizetype, size);
3055 
3056   /* It is incorrect to use the libcall calling conventions for calls to
3057      memset because it can be provided by the user.  */
3058   fn = builtin_decl_implicit (BUILT_IN_MEMSET);
3059   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
3060   CALL_EXPR_TAILCALL (call_expr) = tailcall;
3061 
3062   return expand_call (call_expr, NULL_RTX, false);
3063 }
3064 
3065 /* Expand a setmem pattern; return true if successful.  */
3066 
3067 bool
3068 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
3069 			unsigned int expected_align, HOST_WIDE_INT expected_size,
3070 			unsigned HOST_WIDE_INT min_size,
3071 			unsigned HOST_WIDE_INT max_size,
3072 			unsigned HOST_WIDE_INT probable_max_size)
3073 {
3074   /* Try the most limited insn first, because there's no point
3075      including more than one in the machine description unless
3076      the more limited one has some advantage.  */
3077 
3078   if (expected_align < align)
3079     expected_align = align;
3080   if (expected_size != -1)
3081     {
3082       if ((unsigned HOST_WIDE_INT)expected_size > max_size)
3083 	expected_size = max_size;
3084       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
3085 	expected_size = min_size;
3086     }
3087 
3088   opt_scalar_int_mode mode_iter;
3089   FOR_EACH_MODE_IN_CLASS (mode_iter, MODE_INT)
3090     {
3091       scalar_int_mode mode = mode_iter.require ();
3092       enum insn_code code = direct_optab_handler (setmem_optab, mode);
3093 
3094       if (code != CODE_FOR_nothing
3095 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
3096 	     here because if SIZE is less than the mode mask, as it is
3097 	     returned by the macro, it will definitely be less than the
3098 	     actual mode mask.  Since SIZE is within the Pmode address
3099 	     space, we limit MODE to Pmode.  */
3100 	  && ((CONST_INT_P (size)
3101 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
3102 		   <= (GET_MODE_MASK (mode) >> 1)))
3103 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
3104 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
3105 	{
3106 	  struct expand_operand ops[9];
3107 	  unsigned int nops;
3108 
3109 	  nops = insn_data[(int) code].n_generator_args;
3110 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
3111 
3112 	  create_fixed_operand (&ops[0], object);
3113 	  /* The check above guarantees that this size conversion is valid.  */
3114 	  create_convert_operand_to (&ops[1], size, mode, true);
3115 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
3116 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
3117 	  if (nops >= 6)
3118 	    {
3119 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
3120 	      create_integer_operand (&ops[5], expected_size);
3121 	    }
3122 	  if (nops >= 8)
3123 	    {
3124 	      create_integer_operand (&ops[6], min_size);
3125 	      /* If we can not represent the maximal size,
3126 		 make parameter NULL.  */
3127 	      if ((HOST_WIDE_INT) max_size != -1)
3128 	        create_integer_operand (&ops[7], max_size);
3129 	      else
3130 		create_fixed_operand (&ops[7], NULL);
3131 	    }
3132 	  if (nops == 9)
3133 	    {
3134 	      /* If we can not represent the maximal size,
3135 		 make parameter NULL.  */
3136 	      if ((HOST_WIDE_INT) probable_max_size != -1)
3137 	        create_integer_operand (&ops[8], probable_max_size);
3138 	      else
3139 		create_fixed_operand (&ops[8], NULL);
3140 	    }
3141 	  if (maybe_expand_insn (code, nops, ops))
3142 	    return true;
3143 	}
3144     }
3145 
3146   return false;
3147 }
3148 
3149 
3150 /* Write to one of the components of the complex value CPLX.  Write VAL to
3151    the real part if IMAG_P is false, and the imaginary part if its true.  */
3152 
3153 void
3154 write_complex_part (rtx cplx, rtx val, bool imag_p)
3155 {
3156   machine_mode cmode;
3157   scalar_mode imode;
3158   unsigned ibitsize;
3159 
3160   if (GET_CODE (cplx) == CONCAT)
3161     {
3162       emit_move_insn (XEXP (cplx, imag_p), val);
3163       return;
3164     }
3165 
3166   cmode = GET_MODE (cplx);
3167   imode = GET_MODE_INNER (cmode);
3168   ibitsize = GET_MODE_BITSIZE (imode);
3169 
3170   /* For MEMs simplify_gen_subreg may generate an invalid new address
3171      because, e.g., the original address is considered mode-dependent
3172      by the target, which restricts simplify_subreg from invoking
3173      adjust_address_nv.  Instead of preparing fallback support for an
3174      invalid address, we call adjust_address_nv directly.  */
3175   if (MEM_P (cplx))
3176     {
3177       emit_move_insn (adjust_address_nv (cplx, imode,
3178 					 imag_p ? GET_MODE_SIZE (imode) : 0),
3179 		      val);
3180       return;
3181     }
3182 
3183   /* If the sub-object is at least word sized, then we know that subregging
3184      will work.  This special case is important, since store_bit_field
3185      wants to operate on integer modes, and there's rarely an OImode to
3186      correspond to TCmode.  */
3187   if (ibitsize >= BITS_PER_WORD
3188       /* For hard regs we have exact predicates.  Assume we can split
3189 	 the original object if it spans an even number of hard regs.
3190 	 This special case is important for SCmode on 64-bit platforms
3191 	 where the natural size of floating-point regs is 32-bit.  */
3192       || (REG_P (cplx)
3193 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3194 	  && REG_NREGS (cplx) % 2 == 0))
3195     {
3196       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3197 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3198       if (part)
3199         {
3200 	  emit_move_insn (part, val);
3201 	  return;
3202 	}
3203       else
3204 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3205 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3206     }
3207 
3208   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3209 		   false);
3210 }
3211 
3212 /* Extract one of the components of the complex value CPLX.  Extract the
3213    real part if IMAG_P is false, and the imaginary part if it's true.  */
3214 
3215 rtx
3216 read_complex_part (rtx cplx, bool imag_p)
3217 {
3218   machine_mode cmode;
3219   scalar_mode imode;
3220   unsigned ibitsize;
3221 
3222   if (GET_CODE (cplx) == CONCAT)
3223     return XEXP (cplx, imag_p);
3224 
3225   cmode = GET_MODE (cplx);
3226   imode = GET_MODE_INNER (cmode);
3227   ibitsize = GET_MODE_BITSIZE (imode);
3228 
3229   /* Special case reads from complex constants that got spilled to memory.  */
3230   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3231     {
3232       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3233       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3234 	{
3235 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3236 	  if (CONSTANT_CLASS_P (part))
3237 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3238 	}
3239     }
3240 
3241   /* For MEMs simplify_gen_subreg may generate an invalid new address
3242      because, e.g., the original address is considered mode-dependent
3243      by the target, which restricts simplify_subreg from invoking
3244      adjust_address_nv.  Instead of preparing fallback support for an
3245      invalid address, we call adjust_address_nv directly.  */
3246   if (MEM_P (cplx))
3247     return adjust_address_nv (cplx, imode,
3248 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3249 
3250   /* If the sub-object is at least word sized, then we know that subregging
3251      will work.  This special case is important, since extract_bit_field
3252      wants to operate on integer modes, and there's rarely an OImode to
3253      correspond to TCmode.  */
3254   if (ibitsize >= BITS_PER_WORD
3255       /* For hard regs we have exact predicates.  Assume we can split
3256 	 the original object if it spans an even number of hard regs.
3257 	 This special case is important for SCmode on 64-bit platforms
3258 	 where the natural size of floating-point regs is 32-bit.  */
3259       || (REG_P (cplx)
3260 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3261 	  && REG_NREGS (cplx) % 2 == 0))
3262     {
3263       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3264 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3265       if (ret)
3266         return ret;
3267       else
3268 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3269 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3270     }
3271 
3272   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3273 			    true, NULL_RTX, imode, imode, false, NULL);
3274 }
3275 
3276 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3277    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3278    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3279    we'll force-create a SUBREG if needed.  */
3280 
3281 static rtx
3282 emit_move_change_mode (machine_mode new_mode,
3283 		       machine_mode old_mode, rtx x, bool force)
3284 {
3285   rtx ret;
3286 
3287   if (push_operand (x, GET_MODE (x)))
3288     {
3289       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3290       MEM_COPY_ATTRIBUTES (ret, x);
3291     }
3292   else if (MEM_P (x))
3293     {
3294       /* We don't have to worry about changing the address since the
3295 	 size in bytes is supposed to be the same.  */
3296       if (reload_in_progress)
3297 	{
3298 	  /* Copy the MEM to change the mode and move any
3299 	     substitutions from the old MEM to the new one.  */
3300 	  ret = adjust_address_nv (x, new_mode, 0);
3301 	  copy_replacements (x, ret);
3302 	}
3303       else
3304 	ret = adjust_address (x, new_mode, 0);
3305     }
3306   else
3307     {
3308       /* Note that we do want simplify_subreg's behavior of validating
3309 	 that the new mode is ok for a hard register.  If we were to use
3310 	 simplify_gen_subreg, we would create the subreg, but would
3311 	 probably run into the target not being able to implement it.  */
3312       /* Except, of course, when FORCE is true, when this is exactly what
3313 	 we want.  Which is needed for CCmodes on some targets.  */
3314       if (force)
3315 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3316       else
3317 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3318     }
3319 
3320   return ret;
3321 }
3322 
3323 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3324    an integer mode of the same size as MODE.  Returns the instruction
3325    emitted, or NULL if such a move could not be generated.  */
3326 
3327 static rtx_insn *
3328 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3329 {
3330   scalar_int_mode imode;
3331   enum insn_code code;
3332 
3333   /* There must exist a mode of the exact size we require.  */
3334   if (!int_mode_for_mode (mode).exists (&imode))
3335     return NULL;
3336 
3337   /* The target must support moves in this mode.  */
3338   code = optab_handler (mov_optab, imode);
3339   if (code == CODE_FOR_nothing)
3340     return NULL;
3341 
3342   x = emit_move_change_mode (imode, mode, x, force);
3343   if (x == NULL_RTX)
3344     return NULL;
3345   y = emit_move_change_mode (imode, mode, y, force);
3346   if (y == NULL_RTX)
3347     return NULL;
3348   return emit_insn (GEN_FCN (code) (x, y));
3349 }
3350 
3351 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3352    Return an equivalent MEM that does not use an auto-increment.  */
3353 
3354 rtx
3355 emit_move_resolve_push (machine_mode mode, rtx x)
3356 {
3357   enum rtx_code code = GET_CODE (XEXP (x, 0));
3358   rtx temp;
3359 
3360   poly_int64 adjust = GET_MODE_SIZE (mode);
3361 #ifdef PUSH_ROUNDING
3362   adjust = PUSH_ROUNDING (adjust);
3363 #endif
3364   if (code == PRE_DEC || code == POST_DEC)
3365     adjust = -adjust;
3366   else if (code == PRE_MODIFY || code == POST_MODIFY)
3367     {
3368       rtx expr = XEXP (XEXP (x, 0), 1);
3369 
3370       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3371       poly_int64 val = rtx_to_poly_int64 (XEXP (expr, 1));
3372       if (GET_CODE (expr) == MINUS)
3373 	val = -val;
3374       gcc_assert (known_eq (adjust, val) || known_eq (adjust, -val));
3375       adjust = val;
3376     }
3377 
3378   /* Do not use anti_adjust_stack, since we don't want to update
3379      stack_pointer_delta.  */
3380   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3381 			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3382 			      0, OPTAB_LIB_WIDEN);
3383   if (temp != stack_pointer_rtx)
3384     emit_move_insn (stack_pointer_rtx, temp);
3385 
3386   switch (code)
3387     {
3388     case PRE_INC:
3389     case PRE_DEC:
3390     case PRE_MODIFY:
3391       temp = stack_pointer_rtx;
3392       break;
3393     case POST_INC:
3394     case POST_DEC:
3395     case POST_MODIFY:
3396       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3397       break;
3398     default:
3399       gcc_unreachable ();
3400     }
3401 
3402   return replace_equiv_address (x, temp);
3403 }
3404 
3405 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3406    X is known to satisfy push_operand, and MODE is known to be complex.
3407    Returns the last instruction emitted.  */
3408 
3409 rtx_insn *
3410 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3411 {
3412   scalar_mode submode = GET_MODE_INNER (mode);
3413   bool imag_first;
3414 
3415 #ifdef PUSH_ROUNDING
3416   poly_int64 submodesize = GET_MODE_SIZE (submode);
3417 
3418   /* In case we output to the stack, but the size is smaller than the
3419      machine can push exactly, we need to use move instructions.  */
3420   if (maybe_ne (PUSH_ROUNDING (submodesize), submodesize))
3421     {
3422       x = emit_move_resolve_push (mode, x);
3423       return emit_move_insn (x, y);
3424     }
3425 #endif
3426 
3427   /* Note that the real part always precedes the imag part in memory
3428      regardless of machine's endianness.  */
3429   switch (GET_CODE (XEXP (x, 0)))
3430     {
3431     case PRE_DEC:
3432     case POST_DEC:
3433       imag_first = true;
3434       break;
3435     case PRE_INC:
3436     case POST_INC:
3437       imag_first = false;
3438       break;
3439     default:
3440       gcc_unreachable ();
3441     }
3442 
3443   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3444 		  read_complex_part (y, imag_first));
3445   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3446 			 read_complex_part (y, !imag_first));
3447 }
3448 
3449 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3450    via two moves of the parts.  Returns the last instruction emitted.  */
3451 
3452 rtx_insn *
3453 emit_move_complex_parts (rtx x, rtx y)
3454 {
3455   /* Show the output dies here.  This is necessary for SUBREGs
3456      of pseudos since we cannot track their lifetimes correctly;
3457      hard regs shouldn't appear here except as return values.  */
3458   if (!reload_completed && !reload_in_progress
3459       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3460     emit_clobber (x);
3461 
3462   write_complex_part (x, read_complex_part (y, false), false);
3463   write_complex_part (x, read_complex_part (y, true), true);
3464 
3465   return get_last_insn ();
3466 }
3467 
3468 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3469    MODE is known to be complex.  Returns the last instruction emitted.  */
3470 
3471 static rtx_insn *
3472 emit_move_complex (machine_mode mode, rtx x, rtx y)
3473 {
3474   bool try_int;
3475 
3476   /* Need to take special care for pushes, to maintain proper ordering
3477      of the data, and possibly extra padding.  */
3478   if (push_operand (x, mode))
3479     return emit_move_complex_push (mode, x, y);
3480 
3481   /* See if we can coerce the target into moving both values at once, except
3482      for floating point where we favor moving as parts if this is easy.  */
3483   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3484       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3485       && !(REG_P (x)
3486 	   && HARD_REGISTER_P (x)
3487 	   && REG_NREGS (x) == 1)
3488       && !(REG_P (y)
3489 	   && HARD_REGISTER_P (y)
3490 	   && REG_NREGS (y) == 1))
3491     try_int = false;
3492   /* Not possible if the values are inherently not adjacent.  */
3493   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3494     try_int = false;
3495   /* Is possible if both are registers (or subregs of registers).  */
3496   else if (register_operand (x, mode) && register_operand (y, mode))
3497     try_int = true;
3498   /* If one of the operands is a memory, and alignment constraints
3499      are friendly enough, we may be able to do combined memory operations.
3500      We do not attempt this if Y is a constant because that combination is
3501      usually better with the by-parts thing below.  */
3502   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3503 	   && (!STRICT_ALIGNMENT
3504 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3505     try_int = true;
3506   else
3507     try_int = false;
3508 
3509   if (try_int)
3510     {
3511       rtx_insn *ret;
3512 
3513       /* For memory to memory moves, optimal behavior can be had with the
3514 	 existing block move logic.  */
3515       if (MEM_P (x) && MEM_P (y))
3516 	{
3517 	  emit_block_move (x, y, gen_int_mode (GET_MODE_SIZE (mode), Pmode),
3518 			   BLOCK_OP_NO_LIBCALL);
3519 	  return get_last_insn ();
3520 	}
3521 
3522       ret = emit_move_via_integer (mode, x, y, true);
3523       if (ret)
3524 	return ret;
3525     }
3526 
3527   return emit_move_complex_parts (x, y);
3528 }
3529 
3530 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3531    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3532 
3533 static rtx_insn *
3534 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3535 {
3536   rtx_insn *ret;
3537 
3538   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3539   if (mode != CCmode)
3540     {
3541       enum insn_code code = optab_handler (mov_optab, CCmode);
3542       if (code != CODE_FOR_nothing)
3543 	{
3544 	  x = emit_move_change_mode (CCmode, mode, x, true);
3545 	  y = emit_move_change_mode (CCmode, mode, y, true);
3546 	  return emit_insn (GEN_FCN (code) (x, y));
3547 	}
3548     }
3549 
3550   /* Otherwise, find the MODE_INT mode of the same width.  */
3551   ret = emit_move_via_integer (mode, x, y, false);
3552   gcc_assert (ret != NULL);
3553   return ret;
3554 }
3555 
3556 /* Return true if word I of OP lies entirely in the
3557    undefined bits of a paradoxical subreg.  */
3558 
3559 static bool
3560 undefined_operand_subword_p (const_rtx op, int i)
3561 {
3562   if (GET_CODE (op) != SUBREG)
3563     return false;
3564   machine_mode innermostmode = GET_MODE (SUBREG_REG (op));
3565   poly_int64 offset = i * UNITS_PER_WORD + subreg_memory_offset (op);
3566   return (known_ge (offset, GET_MODE_SIZE (innermostmode))
3567 	  || known_le (offset, -UNITS_PER_WORD));
3568 }
3569 
3570 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3571    MODE is any multi-word or full-word mode that lacks a move_insn
3572    pattern.  Note that you will get better code if you define such
3573    patterns, even if they must turn into multiple assembler instructions.  */
3574 
3575 static rtx_insn *
3576 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3577 {
3578   rtx_insn *last_insn = 0;
3579   rtx_insn *seq;
3580   rtx inner;
3581   bool need_clobber;
3582   int i, mode_size;
3583 
3584   /* This function can only handle cases where the number of words is
3585      known at compile time.  */
3586   mode_size = GET_MODE_SIZE (mode).to_constant ();
3587   gcc_assert (mode_size >= UNITS_PER_WORD);
3588 
3589   /* If X is a push on the stack, do the push now and replace
3590      X with a reference to the stack pointer.  */
3591   if (push_operand (x, mode))
3592     x = emit_move_resolve_push (mode, x);
3593 
3594   /* If we are in reload, see if either operand is a MEM whose address
3595      is scheduled for replacement.  */
3596   if (reload_in_progress && MEM_P (x)
3597       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3598     x = replace_equiv_address_nv (x, inner);
3599   if (reload_in_progress && MEM_P (y)
3600       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3601     y = replace_equiv_address_nv (y, inner);
3602 
3603   start_sequence ();
3604 
3605   need_clobber = false;
3606   for (i = 0; i < CEIL (mode_size, UNITS_PER_WORD); i++)
3607     {
3608       rtx xpart = operand_subword (x, i, 1, mode);
3609       rtx ypart;
3610 
3611       /* Do not generate code for a move if it would come entirely
3612 	 from the undefined bits of a paradoxical subreg.  */
3613       if (undefined_operand_subword_p (y, i))
3614 	continue;
3615 
3616       ypart = operand_subword (y, i, 1, mode);
3617 
3618       /* If we can't get a part of Y, put Y into memory if it is a
3619 	 constant.  Otherwise, force it into a register.  Then we must
3620 	 be able to get a part of Y.  */
3621       if (ypart == 0 && CONSTANT_P (y))
3622 	{
3623 	  y = use_anchored_address (force_const_mem (mode, y));
3624 	  ypart = operand_subword (y, i, 1, mode);
3625 	}
3626       else if (ypart == 0)
3627 	ypart = operand_subword_force (y, i, mode);
3628 
3629       gcc_assert (xpart && ypart);
3630 
3631       need_clobber |= (GET_CODE (xpart) == SUBREG);
3632 
3633       last_insn = emit_move_insn (xpart, ypart);
3634     }
3635 
3636   seq = get_insns ();
3637   end_sequence ();
3638 
3639   /* Show the output dies here.  This is necessary for SUBREGs
3640      of pseudos since we cannot track their lifetimes correctly;
3641      hard regs shouldn't appear here except as return values.
3642      We never want to emit such a clobber after reload.  */
3643   if (x != y
3644       && ! (reload_in_progress || reload_completed)
3645       && need_clobber != 0)
3646     emit_clobber (x);
3647 
3648   emit_insn (seq);
3649 
3650   return last_insn;
3651 }
3652 
3653 /* Low level part of emit_move_insn.
3654    Called just like emit_move_insn, but assumes X and Y
3655    are basically valid.  */
3656 
3657 rtx_insn *
3658 emit_move_insn_1 (rtx x, rtx y)
3659 {
3660   machine_mode mode = GET_MODE (x);
3661   enum insn_code code;
3662 
3663   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3664 
3665   code = optab_handler (mov_optab, mode);
3666   if (code != CODE_FOR_nothing)
3667     return emit_insn (GEN_FCN (code) (x, y));
3668 
3669   /* Expand complex moves by moving real part and imag part.  */
3670   if (COMPLEX_MODE_P (mode))
3671     return emit_move_complex (mode, x, y);
3672 
3673   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3674       || ALL_FIXED_POINT_MODE_P (mode))
3675     {
3676       rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3677 
3678       /* If we can't find an integer mode, use multi words.  */
3679       if (result)
3680 	return result;
3681       else
3682 	return emit_move_multi_word (mode, x, y);
3683     }
3684 
3685   if (GET_MODE_CLASS (mode) == MODE_CC)
3686     return emit_move_ccmode (mode, x, y);
3687 
3688   /* Try using a move pattern for the corresponding integer mode.  This is
3689      only safe when simplify_subreg can convert MODE constants into integer
3690      constants.  At present, it can only do this reliably if the value
3691      fits within a HOST_WIDE_INT.  */
3692   if (!CONSTANT_P (y)
3693       || known_le (GET_MODE_BITSIZE (mode), HOST_BITS_PER_WIDE_INT))
3694     {
3695       rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3696 
3697       if (ret)
3698 	{
3699 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3700 	    return ret;
3701 	}
3702     }
3703 
3704   return emit_move_multi_word (mode, x, y);
3705 }
3706 
3707 /* Generate code to copy Y into X.
3708    Both Y and X must have the same mode, except that
3709    Y can be a constant with VOIDmode.
3710    This mode cannot be BLKmode; use emit_block_move for that.
3711 
3712    Return the last instruction emitted.  */
3713 
3714 rtx_insn *
3715 emit_move_insn (rtx x, rtx y)
3716 {
3717   machine_mode mode = GET_MODE (x);
3718   rtx y_cst = NULL_RTX;
3719   rtx_insn *last_insn;
3720   rtx set;
3721 
3722   gcc_assert (mode != BLKmode
3723 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3724 
3725   if (CONSTANT_P (y))
3726     {
3727       if (optimize
3728 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3729 	  && (last_insn = compress_float_constant (x, y)))
3730 	return last_insn;
3731 
3732       y_cst = y;
3733 
3734       if (!targetm.legitimate_constant_p (mode, y))
3735 	{
3736 	  y = force_const_mem (mode, y);
3737 
3738 	  /* If the target's cannot_force_const_mem prevented the spill,
3739 	     assume that the target's move expanders will also take care
3740 	     of the non-legitimate constant.  */
3741 	  if (!y)
3742 	    y = y_cst;
3743 	  else
3744 	    y = use_anchored_address (y);
3745 	}
3746     }
3747 
3748   /* If X or Y are memory references, verify that their addresses are valid
3749      for the machine.  */
3750   if (MEM_P (x)
3751       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3752 					 MEM_ADDR_SPACE (x))
3753 	  && ! push_operand (x, GET_MODE (x))))
3754     x = validize_mem (x);
3755 
3756   if (MEM_P (y)
3757       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3758 					MEM_ADDR_SPACE (y)))
3759     y = validize_mem (y);
3760 
3761   gcc_assert (mode != BLKmode);
3762 
3763   last_insn = emit_move_insn_1 (x, y);
3764 
3765   if (y_cst && REG_P (x)
3766       && (set = single_set (last_insn)) != NULL_RTX
3767       && SET_DEST (set) == x
3768       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3769     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3770 
3771   return last_insn;
3772 }
3773 
3774 /* Generate the body of an instruction to copy Y into X.
3775    It may be a list of insns, if one insn isn't enough.  */
3776 
3777 rtx_insn *
3778 gen_move_insn (rtx x, rtx y)
3779 {
3780   rtx_insn *seq;
3781 
3782   start_sequence ();
3783   emit_move_insn_1 (x, y);
3784   seq = get_insns ();
3785   end_sequence ();
3786   return seq;
3787 }
3788 
3789 /* If Y is representable exactly in a narrower mode, and the target can
3790    perform the extension directly from constant or memory, then emit the
3791    move as an extension.  */
3792 
3793 static rtx_insn *
3794 compress_float_constant (rtx x, rtx y)
3795 {
3796   machine_mode dstmode = GET_MODE (x);
3797   machine_mode orig_srcmode = GET_MODE (y);
3798   machine_mode srcmode;
3799   const REAL_VALUE_TYPE *r;
3800   int oldcost, newcost;
3801   bool speed = optimize_insn_for_speed_p ();
3802 
3803   r = CONST_DOUBLE_REAL_VALUE (y);
3804 
3805   if (targetm.legitimate_constant_p (dstmode, y))
3806     oldcost = set_src_cost (y, orig_srcmode, speed);
3807   else
3808     oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3809 
3810   FOR_EACH_MODE_UNTIL (srcmode, orig_srcmode)
3811     {
3812       enum insn_code ic;
3813       rtx trunc_y;
3814       rtx_insn *last_insn;
3815 
3816       /* Skip if the target can't extend this way.  */
3817       ic = can_extend_p (dstmode, srcmode, 0);
3818       if (ic == CODE_FOR_nothing)
3819 	continue;
3820 
3821       /* Skip if the narrowed value isn't exact.  */
3822       if (! exact_real_truncate (srcmode, r))
3823 	continue;
3824 
3825       trunc_y = const_double_from_real_value (*r, srcmode);
3826 
3827       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3828 	{
3829 	  /* Skip if the target needs extra instructions to perform
3830 	     the extension.  */
3831 	  if (!insn_operand_matches (ic, 1, trunc_y))
3832 	    continue;
3833 	  /* This is valid, but may not be cheaper than the original. */
3834 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3835 				  dstmode, speed);
3836 	  if (oldcost < newcost)
3837 	    continue;
3838 	}
3839       else if (float_extend_from_mem[dstmode][srcmode])
3840 	{
3841 	  trunc_y = force_const_mem (srcmode, trunc_y);
3842 	  /* This is valid, but may not be cheaper than the original. */
3843 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3844 				  dstmode, speed);
3845 	  if (oldcost < newcost)
3846 	    continue;
3847 	  trunc_y = validize_mem (trunc_y);
3848 	}
3849       else
3850 	continue;
3851 
3852       /* For CSE's benefit, force the compressed constant pool entry
3853 	 into a new pseudo.  This constant may be used in different modes,
3854 	 and if not, combine will put things back together for us.  */
3855       trunc_y = force_reg (srcmode, trunc_y);
3856 
3857       /* If x is a hard register, perform the extension into a pseudo,
3858 	 so that e.g. stack realignment code is aware of it.  */
3859       rtx target = x;
3860       if (REG_P (x) && HARD_REGISTER_P (x))
3861 	target = gen_reg_rtx (dstmode);
3862 
3863       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3864       last_insn = get_last_insn ();
3865 
3866       if (REG_P (target))
3867 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3868 
3869       if (target != x)
3870 	return emit_move_insn (x, target);
3871       return last_insn;
3872     }
3873 
3874   return NULL;
3875 }
3876 
3877 /* Pushing data onto the stack.  */
3878 
3879 /* Push a block of length SIZE (perhaps variable)
3880    and return an rtx to address the beginning of the block.
3881    The value may be virtual_outgoing_args_rtx.
3882 
3883    EXTRA is the number of bytes of padding to push in addition to SIZE.
3884    BELOW nonzero means this padding comes at low addresses;
3885    otherwise, the padding comes at high addresses.  */
3886 
3887 rtx
3888 push_block (rtx size, poly_int64 extra, int below)
3889 {
3890   rtx temp;
3891 
3892   size = convert_modes (Pmode, ptr_mode, size, 1);
3893   if (CONSTANT_P (size))
3894     anti_adjust_stack (plus_constant (Pmode, size, extra));
3895   else if (REG_P (size) && known_eq (extra, 0))
3896     anti_adjust_stack (size);
3897   else
3898     {
3899       temp = copy_to_mode_reg (Pmode, size);
3900       if (maybe_ne (extra, 0))
3901 	temp = expand_binop (Pmode, add_optab, temp,
3902 			     gen_int_mode (extra, Pmode),
3903 			     temp, 0, OPTAB_LIB_WIDEN);
3904       anti_adjust_stack (temp);
3905     }
3906 
3907   if (STACK_GROWS_DOWNWARD)
3908     {
3909       temp = virtual_outgoing_args_rtx;
3910       if (maybe_ne (extra, 0) && below)
3911 	temp = plus_constant (Pmode, temp, extra);
3912     }
3913   else
3914     {
3915       if (CONST_INT_P (size))
3916 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3917 			      -INTVAL (size) - (below ? 0 : extra));
3918       else if (maybe_ne (extra, 0) && !below)
3919 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3920 			     negate_rtx (Pmode, plus_constant (Pmode, size,
3921 							       extra)));
3922       else
3923 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3924 			     negate_rtx (Pmode, size));
3925     }
3926 
3927   return memory_address (NARROWEST_INT_MODE, temp);
3928 }
3929 
3930 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3931 
3932 static rtx
3933 mem_autoinc_base (rtx mem)
3934 {
3935   if (MEM_P (mem))
3936     {
3937       rtx addr = XEXP (mem, 0);
3938       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3939 	return XEXP (addr, 0);
3940     }
3941   return NULL;
3942 }
3943 
3944 /* A utility routine used here, in reload, and in try_split.  The insns
3945    after PREV up to and including LAST are known to adjust the stack,
3946    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3947    placing notes as appropriate.  PREV may be NULL, indicating the
3948    entire insn sequence prior to LAST should be scanned.
3949 
3950    The set of allowed stack pointer modifications is small:
3951      (1) One or more auto-inc style memory references (aka pushes),
3952      (2) One or more addition/subtraction with the SP as destination,
3953      (3) A single move insn with the SP as destination,
3954      (4) A call_pop insn,
3955      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3956 
3957    Insns in the sequence that do not modify the SP are ignored,
3958    except for noreturn calls.
3959 
3960    The return value is the amount of adjustment that can be trivially
3961    verified, via immediate operand or auto-inc.  If the adjustment
3962    cannot be trivially extracted, the return value is HOST_WIDE_INT_MIN.  */
3963 
3964 poly_int64
3965 find_args_size_adjust (rtx_insn *insn)
3966 {
3967   rtx dest, set, pat;
3968   int i;
3969 
3970   pat = PATTERN (insn);
3971   set = NULL;
3972 
3973   /* Look for a call_pop pattern.  */
3974   if (CALL_P (insn))
3975     {
3976       /* We have to allow non-call_pop patterns for the case
3977 	 of emit_single_push_insn of a TLS address.  */
3978       if (GET_CODE (pat) != PARALLEL)
3979 	return 0;
3980 
3981       /* All call_pop have a stack pointer adjust in the parallel.
3982 	 The call itself is always first, and the stack adjust is
3983 	 usually last, so search from the end.  */
3984       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3985 	{
3986 	  set = XVECEXP (pat, 0, i);
3987 	  if (GET_CODE (set) != SET)
3988 	    continue;
3989 	  dest = SET_DEST (set);
3990 	  if (dest == stack_pointer_rtx)
3991 	    break;
3992 	}
3993       /* We'd better have found the stack pointer adjust.  */
3994       if (i == 0)
3995 	return 0;
3996       /* Fall through to process the extracted SET and DEST
3997 	 as if it was a standalone insn.  */
3998     }
3999   else if (GET_CODE (pat) == SET)
4000     set = pat;
4001   else if ((set = single_set (insn)) != NULL)
4002     ;
4003   else if (GET_CODE (pat) == PARALLEL)
4004     {
4005       /* ??? Some older ports use a parallel with a stack adjust
4006 	 and a store for a PUSH_ROUNDING pattern, rather than a
4007 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
4008       /* ??? See h8300 and m68k, pushqi1.  */
4009       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
4010 	{
4011 	  set = XVECEXP (pat, 0, i);
4012 	  if (GET_CODE (set) != SET)
4013 	    continue;
4014 	  dest = SET_DEST (set);
4015 	  if (dest == stack_pointer_rtx)
4016 	    break;
4017 
4018 	  /* We do not expect an auto-inc of the sp in the parallel.  */
4019 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
4020 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4021 			       != stack_pointer_rtx);
4022 	}
4023       if (i < 0)
4024 	return 0;
4025     }
4026   else
4027     return 0;
4028 
4029   dest = SET_DEST (set);
4030 
4031   /* Look for direct modifications of the stack pointer.  */
4032   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
4033     {
4034       /* Look for a trivial adjustment, otherwise assume nothing.  */
4035       /* Note that the SPU restore_stack_block pattern refers to
4036 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
4037       if (SCALAR_INT_MODE_P (GET_MODE (dest))
4038 	  && GET_CODE (SET_SRC (set)) == PLUS
4039 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
4040 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
4041 	return INTVAL (XEXP (SET_SRC (set), 1));
4042       /* ??? Reload can generate no-op moves, which will be cleaned
4043 	 up later.  Recognize it and continue searching.  */
4044       else if (rtx_equal_p (dest, SET_SRC (set)))
4045 	return 0;
4046       else
4047 	return HOST_WIDE_INT_MIN;
4048     }
4049   else
4050     {
4051       rtx mem, addr;
4052 
4053       /* Otherwise only think about autoinc patterns.  */
4054       if (mem_autoinc_base (dest) == stack_pointer_rtx)
4055 	{
4056 	  mem = dest;
4057 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
4058 			       != stack_pointer_rtx);
4059 	}
4060       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
4061 	mem = SET_SRC (set);
4062       else
4063 	return 0;
4064 
4065       addr = XEXP (mem, 0);
4066       switch (GET_CODE (addr))
4067 	{
4068 	case PRE_INC:
4069 	case POST_INC:
4070 	  return GET_MODE_SIZE (GET_MODE (mem));
4071 	case PRE_DEC:
4072 	case POST_DEC:
4073 	  return -GET_MODE_SIZE (GET_MODE (mem));
4074 	case PRE_MODIFY:
4075 	case POST_MODIFY:
4076 	  addr = XEXP (addr, 1);
4077 	  gcc_assert (GET_CODE (addr) == PLUS);
4078 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
4079 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
4080 	  return INTVAL (XEXP (addr, 1));
4081 	default:
4082 	  gcc_unreachable ();
4083 	}
4084     }
4085 }
4086 
4087 poly_int64
4088 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last,
4089 		       poly_int64 end_args_size)
4090 {
4091   poly_int64 args_size = end_args_size;
4092   bool saw_unknown = false;
4093   rtx_insn *insn;
4094 
4095   for (insn = last; insn != prev; insn = PREV_INSN (insn))
4096     {
4097       if (!NONDEBUG_INSN_P (insn))
4098 	continue;
4099 
4100       /* We might have existing REG_ARGS_SIZE notes, e.g. when pushing
4101 	 a call argument containing a TLS address that itself requires
4102 	 a call to __tls_get_addr.  The handling of stack_pointer_delta
4103 	 in emit_single_push_insn is supposed to ensure that any such
4104 	 notes are already correct.  */
4105       rtx note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
4106       gcc_assert (!note || known_eq (args_size, get_args_size (note)));
4107 
4108       poly_int64 this_delta = find_args_size_adjust (insn);
4109       if (known_eq (this_delta, 0))
4110 	{
4111 	  if (!CALL_P (insn)
4112 	      || ACCUMULATE_OUTGOING_ARGS
4113 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
4114 	    continue;
4115 	}
4116 
4117       gcc_assert (!saw_unknown);
4118       if (known_eq (this_delta, HOST_WIDE_INT_MIN))
4119 	saw_unknown = true;
4120 
4121       if (!note)
4122 	add_args_size_note (insn, args_size);
4123       if (STACK_GROWS_DOWNWARD)
4124 	this_delta = -poly_uint64 (this_delta);
4125 
4126       if (saw_unknown)
4127 	args_size = HOST_WIDE_INT_MIN;
4128       else
4129 	args_size -= this_delta;
4130     }
4131 
4132   return args_size;
4133 }
4134 
4135 #ifdef PUSH_ROUNDING
4136 /* Emit single push insn.  */
4137 
4138 static void
4139 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4140 {
4141   rtx dest_addr;
4142   poly_int64 rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4143   rtx dest;
4144   enum insn_code icode;
4145 
4146   /* If there is push pattern, use it.  Otherwise try old way of throwing
4147      MEM representing push operation to move expander.  */
4148   icode = optab_handler (push_optab, mode);
4149   if (icode != CODE_FOR_nothing)
4150     {
4151       struct expand_operand ops[1];
4152 
4153       create_input_operand (&ops[0], x, mode);
4154       if (maybe_expand_insn (icode, 1, ops))
4155 	return;
4156     }
4157   if (known_eq (GET_MODE_SIZE (mode), rounded_size))
4158     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4159   /* If we are to pad downward, adjust the stack pointer first and
4160      then store X into the stack location using an offset.  This is
4161      because emit_move_insn does not know how to pad; it does not have
4162      access to type.  */
4163   else if (targetm.calls.function_arg_padding (mode, type) == PAD_DOWNWARD)
4164     {
4165       emit_move_insn (stack_pointer_rtx,
4166 		      expand_binop (Pmode,
4167 				    STACK_GROWS_DOWNWARD ? sub_optab
4168 				    : add_optab,
4169 				    stack_pointer_rtx,
4170 				    gen_int_mode (rounded_size, Pmode),
4171 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
4172 
4173       poly_int64 offset = rounded_size - GET_MODE_SIZE (mode);
4174       if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
4175 	/* We have already decremented the stack pointer, so get the
4176 	   previous value.  */
4177 	offset += rounded_size;
4178 
4179       if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
4180 	/* We have already incremented the stack pointer, so get the
4181 	   previous value.  */
4182 	offset -= rounded_size;
4183 
4184       dest_addr = plus_constant (Pmode, stack_pointer_rtx, offset);
4185     }
4186   else
4187     {
4188       if (STACK_GROWS_DOWNWARD)
4189 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4190 	dest_addr = plus_constant (Pmode, stack_pointer_rtx, -rounded_size);
4191       else
4192 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4193 	dest_addr = plus_constant (Pmode, stack_pointer_rtx, rounded_size);
4194 
4195       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4196     }
4197 
4198   dest = gen_rtx_MEM (mode, dest_addr);
4199 
4200   if (type != 0)
4201     {
4202       set_mem_attributes (dest, type, 1);
4203 
4204       if (cfun->tail_call_marked)
4205 	/* Function incoming arguments may overlap with sibling call
4206 	   outgoing arguments and we cannot allow reordering of reads
4207 	   from function arguments with stores to outgoing arguments
4208 	   of sibling calls.  */
4209 	set_mem_alias_set (dest, 0);
4210     }
4211   emit_move_insn (dest, x);
4212 }
4213 
4214 /* Emit and annotate a single push insn.  */
4215 
4216 static void
4217 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4218 {
4219   poly_int64 delta, old_delta = stack_pointer_delta;
4220   rtx_insn *prev = get_last_insn ();
4221   rtx_insn *last;
4222 
4223   emit_single_push_insn_1 (mode, x, type);
4224 
4225   /* Adjust stack_pointer_delta to describe the situation after the push
4226      we just performed.  Note that we must do this after the push rather
4227      than before the push in case calculating X needs pushes and pops of
4228      its own (e.g. if calling __tls_get_addr).  The REG_ARGS_SIZE notes
4229      for such pushes and pops must not include the effect of the future
4230      push of X.  */
4231   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4232 
4233   last = get_last_insn ();
4234 
4235   /* Notice the common case where we emitted exactly one insn.  */
4236   if (PREV_INSN (last) == prev)
4237     {
4238       add_args_size_note (last, stack_pointer_delta);
4239       return;
4240     }
4241 
4242   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4243   gcc_assert (known_eq (delta, HOST_WIDE_INT_MIN)
4244 	      || known_eq (delta, old_delta));
4245 }
4246 #endif
4247 
4248 /* If reading SIZE bytes from X will end up reading from
4249    Y return the number of bytes that overlap.  Return -1
4250    if there is no overlap or -2 if we can't determine
4251    (for example when X and Y have different base registers).  */
4252 
4253 static int
4254 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4255 {
4256   rtx tmp = plus_constant (Pmode, x, size);
4257   rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4258 
4259   if (!CONST_INT_P (sub))
4260     return -2;
4261 
4262   HOST_WIDE_INT val = INTVAL (sub);
4263 
4264   return IN_RANGE (val, 1, size) ? val : -1;
4265 }
4266 
4267 /* Generate code to push X onto the stack, assuming it has mode MODE and
4268    type TYPE.
4269    MODE is redundant except when X is a CONST_INT (since they don't
4270    carry mode info).
4271    SIZE is an rtx for the size of data to be copied (in bytes),
4272    needed only if X is BLKmode.
4273    Return true if successful.  May return false if asked to push a
4274    partial argument during a sibcall optimization (as specified by
4275    SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4276    to not overlap.
4277 
4278    ALIGN (in bits) is maximum alignment we can assume.
4279 
4280    If PARTIAL and REG are both nonzero, then copy that many of the first
4281    bytes of X into registers starting with REG, and push the rest of X.
4282    The amount of space pushed is decreased by PARTIAL bytes.
4283    REG must be a hard register in this case.
4284    If REG is zero but PARTIAL is not, take any all others actions for an
4285    argument partially in registers, but do not actually load any
4286    registers.
4287 
4288    EXTRA is the amount in bytes of extra space to leave next to this arg.
4289    This is ignored if an argument block has already been allocated.
4290 
4291    On a machine that lacks real push insns, ARGS_ADDR is the address of
4292    the bottom of the argument block for this call.  We use indexing off there
4293    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4294    argument block has not been preallocated.
4295 
4296    ARGS_SO_FAR is the size of args previously pushed for this call.
4297 
4298    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4299    for arguments passed in registers.  If nonzero, it will be the number
4300    of bytes required.  */
4301 
4302 bool
4303 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4304 		unsigned int align, int partial, rtx reg, poly_int64 extra,
4305 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4306 		rtx alignment_pad, bool sibcall_p)
4307 {
4308   rtx xinner;
4309   pad_direction stack_direction
4310     = STACK_GROWS_DOWNWARD ? PAD_DOWNWARD : PAD_UPWARD;
4311 
4312   /* Decide where to pad the argument: PAD_DOWNWARD for below,
4313      PAD_UPWARD for above, or PAD_NONE for don't pad it.
4314      Default is below for small data on big-endian machines; else above.  */
4315   pad_direction where_pad = targetm.calls.function_arg_padding (mode, type);
4316 
4317   /* Invert direction if stack is post-decrement.
4318      FIXME: why?  */
4319   if (STACK_PUSH_CODE == POST_DEC)
4320     if (where_pad != PAD_NONE)
4321       where_pad = (where_pad == PAD_DOWNWARD ? PAD_UPWARD : PAD_DOWNWARD);
4322 
4323   xinner = x;
4324 
4325   int nregs = partial / UNITS_PER_WORD;
4326   rtx *tmp_regs = NULL;
4327   int overlapping = 0;
4328 
4329   if (mode == BLKmode
4330       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)
4331 	  && type != NULL_TREE))
4332     {
4333       /* Copy a block into the stack, entirely or partially.  */
4334 
4335       rtx temp;
4336       int used;
4337       int offset;
4338       int skip;
4339 
4340       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4341       used = partial - offset;
4342 
4343       if (mode != BLKmode)
4344 	{
4345 	  /* A value is to be stored in an insufficiently aligned
4346 	     stack slot; copy via a suitably aligned slot if
4347 	     necessary.  */
4348 	  size = gen_int_mode (GET_MODE_SIZE (mode), Pmode);
4349 	  if (!MEM_P (xinner))
4350 	    {
4351 	      temp = assign_temp (type, 1, 1);
4352 	      emit_move_insn (temp, xinner);
4353 	      xinner = temp;
4354 	    }
4355 	}
4356 
4357       gcc_assert (size);
4358 
4359       /* USED is now the # of bytes we need not copy to the stack
4360 	 because registers will take care of them.  */
4361 
4362       if (partial != 0)
4363 	xinner = adjust_address (xinner, BLKmode, used);
4364 
4365       /* If the partial register-part of the arg counts in its stack size,
4366 	 skip the part of stack space corresponding to the registers.
4367 	 Otherwise, start copying to the beginning of the stack space,
4368 	 by setting SKIP to 0.  */
4369       skip = (reg_parm_stack_space == 0) ? 0 : used;
4370 
4371 #ifdef PUSH_ROUNDING
4372       /* Do it with several push insns if that doesn't take lots of insns
4373 	 and if there is no difficulty with push insns that skip bytes
4374 	 on the stack for alignment purposes.  */
4375       if (args_addr == 0
4376 	  && PUSH_ARGS
4377 	  && CONST_INT_P (size)
4378 	  && skip == 0
4379 	  && MEM_ALIGN (xinner) >= align
4380 	  && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4381 	  /* Here we avoid the case of a structure whose weak alignment
4382 	     forces many pushes of a small amount of data,
4383 	     and such small pushes do rounding that causes trouble.  */
4384 	  && ((!targetm.slow_unaligned_access (word_mode, align))
4385 	      || align >= BIGGEST_ALIGNMENT
4386 	      || known_eq (PUSH_ROUNDING (align / BITS_PER_UNIT),
4387 			   align / BITS_PER_UNIT))
4388 	  && known_eq (PUSH_ROUNDING (INTVAL (size)), INTVAL (size)))
4389 	{
4390 	  /* Push padding now if padding above and stack grows down,
4391 	     or if padding below and stack grows up.
4392 	     But if space already allocated, this has already been done.  */
4393 	  if (maybe_ne (extra, 0)
4394 	      && args_addr == 0
4395 	      && where_pad != PAD_NONE
4396 	      && where_pad != stack_direction)
4397 	    anti_adjust_stack (gen_int_mode (extra, Pmode));
4398 
4399 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4400 	}
4401       else
4402 #endif /* PUSH_ROUNDING  */
4403 	{
4404 	  rtx target;
4405 
4406 	  /* Otherwise make space on the stack and copy the data
4407 	     to the address of that space.  */
4408 
4409 	  /* Deduct words put into registers from the size we must copy.  */
4410 	  if (partial != 0)
4411 	    {
4412 	      if (CONST_INT_P (size))
4413 		size = GEN_INT (INTVAL (size) - used);
4414 	      else
4415 		size = expand_binop (GET_MODE (size), sub_optab, size,
4416 				     gen_int_mode (used, GET_MODE (size)),
4417 				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4418 	    }
4419 
4420 	  /* Get the address of the stack space.
4421 	     In this case, we do not deal with EXTRA separately.
4422 	     A single stack adjust will do.  */
4423 	  if (! args_addr)
4424 	    {
4425 	      temp = push_block (size, extra, where_pad == PAD_DOWNWARD);
4426 	      extra = 0;
4427 	    }
4428 	  else if (CONST_INT_P (args_so_far))
4429 	    temp = memory_address (BLKmode,
4430 				   plus_constant (Pmode, args_addr,
4431 						  skip + INTVAL (args_so_far)));
4432 	  else
4433 	    temp = memory_address (BLKmode,
4434 				   plus_constant (Pmode,
4435 						  gen_rtx_PLUS (Pmode,
4436 								args_addr,
4437 								args_so_far),
4438 						  skip));
4439 
4440 	  if (!ACCUMULATE_OUTGOING_ARGS)
4441 	    {
4442 	      /* If the source is referenced relative to the stack pointer,
4443 		 copy it to another register to stabilize it.  We do not need
4444 		 to do this if we know that we won't be changing sp.  */
4445 
4446 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4447 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4448 		temp = copy_to_reg (temp);
4449 	    }
4450 
4451 	  target = gen_rtx_MEM (BLKmode, temp);
4452 
4453 	  /* We do *not* set_mem_attributes here, because incoming arguments
4454 	     may overlap with sibling call outgoing arguments and we cannot
4455 	     allow reordering of reads from function arguments with stores
4456 	     to outgoing arguments of sibling calls.  We do, however, want
4457 	     to record the alignment of the stack slot.  */
4458 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4459 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4460 	  set_mem_align (target, align);
4461 
4462 	  /* If part should go in registers and pushing to that part would
4463 	     overwrite some of the values that need to go into regs, load the
4464 	     overlapping values into temporary pseudos to be moved into the hard
4465 	     regs at the end after the stack pushing has completed.
4466 	     We cannot load them directly into the hard regs here because
4467 	     they can be clobbered by the block move expansions.
4468 	     See PR 65358.  */
4469 
4470 	  if (partial > 0 && reg != 0 && mode == BLKmode
4471 	      && GET_CODE (reg) != PARALLEL)
4472 	    {
4473 	      overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4474 	      if (overlapping > 0)
4475 	        {
4476 		  gcc_assert (overlapping % UNITS_PER_WORD == 0);
4477 		  overlapping /= UNITS_PER_WORD;
4478 
4479 		  tmp_regs = XALLOCAVEC (rtx, overlapping);
4480 
4481 		  for (int i = 0; i < overlapping; i++)
4482 		    tmp_regs[i] = gen_reg_rtx (word_mode);
4483 
4484 		  for (int i = 0; i < overlapping; i++)
4485 		    emit_move_insn (tmp_regs[i],
4486 				    operand_subword_force (target, i, mode));
4487 	        }
4488 	      else if (overlapping == -1)
4489 		overlapping = 0;
4490 	      /* Could not determine whether there is overlap.
4491 	         Fail the sibcall.  */
4492 	      else
4493 		{
4494 		  overlapping = 0;
4495 		  if (sibcall_p)
4496 		    return false;
4497 		}
4498 	    }
4499 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4500 	}
4501     }
4502   else if (partial > 0)
4503     {
4504       /* Scalar partly in registers.  This case is only supported
4505 	 for fixed-wdth modes.  */
4506       int size = GET_MODE_SIZE (mode).to_constant ();
4507       size /= UNITS_PER_WORD;
4508       int i;
4509       int not_stack;
4510       /* # bytes of start of argument
4511 	 that we must make space for but need not store.  */
4512       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4513       int args_offset = INTVAL (args_so_far);
4514       int skip;
4515 
4516       /* Push padding now if padding above and stack grows down,
4517 	 or if padding below and stack grows up.
4518 	 But if space already allocated, this has already been done.  */
4519       if (maybe_ne (extra, 0)
4520 	  && args_addr == 0
4521 	  && where_pad != PAD_NONE
4522 	  && where_pad != stack_direction)
4523 	anti_adjust_stack (gen_int_mode (extra, Pmode));
4524 
4525       /* If we make space by pushing it, we might as well push
4526 	 the real data.  Otherwise, we can leave OFFSET nonzero
4527 	 and leave the space uninitialized.  */
4528       if (args_addr == 0)
4529 	offset = 0;
4530 
4531       /* Now NOT_STACK gets the number of words that we don't need to
4532 	 allocate on the stack.  Convert OFFSET to words too.  */
4533       not_stack = (partial - offset) / UNITS_PER_WORD;
4534       offset /= UNITS_PER_WORD;
4535 
4536       /* If the partial register-part of the arg counts in its stack size,
4537 	 skip the part of stack space corresponding to the registers.
4538 	 Otherwise, start copying to the beginning of the stack space,
4539 	 by setting SKIP to 0.  */
4540       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4541 
4542       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4543 	x = validize_mem (force_const_mem (mode, x));
4544 
4545       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4546 	 SUBREGs of such registers are not allowed.  */
4547       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4548 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4549 	x = copy_to_reg (x);
4550 
4551       /* Loop over all the words allocated on the stack for this arg.  */
4552       /* We can do it by words, because any scalar bigger than a word
4553 	 has a size a multiple of a word.  */
4554       for (i = size - 1; i >= not_stack; i--)
4555 	if (i >= not_stack + offset)
4556 	  if (!emit_push_insn (operand_subword_force (x, i, mode),
4557 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4558 			  0, args_addr,
4559 			  GEN_INT (args_offset + ((i - not_stack + skip)
4560 						  * UNITS_PER_WORD)),
4561 			  reg_parm_stack_space, alignment_pad, sibcall_p))
4562 	    return false;
4563     }
4564   else
4565     {
4566       rtx addr;
4567       rtx dest;
4568 
4569       /* Push padding now if padding above and stack grows down,
4570 	 or if padding below and stack grows up.
4571 	 But if space already allocated, this has already been done.  */
4572       if (maybe_ne (extra, 0)
4573 	  && args_addr == 0
4574 	  && where_pad != PAD_NONE
4575 	  && where_pad != stack_direction)
4576 	anti_adjust_stack (gen_int_mode (extra, Pmode));
4577 
4578 #ifdef PUSH_ROUNDING
4579       if (args_addr == 0 && PUSH_ARGS)
4580 	emit_single_push_insn (mode, x, type);
4581       else
4582 #endif
4583 	{
4584 	  addr = simplify_gen_binary (PLUS, Pmode, args_addr, args_so_far);
4585 	  dest = gen_rtx_MEM (mode, memory_address (mode, addr));
4586 
4587 	  /* We do *not* set_mem_attributes here, because incoming arguments
4588 	     may overlap with sibling call outgoing arguments and we cannot
4589 	     allow reordering of reads from function arguments with stores
4590 	     to outgoing arguments of sibling calls.  We do, however, want
4591 	     to record the alignment of the stack slot.  */
4592 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4593 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4594 	  set_mem_align (dest, align);
4595 
4596 	  emit_move_insn (dest, x);
4597 	}
4598     }
4599 
4600   /* Move the partial arguments into the registers and any overlapping
4601      values that we moved into the pseudos in tmp_regs.  */
4602   if (partial > 0 && reg != 0)
4603     {
4604       /* Handle calls that pass values in multiple non-contiguous locations.
4605 	 The Irix 6 ABI has examples of this.  */
4606       if (GET_CODE (reg) == PARALLEL)
4607 	emit_group_load (reg, x, type, -1);
4608       else
4609         {
4610 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4611 	  move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4612 
4613 	  for (int i = 0; i < overlapping; i++)
4614 	    emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4615 						    + nregs - overlapping + i),
4616 			    tmp_regs[i]);
4617 
4618 	}
4619     }
4620 
4621   if (maybe_ne (extra, 0) && args_addr == 0 && where_pad == stack_direction)
4622     anti_adjust_stack (gen_int_mode (extra, Pmode));
4623 
4624   if (alignment_pad && args_addr == 0)
4625     anti_adjust_stack (alignment_pad);
4626 
4627   return true;
4628 }
4629 
4630 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4631    operations.  */
4632 
4633 static rtx
4634 get_subtarget (rtx x)
4635 {
4636   return (optimize
4637           || x == 0
4638 	   /* Only registers can be subtargets.  */
4639 	   || !REG_P (x)
4640 	   /* Don't use hard regs to avoid extending their life.  */
4641 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4642 	  ? 0 : x);
4643 }
4644 
4645 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4646    FIELD is a bitfield.  Returns true if the optimization was successful,
4647    and there's nothing else to do.  */
4648 
4649 static bool
4650 optimize_bitfield_assignment_op (poly_uint64 pbitsize,
4651 				 poly_uint64 pbitpos,
4652 				 poly_uint64 pbitregion_start,
4653 				 poly_uint64 pbitregion_end,
4654 				 machine_mode mode1, rtx str_rtx,
4655 				 tree to, tree src, bool reverse)
4656 {
4657   /* str_mode is not guaranteed to be a scalar type.  */
4658   machine_mode str_mode = GET_MODE (str_rtx);
4659   unsigned int str_bitsize;
4660   tree op0, op1;
4661   rtx value, result;
4662   optab binop;
4663   gimple *srcstmt;
4664   enum tree_code code;
4665 
4666   unsigned HOST_WIDE_INT bitsize, bitpos, bitregion_start, bitregion_end;
4667   if (mode1 != VOIDmode
4668       || !pbitsize.is_constant (&bitsize)
4669       || !pbitpos.is_constant (&bitpos)
4670       || !pbitregion_start.is_constant (&bitregion_start)
4671       || !pbitregion_end.is_constant (&bitregion_end)
4672       || bitsize >= BITS_PER_WORD
4673       || !GET_MODE_BITSIZE (str_mode).is_constant (&str_bitsize)
4674       || str_bitsize > BITS_PER_WORD
4675       || TREE_SIDE_EFFECTS (to)
4676       || TREE_THIS_VOLATILE (to))
4677     return false;
4678 
4679   STRIP_NOPS (src);
4680   if (TREE_CODE (src) != SSA_NAME)
4681     return false;
4682   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4683     return false;
4684 
4685   srcstmt = get_gimple_for_ssa_name (src);
4686   if (!srcstmt
4687       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4688     return false;
4689 
4690   code = gimple_assign_rhs_code (srcstmt);
4691 
4692   op0 = gimple_assign_rhs1 (srcstmt);
4693 
4694   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4695      to find its initialization.  Hopefully the initialization will
4696      be from a bitfield load.  */
4697   if (TREE_CODE (op0) == SSA_NAME)
4698     {
4699       gimple *op0stmt = get_gimple_for_ssa_name (op0);
4700 
4701       /* We want to eventually have OP0 be the same as TO, which
4702 	 should be a bitfield.  */
4703       if (!op0stmt
4704 	  || !is_gimple_assign (op0stmt)
4705 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4706 	return false;
4707       op0 = gimple_assign_rhs1 (op0stmt);
4708     }
4709 
4710   op1 = gimple_assign_rhs2 (srcstmt);
4711 
4712   if (!operand_equal_p (to, op0, 0))
4713     return false;
4714 
4715   if (MEM_P (str_rtx))
4716     {
4717       unsigned HOST_WIDE_INT offset1;
4718 
4719       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4720 	str_bitsize = BITS_PER_WORD;
4721 
4722       scalar_int_mode best_mode;
4723       if (!get_best_mode (bitsize, bitpos, bitregion_start, bitregion_end,
4724 			  MEM_ALIGN (str_rtx), str_bitsize, false, &best_mode))
4725 	return false;
4726       str_mode = best_mode;
4727       str_bitsize = GET_MODE_BITSIZE (best_mode);
4728 
4729       offset1 = bitpos;
4730       bitpos %= str_bitsize;
4731       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4732       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4733     }
4734   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4735     return false;
4736 
4737   /* If the bit field covers the whole REG/MEM, store_field
4738      will likely generate better code.  */
4739   if (bitsize >= str_bitsize)
4740     return false;
4741 
4742   /* We can't handle fields split across multiple entities.  */
4743   if (bitpos + bitsize > str_bitsize)
4744     return false;
4745 
4746   if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4747     bitpos = str_bitsize - bitpos - bitsize;
4748 
4749   switch (code)
4750     {
4751     case PLUS_EXPR:
4752     case MINUS_EXPR:
4753       /* For now, just optimize the case of the topmost bitfield
4754 	 where we don't need to do any masking and also
4755 	 1 bit bitfields where xor can be used.
4756 	 We might win by one instruction for the other bitfields
4757 	 too if insv/extv instructions aren't used, so that
4758 	 can be added later.  */
4759       if ((reverse || bitpos + bitsize != str_bitsize)
4760 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4761 	break;
4762 
4763       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4764       value = convert_modes (str_mode,
4765 			     TYPE_MODE (TREE_TYPE (op1)), value,
4766 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4767 
4768       /* We may be accessing data outside the field, which means
4769 	 we can alias adjacent data.  */
4770       if (MEM_P (str_rtx))
4771 	{
4772 	  str_rtx = shallow_copy_rtx (str_rtx);
4773 	  set_mem_alias_set (str_rtx, 0);
4774 	  set_mem_expr (str_rtx, 0);
4775 	}
4776 
4777       if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4778 	{
4779 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4780 	  binop = xor_optab;
4781 	}
4782       else
4783 	binop = code == PLUS_EXPR ? add_optab : sub_optab;
4784 
4785       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4786       if (reverse)
4787 	value = flip_storage_order (str_mode, value);
4788       result = expand_binop (str_mode, binop, str_rtx,
4789 			     value, str_rtx, 1, OPTAB_WIDEN);
4790       if (result != str_rtx)
4791 	emit_move_insn (str_rtx, result);
4792       return true;
4793 
4794     case BIT_IOR_EXPR:
4795     case BIT_XOR_EXPR:
4796       if (TREE_CODE (op1) != INTEGER_CST)
4797 	break;
4798       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4799       value = convert_modes (str_mode,
4800 			     TYPE_MODE (TREE_TYPE (op1)), value,
4801 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4802 
4803       /* We may be accessing data outside the field, which means
4804 	 we can alias adjacent data.  */
4805       if (MEM_P (str_rtx))
4806 	{
4807 	  str_rtx = shallow_copy_rtx (str_rtx);
4808 	  set_mem_alias_set (str_rtx, 0);
4809 	  set_mem_expr (str_rtx, 0);
4810 	}
4811 
4812       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4813       if (bitpos + bitsize != str_bitsize)
4814 	{
4815 	  rtx mask = gen_int_mode ((HOST_WIDE_INT_1U << bitsize) - 1,
4816 				   str_mode);
4817 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4818 	}
4819       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4820       if (reverse)
4821 	value = flip_storage_order (str_mode, value);
4822       result = expand_binop (str_mode, binop, str_rtx,
4823 			     value, str_rtx, 1, OPTAB_WIDEN);
4824       if (result != str_rtx)
4825 	emit_move_insn (str_rtx, result);
4826       return true;
4827 
4828     default:
4829       break;
4830     }
4831 
4832   return false;
4833 }
4834 
4835 /* In the C++ memory model, consecutive bit fields in a structure are
4836    considered one memory location.
4837 
4838    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4839    returns the bit range of consecutive bits in which this COMPONENT_REF
4840    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4841    and *OFFSET may be adjusted in the process.
4842 
4843    If the access does not need to be restricted, 0 is returned in both
4844    *BITSTART and *BITEND.  */
4845 
4846 void
4847 get_bit_range (poly_uint64_pod *bitstart, poly_uint64_pod *bitend, tree exp,
4848 	       poly_int64_pod *bitpos, tree *offset)
4849 {
4850   poly_int64 bitoffset;
4851   tree field, repr;
4852 
4853   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4854 
4855   field = TREE_OPERAND (exp, 1);
4856   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4857   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4858      need to limit the range we can access.  */
4859   if (!repr)
4860     {
4861       *bitstart = *bitend = 0;
4862       return;
4863     }
4864 
4865   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4866      part of a larger bit field, then the representative does not serve any
4867      useful purpose.  This can occur in Ada.  */
4868   if (handled_component_p (TREE_OPERAND (exp, 0)))
4869     {
4870       machine_mode rmode;
4871       poly_int64 rbitsize, rbitpos;
4872       tree roffset;
4873       int unsignedp, reversep, volatilep = 0;
4874       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4875 			   &roffset, &rmode, &unsignedp, &reversep,
4876 			   &volatilep);
4877       if (!multiple_p (rbitpos, BITS_PER_UNIT))
4878 	{
4879 	  *bitstart = *bitend = 0;
4880 	  return;
4881 	}
4882     }
4883 
4884   /* Compute the adjustment to bitpos from the offset of the field
4885      relative to the representative.  DECL_FIELD_OFFSET of field and
4886      repr are the same by construction if they are not constants,
4887      see finish_bitfield_layout.  */
4888   poly_uint64 field_offset, repr_offset;
4889   if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
4890       && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
4891     bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
4892   else
4893     bitoffset = 0;
4894   bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4895 		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4896 
4897   /* If the adjustment is larger than bitpos, we would have a negative bit
4898      position for the lower bound and this may wreak havoc later.  Adjust
4899      offset and bitpos to make the lower bound non-negative in that case.  */
4900   if (maybe_gt (bitoffset, *bitpos))
4901     {
4902       poly_int64 adjust_bits = upper_bound (bitoffset, *bitpos) - *bitpos;
4903       poly_int64 adjust_bytes = exact_div (adjust_bits, BITS_PER_UNIT);
4904 
4905       *bitpos += adjust_bits;
4906       if (*offset == NULL_TREE)
4907 	*offset = size_int (-adjust_bytes);
4908       else
4909 	*offset = size_binop (MINUS_EXPR, *offset, size_int (adjust_bytes));
4910       *bitstart = 0;
4911     }
4912   else
4913     *bitstart = *bitpos - bitoffset;
4914 
4915   *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4916 }
4917 
4918 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4919    in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4920    DECL_RTL was not set yet, return NORTL.  */
4921 
4922 static inline bool
4923 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4924 {
4925   if (TREE_CODE (addr) != ADDR_EXPR)
4926     return false;
4927 
4928   tree base = TREE_OPERAND (addr, 0);
4929 
4930   if (!DECL_P (base)
4931       || TREE_ADDRESSABLE (base)
4932       || DECL_MODE (base) == BLKmode)
4933     return false;
4934 
4935   if (!DECL_RTL_SET_P (base))
4936     return nortl;
4937 
4938   return (!MEM_P (DECL_RTL (base)));
4939 }
4940 
4941 /* Returns true if the MEM_REF REF refers to an object that does not
4942    reside in memory and has non-BLKmode.  */
4943 
4944 static inline bool
4945 mem_ref_refers_to_non_mem_p (tree ref)
4946 {
4947   tree base = TREE_OPERAND (ref, 0);
4948   return addr_expr_of_non_mem_decl_p_1 (base, false);
4949 }
4950 
4951 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4952    is true, try generating a nontemporal store.  */
4953 
4954 void
4955 expand_assignment (tree to, tree from, bool nontemporal)
4956 {
4957   rtx to_rtx = 0;
4958   rtx result;
4959   machine_mode mode;
4960   unsigned int align;
4961   enum insn_code icode;
4962 
4963   /* Don't crash if the lhs of the assignment was erroneous.  */
4964   if (TREE_CODE (to) == ERROR_MARK)
4965     {
4966       expand_normal (from);
4967       return;
4968     }
4969 
4970   /* Optimize away no-op moves without side-effects.  */
4971   if (operand_equal_p (to, from, 0))
4972     return;
4973 
4974   /* Handle misaligned stores.  */
4975   mode = TYPE_MODE (TREE_TYPE (to));
4976   if ((TREE_CODE (to) == MEM_REF
4977        || TREE_CODE (to) == TARGET_MEM_REF)
4978       && mode != BLKmode
4979       && !mem_ref_refers_to_non_mem_p (to)
4980       && ((align = get_object_alignment (to))
4981 	  < GET_MODE_ALIGNMENT (mode))
4982       && (((icode = optab_handler (movmisalign_optab, mode))
4983 	   != CODE_FOR_nothing)
4984 	  || targetm.slow_unaligned_access (mode, align)))
4985     {
4986       rtx reg, mem;
4987 
4988       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4989       reg = force_not_mem (reg);
4990       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4991       if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4992 	reg = flip_storage_order (mode, reg);
4993 
4994       if (icode != CODE_FOR_nothing)
4995 	{
4996 	  struct expand_operand ops[2];
4997 
4998 	  create_fixed_operand (&ops[0], mem);
4999 	  create_input_operand (&ops[1], reg, mode);
5000 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
5001 	     would silently be omitted.  */
5002 	  expand_insn (icode, 2, ops);
5003 	}
5004       else
5005 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
5006 			 false);
5007       return;
5008     }
5009 
5010   /* Assignment of a structure component needs special treatment
5011      if the structure component's rtx is not simply a MEM.
5012      Assignment of an array element at a constant index, and assignment of
5013      an array element in an unaligned packed structure field, has the same
5014      problem.  Same for (partially) storing into a non-memory object.  */
5015   if (handled_component_p (to)
5016       || (TREE_CODE (to) == MEM_REF
5017 	  && (REF_REVERSE_STORAGE_ORDER (to)
5018 	      || mem_ref_refers_to_non_mem_p (to)))
5019       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
5020     {
5021       machine_mode mode1;
5022       poly_int64 bitsize, bitpos;
5023       poly_uint64 bitregion_start = 0;
5024       poly_uint64 bitregion_end = 0;
5025       tree offset;
5026       int unsignedp, reversep, volatilep = 0;
5027       tree tem;
5028 
5029       push_temp_slots ();
5030       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
5031 				 &unsignedp, &reversep, &volatilep);
5032 
5033       /* Make sure bitpos is not negative, it can wreak havoc later.  */
5034       if (maybe_lt (bitpos, 0))
5035 	{
5036 	  gcc_assert (offset == NULL_TREE);
5037 	  offset = size_int (bits_to_bytes_round_down (bitpos));
5038 	  bitpos = num_trailing_bits (bitpos);
5039 	}
5040 
5041       if (TREE_CODE (to) == COMPONENT_REF
5042 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
5043 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
5044       /* The C++ memory model naturally applies to byte-aligned fields.
5045 	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
5046 	 BITSIZE are not byte-aligned, there is no need to limit the range
5047 	 we can access.  This can occur with packed structures in Ada.  */
5048       else if (maybe_gt (bitsize, 0)
5049 	       && multiple_p (bitsize, BITS_PER_UNIT)
5050 	       && multiple_p (bitpos, BITS_PER_UNIT))
5051 	{
5052 	  bitregion_start = bitpos;
5053 	  bitregion_end = bitpos + bitsize - 1;
5054 	}
5055 
5056       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
5057 
5058       /* If the field has a mode, we want to access it in the
5059 	 field's mode, not the computed mode.
5060 	 If a MEM has VOIDmode (external with incomplete type),
5061 	 use BLKmode for it instead.  */
5062       if (MEM_P (to_rtx))
5063 	{
5064 	  if (mode1 != VOIDmode)
5065 	    to_rtx = adjust_address (to_rtx, mode1, 0);
5066 	  else if (GET_MODE (to_rtx) == VOIDmode)
5067 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
5068 	}
5069 
5070       if (offset != 0)
5071 	{
5072 	  machine_mode address_mode;
5073 	  rtx offset_rtx;
5074 
5075 	  if (!MEM_P (to_rtx))
5076 	    {
5077 	      /* We can get constant negative offsets into arrays with broken
5078 		 user code.  Translate this to a trap instead of ICEing.  */
5079 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
5080 	      expand_builtin_trap ();
5081 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
5082 	    }
5083 
5084 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
5085 	  address_mode = get_address_mode (to_rtx);
5086 	  if (GET_MODE (offset_rtx) != address_mode)
5087 	    {
5088 		/* We cannot be sure that the RTL in offset_rtx is valid outside
5089 		   of a memory address context, so force it into a register
5090 		   before attempting to convert it to the desired mode.  */
5091 	      offset_rtx = force_operand (offset_rtx, NULL_RTX);
5092 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5093 	    }
5094 
5095 	  /* If we have an expression in OFFSET_RTX and a non-zero
5096 	     byte offset in BITPOS, adding the byte offset before the
5097 	     OFFSET_RTX results in better intermediate code, which makes
5098 	     later rtl optimization passes perform better.
5099 
5100 	     We prefer intermediate code like this:
5101 
5102 	     r124:DI=r123:DI+0x18
5103 	     [r124:DI]=r121:DI
5104 
5105 	     ... instead of ...
5106 
5107 	     r124:DI=r123:DI+0x10
5108 	     [r124:DI+0x8]=r121:DI
5109 
5110 	     This is only done for aligned data values, as these can
5111 	     be expected to result in single move instructions.  */
5112 	  poly_int64 bytepos;
5113 	  if (mode1 != VOIDmode
5114 	      && maybe_ne (bitpos, 0)
5115 	      && maybe_gt (bitsize, 0)
5116 	      && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
5117 	      && multiple_p (bitpos, bitsize)
5118 	      && multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
5119 	      && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
5120 	    {
5121 	      to_rtx = adjust_address (to_rtx, mode1, bytepos);
5122 	      bitregion_start = 0;
5123 	      if (known_ge (bitregion_end, poly_uint64 (bitpos)))
5124 		bitregion_end -= bitpos;
5125 	      bitpos = 0;
5126 	    }
5127 
5128 	  to_rtx = offset_address (to_rtx, offset_rtx,
5129 				   highest_pow2_factor_for_target (to,
5130 				   				   offset));
5131 	}
5132 
5133       /* No action is needed if the target is not a memory and the field
5134 	 lies completely outside that target.  This can occur if the source
5135 	 code contains an out-of-bounds access to a small array.  */
5136       if (!MEM_P (to_rtx)
5137 	  && GET_MODE (to_rtx) != BLKmode
5138 	  && known_ge (bitpos, GET_MODE_PRECISION (GET_MODE (to_rtx))))
5139 	{
5140 	  expand_normal (from);
5141 	  result = NULL;
5142 	}
5143       /* Handle expand_expr of a complex value returning a CONCAT.  */
5144       else if (GET_CODE (to_rtx) == CONCAT)
5145 	{
5146 	  machine_mode to_mode = GET_MODE (to_rtx);
5147 	  gcc_checking_assert (COMPLEX_MODE_P (to_mode));
5148 	  poly_int64 mode_bitsize = GET_MODE_BITSIZE (to_mode);
5149 	  unsigned short inner_bitsize = GET_MODE_UNIT_BITSIZE (to_mode);
5150 	  if (TYPE_MODE (TREE_TYPE (from)) == to_mode
5151 	      && known_eq (bitpos, 0)
5152 	      && known_eq (bitsize, mode_bitsize))
5153 	    result = store_expr (from, to_rtx, false, nontemporal, reversep);
5154 	  else if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE_INNER (to_mode)
5155 		   && known_eq (bitsize, inner_bitsize)
5156 		   && (known_eq (bitpos, 0)
5157 		       || known_eq (bitpos, inner_bitsize)))
5158 	    result = store_expr (from, XEXP (to_rtx, maybe_ne (bitpos, 0)),
5159 				 false, nontemporal, reversep);
5160 	  else if (known_le (bitpos + bitsize, inner_bitsize))
5161 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
5162 				  bitregion_start, bitregion_end,
5163 				  mode1, from, get_alias_set (to),
5164 				  nontemporal, reversep);
5165 	  else if (known_ge (bitpos, inner_bitsize))
5166 	    result = store_field (XEXP (to_rtx, 1), bitsize,
5167 				  bitpos - inner_bitsize,
5168 				  bitregion_start, bitregion_end,
5169 				  mode1, from, get_alias_set (to),
5170 				  nontemporal, reversep);
5171 	  else if (known_eq (bitpos, 0) && known_eq (bitsize, mode_bitsize))
5172 	    {
5173 	      result = expand_normal (from);
5174 	      if (GET_CODE (result) == CONCAT)
5175 		{
5176 		  to_mode = GET_MODE_INNER (to_mode);
5177 		  machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
5178 		  rtx from_real
5179 		    = simplify_gen_subreg (to_mode, XEXP (result, 0),
5180 					   from_mode, 0);
5181 		  rtx from_imag
5182 		    = simplify_gen_subreg (to_mode, XEXP (result, 1),
5183 					   from_mode, 0);
5184 		  if (!from_real || !from_imag)
5185 		    goto concat_store_slow;
5186 		  emit_move_insn (XEXP (to_rtx, 0), from_real);
5187 		  emit_move_insn (XEXP (to_rtx, 1), from_imag);
5188 		}
5189 	      else
5190 		{
5191 		  rtx from_rtx;
5192 		  if (MEM_P (result))
5193 		    from_rtx = change_address (result, to_mode, NULL_RTX);
5194 		  else
5195 		    from_rtx
5196 		      = simplify_gen_subreg (to_mode, result,
5197 					     TYPE_MODE (TREE_TYPE (from)), 0);
5198 		  if (from_rtx)
5199 		    {
5200 		      emit_move_insn (XEXP (to_rtx, 0),
5201 				      read_complex_part (from_rtx, false));
5202 		      emit_move_insn (XEXP (to_rtx, 1),
5203 				      read_complex_part (from_rtx, true));
5204 		    }
5205 		  else
5206 		    {
5207 		      machine_mode to_mode
5208 			= GET_MODE_INNER (GET_MODE (to_rtx));
5209 		      rtx from_real
5210 			= simplify_gen_subreg (to_mode, result,
5211 					       TYPE_MODE (TREE_TYPE (from)),
5212 					       0);
5213 		      rtx from_imag
5214 			= simplify_gen_subreg (to_mode, result,
5215 					       TYPE_MODE (TREE_TYPE (from)),
5216 					       GET_MODE_SIZE (to_mode));
5217 		      if (!from_real || !from_imag)
5218 			goto concat_store_slow;
5219 		      emit_move_insn (XEXP (to_rtx, 0), from_real);
5220 		      emit_move_insn (XEXP (to_rtx, 1), from_imag);
5221 		    }
5222 		}
5223 	    }
5224 	  else
5225 	    {
5226 	    concat_store_slow:;
5227 	      rtx temp = assign_stack_temp (to_mode,
5228 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
5229 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
5230 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
5231 	      result = store_field (temp, bitsize, bitpos,
5232 				    bitregion_start, bitregion_end,
5233 				    mode1, from, get_alias_set (to),
5234 				    nontemporal, reversep);
5235 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5236 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5237 	    }
5238 	}
5239       /* For calls to functions returning variable length structures, if TO_RTX
5240 	 is not a MEM, go through a MEM because we must not create temporaries
5241 	 of the VLA type.  */
5242       else if (!MEM_P (to_rtx)
5243 	       && TREE_CODE (from) == CALL_EXPR
5244 	       && COMPLETE_TYPE_P (TREE_TYPE (from))
5245 	       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) != INTEGER_CST)
5246 	{
5247 	  rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5248 					GET_MODE_SIZE (GET_MODE (to_rtx)));
5249 	  result = store_field (temp, bitsize, bitpos, bitregion_start,
5250 				bitregion_end, mode1, from, get_alias_set (to),
5251 				nontemporal, reversep);
5252 	  emit_move_insn (to_rtx, temp);
5253 	}
5254       else
5255 	{
5256 	  if (MEM_P (to_rtx))
5257 	    {
5258 	      /* If the field is at offset zero, we could have been given the
5259 		 DECL_RTX of the parent struct.  Don't munge it.  */
5260 	      to_rtx = shallow_copy_rtx (to_rtx);
5261 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5262 	      if (volatilep)
5263 		MEM_VOLATILE_P (to_rtx) = 1;
5264 	    }
5265 
5266 	  gcc_checking_assert (known_ge (bitpos, 0));
5267 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
5268 					       bitregion_start, bitregion_end,
5269 					       mode1, to_rtx, to, from,
5270 					       reversep))
5271 	    result = NULL;
5272 	  else
5273 	    result = store_field (to_rtx, bitsize, bitpos,
5274 				  bitregion_start, bitregion_end,
5275 				  mode1, from, get_alias_set (to),
5276 				  nontemporal, reversep);
5277 	}
5278 
5279       if (result)
5280 	preserve_temp_slots (result);
5281       pop_temp_slots ();
5282       return;
5283     }
5284 
5285   /* If the rhs is a function call and its value is not an aggregate,
5286      call the function before we start to compute the lhs.
5287      This is needed for correct code for cases such as
5288      val = setjmp (buf) on machines where reference to val
5289      requires loading up part of an address in a separate insn.
5290 
5291      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5292      since it might be a promoted variable where the zero- or sign- extension
5293      needs to be done.  Handling this in the normal way is safe because no
5294      computation is done before the call.  The same is true for SSA names.  */
5295   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5296       && COMPLETE_TYPE_P (TREE_TYPE (from))
5297       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5298       && ! (((VAR_P (to)
5299 	      || TREE_CODE (to) == PARM_DECL
5300 	      || TREE_CODE (to) == RESULT_DECL)
5301 	     && REG_P (DECL_RTL (to)))
5302 	    || TREE_CODE (to) == SSA_NAME))
5303     {
5304       rtx value;
5305       rtx bounds;
5306 
5307       push_temp_slots ();
5308       value = expand_normal (from);
5309 
5310       /* Split value and bounds to store them separately.  */
5311       chkp_split_slot (value, &value, &bounds);
5312 
5313       if (to_rtx == 0)
5314 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5315 
5316       /* Handle calls that return values in multiple non-contiguous locations.
5317 	 The Irix 6 ABI has examples of this.  */
5318       if (GET_CODE (to_rtx) == PARALLEL)
5319 	{
5320 	  if (GET_CODE (value) == PARALLEL)
5321 	    emit_group_move (to_rtx, value);
5322 	  else
5323 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5324 			     int_size_in_bytes (TREE_TYPE (from)));
5325 	}
5326       else if (GET_CODE (value) == PARALLEL)
5327 	emit_group_store (to_rtx, value, TREE_TYPE (from),
5328 			  int_size_in_bytes (TREE_TYPE (from)));
5329       else if (GET_MODE (to_rtx) == BLKmode)
5330 	{
5331 	  /* Handle calls that return BLKmode values in registers.  */
5332 	  if (REG_P (value))
5333 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5334 	  else
5335 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5336 	}
5337       else
5338 	{
5339 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5340 	    value = convert_memory_address_addr_space
5341 	      (as_a <scalar_int_mode> (GET_MODE (to_rtx)), value,
5342 	       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5343 
5344 	  emit_move_insn (to_rtx, value);
5345 	}
5346 
5347       /* Store bounds if required.  */
5348       if (bounds
5349 	  && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5350 	{
5351 	  gcc_assert (MEM_P (to_rtx));
5352 	  chkp_emit_bounds_store (bounds, value, to_rtx);
5353 	}
5354 
5355       preserve_temp_slots (to_rtx);
5356       pop_temp_slots ();
5357       return;
5358     }
5359 
5360   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5361   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5362 
5363   /* Don't move directly into a return register.  */
5364   if (TREE_CODE (to) == RESULT_DECL
5365       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5366     {
5367       rtx temp;
5368 
5369       push_temp_slots ();
5370 
5371       /* If the source is itself a return value, it still is in a pseudo at
5372 	 this point so we can move it back to the return register directly.  */
5373       if (REG_P (to_rtx)
5374 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5375 	  && TREE_CODE (from) != CALL_EXPR)
5376 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5377       else
5378 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5379 
5380       /* Handle calls that return values in multiple non-contiguous locations.
5381 	 The Irix 6 ABI has examples of this.  */
5382       if (GET_CODE (to_rtx) == PARALLEL)
5383 	{
5384 	  if (GET_CODE (temp) == PARALLEL)
5385 	    emit_group_move (to_rtx, temp);
5386 	  else
5387 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5388 			     int_size_in_bytes (TREE_TYPE (from)));
5389 	}
5390       else if (temp)
5391 	emit_move_insn (to_rtx, temp);
5392 
5393       preserve_temp_slots (to_rtx);
5394       pop_temp_slots ();
5395       return;
5396     }
5397 
5398   /* In case we are returning the contents of an object which overlaps
5399      the place the value is being stored, use a safe function when copying
5400      a value through a pointer into a structure value return block.  */
5401   if (TREE_CODE (to) == RESULT_DECL
5402       && TREE_CODE (from) == INDIRECT_REF
5403       && ADDR_SPACE_GENERIC_P
5404 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5405       && refs_may_alias_p (to, from)
5406       && cfun->returns_struct
5407       && !cfun->returns_pcc_struct)
5408     {
5409       rtx from_rtx, size;
5410 
5411       push_temp_slots ();
5412       size = expr_size (from);
5413       from_rtx = expand_normal (from);
5414 
5415       emit_block_move_via_libcall (XEXP (to_rtx, 0), XEXP (from_rtx, 0), size);
5416 
5417       preserve_temp_slots (to_rtx);
5418       pop_temp_slots ();
5419       return;
5420     }
5421 
5422   /* Compute FROM and store the value in the rtx we got.  */
5423 
5424   push_temp_slots ();
5425   result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5426   preserve_temp_slots (result);
5427   pop_temp_slots ();
5428   return;
5429 }
5430 
5431 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5432    succeeded, false otherwise.  */
5433 
5434 bool
5435 emit_storent_insn (rtx to, rtx from)
5436 {
5437   struct expand_operand ops[2];
5438   machine_mode mode = GET_MODE (to);
5439   enum insn_code code = optab_handler (storent_optab, mode);
5440 
5441   if (code == CODE_FOR_nothing)
5442     return false;
5443 
5444   create_fixed_operand (&ops[0], to);
5445   create_input_operand (&ops[1], from, mode);
5446   return maybe_expand_insn (code, 2, ops);
5447 }
5448 
5449 /* Generate code for computing expression EXP,
5450    and storing the value into TARGET.
5451 
5452    If the mode is BLKmode then we may return TARGET itself.
5453    It turns out that in BLKmode it doesn't cause a problem.
5454    because C has no operators that could combine two different
5455    assignments into the same BLKmode object with different values
5456    with no sequence point.  Will other languages need this to
5457    be more thorough?
5458 
5459    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5460    stack, and block moves may need to be treated specially.
5461 
5462    If NONTEMPORAL is true, try using a nontemporal store instruction.
5463 
5464    If REVERSE is true, the store is to be done in reverse order.
5465 
5466    If BTARGET is not NULL then computed bounds of EXP are
5467    associated with BTARGET.  */
5468 
5469 rtx
5470 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5471 			bool nontemporal, bool reverse, tree btarget)
5472 {
5473   rtx temp;
5474   rtx alt_rtl = NULL_RTX;
5475   location_t loc = curr_insn_location ();
5476 
5477   if (VOID_TYPE_P (TREE_TYPE (exp)))
5478     {
5479       /* C++ can generate ?: expressions with a throw expression in one
5480 	 branch and an rvalue in the other. Here, we resolve attempts to
5481 	 store the throw expression's nonexistent result.  */
5482       gcc_assert (!call_param_p);
5483       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5484       return NULL_RTX;
5485     }
5486   if (TREE_CODE (exp) == COMPOUND_EXPR)
5487     {
5488       /* Perform first part of compound expression, then assign from second
5489 	 part.  */
5490       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5491 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5492       return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5493 				     call_param_p, nontemporal, reverse,
5494 				     btarget);
5495     }
5496   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5497     {
5498       /* For conditional expression, get safe form of the target.  Then
5499 	 test the condition, doing the appropriate assignment on either
5500 	 side.  This avoids the creation of unnecessary temporaries.
5501 	 For non-BLKmode, it is more efficient not to do this.  */
5502 
5503       rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5504 
5505       do_pending_stack_adjust ();
5506       NO_DEFER_POP;
5507       jumpifnot (TREE_OPERAND (exp, 0), lab1,
5508 		 profile_probability::uninitialized ());
5509       store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5510 			      nontemporal, reverse, btarget);
5511       emit_jump_insn (targetm.gen_jump (lab2));
5512       emit_barrier ();
5513       emit_label (lab1);
5514       store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5515 			      nontemporal, reverse, btarget);
5516       emit_label (lab2);
5517       OK_DEFER_POP;
5518 
5519       return NULL_RTX;
5520     }
5521   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5522     /* If this is a scalar in a register that is stored in a wider mode
5523        than the declared mode, compute the result into its declared mode
5524        and then convert to the wider mode.  Our value is the computed
5525        expression.  */
5526     {
5527       rtx inner_target = 0;
5528       scalar_int_mode outer_mode = subreg_unpromoted_mode (target);
5529       scalar_int_mode inner_mode = subreg_promoted_mode (target);
5530 
5531       /* We can do the conversion inside EXP, which will often result
5532 	 in some optimizations.  Do the conversion in two steps: first
5533 	 change the signedness, if needed, then the extend.  But don't
5534 	 do this if the type of EXP is a subtype of something else
5535 	 since then the conversion might involve more than just
5536 	 converting modes.  */
5537       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5538 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5539 	  && GET_MODE_PRECISION (outer_mode)
5540 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5541 	{
5542 	  if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5543 					  TYPE_UNSIGNED (TREE_TYPE (exp))))
5544 	    {
5545 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5546 		 version, so use the mode instead.  */
5547 	      tree ntype
5548 		= (signed_or_unsigned_type_for
5549 		   (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5550 	      if (ntype == NULL)
5551 		ntype = lang_hooks.types.type_for_mode
5552 		  (TYPE_MODE (TREE_TYPE (exp)),
5553 		   SUBREG_PROMOTED_SIGN (target));
5554 
5555 	      exp = fold_convert_loc (loc, ntype, exp);
5556 	    }
5557 
5558 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5559 				  (inner_mode, SUBREG_PROMOTED_SIGN (target)),
5560 				  exp);
5561 
5562 	  inner_target = SUBREG_REG (target);
5563 	}
5564 
5565       temp = expand_expr (exp, inner_target, VOIDmode,
5566 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5567 
5568       /* Handle bounds returned by call.  */
5569       if (TREE_CODE (exp) == CALL_EXPR)
5570 	{
5571 	  rtx bounds;
5572 	  chkp_split_slot (temp, &temp, &bounds);
5573 	  if (bounds && btarget)
5574 	    {
5575 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5576 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5577 	      chkp_set_rtl_bounds (btarget, tmp);
5578 	    }
5579 	}
5580 
5581       /* If TEMP is a VOIDmode constant, use convert_modes to make
5582 	 sure that we properly convert it.  */
5583       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5584 	{
5585 	  temp = convert_modes (outer_mode, TYPE_MODE (TREE_TYPE (exp)),
5586 				temp, SUBREG_PROMOTED_SIGN (target));
5587 	  temp = convert_modes (inner_mode, outer_mode, temp,
5588 				SUBREG_PROMOTED_SIGN (target));
5589 	}
5590 
5591       convert_move (SUBREG_REG (target), temp,
5592 		    SUBREG_PROMOTED_SIGN (target));
5593 
5594       return NULL_RTX;
5595     }
5596   else if ((TREE_CODE (exp) == STRING_CST
5597 	    || (TREE_CODE (exp) == MEM_REF
5598 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5599 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5600 		   == STRING_CST
5601 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5602 	   && !nontemporal && !call_param_p
5603 	   && MEM_P (target))
5604     {
5605       /* Optimize initialization of an array with a STRING_CST.  */
5606       HOST_WIDE_INT exp_len, str_copy_len;
5607       rtx dest_mem;
5608       tree str = TREE_CODE (exp) == STRING_CST
5609 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5610 
5611       exp_len = int_expr_size (exp);
5612       if (exp_len <= 0)
5613 	goto normal_expr;
5614 
5615       if (TREE_STRING_LENGTH (str) <= 0)
5616 	goto normal_expr;
5617 
5618       str_copy_len = strlen (TREE_STRING_POINTER (str));
5619       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5620 	goto normal_expr;
5621 
5622       str_copy_len = TREE_STRING_LENGTH (str);
5623       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5624 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5625 	{
5626 	  str_copy_len += STORE_MAX_PIECES - 1;
5627 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5628 	}
5629       str_copy_len = MIN (str_copy_len, exp_len);
5630       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5631 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5632 				MEM_ALIGN (target), false))
5633 	goto normal_expr;
5634 
5635       dest_mem = target;
5636 
5637       dest_mem = store_by_pieces (dest_mem,
5638 				  str_copy_len, builtin_strncpy_read_str,
5639 				  CONST_CAST (char *,
5640 					      TREE_STRING_POINTER (str)),
5641 				  MEM_ALIGN (target), false,
5642 				  exp_len > str_copy_len ? 1 : 0);
5643       if (exp_len > str_copy_len)
5644 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5645 		       GEN_INT (exp_len - str_copy_len),
5646 		       BLOCK_OP_NORMAL);
5647       return NULL_RTX;
5648     }
5649   else
5650     {
5651       rtx tmp_target;
5652 
5653   normal_expr:
5654       /* If we want to use a nontemporal or a reverse order store, force the
5655 	 value into a register first.  */
5656       tmp_target = nontemporal || reverse ? NULL_RTX : target;
5657       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5658 			       (call_param_p
5659 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5660 			       &alt_rtl, false);
5661 
5662       /* Handle bounds returned by call.  */
5663       if (TREE_CODE (exp) == CALL_EXPR)
5664 	{
5665 	  rtx bounds;
5666 	  chkp_split_slot (temp, &temp, &bounds);
5667 	  if (bounds && btarget)
5668 	    {
5669 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5670 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5671 	      chkp_set_rtl_bounds (btarget, tmp);
5672 	    }
5673 	}
5674     }
5675 
5676   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5677      the same as that of TARGET, adjust the constant.  This is needed, for
5678      example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5679      only a word-sized value.  */
5680   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5681       && TREE_CODE (exp) != ERROR_MARK
5682       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5683     {
5684       if (GET_MODE_CLASS (GET_MODE (target))
5685 	  != GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (exp)))
5686 	  && known_eq (GET_MODE_BITSIZE (GET_MODE (target)),
5687 		       GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)))))
5688 	{
5689 	  rtx t = simplify_gen_subreg (GET_MODE (target), temp,
5690 				       TYPE_MODE (TREE_TYPE (exp)), 0);
5691 	  if (t)
5692 	    temp = t;
5693 	}
5694       if (GET_MODE (temp) == VOIDmode)
5695 	temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5696 			      temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5697     }
5698 
5699   /* If value was not generated in the target, store it there.
5700      Convert the value to TARGET's type first if necessary and emit the
5701      pending incrementations that have been queued when expanding EXP.
5702      Note that we cannot emit the whole queue blindly because this will
5703      effectively disable the POST_INC optimization later.
5704 
5705      If TEMP and TARGET compare equal according to rtx_equal_p, but
5706      one or both of them are volatile memory refs, we have to distinguish
5707      two cases:
5708      - expand_expr has used TARGET.  In this case, we must not generate
5709        another copy.  This can be detected by TARGET being equal according
5710        to == .
5711      - expand_expr has not used TARGET - that means that the source just
5712        happens to have the same RTX form.  Since temp will have been created
5713        by expand_expr, it will compare unequal according to == .
5714        We must generate a copy in this case, to reach the correct number
5715        of volatile memory references.  */
5716 
5717   if ((! rtx_equal_p (temp, target)
5718        || (temp != target && (side_effects_p (temp)
5719 			      || side_effects_p (target))))
5720       && TREE_CODE (exp) != ERROR_MARK
5721       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5722 	 but TARGET is not valid memory reference, TEMP will differ
5723 	 from TARGET although it is really the same location.  */
5724       && !(alt_rtl
5725 	   && rtx_equal_p (alt_rtl, target)
5726 	   && !side_effects_p (alt_rtl)
5727 	   && !side_effects_p (target))
5728       /* If there's nothing to copy, don't bother.  Don't call
5729 	 expr_size unless necessary, because some front-ends (C++)
5730 	 expr_size-hook must not be given objects that are not
5731 	 supposed to be bit-copied or bit-initialized.  */
5732       && expr_size (exp) != const0_rtx)
5733     {
5734       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5735 	{
5736 	  if (GET_MODE (target) == BLKmode)
5737 	    {
5738 	      /* Handle calls that return BLKmode values in registers.  */
5739 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5740 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5741 	      else
5742 		store_bit_field (target,
5743 				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5744 				 0, 0, 0, GET_MODE (temp), temp, reverse);
5745 	    }
5746 	  else
5747 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5748 	}
5749 
5750       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5751 	{
5752 	  /* Handle copying a string constant into an array.  The string
5753 	     constant may be shorter than the array.  So copy just the string's
5754 	     actual length, and clear the rest.  First get the size of the data
5755 	     type of the string, which is actually the size of the target.  */
5756 	  rtx size = expr_size (exp);
5757 
5758 	  if (CONST_INT_P (size)
5759 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5760 	    emit_block_move (target, temp, size,
5761 			     (call_param_p
5762 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5763 	  else
5764 	    {
5765 	      machine_mode pointer_mode
5766 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5767 	      machine_mode address_mode = get_address_mode (target);
5768 
5769 	      /* Compute the size of the data to copy from the string.  */
5770 	      tree copy_size
5771 		= size_binop_loc (loc, MIN_EXPR,
5772 				  make_tree (sizetype, size),
5773 				  size_int (TREE_STRING_LENGTH (exp)));
5774 	      rtx copy_size_rtx
5775 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5776 			       (call_param_p
5777 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5778 	      rtx_code_label *label = 0;
5779 
5780 	      /* Copy that much.  */
5781 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5782 					       TYPE_UNSIGNED (sizetype));
5783 	      emit_block_move (target, temp, copy_size_rtx,
5784 			       (call_param_p
5785 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5786 
5787 	      /* Figure out how much is left in TARGET that we have to clear.
5788 		 Do all calculations in pointer_mode.  */
5789 	      if (CONST_INT_P (copy_size_rtx))
5790 		{
5791 		  size = plus_constant (address_mode, size,
5792 					-INTVAL (copy_size_rtx));
5793 		  target = adjust_address (target, BLKmode,
5794 					   INTVAL (copy_size_rtx));
5795 		}
5796 	      else
5797 		{
5798 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5799 				       copy_size_rtx, NULL_RTX, 0,
5800 				       OPTAB_LIB_WIDEN);
5801 
5802 		  if (GET_MODE (copy_size_rtx) != address_mode)
5803 		    copy_size_rtx = convert_to_mode (address_mode,
5804 						     copy_size_rtx,
5805 						     TYPE_UNSIGNED (sizetype));
5806 
5807 		  target = offset_address (target, copy_size_rtx,
5808 					   highest_pow2_factor (copy_size));
5809 		  label = gen_label_rtx ();
5810 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5811 					   GET_MODE (size), 0, label);
5812 		}
5813 
5814 	      if (size != const0_rtx)
5815 		clear_storage (target, size, BLOCK_OP_NORMAL);
5816 
5817 	      if (label)
5818 		emit_label (label);
5819 	    }
5820 	}
5821       /* Handle calls that return values in multiple non-contiguous locations.
5822 	 The Irix 6 ABI has examples of this.  */
5823       else if (GET_CODE (target) == PARALLEL)
5824 	{
5825 	  if (GET_CODE (temp) == PARALLEL)
5826 	    emit_group_move (target, temp);
5827 	  else
5828 	    emit_group_load (target, temp, TREE_TYPE (exp),
5829 			     int_size_in_bytes (TREE_TYPE (exp)));
5830 	}
5831       else if (GET_CODE (temp) == PARALLEL)
5832 	emit_group_store (target, temp, TREE_TYPE (exp),
5833 			  int_size_in_bytes (TREE_TYPE (exp)));
5834       else if (GET_MODE (temp) == BLKmode)
5835 	emit_block_move (target, temp, expr_size (exp),
5836 			 (call_param_p
5837 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5838       /* If we emit a nontemporal store, there is nothing else to do.  */
5839       else if (nontemporal && emit_storent_insn (target, temp))
5840 	;
5841       else
5842 	{
5843 	  if (reverse)
5844 	    temp = flip_storage_order (GET_MODE (target), temp);
5845 	  temp = force_operand (temp, target);
5846 	  if (temp != target)
5847 	    emit_move_insn (target, temp);
5848 	}
5849     }
5850 
5851   return NULL_RTX;
5852 }
5853 
5854 /* Same as store_expr_with_bounds but ignoring bounds of EXP.  */
5855 rtx
5856 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5857 	    bool reverse)
5858 {
5859   return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5860 				 reverse, NULL);
5861 }
5862 
5863 /* Return true if field F of structure TYPE is a flexible array.  */
5864 
5865 static bool
5866 flexible_array_member_p (const_tree f, const_tree type)
5867 {
5868   const_tree tf;
5869 
5870   tf = TREE_TYPE (f);
5871   return (DECL_CHAIN (f) == NULL
5872 	  && TREE_CODE (tf) == ARRAY_TYPE
5873 	  && TYPE_DOMAIN (tf)
5874 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5875 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5876 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5877 	  && int_size_in_bytes (type) >= 0);
5878 }
5879 
5880 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5881    must have in order for it to completely initialize a value of type TYPE.
5882    Return -1 if the number isn't known.
5883 
5884    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5885 
5886 static HOST_WIDE_INT
5887 count_type_elements (const_tree type, bool for_ctor_p)
5888 {
5889   switch (TREE_CODE (type))
5890     {
5891     case ARRAY_TYPE:
5892       {
5893 	tree nelts;
5894 
5895 	nelts = array_type_nelts (type);
5896 	if (nelts && tree_fits_uhwi_p (nelts))
5897 	  {
5898 	    unsigned HOST_WIDE_INT n;
5899 
5900 	    n = tree_to_uhwi (nelts) + 1;
5901 	    if (n == 0 || for_ctor_p)
5902 	      return n;
5903 	    else
5904 	      return n * count_type_elements (TREE_TYPE (type), false);
5905 	  }
5906 	return for_ctor_p ? -1 : 1;
5907       }
5908 
5909     case RECORD_TYPE:
5910       {
5911 	unsigned HOST_WIDE_INT n;
5912 	tree f;
5913 
5914 	n = 0;
5915 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5916 	  if (TREE_CODE (f) == FIELD_DECL)
5917 	    {
5918 	      if (!for_ctor_p)
5919 		n += count_type_elements (TREE_TYPE (f), false);
5920 	      else if (!flexible_array_member_p (f, type))
5921 		/* Don't count flexible arrays, which are not supposed
5922 		   to be initialized.  */
5923 		n += 1;
5924 	    }
5925 
5926 	return n;
5927       }
5928 
5929     case UNION_TYPE:
5930     case QUAL_UNION_TYPE:
5931       {
5932 	tree f;
5933 	HOST_WIDE_INT n, m;
5934 
5935 	gcc_assert (!for_ctor_p);
5936 	/* Estimate the number of scalars in each field and pick the
5937 	   maximum.  Other estimates would do instead; the idea is simply
5938 	   to make sure that the estimate is not sensitive to the ordering
5939 	   of the fields.  */
5940 	n = 1;
5941 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5942 	  if (TREE_CODE (f) == FIELD_DECL)
5943 	    {
5944 	      m = count_type_elements (TREE_TYPE (f), false);
5945 	      /* If the field doesn't span the whole union, add an extra
5946 		 scalar for the rest.  */
5947 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5948 				    TYPE_SIZE (type)) != 1)
5949 		m++;
5950 	      if (n < m)
5951 		n = m;
5952 	    }
5953 	return n;
5954       }
5955 
5956     case COMPLEX_TYPE:
5957       return 2;
5958 
5959     case VECTOR_TYPE:
5960       {
5961 	unsigned HOST_WIDE_INT nelts;
5962 	if (TYPE_VECTOR_SUBPARTS (type).is_constant (&nelts))
5963 	  return nelts;
5964 	else
5965 	  return -1;
5966       }
5967 
5968     case INTEGER_TYPE:
5969     case REAL_TYPE:
5970     case FIXED_POINT_TYPE:
5971     case ENUMERAL_TYPE:
5972     case BOOLEAN_TYPE:
5973     case POINTER_TYPE:
5974     case OFFSET_TYPE:
5975     case REFERENCE_TYPE:
5976     case NULLPTR_TYPE:
5977       return 1;
5978 
5979     case ERROR_MARK:
5980       return 0;
5981 
5982     case VOID_TYPE:
5983     case METHOD_TYPE:
5984     case FUNCTION_TYPE:
5985     case LANG_TYPE:
5986     default:
5987       gcc_unreachable ();
5988     }
5989 }
5990 
5991 /* Helper for categorize_ctor_elements.  Identical interface.  */
5992 
5993 static bool
5994 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5995 			    HOST_WIDE_INT *p_unique_nz_elts,
5996 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5997 {
5998   unsigned HOST_WIDE_INT idx;
5999   HOST_WIDE_INT nz_elts, unique_nz_elts, init_elts, num_fields;
6000   tree value, purpose, elt_type;
6001 
6002   /* Whether CTOR is a valid constant initializer, in accordance with what
6003      initializer_constant_valid_p does.  If inferred from the constructor
6004      elements, true until proven otherwise.  */
6005   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
6006   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
6007 
6008   nz_elts = 0;
6009   unique_nz_elts = 0;
6010   init_elts = 0;
6011   num_fields = 0;
6012   elt_type = NULL_TREE;
6013 
6014   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
6015     {
6016       HOST_WIDE_INT mult = 1;
6017 
6018       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
6019 	{
6020 	  tree lo_index = TREE_OPERAND (purpose, 0);
6021 	  tree hi_index = TREE_OPERAND (purpose, 1);
6022 
6023 	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
6024 	    mult = (tree_to_uhwi (hi_index)
6025 		    - tree_to_uhwi (lo_index) + 1);
6026 	}
6027       num_fields += mult;
6028       elt_type = TREE_TYPE (value);
6029 
6030       switch (TREE_CODE (value))
6031 	{
6032 	case CONSTRUCTOR:
6033 	  {
6034 	    HOST_WIDE_INT nz = 0, unz = 0, ic = 0;
6035 
6036 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &unz,
6037 							   &ic, p_complete);
6038 
6039 	    nz_elts += mult * nz;
6040 	    unique_nz_elts += unz;
6041  	    init_elts += mult * ic;
6042 
6043 	    if (const_from_elts_p && const_p)
6044 	      const_p = const_elt_p;
6045 	  }
6046 	  break;
6047 
6048 	case INTEGER_CST:
6049 	case REAL_CST:
6050 	case FIXED_CST:
6051 	  if (!initializer_zerop (value))
6052 	    {
6053 	      nz_elts += mult;
6054 	      unique_nz_elts++;
6055 	    }
6056 	  init_elts += mult;
6057 	  break;
6058 
6059 	case STRING_CST:
6060 	  nz_elts += mult * TREE_STRING_LENGTH (value);
6061 	  unique_nz_elts += TREE_STRING_LENGTH (value);
6062 	  init_elts += mult * TREE_STRING_LENGTH (value);
6063 	  break;
6064 
6065 	case COMPLEX_CST:
6066 	  if (!initializer_zerop (TREE_REALPART (value)))
6067 	    {
6068 	      nz_elts += mult;
6069 	      unique_nz_elts++;
6070 	    }
6071 	  if (!initializer_zerop (TREE_IMAGPART (value)))
6072 	    {
6073 	      nz_elts += mult;
6074 	      unique_nz_elts++;
6075 	    }
6076 	  init_elts += 2 * mult;
6077 	  break;
6078 
6079 	case VECTOR_CST:
6080 	  {
6081 	    /* We can only construct constant-length vectors using
6082 	       CONSTRUCTOR.  */
6083 	    unsigned int nunits = VECTOR_CST_NELTS (value).to_constant ();
6084 	    for (unsigned int i = 0; i < nunits; ++i)
6085 	      {
6086 		tree v = VECTOR_CST_ELT (value, i);
6087 		if (!initializer_zerop (v))
6088 		  {
6089 		    nz_elts += mult;
6090 		    unique_nz_elts++;
6091 		  }
6092 		init_elts += mult;
6093 	      }
6094 	  }
6095 	  break;
6096 
6097 	default:
6098 	  {
6099 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
6100 	    nz_elts += mult * tc;
6101 	    unique_nz_elts += tc;
6102 	    init_elts += mult * tc;
6103 
6104 	    if (const_from_elts_p && const_p)
6105 	      const_p
6106 		= initializer_constant_valid_p (value,
6107 						elt_type,
6108 						TYPE_REVERSE_STORAGE_ORDER
6109 						(TREE_TYPE (ctor)))
6110 		  != NULL_TREE;
6111 	  }
6112 	  break;
6113 	}
6114     }
6115 
6116   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
6117 						num_fields, elt_type))
6118     *p_complete = false;
6119 
6120   *p_nz_elts += nz_elts;
6121   *p_unique_nz_elts += unique_nz_elts;
6122   *p_init_elts += init_elts;
6123 
6124   return const_p;
6125 }
6126 
6127 /* Examine CTOR to discover:
6128    * how many scalar fields are set to nonzero values,
6129      and place it in *P_NZ_ELTS;
6130    * the same, but counting RANGE_EXPRs as multiplier of 1 instead of
6131      high - low + 1 (this can be useful for callers to determine ctors
6132      that could be cheaply initialized with - perhaps nested - loops
6133      compared to copied from huge read-only data),
6134      and place it in *P_UNIQUE_NZ_ELTS;
6135    * how many scalar fields in total are in CTOR,
6136      and place it in *P_ELT_COUNT.
6137    * whether the constructor is complete -- in the sense that every
6138      meaningful byte is explicitly given a value --
6139      and place it in *P_COMPLETE.
6140 
6141    Return whether or not CTOR is a valid static constant initializer, the same
6142    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
6143 
6144 bool
6145 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
6146 			  HOST_WIDE_INT *p_unique_nz_elts,
6147 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
6148 {
6149   *p_nz_elts = 0;
6150   *p_unique_nz_elts = 0;
6151   *p_init_elts = 0;
6152   *p_complete = true;
6153 
6154   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_unique_nz_elts,
6155 				     p_init_elts, p_complete);
6156 }
6157 
6158 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
6159    of which had type LAST_TYPE.  Each element was itself a complete
6160    initializer, in the sense that every meaningful byte was explicitly
6161    given a value.  Return true if the same is true for the constructor
6162    as a whole.  */
6163 
6164 bool
6165 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
6166 			  const_tree last_type)
6167 {
6168   if (TREE_CODE (type) == UNION_TYPE
6169       || TREE_CODE (type) == QUAL_UNION_TYPE)
6170     {
6171       if (num_elts == 0)
6172 	return false;
6173 
6174       gcc_assert (num_elts == 1 && last_type);
6175 
6176       /* ??? We could look at each element of the union, and find the
6177 	 largest element.  Which would avoid comparing the size of the
6178 	 initialized element against any tail padding in the union.
6179 	 Doesn't seem worth the effort...  */
6180       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
6181     }
6182 
6183   return count_type_elements (type, true) == num_elts;
6184 }
6185 
6186 /* Return 1 if EXP contains mostly (3/4) zeros.  */
6187 
6188 static int
6189 mostly_zeros_p (const_tree exp)
6190 {
6191   if (TREE_CODE (exp) == CONSTRUCTOR)
6192     {
6193       HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6194       bool complete_p;
6195 
6196       categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6197 				&complete_p);
6198       return !complete_p || nz_elts < init_elts / 4;
6199     }
6200 
6201   return initializer_zerop (exp);
6202 }
6203 
6204 /* Return 1 if EXP contains all zeros.  */
6205 
6206 static int
6207 all_zeros_p (const_tree exp)
6208 {
6209   if (TREE_CODE (exp) == CONSTRUCTOR)
6210     {
6211       HOST_WIDE_INT nz_elts, unz_elts, init_elts;
6212       bool complete_p;
6213 
6214       categorize_ctor_elements (exp, &nz_elts, &unz_elts, &init_elts,
6215 				&complete_p);
6216       return nz_elts == 0;
6217     }
6218 
6219   return initializer_zerop (exp);
6220 }
6221 
6222 /* Helper function for store_constructor.
6223    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
6224    CLEARED is as for store_constructor.
6225    ALIAS_SET is the alias set to use for any stores.
6226    If REVERSE is true, the store is to be done in reverse order.
6227 
6228    This provides a recursive shortcut back to store_constructor when it isn't
6229    necessary to go through store_field.  This is so that we can pass through
6230    the cleared field to let store_constructor know that we may not have to
6231    clear a substructure if the outer structure has already been cleared.  */
6232 
6233 static void
6234 store_constructor_field (rtx target, poly_uint64 bitsize, poly_int64 bitpos,
6235 			 poly_uint64 bitregion_start,
6236 			 poly_uint64 bitregion_end,
6237 			 machine_mode mode,
6238 			 tree exp, int cleared,
6239 			 alias_set_type alias_set, bool reverse)
6240 {
6241   poly_int64 bytepos;
6242   poly_uint64 bytesize;
6243   if (TREE_CODE (exp) == CONSTRUCTOR
6244       /* We can only call store_constructor recursively if the size and
6245 	 bit position are on a byte boundary.  */
6246       && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
6247       && maybe_ne (bitsize, 0U)
6248       && multiple_p (bitsize, BITS_PER_UNIT, &bytesize)
6249       /* If we have a nonzero bitpos for a register target, then we just
6250 	 let store_field do the bitfield handling.  This is unlikely to
6251 	 generate unnecessary clear instructions anyways.  */
6252       && (known_eq (bitpos, 0) || MEM_P (target)))
6253     {
6254       if (MEM_P (target))
6255 	{
6256 	  machine_mode target_mode = GET_MODE (target);
6257 	  if (target_mode != BLKmode
6258 	      && !multiple_p (bitpos, GET_MODE_ALIGNMENT (target_mode)))
6259 	    target_mode = BLKmode;
6260 	  target = adjust_address (target, target_mode, bytepos);
6261 	}
6262 
6263 
6264       /* Update the alias set, if required.  */
6265       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
6266 	  && MEM_ALIAS_SET (target) != 0)
6267 	{
6268 	  target = copy_rtx (target);
6269 	  set_mem_alias_set (target, alias_set);
6270 	}
6271 
6272       store_constructor (exp, target, cleared, bytesize, reverse);
6273     }
6274   else
6275     store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
6276 		 exp, alias_set, false, reverse);
6277 }
6278 
6279 
6280 /* Returns the number of FIELD_DECLs in TYPE.  */
6281 
6282 static int
6283 fields_length (const_tree type)
6284 {
6285   tree t = TYPE_FIELDS (type);
6286   int count = 0;
6287 
6288   for (; t; t = DECL_CHAIN (t))
6289     if (TREE_CODE (t) == FIELD_DECL)
6290       ++count;
6291 
6292   return count;
6293 }
6294 
6295 
6296 /* Store the value of constructor EXP into the rtx TARGET.
6297    TARGET is either a REG or a MEM; we know it cannot conflict, since
6298    safe_from_p has been called.
6299    CLEARED is true if TARGET is known to have been zero'd.
6300    SIZE is the number of bytes of TARGET we are allowed to modify: this
6301    may not be the same as the size of EXP if we are assigning to a field
6302    which has been packed to exclude padding bits.
6303    If REVERSE is true, the store is to be done in reverse order.  */
6304 
6305 static void
6306 store_constructor (tree exp, rtx target, int cleared, poly_int64 size,
6307 		   bool reverse)
6308 {
6309   tree type = TREE_TYPE (exp);
6310   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6311   poly_int64 bitregion_end = known_gt (size, 0) ? size * BITS_PER_UNIT - 1 : 0;
6312 
6313   switch (TREE_CODE (type))
6314     {
6315     case RECORD_TYPE:
6316     case UNION_TYPE:
6317     case QUAL_UNION_TYPE:
6318       {
6319 	unsigned HOST_WIDE_INT idx;
6320 	tree field, value;
6321 
6322 	/* The storage order is specified for every aggregate type.  */
6323 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6324 
6325 	/* If size is zero or the target is already cleared, do nothing.  */
6326 	if (known_eq (size, 0) || cleared)
6327 	  cleared = 1;
6328 	/* We either clear the aggregate or indicate the value is dead.  */
6329 	else if ((TREE_CODE (type) == UNION_TYPE
6330 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
6331 		 && ! CONSTRUCTOR_ELTS (exp))
6332 	  /* If the constructor is empty, clear the union.  */
6333 	  {
6334 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6335 	    cleared = 1;
6336 	  }
6337 
6338 	/* If we are building a static constructor into a register,
6339 	   set the initial value as zero so we can fold the value into
6340 	   a constant.  But if more than one register is involved,
6341 	   this probably loses.  */
6342 	else if (REG_P (target) && TREE_STATIC (exp)
6343 		 && known_le (GET_MODE_SIZE (GET_MODE (target)),
6344 			      REGMODE_NATURAL_SIZE (GET_MODE (target))))
6345 	  {
6346 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6347 	    cleared = 1;
6348 	  }
6349 
6350         /* If the constructor has fewer fields than the structure or
6351 	   if we are initializing the structure to mostly zeros, clear
6352 	   the whole structure first.  Don't do this if TARGET is a
6353 	   register whose mode size isn't equal to SIZE since
6354 	   clear_storage can't handle this case.  */
6355 	else if (known_size_p (size)
6356 		 && (((int) CONSTRUCTOR_NELTS (exp) != fields_length (type))
6357 		     || mostly_zeros_p (exp))
6358 		 && (!REG_P (target)
6359 		     || known_eq (GET_MODE_SIZE (GET_MODE (target)), size)))
6360 	  {
6361 	    clear_storage (target, gen_int_mode (size, Pmode),
6362 			   BLOCK_OP_NORMAL);
6363 	    cleared = 1;
6364 	  }
6365 
6366 	if (REG_P (target) && !cleared)
6367 	  emit_clobber (target);
6368 
6369 	/* Store each element of the constructor into the
6370 	   corresponding field of TARGET.  */
6371 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6372 	  {
6373 	    machine_mode mode;
6374 	    HOST_WIDE_INT bitsize;
6375 	    HOST_WIDE_INT bitpos = 0;
6376 	    tree offset;
6377 	    rtx to_rtx = target;
6378 
6379 	    /* Just ignore missing fields.  We cleared the whole
6380 	       structure, above, if any fields are missing.  */
6381 	    if (field == 0)
6382 	      continue;
6383 
6384 	    if (cleared && initializer_zerop (value))
6385 	      continue;
6386 
6387 	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
6388 	      bitsize = tree_to_uhwi (DECL_SIZE (field));
6389 	    else
6390 	      gcc_unreachable ();
6391 
6392 	    mode = DECL_MODE (field);
6393 	    if (DECL_BIT_FIELD (field))
6394 	      mode = VOIDmode;
6395 
6396 	    offset = DECL_FIELD_OFFSET (field);
6397 	    if (tree_fits_shwi_p (offset)
6398 		&& tree_fits_shwi_p (bit_position (field)))
6399 	      {
6400 		bitpos = int_bit_position (field);
6401 		offset = NULL_TREE;
6402 	      }
6403 	    else
6404 	      gcc_unreachable ();
6405 
6406 	    /* If this initializes a field that is smaller than a
6407 	       word, at the start of a word, try to widen it to a full
6408 	       word.  This special case allows us to output C++ member
6409 	       function initializations in a form that the optimizers
6410 	       can understand.  */
6411 	    if (WORD_REGISTER_OPERATIONS
6412 		&& REG_P (target)
6413 		&& bitsize < BITS_PER_WORD
6414 		&& bitpos % BITS_PER_WORD == 0
6415 		&& GET_MODE_CLASS (mode) == MODE_INT
6416 		&& TREE_CODE (value) == INTEGER_CST
6417 		&& exp_size >= 0
6418 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6419 	      {
6420 		tree type = TREE_TYPE (value);
6421 
6422 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6423 		  {
6424 		    type = lang_hooks.types.type_for_mode
6425 		      (word_mode, TYPE_UNSIGNED (type));
6426 		    value = fold_convert (type, value);
6427 		    /* Make sure the bits beyond the original bitsize are zero
6428 		       so that we can correctly avoid extra zeroing stores in
6429 		       later constructor elements.  */
6430 		    tree bitsize_mask
6431 		      = wide_int_to_tree (type, wi::mask (bitsize, false,
6432 							   BITS_PER_WORD));
6433 		    value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6434 		  }
6435 
6436 		if (BYTES_BIG_ENDIAN)
6437 		  value
6438 		   = fold_build2 (LSHIFT_EXPR, type, value,
6439 				   build_int_cst (type,
6440 						  BITS_PER_WORD - bitsize));
6441 		bitsize = BITS_PER_WORD;
6442 		mode = word_mode;
6443 	      }
6444 
6445 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6446 		&& DECL_NONADDRESSABLE_P (field))
6447 	      {
6448 		to_rtx = copy_rtx (to_rtx);
6449 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6450 	      }
6451 
6452 	    store_constructor_field (to_rtx, bitsize, bitpos,
6453 				     0, bitregion_end, mode,
6454 				     value, cleared,
6455 				     get_alias_set (TREE_TYPE (field)),
6456 				     reverse);
6457 	  }
6458 	break;
6459       }
6460     case ARRAY_TYPE:
6461       {
6462 	tree value, index;
6463 	unsigned HOST_WIDE_INT i;
6464 	int need_to_clear;
6465 	tree domain;
6466 	tree elttype = TREE_TYPE (type);
6467 	int const_bounds_p;
6468 	HOST_WIDE_INT minelt = 0;
6469 	HOST_WIDE_INT maxelt = 0;
6470 
6471 	/* The storage order is specified for every aggregate type.  */
6472 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6473 
6474 	domain = TYPE_DOMAIN (type);
6475 	const_bounds_p = (TYPE_MIN_VALUE (domain)
6476 			  && TYPE_MAX_VALUE (domain)
6477 			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6478 			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6479 
6480 	/* If we have constant bounds for the range of the type, get them.  */
6481 	if (const_bounds_p)
6482 	  {
6483 	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6484 	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6485 	  }
6486 
6487 	/* If the constructor has fewer elements than the array, clear
6488            the whole array first.  Similarly if this is static
6489            constructor of a non-BLKmode object.  */
6490 	if (cleared)
6491 	  need_to_clear = 0;
6492 	else if (REG_P (target) && TREE_STATIC (exp))
6493 	  need_to_clear = 1;
6494 	else
6495 	  {
6496 	    unsigned HOST_WIDE_INT idx;
6497 	    tree index, value;
6498 	    HOST_WIDE_INT count = 0, zero_count = 0;
6499 	    need_to_clear = ! const_bounds_p;
6500 
6501 	    /* This loop is a more accurate version of the loop in
6502 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6503 	       is also needed to check for missing elements.  */
6504 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6505 	      {
6506 		HOST_WIDE_INT this_node_count;
6507 
6508 		if (need_to_clear)
6509 		  break;
6510 
6511 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6512 		  {
6513 		    tree lo_index = TREE_OPERAND (index, 0);
6514 		    tree hi_index = TREE_OPERAND (index, 1);
6515 
6516 		    if (! tree_fits_uhwi_p (lo_index)
6517 			|| ! tree_fits_uhwi_p (hi_index))
6518 		      {
6519 			need_to_clear = 1;
6520 			break;
6521 		      }
6522 
6523 		    this_node_count = (tree_to_uhwi (hi_index)
6524 				       - tree_to_uhwi (lo_index) + 1);
6525 		  }
6526 		else
6527 		  this_node_count = 1;
6528 
6529 		count += this_node_count;
6530 		if (mostly_zeros_p (value))
6531 		  zero_count += this_node_count;
6532 	      }
6533 
6534 	    /* Clear the entire array first if there are any missing
6535 	       elements, or if the incidence of zero elements is >=
6536 	       75%.  */
6537 	    if (! need_to_clear
6538 		&& (count < maxelt - minelt + 1
6539 		    || 4 * zero_count >= 3 * count))
6540 	      need_to_clear = 1;
6541 	  }
6542 
6543 	if (need_to_clear && maybe_gt (size, 0))
6544 	  {
6545 	    if (REG_P (target))
6546 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6547 	    else
6548 	      clear_storage (target, gen_int_mode (size, Pmode),
6549 			     BLOCK_OP_NORMAL);
6550 	    cleared = 1;
6551 	  }
6552 
6553 	if (!cleared && REG_P (target))
6554 	  /* Inform later passes that the old value is dead.  */
6555 	  emit_clobber (target);
6556 
6557 	/* Store each element of the constructor into the
6558 	   corresponding element of TARGET, determined by counting the
6559 	   elements.  */
6560 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6561 	  {
6562 	    machine_mode mode;
6563 	    poly_int64 bitsize;
6564 	    HOST_WIDE_INT bitpos;
6565 	    rtx xtarget = target;
6566 
6567 	    if (cleared && initializer_zerop (value))
6568 	      continue;
6569 
6570 	    mode = TYPE_MODE (elttype);
6571 	    if (mode == BLKmode)
6572 	      bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6573 			 ? tree_to_uhwi (TYPE_SIZE (elttype))
6574 			 : -1);
6575 	    else
6576 	      bitsize = GET_MODE_BITSIZE (mode);
6577 
6578 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6579 	      {
6580 		tree lo_index = TREE_OPERAND (index, 0);
6581 		tree hi_index = TREE_OPERAND (index, 1);
6582 		rtx index_r, pos_rtx;
6583 		HOST_WIDE_INT lo, hi, count;
6584 		tree position;
6585 
6586 		/* If the range is constant and "small", unroll the loop.  */
6587 		if (const_bounds_p
6588 		    && tree_fits_shwi_p (lo_index)
6589 		    && tree_fits_shwi_p (hi_index)
6590 		    && (lo = tree_to_shwi (lo_index),
6591 			hi = tree_to_shwi (hi_index),
6592 			count = hi - lo + 1,
6593 			(!MEM_P (target)
6594 			 || count <= 2
6595 			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6596 			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6597 				 <= 40 * 8)))))
6598 		  {
6599 		    lo -= minelt;  hi -= minelt;
6600 		    for (; lo <= hi; lo++)
6601 		      {
6602 			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6603 
6604 			if (MEM_P (target)
6605 			    && !MEM_KEEP_ALIAS_SET_P (target)
6606 			    && TREE_CODE (type) == ARRAY_TYPE
6607 			    && TYPE_NONALIASED_COMPONENT (type))
6608 			  {
6609 			    target = copy_rtx (target);
6610 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6611 			  }
6612 
6613 			store_constructor_field
6614 			  (target, bitsize, bitpos, 0, bitregion_end,
6615 			   mode, value, cleared,
6616 			   get_alias_set (elttype), reverse);
6617 		      }
6618 		  }
6619 		else
6620 		  {
6621 		    rtx_code_label *loop_start = gen_label_rtx ();
6622 		    rtx_code_label *loop_end = gen_label_rtx ();
6623 		    tree exit_cond;
6624 
6625 		    expand_normal (hi_index);
6626 
6627 		    index = build_decl (EXPR_LOCATION (exp),
6628 					VAR_DECL, NULL_TREE, domain);
6629 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6630 		    SET_DECL_RTL (index, index_r);
6631 		    store_expr (lo_index, index_r, 0, false, reverse);
6632 
6633 		    /* Build the head of the loop.  */
6634 		    do_pending_stack_adjust ();
6635 		    emit_label (loop_start);
6636 
6637 		    /* Assign value to element index.  */
6638 		    position =
6639 		      fold_convert (ssizetype,
6640 				    fold_build2 (MINUS_EXPR,
6641 						 TREE_TYPE (index),
6642 						 index,
6643 						 TYPE_MIN_VALUE (domain)));
6644 
6645 		    position =
6646 			size_binop (MULT_EXPR, position,
6647 				    fold_convert (ssizetype,
6648 						  TYPE_SIZE_UNIT (elttype)));
6649 
6650 		    pos_rtx = expand_normal (position);
6651 		    xtarget = offset_address (target, pos_rtx,
6652 					      highest_pow2_factor (position));
6653 		    xtarget = adjust_address (xtarget, mode, 0);
6654 		    if (TREE_CODE (value) == CONSTRUCTOR)
6655 		      store_constructor (value, xtarget, cleared,
6656 					 exact_div (bitsize, BITS_PER_UNIT),
6657 					 reverse);
6658 		    else
6659 		      store_expr (value, xtarget, 0, false, reverse);
6660 
6661 		    /* Generate a conditional jump to exit the loop.  */
6662 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6663 					index, hi_index);
6664 		    jumpif (exit_cond, loop_end,
6665 			    profile_probability::uninitialized ());
6666 
6667 		    /* Update the loop counter, and jump to the head of
6668 		       the loop.  */
6669 		    expand_assignment (index,
6670 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6671 					       index, integer_one_node),
6672 				       false);
6673 
6674 		    emit_jump (loop_start);
6675 
6676 		    /* Build the end of the loop.  */
6677 		    emit_label (loop_end);
6678 		  }
6679 	      }
6680 	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6681 		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6682 	      {
6683 		tree position;
6684 
6685 		if (index == 0)
6686 		  index = ssize_int (1);
6687 
6688 		if (minelt)
6689 		  index = fold_convert (ssizetype,
6690 					fold_build2 (MINUS_EXPR,
6691 						     TREE_TYPE (index),
6692 						     index,
6693 						     TYPE_MIN_VALUE (domain)));
6694 
6695 		position =
6696 		  size_binop (MULT_EXPR, index,
6697 			      fold_convert (ssizetype,
6698 					    TYPE_SIZE_UNIT (elttype)));
6699 		xtarget = offset_address (target,
6700 					  expand_normal (position),
6701 					  highest_pow2_factor (position));
6702 		xtarget = adjust_address (xtarget, mode, 0);
6703 		store_expr (value, xtarget, 0, false, reverse);
6704 	      }
6705 	    else
6706 	      {
6707 		if (index != 0)
6708 		  bitpos = ((tree_to_shwi (index) - minelt)
6709 			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6710 		else
6711 		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6712 
6713 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6714 		    && TREE_CODE (type) == ARRAY_TYPE
6715 		    && TYPE_NONALIASED_COMPONENT (type))
6716 		  {
6717 		    target = copy_rtx (target);
6718 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6719 		  }
6720 		store_constructor_field (target, bitsize, bitpos, 0,
6721 					 bitregion_end, mode, value,
6722 					 cleared, get_alias_set (elttype),
6723 					 reverse);
6724 	      }
6725 	  }
6726 	break;
6727       }
6728 
6729     case VECTOR_TYPE:
6730       {
6731 	unsigned HOST_WIDE_INT idx;
6732 	constructor_elt *ce;
6733 	int i;
6734 	int need_to_clear;
6735 	insn_code icode = CODE_FOR_nothing;
6736 	tree elt;
6737 	tree elttype = TREE_TYPE (type);
6738 	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6739 	machine_mode eltmode = TYPE_MODE (elttype);
6740 	HOST_WIDE_INT bitsize;
6741 	HOST_WIDE_INT bitpos;
6742 	rtvec vector = NULL;
6743 	poly_uint64 n_elts;
6744 	unsigned HOST_WIDE_INT const_n_elts;
6745 	alias_set_type alias;
6746 	bool vec_vec_init_p = false;
6747 	machine_mode mode = GET_MODE (target);
6748 
6749 	gcc_assert (eltmode != BLKmode);
6750 
6751 	/* Try using vec_duplicate_optab for uniform vectors.  */
6752 	if (!TREE_SIDE_EFFECTS (exp)
6753 	    && VECTOR_MODE_P (mode)
6754 	    && eltmode == GET_MODE_INNER (mode)
6755 	    && ((icode = optab_handler (vec_duplicate_optab, mode))
6756 		!= CODE_FOR_nothing)
6757 	    && (elt = uniform_vector_p (exp)))
6758 	  {
6759 	    struct expand_operand ops[2];
6760 	    create_output_operand (&ops[0], target, mode);
6761 	    create_input_operand (&ops[1], expand_normal (elt), eltmode);
6762 	    expand_insn (icode, 2, ops);
6763 	    if (!rtx_equal_p (target, ops[0].value))
6764 	      emit_move_insn (target, ops[0].value);
6765 	    break;
6766 	  }
6767 
6768 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6769 	if (REG_P (target)
6770 	    && VECTOR_MODE_P (mode)
6771 	    && n_elts.is_constant (&const_n_elts))
6772 	  {
6773 	    machine_mode emode = eltmode;
6774 
6775 	    if (CONSTRUCTOR_NELTS (exp)
6776 		&& (TREE_CODE (TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value))
6777 		    == VECTOR_TYPE))
6778 	      {
6779 		tree etype = TREE_TYPE (CONSTRUCTOR_ELT (exp, 0)->value);
6780 		gcc_assert (known_eq (CONSTRUCTOR_NELTS (exp)
6781 				      * TYPE_VECTOR_SUBPARTS (etype),
6782 				      n_elts));
6783 		emode = TYPE_MODE (etype);
6784 	      }
6785 	    icode = convert_optab_handler (vec_init_optab, mode, emode);
6786 	    if (icode != CODE_FOR_nothing)
6787 	      {
6788 		unsigned int i, n = const_n_elts;
6789 
6790 		if (emode != eltmode)
6791 		  {
6792 		    n = CONSTRUCTOR_NELTS (exp);
6793 		    vec_vec_init_p = true;
6794 		  }
6795 		vector = rtvec_alloc (n);
6796 		for (i = 0; i < n; i++)
6797 		  RTVEC_ELT (vector, i) = CONST0_RTX (emode);
6798 	      }
6799 	  }
6800 
6801 	/* If the constructor has fewer elements than the vector,
6802 	   clear the whole array first.  Similarly if this is static
6803 	   constructor of a non-BLKmode object.  */
6804 	if (cleared)
6805 	  need_to_clear = 0;
6806 	else if (REG_P (target) && TREE_STATIC (exp))
6807 	  need_to_clear = 1;
6808 	else
6809 	  {
6810 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6811 	    tree value;
6812 
6813 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6814 	      {
6815 		tree sz = TYPE_SIZE (TREE_TYPE (value));
6816 		int n_elts_here
6817 		  = tree_to_uhwi (int_const_binop (TRUNC_DIV_EXPR, sz,
6818 						   TYPE_SIZE (elttype)));
6819 
6820 		count += n_elts_here;
6821 		if (mostly_zeros_p (value))
6822 		  zero_count += n_elts_here;
6823 	      }
6824 
6825 	    /* Clear the entire vector first if there are any missing elements,
6826 	       or if the incidence of zero elements is >= 75%.  */
6827 	    need_to_clear = (maybe_lt (count, n_elts)
6828 			     || 4 * zero_count >= 3 * count);
6829 	  }
6830 
6831 	if (need_to_clear && maybe_gt (size, 0) && !vector)
6832 	  {
6833 	    if (REG_P (target))
6834 	      emit_move_insn (target, CONST0_RTX (mode));
6835 	    else
6836 	      clear_storage (target, gen_int_mode (size, Pmode),
6837 			     BLOCK_OP_NORMAL);
6838 	    cleared = 1;
6839 	  }
6840 
6841 	/* Inform later passes that the old value is dead.  */
6842 	if (!cleared && !vector && REG_P (target))
6843 	  emit_move_insn (target, CONST0_RTX (mode));
6844 
6845         if (MEM_P (target))
6846 	  alias = MEM_ALIAS_SET (target);
6847 	else
6848 	  alias = get_alias_set (elttype);
6849 
6850         /* Store each element of the constructor into the corresponding
6851 	   element of TARGET, determined by counting the elements.  */
6852 	for (idx = 0, i = 0;
6853 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6854 	     idx++, i += bitsize / elt_size)
6855 	  {
6856 	    HOST_WIDE_INT eltpos;
6857 	    tree value = ce->value;
6858 
6859 	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6860 	    if (cleared && initializer_zerop (value))
6861 	      continue;
6862 
6863 	    if (ce->index)
6864 	      eltpos = tree_to_uhwi (ce->index);
6865 	    else
6866 	      eltpos = i;
6867 
6868 	    if (vector)
6869 	      {
6870 		if (vec_vec_init_p)
6871 		  {
6872 		    gcc_assert (ce->index == NULL_TREE);
6873 		    gcc_assert (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE);
6874 		    eltpos = idx;
6875 		  }
6876 		else
6877 		  gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6878 		RTVEC_ELT (vector, eltpos) = expand_normal (value);
6879 	      }
6880 	    else
6881 	      {
6882 		machine_mode value_mode
6883 		  = (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6884 		     ? TYPE_MODE (TREE_TYPE (value)) : eltmode);
6885 		bitpos = eltpos * elt_size;
6886 		store_constructor_field (target, bitsize, bitpos, 0,
6887 					 bitregion_end, value_mode,
6888 					 value, cleared, alias, reverse);
6889 	      }
6890 	  }
6891 
6892 	if (vector)
6893 	  emit_insn (GEN_FCN (icode) (target,
6894 				      gen_rtx_PARALLEL (mode, vector)));
6895 	break;
6896       }
6897 
6898     default:
6899       gcc_unreachable ();
6900     }
6901 }
6902 
6903 /* Store the value of EXP (an expression tree)
6904    into a subfield of TARGET which has mode MODE and occupies
6905    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6906    If MODE is VOIDmode, it means that we are storing into a bit-field.
6907 
6908    BITREGION_START is bitpos of the first bitfield in this region.
6909    BITREGION_END is the bitpos of the ending bitfield in this region.
6910    These two fields are 0, if the C++ memory model does not apply,
6911    or we are not interested in keeping track of bitfield regions.
6912 
6913    Always return const0_rtx unless we have something particular to
6914    return.
6915 
6916    ALIAS_SET is the alias set for the destination.  This value will
6917    (in general) be different from that for TARGET, since TARGET is a
6918    reference to the containing structure.
6919 
6920    If NONTEMPORAL is true, try generating a nontemporal store.
6921 
6922    If REVERSE is true, the store is to be done in reverse order.  */
6923 
6924 static rtx
6925 store_field (rtx target, poly_int64 bitsize, poly_int64 bitpos,
6926 	     poly_uint64 bitregion_start, poly_uint64 bitregion_end,
6927 	     machine_mode mode, tree exp,
6928 	     alias_set_type alias_set, bool nontemporal,  bool reverse)
6929 {
6930   if (TREE_CODE (exp) == ERROR_MARK)
6931     return const0_rtx;
6932 
6933   /* If we have nothing to store, do nothing unless the expression has
6934      side-effects.  Don't do that for zero sized addressable lhs of
6935      calls.  */
6936   if (known_eq (bitsize, 0)
6937       && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6938 	  || TREE_CODE (exp) != CALL_EXPR))
6939     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6940 
6941   if (GET_CODE (target) == CONCAT)
6942     {
6943       /* We're storing into a struct containing a single __complex.  */
6944 
6945       gcc_assert (known_eq (bitpos, 0));
6946       return store_expr (exp, target, 0, nontemporal, reverse);
6947     }
6948 
6949   /* If the structure is in a register or if the component
6950      is a bit field, we cannot use addressing to access it.
6951      Use bit-field techniques or SUBREG to store in it.  */
6952 
6953   poly_int64 decl_bitsize;
6954   if (mode == VOIDmode
6955       || (mode != BLKmode && ! direct_store[(int) mode]
6956 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6957 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6958       || REG_P (target)
6959       || GET_CODE (target) == SUBREG
6960       /* If the field isn't aligned enough to store as an ordinary memref,
6961 	 store it as a bit field.  */
6962       || (mode != BLKmode
6963 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6964 		|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
6965 	       && targetm.slow_unaligned_access (mode, MEM_ALIGN (target)))
6966 	      || !multiple_p (bitpos, BITS_PER_UNIT)))
6967       || (known_size_p (bitsize)
6968 	  && mode != BLKmode
6969 	  && maybe_gt (GET_MODE_BITSIZE (mode), bitsize))
6970       /* If the RHS and field are a constant size and the size of the
6971 	 RHS isn't the same size as the bitfield, we must use bitfield
6972 	 operations.  */
6973       || (known_size_p (bitsize)
6974 	  && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
6975 	  && maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
6976 		       bitsize)
6977 	  /* Except for initialization of full bytes from a CONSTRUCTOR, which
6978 	     we will handle specially below.  */
6979 	  && !(TREE_CODE (exp) == CONSTRUCTOR
6980 	       && multiple_p (bitsize, BITS_PER_UNIT))
6981 	  /* And except for bitwise copying of TREE_ADDRESSABLE types,
6982 	     where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6983 	     includes some extra padding.  store_expr / expand_expr will in
6984 	     that case call get_inner_reference that will have the bitsize
6985 	     we check here and thus the block move will not clobber the
6986 	     padding that shouldn't be clobbered.  In the future we could
6987 	     replace the TREE_ADDRESSABLE check with a check that
6988 	     get_base_address needs to live in memory.  */
6989 	  && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6990 	      || TREE_CODE (exp) != COMPONENT_REF
6991 	      || !multiple_p (bitsize, BITS_PER_UNIT)
6992 	      || !multiple_p (bitpos, BITS_PER_UNIT)
6993 	      || !poly_int_tree_p (DECL_SIZE (TREE_OPERAND (exp, 1)),
6994 				   &decl_bitsize)
6995 	      || maybe_ne (decl_bitsize, bitsize)))
6996       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6997          decl we must use bitfield operations.  */
6998       || (known_size_p (bitsize)
6999 	  && TREE_CODE (exp) == MEM_REF
7000 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7001 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7002 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7003 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
7004     {
7005       rtx temp;
7006       gimple *nop_def;
7007 
7008       /* If EXP is a NOP_EXPR of precision less than its mode, then that
7009 	 implies a mask operation.  If the precision is the same size as
7010 	 the field we're storing into, that mask is redundant.  This is
7011 	 particularly common with bit field assignments generated by the
7012 	 C front end.  */
7013       nop_def = get_def_for_expr (exp, NOP_EXPR);
7014       if (nop_def)
7015 	{
7016 	  tree type = TREE_TYPE (exp);
7017 	  if (INTEGRAL_TYPE_P (type)
7018 	      && maybe_ne (TYPE_PRECISION (type),
7019 			   GET_MODE_BITSIZE (TYPE_MODE (type)))
7020 	      && known_eq (bitsize, TYPE_PRECISION (type)))
7021 	    {
7022 	      tree op = gimple_assign_rhs1 (nop_def);
7023 	      type = TREE_TYPE (op);
7024 	      if (INTEGRAL_TYPE_P (type)
7025 		  && known_ge (TYPE_PRECISION (type), bitsize))
7026 		exp = op;
7027 	    }
7028 	}
7029 
7030       temp = expand_normal (exp);
7031 
7032       /* We don't support variable-sized BLKmode bitfields, since our
7033 	 handling of BLKmode is bound up with the ability to break
7034 	 things into words.  */
7035       gcc_assert (mode != BLKmode || bitsize.is_constant ());
7036 
7037       /* Handle calls that return values in multiple non-contiguous locations.
7038 	 The Irix 6 ABI has examples of this.  */
7039       if (GET_CODE (temp) == PARALLEL)
7040 	{
7041 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
7042 	  machine_mode temp_mode = GET_MODE (temp);
7043 	  if (temp_mode == BLKmode || temp_mode == VOIDmode)
7044 	    temp_mode = smallest_int_mode_for_size (size * BITS_PER_UNIT);
7045 	  rtx temp_target = gen_reg_rtx (temp_mode);
7046 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
7047 	  temp = temp_target;
7048 	}
7049 
7050       /* Handle calls that return BLKmode values in registers.  */
7051       else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
7052 	{
7053 	  rtx temp_target = gen_reg_rtx (GET_MODE (temp));
7054 	  copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
7055 	  temp = temp_target;
7056 	}
7057 
7058       /* If the value has aggregate type and an integral mode then, if BITSIZE
7059 	 is narrower than this mode and this is for big-endian data, we first
7060 	 need to put the value into the low-order bits for store_bit_field,
7061 	 except when MODE is BLKmode and BITSIZE larger than the word size
7062 	 (see the handling of fields larger than a word in store_bit_field).
7063 	 Moreover, the field may be not aligned on a byte boundary; in this
7064 	 case, if it has reverse storage order, it needs to be accessed as a
7065 	 scalar field with reverse storage order and we must first put the
7066 	 value into target order.  */
7067       scalar_int_mode temp_mode;
7068       if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
7069 	  && is_int_mode (GET_MODE (temp), &temp_mode))
7070 	{
7071 	  HOST_WIDE_INT size = GET_MODE_BITSIZE (temp_mode);
7072 
7073 	  reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
7074 
7075 	  if (reverse)
7076 	    temp = flip_storage_order (temp_mode, temp);
7077 
7078 	  gcc_checking_assert (known_le (bitsize, size));
7079 	  if (maybe_lt (bitsize, size)
7080 	      && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
7081 	      /* Use of to_constant for BLKmode was checked above.  */
7082 	      && !(mode == BLKmode && bitsize.to_constant () > BITS_PER_WORD))
7083 	    temp = expand_shift (RSHIFT_EXPR, temp_mode, temp,
7084 				 size - bitsize, NULL_RTX, 1);
7085 	}
7086 
7087       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
7088       if (mode != VOIDmode && mode != BLKmode
7089 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
7090 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
7091 
7092       /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
7093 	 and BITPOS must be aligned on a byte boundary.  If so, we simply do
7094 	 a block copy.  Likewise for a BLKmode-like TARGET.  */
7095       if (GET_MODE (temp) == BLKmode
7096 	  && (GET_MODE (target) == BLKmode
7097 	      || (MEM_P (target)
7098 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
7099 		  && multiple_p (bitpos, BITS_PER_UNIT)
7100 		  && multiple_p (bitsize, BITS_PER_UNIT))))
7101 	{
7102 	  gcc_assert (MEM_P (target) && MEM_P (temp));
7103 	  poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
7104 	  poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
7105 
7106 	  target = adjust_address (target, VOIDmode, bytepos);
7107 	  emit_block_move (target, temp,
7108 			   gen_int_mode (bytesize, Pmode),
7109 			   BLOCK_OP_NORMAL);
7110 
7111 	  return const0_rtx;
7112 	}
7113 
7114       /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
7115 	 word size, we need to load the value (see again store_bit_field).  */
7116       if (GET_MODE (temp) == BLKmode && known_le (bitsize, BITS_PER_WORD))
7117 	{
7118 	  scalar_int_mode temp_mode = smallest_int_mode_for_size (bitsize);
7119 	  temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
7120 				    temp_mode, false, NULL);
7121 	}
7122 
7123       /* Store the value in the bitfield.  */
7124       gcc_assert (known_ge (bitpos, 0));
7125       store_bit_field (target, bitsize, bitpos,
7126 		       bitregion_start, bitregion_end,
7127 		       mode, temp, reverse);
7128 
7129       return const0_rtx;
7130     }
7131   else
7132     {
7133       /* Now build a reference to just the desired component.  */
7134       rtx to_rtx = adjust_address (target, mode,
7135 				   exact_div (bitpos, BITS_PER_UNIT));
7136 
7137       if (to_rtx == target)
7138 	to_rtx = copy_rtx (to_rtx);
7139 
7140       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
7141 	set_mem_alias_set (to_rtx, alias_set);
7142 
7143       /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
7144 	 into a target smaller than its type; handle that case now.  */
7145       if (TREE_CODE (exp) == CONSTRUCTOR && known_size_p (bitsize))
7146 	{
7147 	  poly_int64 bytesize = exact_div (bitsize, BITS_PER_UNIT);
7148 	  store_constructor (exp, to_rtx, 0, bytesize, reverse);
7149 	  return to_rtx;
7150 	}
7151 
7152       return store_expr (exp, to_rtx, 0, nontemporal, reverse);
7153     }
7154 }
7155 
7156 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
7157    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
7158    codes and find the ultimate containing object, which we return.
7159 
7160    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
7161    bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
7162    storage order of the field.
7163    If the position of the field is variable, we store a tree
7164    giving the variable offset (in units) in *POFFSET.
7165    This offset is in addition to the bit position.
7166    If the position is not variable, we store 0 in *POFFSET.
7167 
7168    If any of the extraction expressions is volatile,
7169    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
7170 
7171    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
7172    Otherwise, it is a mode that can be used to access the field.
7173 
7174    If the field describes a variable-sized object, *PMODE is set to
7175    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
7176    this case, but the address of the object can be found.  */
7177 
7178 tree
7179 get_inner_reference (tree exp, poly_int64_pod *pbitsize,
7180 		     poly_int64_pod *pbitpos, tree *poffset,
7181 		     machine_mode *pmode, int *punsignedp,
7182 		     int *preversep, int *pvolatilep)
7183 {
7184   tree size_tree = 0;
7185   machine_mode mode = VOIDmode;
7186   bool blkmode_bitfield = false;
7187   tree offset = size_zero_node;
7188   poly_offset_int bit_offset = 0;
7189 
7190   /* First get the mode, signedness, storage order and size.  We do this from
7191      just the outermost expression.  */
7192   *pbitsize = -1;
7193   if (TREE_CODE (exp) == COMPONENT_REF)
7194     {
7195       tree field = TREE_OPERAND (exp, 1);
7196       size_tree = DECL_SIZE (field);
7197       if (flag_strict_volatile_bitfields > 0
7198 	  && TREE_THIS_VOLATILE (exp)
7199 	  && DECL_BIT_FIELD_TYPE (field)
7200 	  && DECL_MODE (field) != BLKmode)
7201 	/* Volatile bitfields should be accessed in the mode of the
7202 	     field's type, not the mode computed based on the bit
7203 	     size.  */
7204 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
7205       else if (!DECL_BIT_FIELD (field))
7206 	{
7207 	  mode = DECL_MODE (field);
7208 	  /* For vector fields re-check the target flags, as DECL_MODE
7209 	     could have been set with different target flags than
7210 	     the current function has.  */
7211 	  if (mode == BLKmode
7212 	      && VECTOR_TYPE_P (TREE_TYPE (field))
7213 	      && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
7214 	    mode = TYPE_MODE (TREE_TYPE (field));
7215 	}
7216       else if (DECL_MODE (field) == BLKmode)
7217 	blkmode_bitfield = true;
7218 
7219       *punsignedp = DECL_UNSIGNED (field);
7220     }
7221   else if (TREE_CODE (exp) == BIT_FIELD_REF)
7222     {
7223       size_tree = TREE_OPERAND (exp, 1);
7224       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
7225 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
7226 
7227       /* For vector types, with the correct size of access, use the mode of
7228 	 inner type.  */
7229       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
7230 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
7231 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
7232         mode = TYPE_MODE (TREE_TYPE (exp));
7233     }
7234   else
7235     {
7236       mode = TYPE_MODE (TREE_TYPE (exp));
7237       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
7238 
7239       if (mode == BLKmode)
7240 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
7241       else
7242 	*pbitsize = GET_MODE_BITSIZE (mode);
7243     }
7244 
7245   if (size_tree != 0)
7246     {
7247       if (! tree_fits_uhwi_p (size_tree))
7248 	mode = BLKmode, *pbitsize = -1;
7249       else
7250 	*pbitsize = tree_to_uhwi (size_tree);
7251     }
7252 
7253   *preversep = reverse_storage_order_for_component_p (exp);
7254 
7255   /* Compute cumulative bit-offset for nested component-refs and array-refs,
7256      and find the ultimate containing object.  */
7257   while (1)
7258     {
7259       switch (TREE_CODE (exp))
7260 	{
7261 	case BIT_FIELD_REF:
7262 	  bit_offset += wi::to_poly_offset (TREE_OPERAND (exp, 2));
7263 	  break;
7264 
7265 	case COMPONENT_REF:
7266 	  {
7267 	    tree field = TREE_OPERAND (exp, 1);
7268 	    tree this_offset = component_ref_field_offset (exp);
7269 
7270 	    /* If this field hasn't been filled in yet, don't go past it.
7271 	       This should only happen when folding expressions made during
7272 	       type construction.  */
7273 	    if (this_offset == 0)
7274 	      break;
7275 
7276 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
7277 	    bit_offset += wi::to_poly_offset (DECL_FIELD_BIT_OFFSET (field));
7278 
7279 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
7280 	  }
7281 	  break;
7282 
7283 	case ARRAY_REF:
7284 	case ARRAY_RANGE_REF:
7285 	  {
7286 	    tree index = TREE_OPERAND (exp, 1);
7287 	    tree low_bound = array_ref_low_bound (exp);
7288 	    tree unit_size = array_ref_element_size (exp);
7289 
7290 	    /* We assume all arrays have sizes that are a multiple of a byte.
7291 	       First subtract the lower bound, if any, in the type of the
7292 	       index, then convert to sizetype and multiply by the size of
7293 	       the array element.  */
7294 	    if (! integer_zerop (low_bound))
7295 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
7296 				   index, low_bound);
7297 
7298 	    offset = size_binop (PLUS_EXPR, offset,
7299 			         size_binop (MULT_EXPR,
7300 					     fold_convert (sizetype, index),
7301 					     unit_size));
7302 	  }
7303 	  break;
7304 
7305 	case REALPART_EXPR:
7306 	  break;
7307 
7308 	case IMAGPART_EXPR:
7309 	  bit_offset += *pbitsize;
7310 	  break;
7311 
7312 	case VIEW_CONVERT_EXPR:
7313 	  break;
7314 
7315 	case MEM_REF:
7316 	  /* Hand back the decl for MEM[&decl, off].  */
7317 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7318 	    {
7319 	      tree off = TREE_OPERAND (exp, 1);
7320 	      if (!integer_zerop (off))
7321 		{
7322 		  poly_offset_int boff = mem_ref_offset (exp);
7323 		  boff <<= LOG2_BITS_PER_UNIT;
7324 		  bit_offset += boff;
7325 		}
7326 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7327 	    }
7328 	  goto done;
7329 
7330 	default:
7331 	  goto done;
7332 	}
7333 
7334       /* If any reference in the chain is volatile, the effect is volatile.  */
7335       if (TREE_THIS_VOLATILE (exp))
7336 	*pvolatilep = 1;
7337 
7338       exp = TREE_OPERAND (exp, 0);
7339     }
7340  done:
7341 
7342   /* If OFFSET is constant, see if we can return the whole thing as a
7343      constant bit position.  Make sure to handle overflow during
7344      this conversion.  */
7345   if (poly_int_tree_p (offset))
7346     {
7347       poly_offset_int tem = wi::sext (wi::to_poly_offset (offset),
7348 				      TYPE_PRECISION (sizetype));
7349       tem <<= LOG2_BITS_PER_UNIT;
7350       tem += bit_offset;
7351       if (tem.to_shwi (pbitpos))
7352 	*poffset = offset = NULL_TREE;
7353     }
7354 
7355   /* Otherwise, split it up.  */
7356   if (offset)
7357     {
7358       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
7359       if (!bit_offset.to_shwi (pbitpos) || maybe_lt (*pbitpos, 0))
7360         {
7361 	  *pbitpos = num_trailing_bits (bit_offset.force_shwi ());
7362 	  poly_offset_int bytes = bits_to_bytes_round_down (bit_offset);
7363 	  offset = size_binop (PLUS_EXPR, offset,
7364 			       build_int_cst (sizetype, bytes.force_shwi ()));
7365 	}
7366 
7367       *poffset = offset;
7368     }
7369 
7370   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
7371   if (mode == VOIDmode
7372       && blkmode_bitfield
7373       && multiple_p (*pbitpos, BITS_PER_UNIT)
7374       && multiple_p (*pbitsize, BITS_PER_UNIT))
7375     *pmode = BLKmode;
7376   else
7377     *pmode = mode;
7378 
7379   return exp;
7380 }
7381 
7382 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7383 
7384 static unsigned HOST_WIDE_INT
7385 target_align (const_tree target)
7386 {
7387   /* We might have a chain of nested references with intermediate misaligning
7388      bitfields components, so need to recurse to find out.  */
7389 
7390   unsigned HOST_WIDE_INT this_align, outer_align;
7391 
7392   switch (TREE_CODE (target))
7393     {
7394     case BIT_FIELD_REF:
7395       return 1;
7396 
7397     case COMPONENT_REF:
7398       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7399       outer_align = target_align (TREE_OPERAND (target, 0));
7400       return MIN (this_align, outer_align);
7401 
7402     case ARRAY_REF:
7403     case ARRAY_RANGE_REF:
7404       this_align = TYPE_ALIGN (TREE_TYPE (target));
7405       outer_align = target_align (TREE_OPERAND (target, 0));
7406       return MIN (this_align, outer_align);
7407 
7408     CASE_CONVERT:
7409     case NON_LVALUE_EXPR:
7410     case VIEW_CONVERT_EXPR:
7411       this_align = TYPE_ALIGN (TREE_TYPE (target));
7412       outer_align = target_align (TREE_OPERAND (target, 0));
7413       return MAX (this_align, outer_align);
7414 
7415     default:
7416       return TYPE_ALIGN (TREE_TYPE (target));
7417     }
7418 }
7419 
7420 
7421 /* Given an rtx VALUE that may contain additions and multiplications, return
7422    an equivalent value that just refers to a register, memory, or constant.
7423    This is done by generating instructions to perform the arithmetic and
7424    returning a pseudo-register containing the value.
7425 
7426    The returned value may be a REG, SUBREG, MEM or constant.  */
7427 
7428 rtx
7429 force_operand (rtx value, rtx target)
7430 {
7431   rtx op1, op2;
7432   /* Use subtarget as the target for operand 0 of a binary operation.  */
7433   rtx subtarget = get_subtarget (target);
7434   enum rtx_code code = GET_CODE (value);
7435 
7436   /* Check for subreg applied to an expression produced by loop optimizer.  */
7437   if (code == SUBREG
7438       && !REG_P (SUBREG_REG (value))
7439       && !MEM_P (SUBREG_REG (value)))
7440     {
7441       value
7442 	= simplify_gen_subreg (GET_MODE (value),
7443 			       force_reg (GET_MODE (SUBREG_REG (value)),
7444 					  force_operand (SUBREG_REG (value),
7445 							 NULL_RTX)),
7446 			       GET_MODE (SUBREG_REG (value)),
7447 			       SUBREG_BYTE (value));
7448       code = GET_CODE (value);
7449     }
7450 
7451   /* Check for a PIC address load.  */
7452   if ((code == PLUS || code == MINUS)
7453       && XEXP (value, 0) == pic_offset_table_rtx
7454       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7455 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7456 	  || GET_CODE (XEXP (value, 1)) == CONST))
7457     {
7458       if (!subtarget)
7459 	subtarget = gen_reg_rtx (GET_MODE (value));
7460       emit_move_insn (subtarget, value);
7461       return subtarget;
7462     }
7463 
7464   if (ARITHMETIC_P (value))
7465     {
7466       op2 = XEXP (value, 1);
7467       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7468 	subtarget = 0;
7469       if (code == MINUS && CONST_INT_P (op2))
7470 	{
7471 	  code = PLUS;
7472 	  op2 = negate_rtx (GET_MODE (value), op2);
7473 	}
7474 
7475       /* Check for an addition with OP2 a constant integer and our first
7476          operand a PLUS of a virtual register and something else.  In that
7477          case, we want to emit the sum of the virtual register and the
7478          constant first and then add the other value.  This allows virtual
7479          register instantiation to simply modify the constant rather than
7480          creating another one around this addition.  */
7481       if (code == PLUS && CONST_INT_P (op2)
7482 	  && GET_CODE (XEXP (value, 0)) == PLUS
7483 	  && REG_P (XEXP (XEXP (value, 0), 0))
7484 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7485 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7486 	{
7487 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7488 					  XEXP (XEXP (value, 0), 0), op2,
7489 					  subtarget, 0, OPTAB_LIB_WIDEN);
7490 	  return expand_simple_binop (GET_MODE (value), code, temp,
7491 				      force_operand (XEXP (XEXP (value,
7492 								 0), 1), 0),
7493 				      target, 0, OPTAB_LIB_WIDEN);
7494 	}
7495 
7496       op1 = force_operand (XEXP (value, 0), subtarget);
7497       op2 = force_operand (op2, NULL_RTX);
7498       switch (code)
7499 	{
7500 	case MULT:
7501 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7502 	case DIV:
7503 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7504 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7505 					target, 1, OPTAB_LIB_WIDEN);
7506 	  else
7507 	    return expand_divmod (0,
7508 				  FLOAT_MODE_P (GET_MODE (value))
7509 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7510 				  GET_MODE (value), op1, op2, target, 0);
7511 	case MOD:
7512 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7513 				target, 0);
7514 	case UDIV:
7515 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7516 				target, 1);
7517 	case UMOD:
7518 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7519 				target, 1);
7520 	case ASHIFTRT:
7521 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7522 				      target, 0, OPTAB_LIB_WIDEN);
7523 	default:
7524 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7525 				      target, 1, OPTAB_LIB_WIDEN);
7526 	}
7527     }
7528   if (UNARY_P (value))
7529     {
7530       if (!target)
7531 	target = gen_reg_rtx (GET_MODE (value));
7532       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7533       switch (code)
7534 	{
7535 	case ZERO_EXTEND:
7536 	case SIGN_EXTEND:
7537 	case TRUNCATE:
7538 	case FLOAT_EXTEND:
7539 	case FLOAT_TRUNCATE:
7540 	  convert_move (target, op1, code == ZERO_EXTEND);
7541 	  return target;
7542 
7543 	case FIX:
7544 	case UNSIGNED_FIX:
7545 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7546 	  return target;
7547 
7548 	case FLOAT:
7549 	case UNSIGNED_FLOAT:
7550 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7551 	  return target;
7552 
7553 	default:
7554 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7555 	}
7556     }
7557 
7558 #ifdef INSN_SCHEDULING
7559   /* On machines that have insn scheduling, we want all memory reference to be
7560      explicit, so we need to deal with such paradoxical SUBREGs.  */
7561   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7562     value
7563       = simplify_gen_subreg (GET_MODE (value),
7564 			     force_reg (GET_MODE (SUBREG_REG (value)),
7565 					force_operand (SUBREG_REG (value),
7566 						       NULL_RTX)),
7567 			     GET_MODE (SUBREG_REG (value)),
7568 			     SUBREG_BYTE (value));
7569 #endif
7570 
7571   return value;
7572 }
7573 
7574 /* Subroutine of expand_expr: return nonzero iff there is no way that
7575    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7576    call is going to be used to determine whether we need a temporary
7577    for EXP, as opposed to a recursive call to this function.
7578 
7579    It is always safe for this routine to return zero since it merely
7580    searches for optimization opportunities.  */
7581 
7582 int
7583 safe_from_p (const_rtx x, tree exp, int top_p)
7584 {
7585   rtx exp_rtl = 0;
7586   int i, nops;
7587 
7588   if (x == 0
7589       /* If EXP has varying size, we MUST use a target since we currently
7590 	 have no way of allocating temporaries of variable size
7591 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7592 	 So we assume here that something at a higher level has prevented a
7593 	 clash.  This is somewhat bogus, but the best we can do.  Only
7594 	 do this when X is BLKmode and when we are at the top level.  */
7595       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7596 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7597 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7598 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7599 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7600 	      != INTEGER_CST)
7601 	  && GET_MODE (x) == BLKmode)
7602       /* If X is in the outgoing argument area, it is always safe.  */
7603       || (MEM_P (x)
7604 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7605 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7606 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7607     return 1;
7608 
7609   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7610      find the underlying pseudo.  */
7611   if (GET_CODE (x) == SUBREG)
7612     {
7613       x = SUBREG_REG (x);
7614       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7615 	return 0;
7616     }
7617 
7618   /* Now look at our tree code and possibly recurse.  */
7619   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7620     {
7621     case tcc_declaration:
7622       exp_rtl = DECL_RTL_IF_SET (exp);
7623       break;
7624 
7625     case tcc_constant:
7626       return 1;
7627 
7628     case tcc_exceptional:
7629       if (TREE_CODE (exp) == TREE_LIST)
7630 	{
7631 	  while (1)
7632 	    {
7633 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7634 		return 0;
7635 	      exp = TREE_CHAIN (exp);
7636 	      if (!exp)
7637 		return 1;
7638 	      if (TREE_CODE (exp) != TREE_LIST)
7639 		return safe_from_p (x, exp, 0);
7640 	    }
7641 	}
7642       else if (TREE_CODE (exp) == CONSTRUCTOR)
7643 	{
7644 	  constructor_elt *ce;
7645 	  unsigned HOST_WIDE_INT idx;
7646 
7647 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7648 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7649 		|| !safe_from_p (x, ce->value, 0))
7650 	      return 0;
7651 	  return 1;
7652 	}
7653       else if (TREE_CODE (exp) == ERROR_MARK)
7654 	return 1;	/* An already-visited SAVE_EXPR? */
7655       else
7656 	return 0;
7657 
7658     case tcc_statement:
7659       /* The only case we look at here is the DECL_INITIAL inside a
7660 	 DECL_EXPR.  */
7661       return (TREE_CODE (exp) != DECL_EXPR
7662 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7663 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7664 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7665 
7666     case tcc_binary:
7667     case tcc_comparison:
7668       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7669 	return 0;
7670       /* Fall through.  */
7671 
7672     case tcc_unary:
7673       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7674 
7675     case tcc_expression:
7676     case tcc_reference:
7677     case tcc_vl_exp:
7678       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7679 	 the expression.  If it is set, we conflict iff we are that rtx or
7680 	 both are in memory.  Otherwise, we check all operands of the
7681 	 expression recursively.  */
7682 
7683       switch (TREE_CODE (exp))
7684 	{
7685 	case ADDR_EXPR:
7686 	  /* If the operand is static or we are static, we can't conflict.
7687 	     Likewise if we don't conflict with the operand at all.  */
7688 	  if (staticp (TREE_OPERAND (exp, 0))
7689 	      || TREE_STATIC (exp)
7690 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7691 	    return 1;
7692 
7693 	  /* Otherwise, the only way this can conflict is if we are taking
7694 	     the address of a DECL a that address if part of X, which is
7695 	     very rare.  */
7696 	  exp = TREE_OPERAND (exp, 0);
7697 	  if (DECL_P (exp))
7698 	    {
7699 	      if (!DECL_RTL_SET_P (exp)
7700 		  || !MEM_P (DECL_RTL (exp)))
7701 		return 0;
7702 	      else
7703 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7704 	    }
7705 	  break;
7706 
7707 	case MEM_REF:
7708 	  if (MEM_P (x)
7709 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7710 					get_alias_set (exp)))
7711 	    return 0;
7712 	  break;
7713 
7714 	case CALL_EXPR:
7715 	  /* Assume that the call will clobber all hard registers and
7716 	     all of memory.  */
7717 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7718 	      || MEM_P (x))
7719 	    return 0;
7720 	  break;
7721 
7722 	case WITH_CLEANUP_EXPR:
7723 	case CLEANUP_POINT_EXPR:
7724 	  /* Lowered by gimplify.c.  */
7725 	  gcc_unreachable ();
7726 
7727 	case SAVE_EXPR:
7728 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7729 
7730 	default:
7731 	  break;
7732 	}
7733 
7734       /* If we have an rtx, we do not need to scan our operands.  */
7735       if (exp_rtl)
7736 	break;
7737 
7738       nops = TREE_OPERAND_LENGTH (exp);
7739       for (i = 0; i < nops; i++)
7740 	if (TREE_OPERAND (exp, i) != 0
7741 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7742 	  return 0;
7743 
7744       break;
7745 
7746     case tcc_type:
7747       /* Should never get a type here.  */
7748       gcc_unreachable ();
7749     }
7750 
7751   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7752      with it.  */
7753   if (exp_rtl)
7754     {
7755       if (GET_CODE (exp_rtl) == SUBREG)
7756 	{
7757 	  exp_rtl = SUBREG_REG (exp_rtl);
7758 	  if (REG_P (exp_rtl)
7759 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7760 	    return 0;
7761 	}
7762 
7763       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7764 	 are memory and they conflict.  */
7765       return ! (rtx_equal_p (x, exp_rtl)
7766 		|| (MEM_P (x) && MEM_P (exp_rtl)
7767 		    && true_dependence (exp_rtl, VOIDmode, x)));
7768     }
7769 
7770   /* If we reach here, it is safe.  */
7771   return 1;
7772 }
7773 
7774 
7775 /* Return the highest power of two that EXP is known to be a multiple of.
7776    This is used in updating alignment of MEMs in array references.  */
7777 
7778 unsigned HOST_WIDE_INT
7779 highest_pow2_factor (const_tree exp)
7780 {
7781   unsigned HOST_WIDE_INT ret;
7782   int trailing_zeros = tree_ctz (exp);
7783   if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7784     return BIGGEST_ALIGNMENT;
7785   ret = HOST_WIDE_INT_1U << trailing_zeros;
7786   if (ret > BIGGEST_ALIGNMENT)
7787     return BIGGEST_ALIGNMENT;
7788   return ret;
7789 }
7790 
7791 /* Similar, except that the alignment requirements of TARGET are
7792    taken into account.  Assume it is at least as aligned as its
7793    type, unless it is a COMPONENT_REF in which case the layout of
7794    the structure gives the alignment.  */
7795 
7796 static unsigned HOST_WIDE_INT
7797 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7798 {
7799   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7800   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7801 
7802   return MAX (factor, talign);
7803 }
7804 
7805 /* Convert the tree comparison code TCODE to the rtl one where the
7806    signedness is UNSIGNEDP.  */
7807 
7808 static enum rtx_code
7809 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7810 {
7811   enum rtx_code code;
7812   switch (tcode)
7813     {
7814     case EQ_EXPR:
7815       code = EQ;
7816       break;
7817     case NE_EXPR:
7818       code = NE;
7819       break;
7820     case LT_EXPR:
7821       code = unsignedp ? LTU : LT;
7822       break;
7823     case LE_EXPR:
7824       code = unsignedp ? LEU : LE;
7825       break;
7826     case GT_EXPR:
7827       code = unsignedp ? GTU : GT;
7828       break;
7829     case GE_EXPR:
7830       code = unsignedp ? GEU : GE;
7831       break;
7832     case UNORDERED_EXPR:
7833       code = UNORDERED;
7834       break;
7835     case ORDERED_EXPR:
7836       code = ORDERED;
7837       break;
7838     case UNLT_EXPR:
7839       code = UNLT;
7840       break;
7841     case UNLE_EXPR:
7842       code = UNLE;
7843       break;
7844     case UNGT_EXPR:
7845       code = UNGT;
7846       break;
7847     case UNGE_EXPR:
7848       code = UNGE;
7849       break;
7850     case UNEQ_EXPR:
7851       code = UNEQ;
7852       break;
7853     case LTGT_EXPR:
7854       code = LTGT;
7855       break;
7856 
7857     default:
7858       gcc_unreachable ();
7859     }
7860   return code;
7861 }
7862 
7863 /* Subroutine of expand_expr.  Expand the two operands of a binary
7864    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7865    The value may be stored in TARGET if TARGET is nonzero.  The
7866    MODIFIER argument is as documented by expand_expr.  */
7867 
7868 void
7869 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7870 		 enum expand_modifier modifier)
7871 {
7872   if (! safe_from_p (target, exp1, 1))
7873     target = 0;
7874   if (operand_equal_p (exp0, exp1, 0))
7875     {
7876       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7877       *op1 = copy_rtx (*op0);
7878     }
7879   else
7880     {
7881       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7882       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7883     }
7884 }
7885 
7886 
7887 /* Return a MEM that contains constant EXP.  DEFER is as for
7888    output_constant_def and MODIFIER is as for expand_expr.  */
7889 
7890 static rtx
7891 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7892 {
7893   rtx mem;
7894 
7895   mem = output_constant_def (exp, defer);
7896   if (modifier != EXPAND_INITIALIZER)
7897     mem = use_anchored_address (mem);
7898   return mem;
7899 }
7900 
7901 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7902    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7903 
7904 static rtx
7905 expand_expr_addr_expr_1 (tree exp, rtx target, scalar_int_mode tmode,
7906 		         enum expand_modifier modifier, addr_space_t as)
7907 {
7908   rtx result, subtarget;
7909   tree inner, offset;
7910   poly_int64 bitsize, bitpos;
7911   int unsignedp, reversep, volatilep = 0;
7912   machine_mode mode1;
7913 
7914   /* If we are taking the address of a constant and are at the top level,
7915      we have to use output_constant_def since we can't call force_const_mem
7916      at top level.  */
7917   /* ??? This should be considered a front-end bug.  We should not be
7918      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7919      exception here is STRING_CST.  */
7920   if (CONSTANT_CLASS_P (exp))
7921     {
7922       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7923       if (modifier < EXPAND_SUM)
7924 	result = force_operand (result, target);
7925       return result;
7926     }
7927 
7928   /* Everything must be something allowed by is_gimple_addressable.  */
7929   switch (TREE_CODE (exp))
7930     {
7931     case INDIRECT_REF:
7932       /* This case will happen via recursion for &a->b.  */
7933       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7934 
7935     case MEM_REF:
7936       {
7937 	tree tem = TREE_OPERAND (exp, 0);
7938 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7939 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7940 	return expand_expr (tem, target, tmode, modifier);
7941       }
7942 
7943     case TARGET_MEM_REF:
7944       return addr_for_mem_ref (exp, as, true);
7945 
7946     case CONST_DECL:
7947       /* Expand the initializer like constants above.  */
7948       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7949 					   0, modifier), 0);
7950       if (modifier < EXPAND_SUM)
7951 	result = force_operand (result, target);
7952       return result;
7953 
7954     case REALPART_EXPR:
7955       /* The real part of the complex number is always first, therefore
7956 	 the address is the same as the address of the parent object.  */
7957       offset = 0;
7958       bitpos = 0;
7959       inner = TREE_OPERAND (exp, 0);
7960       break;
7961 
7962     case IMAGPART_EXPR:
7963       /* The imaginary part of the complex number is always second.
7964 	 The expression is therefore always offset by the size of the
7965 	 scalar type.  */
7966       offset = 0;
7967       bitpos = GET_MODE_BITSIZE (SCALAR_TYPE_MODE (TREE_TYPE (exp)));
7968       inner = TREE_OPERAND (exp, 0);
7969       break;
7970 
7971     case COMPOUND_LITERAL_EXPR:
7972       /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7973 	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7974 	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7975 	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7976 	 the initializers aren't gimplified.  */
7977       if (COMPOUND_LITERAL_EXPR_DECL (exp)
7978 	  && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7979 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7980 					target, tmode, modifier, as);
7981       /* FALLTHRU */
7982     default:
7983       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7984 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7985 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7986 	 CONSTRUCTORs too, which should yield a memory reference for the
7987 	 constructor's contents.  Assume language specific tree nodes can
7988 	 be expanded in some interesting way.  */
7989       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7990       if (DECL_P (exp)
7991 	  || TREE_CODE (exp) == CONSTRUCTOR
7992 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7993 	{
7994 	  result = expand_expr (exp, target, tmode,
7995 				modifier == EXPAND_INITIALIZER
7996 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7997 
7998 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7999 	     marked TREE_ADDRESSABLE, which will be either a front-end
8000 	     or a tree optimizer bug.  */
8001 
8002 	  gcc_assert (MEM_P (result));
8003 	  result = XEXP (result, 0);
8004 
8005 	  /* ??? Is this needed anymore?  */
8006 	  if (DECL_P (exp))
8007 	    TREE_USED (exp) = 1;
8008 
8009 	  if (modifier != EXPAND_INITIALIZER
8010 	      && modifier != EXPAND_CONST_ADDRESS
8011 	      && modifier != EXPAND_SUM)
8012 	    result = force_operand (result, target);
8013 	  return result;
8014 	}
8015 
8016       /* Pass FALSE as the last argument to get_inner_reference although
8017 	 we are expanding to RTL.  The rationale is that we know how to
8018 	 handle "aligning nodes" here: we can just bypass them because
8019 	 they won't change the final object whose address will be returned
8020 	 (they actually exist only for that purpose).  */
8021       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
8022 				   &unsignedp, &reversep, &volatilep);
8023       break;
8024     }
8025 
8026   /* We must have made progress.  */
8027   gcc_assert (inner != exp);
8028 
8029   subtarget = offset || maybe_ne (bitpos, 0) ? NULL_RTX : target;
8030   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
8031      inner alignment, force the inner to be sufficiently aligned.  */
8032   if (CONSTANT_CLASS_P (inner)
8033       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
8034     {
8035       inner = copy_node (inner);
8036       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
8037       SET_TYPE_ALIGN (TREE_TYPE (inner), TYPE_ALIGN (TREE_TYPE (exp)));
8038       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
8039     }
8040   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
8041 
8042   if (offset)
8043     {
8044       rtx tmp;
8045 
8046       if (modifier != EXPAND_NORMAL)
8047 	result = force_operand (result, NULL);
8048       tmp = expand_expr (offset, NULL_RTX, tmode,
8049 			 modifier == EXPAND_INITIALIZER
8050 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
8051 
8052       /* expand_expr is allowed to return an object in a mode other
8053 	 than TMODE.  If it did, we need to convert.  */
8054       if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
8055 	tmp = convert_modes (tmode, GET_MODE (tmp),
8056 			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
8057       result = convert_memory_address_addr_space (tmode, result, as);
8058       tmp = convert_memory_address_addr_space (tmode, tmp, as);
8059 
8060       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8061 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
8062       else
8063 	{
8064 	  subtarget = maybe_ne (bitpos, 0) ? NULL_RTX : target;
8065 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
8066 					1, OPTAB_LIB_WIDEN);
8067 	}
8068     }
8069 
8070   if (maybe_ne (bitpos, 0))
8071     {
8072       /* Someone beforehand should have rejected taking the address
8073 	 of an object that isn't byte-aligned.  */
8074       poly_int64 bytepos = exact_div (bitpos, BITS_PER_UNIT);
8075       result = convert_memory_address_addr_space (tmode, result, as);
8076       result = plus_constant (tmode, result, bytepos);
8077       if (modifier < EXPAND_SUM)
8078 	result = force_operand (result, target);
8079     }
8080 
8081   return result;
8082 }
8083 
8084 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
8085    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
8086 
8087 static rtx
8088 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
8089 		       enum expand_modifier modifier)
8090 {
8091   addr_space_t as = ADDR_SPACE_GENERIC;
8092   scalar_int_mode address_mode = Pmode;
8093   scalar_int_mode pointer_mode = ptr_mode;
8094   machine_mode rmode;
8095   rtx result;
8096 
8097   /* Target mode of VOIDmode says "whatever's natural".  */
8098   if (tmode == VOIDmode)
8099     tmode = TYPE_MODE (TREE_TYPE (exp));
8100 
8101   if (POINTER_TYPE_P (TREE_TYPE (exp)))
8102     {
8103       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
8104       address_mode = targetm.addr_space.address_mode (as);
8105       pointer_mode = targetm.addr_space.pointer_mode (as);
8106     }
8107 
8108   /* We can get called with some Weird Things if the user does silliness
8109      like "(short) &a".  In that case, convert_memory_address won't do
8110      the right thing, so ignore the given target mode.  */
8111   scalar_int_mode new_tmode = (tmode == pointer_mode
8112 			       ? pointer_mode
8113 			       : address_mode);
8114 
8115   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
8116 				    new_tmode, modifier, as);
8117 
8118   /* Despite expand_expr claims concerning ignoring TMODE when not
8119      strictly convenient, stuff breaks if we don't honor it.  Note
8120      that combined with the above, we only do this for pointer modes.  */
8121   rmode = GET_MODE (result);
8122   if (rmode == VOIDmode)
8123     rmode = new_tmode;
8124   if (rmode != new_tmode)
8125     result = convert_memory_address_addr_space (new_tmode, result, as);
8126 
8127   return result;
8128 }
8129 
8130 /* Generate code for computing CONSTRUCTOR EXP.
8131    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
8132    is TRUE, instead of creating a temporary variable in memory
8133    NULL is returned and the caller needs to handle it differently.  */
8134 
8135 static rtx
8136 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
8137 		    bool avoid_temp_mem)
8138 {
8139   tree type = TREE_TYPE (exp);
8140   machine_mode mode = TYPE_MODE (type);
8141 
8142   /* Try to avoid creating a temporary at all.  This is possible
8143      if all of the initializer is zero.
8144      FIXME: try to handle all [0..255] initializers we can handle
8145      with memset.  */
8146   if (TREE_STATIC (exp)
8147       && !TREE_ADDRESSABLE (exp)
8148       && target != 0 && mode == BLKmode
8149       && all_zeros_p (exp))
8150     {
8151       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
8152       return target;
8153     }
8154 
8155   /* All elts simple constants => refer to a constant in memory.  But
8156      if this is a non-BLKmode mode, let it store a field at a time
8157      since that should make a CONST_INT, CONST_WIDE_INT or
8158      CONST_DOUBLE when we fold.  Likewise, if we have a target we can
8159      use, it is best to store directly into the target unless the type
8160      is large enough that memcpy will be used.  If we are making an
8161      initializer and all operands are constant, put it in memory as
8162      well.
8163 
8164      FIXME: Avoid trying to fill vector constructors piece-meal.
8165      Output them with output_constant_def below unless we're sure
8166      they're zeros.  This should go away when vector initializers
8167      are treated like VECTOR_CST instead of arrays.  */
8168   if ((TREE_STATIC (exp)
8169        && ((mode == BLKmode
8170 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
8171 		  || TREE_ADDRESSABLE (exp)
8172 		  || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
8173 		      && (! can_move_by_pieces
8174 				     (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
8175 				      TYPE_ALIGN (type)))
8176 		      && ! mostly_zeros_p (exp))))
8177       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
8178 	  && TREE_CONSTANT (exp)))
8179     {
8180       rtx constructor;
8181 
8182       if (avoid_temp_mem)
8183 	return NULL_RTX;
8184 
8185       constructor = expand_expr_constant (exp, 1, modifier);
8186 
8187       if (modifier != EXPAND_CONST_ADDRESS
8188 	  && modifier != EXPAND_INITIALIZER
8189 	  && modifier != EXPAND_SUM)
8190 	constructor = validize_mem (constructor);
8191 
8192       return constructor;
8193     }
8194 
8195   /* Handle calls that pass values in multiple non-contiguous
8196      locations.  The Irix 6 ABI has examples of this.  */
8197   if (target == 0 || ! safe_from_p (target, exp, 1)
8198       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
8199     {
8200       if (avoid_temp_mem)
8201 	return NULL_RTX;
8202 
8203       target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
8204     }
8205 
8206   store_constructor (exp, target, 0, int_expr_size (exp), false);
8207   return target;
8208 }
8209 
8210 
8211 /* expand_expr: generate code for computing expression EXP.
8212    An rtx for the computed value is returned.  The value is never null.
8213    In the case of a void EXP, const0_rtx is returned.
8214 
8215    The value may be stored in TARGET if TARGET is nonzero.
8216    TARGET is just a suggestion; callers must assume that
8217    the rtx returned may not be the same as TARGET.
8218 
8219    If TARGET is CONST0_RTX, it means that the value will be ignored.
8220 
8221    If TMODE is not VOIDmode, it suggests generating the
8222    result in mode TMODE.  But this is done only when convenient.
8223    Otherwise, TMODE is ignored and the value generated in its natural mode.
8224    TMODE is just a suggestion; callers must assume that
8225    the rtx returned may not have mode TMODE.
8226 
8227    Note that TARGET may have neither TMODE nor MODE.  In that case, it
8228    probably will not be used.
8229 
8230    If MODIFIER is EXPAND_SUM then when EXP is an addition
8231    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
8232    or a nest of (PLUS ...) and (MINUS ...) where the terms are
8233    products as above, or REG or MEM, or constant.
8234    Ordinarily in such cases we would output mul or add instructions
8235    and then return a pseudo reg containing the sum.
8236 
8237    EXPAND_INITIALIZER is much like EXPAND_SUM except that
8238    it also marks a label as absolutely required (it can't be dead).
8239    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
8240    This is used for outputting expressions used in initializers.
8241 
8242    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
8243    with a constant address even if that address is not normally legitimate.
8244    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
8245 
8246    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
8247    a call parameter.  Such targets require special care as we haven't yet
8248    marked TARGET so that it's safe from being trashed by libcalls.  We
8249    don't want to use TARGET for anything but the final result;
8250    Intermediate values must go elsewhere.   Additionally, calls to
8251    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
8252 
8253    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
8254    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
8255    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
8256    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
8257    recursively.
8258 
8259    If INNER_REFERENCE_P is true, we are expanding an inner reference.
8260    In this case, we don't adjust a returned MEM rtx that wouldn't be
8261    sufficiently aligned for its mode; instead, it's up to the caller
8262    to deal with it afterwards.  This is used to make sure that unaligned
8263    base objects for which out-of-bounds accesses are supported, for
8264    example record types with trailing arrays, aren't realigned behind
8265    the back of the caller.
8266    The normal operating mode is to pass FALSE for this parameter.  */
8267 
8268 rtx
8269 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8270 		  enum expand_modifier modifier, rtx *alt_rtl,
8271 		  bool inner_reference_p)
8272 {
8273   rtx ret;
8274 
8275   /* Handle ERROR_MARK before anybody tries to access its type.  */
8276   if (TREE_CODE (exp) == ERROR_MARK
8277       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8278     {
8279       ret = CONST0_RTX (tmode);
8280       return ret ? ret : const0_rtx;
8281     }
8282 
8283   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8284 			    inner_reference_p);
8285   return ret;
8286 }
8287 
8288 /* Try to expand the conditional expression which is represented by
8289    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If it succeeds
8290    return the rtl reg which represents the result.  Otherwise return
8291    NULL_RTX.  */
8292 
8293 static rtx
8294 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8295 			      tree treeop1 ATTRIBUTE_UNUSED,
8296 			      tree treeop2 ATTRIBUTE_UNUSED)
8297 {
8298   rtx insn;
8299   rtx op00, op01, op1, op2;
8300   enum rtx_code comparison_code;
8301   machine_mode comparison_mode;
8302   gimple *srcstmt;
8303   rtx temp;
8304   tree type = TREE_TYPE (treeop1);
8305   int unsignedp = TYPE_UNSIGNED (type);
8306   machine_mode mode = TYPE_MODE (type);
8307   machine_mode orig_mode = mode;
8308   static bool expanding_cond_expr_using_cmove = false;
8309 
8310   /* Conditional move expansion can end up TERing two operands which,
8311      when recursively hitting conditional expressions can result in
8312      exponential behavior if the cmove expansion ultimatively fails.
8313      It's hardly profitable to TER a cmove into a cmove so avoid doing
8314      that by failing early if we end up recursing.  */
8315   if (expanding_cond_expr_using_cmove)
8316     return NULL_RTX;
8317 
8318   /* If we cannot do a conditional move on the mode, try doing it
8319      with the promoted mode. */
8320   if (!can_conditionally_move_p (mode))
8321     {
8322       mode = promote_mode (type, mode, &unsignedp);
8323       if (!can_conditionally_move_p (mode))
8324 	return NULL_RTX;
8325       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
8326     }
8327   else
8328     temp = assign_temp (type, 0, 1);
8329 
8330   expanding_cond_expr_using_cmove = true;
8331   start_sequence ();
8332   expand_operands (treeop1, treeop2,
8333 		   temp, &op1, &op2, EXPAND_NORMAL);
8334 
8335   if (TREE_CODE (treeop0) == SSA_NAME
8336       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8337     {
8338       tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8339       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8340       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8341       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8342       comparison_mode = TYPE_MODE (type);
8343       unsignedp = TYPE_UNSIGNED (type);
8344       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8345     }
8346   else if (COMPARISON_CLASS_P (treeop0))
8347     {
8348       tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8349       enum tree_code cmpcode = TREE_CODE (treeop0);
8350       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8351       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8352       unsignedp = TYPE_UNSIGNED (type);
8353       comparison_mode = TYPE_MODE (type);
8354       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8355     }
8356   else
8357     {
8358       op00 = expand_normal (treeop0);
8359       op01 = const0_rtx;
8360       comparison_code = NE;
8361       comparison_mode = GET_MODE (op00);
8362       if (comparison_mode == VOIDmode)
8363 	comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8364     }
8365   expanding_cond_expr_using_cmove = false;
8366 
8367   if (GET_MODE (op1) != mode)
8368     op1 = gen_lowpart (mode, op1);
8369 
8370   if (GET_MODE (op2) != mode)
8371     op2 = gen_lowpart (mode, op2);
8372 
8373   /* Try to emit the conditional move.  */
8374   insn = emit_conditional_move (temp, comparison_code,
8375 				op00, op01, comparison_mode,
8376 				op1, op2, mode,
8377 				unsignedp);
8378 
8379   /* If we could do the conditional move, emit the sequence,
8380      and return.  */
8381   if (insn)
8382     {
8383       rtx_insn *seq = get_insns ();
8384       end_sequence ();
8385       emit_insn (seq);
8386       return convert_modes (orig_mode, mode, temp, 0);
8387     }
8388 
8389   /* Otherwise discard the sequence and fall back to code with
8390      branches.  */
8391   end_sequence ();
8392   return NULL_RTX;
8393 }
8394 
8395 rtx
8396 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8397 		    enum expand_modifier modifier)
8398 {
8399   rtx op0, op1, op2, temp;
8400   rtx_code_label *lab;
8401   tree type;
8402   int unsignedp;
8403   machine_mode mode;
8404   scalar_int_mode int_mode;
8405   enum tree_code code = ops->code;
8406   optab this_optab;
8407   rtx subtarget, original_target;
8408   int ignore;
8409   bool reduce_bit_field;
8410   location_t loc = ops->location;
8411   tree treeop0, treeop1, treeop2;
8412 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8413 				 ? reduce_to_bit_field_precision ((expr), \
8414 								  target, \
8415 								  type)	  \
8416 				 : (expr))
8417 
8418   type = ops->type;
8419   mode = TYPE_MODE (type);
8420   unsignedp = TYPE_UNSIGNED (type);
8421 
8422   treeop0 = ops->op0;
8423   treeop1 = ops->op1;
8424   treeop2 = ops->op2;
8425 
8426   /* We should be called only on simple (binary or unary) expressions,
8427      exactly those that are valid in gimple expressions that aren't
8428      GIMPLE_SINGLE_RHS (or invalid).  */
8429   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8430 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8431 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8432 
8433   ignore = (target == const0_rtx
8434 	    || ((CONVERT_EXPR_CODE_P (code)
8435 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8436 		&& TREE_CODE (type) == VOID_TYPE));
8437 
8438   /* We should be called only if we need the result.  */
8439   gcc_assert (!ignore);
8440 
8441   /* An operation in what may be a bit-field type needs the
8442      result to be reduced to the precision of the bit-field type,
8443      which is narrower than that of the type's mode.  */
8444   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8445 		      && !type_has_mode_precision_p (type));
8446 
8447   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8448     target = 0;
8449 
8450   /* Use subtarget as the target for operand 0 of a binary operation.  */
8451   subtarget = get_subtarget (target);
8452   original_target = target;
8453 
8454   switch (code)
8455     {
8456     case NON_LVALUE_EXPR:
8457     case PAREN_EXPR:
8458     CASE_CONVERT:
8459       if (treeop0 == error_mark_node)
8460 	return const0_rtx;
8461 
8462       if (TREE_CODE (type) == UNION_TYPE)
8463 	{
8464 	  tree valtype = TREE_TYPE (treeop0);
8465 
8466 	  /* If both input and output are BLKmode, this conversion isn't doing
8467 	     anything except possibly changing memory attribute.  */
8468 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8469 	    {
8470 	      rtx result = expand_expr (treeop0, target, tmode,
8471 					modifier);
8472 
8473 	      result = copy_rtx (result);
8474 	      set_mem_attributes (result, type, 0);
8475 	      return result;
8476 	    }
8477 
8478 	  if (target == 0)
8479 	    {
8480 	      if (TYPE_MODE (type) != BLKmode)
8481 		target = gen_reg_rtx (TYPE_MODE (type));
8482 	      else
8483 		target = assign_temp (type, 1, 1);
8484 	    }
8485 
8486 	  if (MEM_P (target))
8487 	    /* Store data into beginning of memory target.  */
8488 	    store_expr (treeop0,
8489 			adjust_address (target, TYPE_MODE (valtype), 0),
8490 			modifier == EXPAND_STACK_PARM,
8491 			false, TYPE_REVERSE_STORAGE_ORDER (type));
8492 
8493 	  else
8494 	    {
8495 	      gcc_assert (REG_P (target)
8496 			  && !TYPE_REVERSE_STORAGE_ORDER (type));
8497 
8498 	      /* Store this field into a union of the proper type.  */
8499 	      poly_uint64 op0_size
8500 		= tree_to_poly_uint64 (TYPE_SIZE (TREE_TYPE (treeop0)));
8501 	      poly_uint64 union_size = GET_MODE_BITSIZE (mode);
8502 	      store_field (target,
8503 			   /* The conversion must be constructed so that
8504 			      we know at compile time how many bits
8505 			      to preserve.  */
8506 			   ordered_min (op0_size, union_size),
8507 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8508 			   false, false);
8509 	    }
8510 
8511 	  /* Return the entire union.  */
8512 	  return target;
8513 	}
8514 
8515       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8516 	{
8517 	  op0 = expand_expr (treeop0, target, VOIDmode,
8518 			     modifier);
8519 
8520 	  /* If the signedness of the conversion differs and OP0 is
8521 	     a promoted SUBREG, clear that indication since we now
8522 	     have to do the proper extension.  */
8523 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8524 	      && GET_CODE (op0) == SUBREG)
8525 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8526 
8527 	  return REDUCE_BIT_FIELD (op0);
8528 	}
8529 
8530       op0 = expand_expr (treeop0, NULL_RTX, mode,
8531 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8532       if (GET_MODE (op0) == mode)
8533 	;
8534 
8535       /* If OP0 is a constant, just convert it into the proper mode.  */
8536       else if (CONSTANT_P (op0))
8537 	{
8538 	  tree inner_type = TREE_TYPE (treeop0);
8539 	  machine_mode inner_mode = GET_MODE (op0);
8540 
8541 	  if (inner_mode == VOIDmode)
8542 	    inner_mode = TYPE_MODE (inner_type);
8543 
8544 	  if (modifier == EXPAND_INITIALIZER)
8545 	    op0 = lowpart_subreg (mode, op0, inner_mode);
8546 	  else
8547 	    op0=  convert_modes (mode, inner_mode, op0,
8548 				 TYPE_UNSIGNED (inner_type));
8549 	}
8550 
8551       else if (modifier == EXPAND_INITIALIZER)
8552 	op0 = gen_rtx_fmt_e (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8553 			     ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8554 
8555       else if (target == 0)
8556 	op0 = convert_to_mode (mode, op0,
8557 			       TYPE_UNSIGNED (TREE_TYPE
8558 					      (treeop0)));
8559       else
8560 	{
8561 	  convert_move (target, op0,
8562 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8563 	  op0 = target;
8564 	}
8565 
8566       return REDUCE_BIT_FIELD (op0);
8567 
8568     case ADDR_SPACE_CONVERT_EXPR:
8569       {
8570 	tree treeop0_type = TREE_TYPE (treeop0);
8571 
8572 	gcc_assert (POINTER_TYPE_P (type));
8573 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8574 
8575 	addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8576 	addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8577 
8578         /* Conversions between pointers to the same address space should
8579 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8580 	gcc_assert (as_to != as_from);
8581 
8582 	op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8583 
8584         /* Ask target code to handle conversion between pointers
8585 	   to overlapping address spaces.  */
8586 	if (targetm.addr_space.subset_p (as_to, as_from)
8587 	    || targetm.addr_space.subset_p (as_from, as_to))
8588 	  {
8589 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8590 	  }
8591         else
8592           {
8593 	    /* For disjoint address spaces, converting anything but a null
8594 	       pointer invokes undefined behavior.  We truncate or extend the
8595 	       value as if we'd converted via integers, which handles 0 as
8596 	       required, and all others as the programmer likely expects.  */
8597 #ifndef POINTERS_EXTEND_UNSIGNED
8598 	    const int POINTERS_EXTEND_UNSIGNED = 1;
8599 #endif
8600 	    op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8601 				 op0, POINTERS_EXTEND_UNSIGNED);
8602 	  }
8603 	gcc_assert (op0);
8604 	return op0;
8605       }
8606 
8607     case POINTER_PLUS_EXPR:
8608       /* Even though the sizetype mode and the pointer's mode can be different
8609          expand is able to handle this correctly and get the correct result out
8610          of the PLUS_EXPR code.  */
8611       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8612          if sizetype precision is smaller than pointer precision.  */
8613       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8614 	treeop1 = fold_convert_loc (loc, type,
8615 				    fold_convert_loc (loc, ssizetype,
8616 						      treeop1));
8617       /* If sizetype precision is larger than pointer precision, truncate the
8618 	 offset to have matching modes.  */
8619       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8620 	treeop1 = fold_convert_loc (loc, type, treeop1);
8621       /* FALLTHRU */
8622 
8623     case PLUS_EXPR:
8624       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8625 	 something else, make sure we add the register to the constant and
8626 	 then to the other thing.  This case can occur during strength
8627 	 reduction and doing it this way will produce better code if the
8628 	 frame pointer or argument pointer is eliminated.
8629 
8630 	 fold-const.c will ensure that the constant is always in the inner
8631 	 PLUS_EXPR, so the only case we need to do anything about is if
8632 	 sp, ap, or fp is our second argument, in which case we must swap
8633 	 the innermost first argument and our second argument.  */
8634 
8635       if (TREE_CODE (treeop0) == PLUS_EXPR
8636 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8637 	  && VAR_P (treeop1)
8638 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8639 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8640 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8641 	{
8642 	  gcc_unreachable ();
8643 	}
8644 
8645       /* If the result is to be ptr_mode and we are adding an integer to
8646 	 something, we might be forming a constant.  So try to use
8647 	 plus_constant.  If it produces a sum and we can't accept it,
8648 	 use force_operand.  This allows P = &ARR[const] to generate
8649 	 efficient code on machines where a SYMBOL_REF is not a valid
8650 	 address.
8651 
8652 	 If this is an EXPAND_SUM call, always return the sum.  */
8653       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8654 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8655 	{
8656 	  if (modifier == EXPAND_STACK_PARM)
8657 	    target = 0;
8658 	  if (TREE_CODE (treeop0) == INTEGER_CST
8659 	      && HWI_COMPUTABLE_MODE_P (mode)
8660 	      && TREE_CONSTANT (treeop1))
8661 	    {
8662 	      rtx constant_part;
8663 	      HOST_WIDE_INT wc;
8664 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8665 
8666 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8667 				 EXPAND_SUM);
8668 	      /* Use wi::shwi to ensure that the constant is
8669 		 truncated according to the mode of OP1, then sign extended
8670 		 to a HOST_WIDE_INT.  Using the constant directly can result
8671 		 in non-canonical RTL in a 64x32 cross compile.  */
8672 	      wc = TREE_INT_CST_LOW (treeop0);
8673 	      constant_part =
8674 		immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8675 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8676 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8677 		op1 = force_operand (op1, target);
8678 	      return REDUCE_BIT_FIELD (op1);
8679 	    }
8680 
8681 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8682 		   && HWI_COMPUTABLE_MODE_P (mode)
8683 		   && TREE_CONSTANT (treeop0))
8684 	    {
8685 	      rtx constant_part;
8686 	      HOST_WIDE_INT wc;
8687 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8688 
8689 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8690 				 (modifier == EXPAND_INITIALIZER
8691 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8692 	      if (! CONSTANT_P (op0))
8693 		{
8694 		  op1 = expand_expr (treeop1, NULL_RTX,
8695 				     VOIDmode, modifier);
8696 		  /* Return a PLUS if modifier says it's OK.  */
8697 		  if (modifier == EXPAND_SUM
8698 		      || modifier == EXPAND_INITIALIZER)
8699 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8700 		  goto binop2;
8701 		}
8702 	      /* Use wi::shwi to ensure that the constant is
8703 		 truncated according to the mode of OP1, then sign extended
8704 		 to a HOST_WIDE_INT.  Using the constant directly can result
8705 		 in non-canonical RTL in a 64x32 cross compile.  */
8706 	      wc = TREE_INT_CST_LOW (treeop1);
8707 	      constant_part
8708 		= immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8709 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8710 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8711 		op0 = force_operand (op0, target);
8712 	      return REDUCE_BIT_FIELD (op0);
8713 	    }
8714 	}
8715 
8716       /* Use TER to expand pointer addition of a negated value
8717 	 as pointer subtraction.  */
8718       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8719 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8720 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8721 	  && TREE_CODE (treeop1) == SSA_NAME
8722 	  && TYPE_MODE (TREE_TYPE (treeop0))
8723 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8724 	{
8725 	  gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8726 	  if (def)
8727 	    {
8728 	      treeop1 = gimple_assign_rhs1 (def);
8729 	      code = MINUS_EXPR;
8730 	      goto do_minus;
8731 	    }
8732 	}
8733 
8734       /* No sense saving up arithmetic to be done
8735 	 if it's all in the wrong mode to form part of an address.
8736 	 And force_operand won't know whether to sign-extend or
8737 	 zero-extend.  */
8738       if (modifier != EXPAND_INITIALIZER
8739 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8740 	{
8741 	  expand_operands (treeop0, treeop1,
8742 			   subtarget, &op0, &op1, modifier);
8743 	  if (op0 == const0_rtx)
8744 	    return op1;
8745 	  if (op1 == const0_rtx)
8746 	    return op0;
8747 	  goto binop2;
8748 	}
8749 
8750       expand_operands (treeop0, treeop1,
8751 		       subtarget, &op0, &op1, modifier);
8752       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8753 
8754     case MINUS_EXPR:
8755     case POINTER_DIFF_EXPR:
8756     do_minus:
8757       /* For initializers, we are allowed to return a MINUS of two
8758 	 symbolic constants.  Here we handle all cases when both operands
8759 	 are constant.  */
8760       /* Handle difference of two symbolic constants,
8761 	 for the sake of an initializer.  */
8762       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8763 	  && really_constant_p (treeop0)
8764 	  && really_constant_p (treeop1))
8765 	{
8766 	  expand_operands (treeop0, treeop1,
8767 			   NULL_RTX, &op0, &op1, modifier);
8768 	  return simplify_gen_binary (MINUS, mode, op0, op1);
8769 	}
8770 
8771       /* No sense saving up arithmetic to be done
8772 	 if it's all in the wrong mode to form part of an address.
8773 	 And force_operand won't know whether to sign-extend or
8774 	 zero-extend.  */
8775       if (modifier != EXPAND_INITIALIZER
8776 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8777 	goto binop;
8778 
8779       expand_operands (treeop0, treeop1,
8780 		       subtarget, &op0, &op1, modifier);
8781 
8782       /* Convert A - const to A + (-const).  */
8783       if (CONST_INT_P (op1))
8784 	{
8785 	  op1 = negate_rtx (mode, op1);
8786 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8787 	}
8788 
8789       goto binop2;
8790 
8791     case WIDEN_MULT_PLUS_EXPR:
8792     case WIDEN_MULT_MINUS_EXPR:
8793       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8794       op2 = expand_normal (treeop2);
8795       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8796 					  target, unsignedp);
8797       return target;
8798 
8799     case WIDEN_MULT_EXPR:
8800       /* If first operand is constant, swap them.
8801 	 Thus the following special case checks need only
8802 	 check the second operand.  */
8803       if (TREE_CODE (treeop0) == INTEGER_CST)
8804 	std::swap (treeop0, treeop1);
8805 
8806       /* First, check if we have a multiplication of one signed and one
8807 	 unsigned operand.  */
8808       if (TREE_CODE (treeop1) != INTEGER_CST
8809 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8810 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8811 	{
8812 	  machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8813 	  this_optab = usmul_widen_optab;
8814 	  if (find_widening_optab_handler (this_optab, mode, innermode)
8815 		!= CODE_FOR_nothing)
8816 	    {
8817 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8818 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8819 				 EXPAND_NORMAL);
8820 	      else
8821 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8822 				 EXPAND_NORMAL);
8823 	      /* op0 and op1 might still be constant, despite the above
8824 		 != INTEGER_CST check.  Handle it.  */
8825 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8826 		{
8827 		  op0 = convert_modes (mode, innermode, op0, true);
8828 		  op1 = convert_modes (mode, innermode, op1, false);
8829 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8830 							target, unsignedp));
8831 		}
8832 	      goto binop3;
8833 	    }
8834 	}
8835       /* Check for a multiplication with matching signedness.  */
8836       else if ((TREE_CODE (treeop1) == INTEGER_CST
8837 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8838 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8839 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8840 	{
8841 	  tree op0type = TREE_TYPE (treeop0);
8842 	  machine_mode innermode = TYPE_MODE (op0type);
8843 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8844 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8845 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8846 
8847 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8848 	    {
8849 	      if (find_widening_optab_handler (this_optab, mode, innermode)
8850 		  != CODE_FOR_nothing)
8851 		{
8852 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8853 				   EXPAND_NORMAL);
8854 		  /* op0 and op1 might still be constant, despite the above
8855 		     != INTEGER_CST check.  Handle it.  */
8856 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8857 		    {
8858 		     widen_mult_const:
8859 		      op0 = convert_modes (mode, innermode, op0, zextend_p);
8860 		      op1
8861 			= convert_modes (mode, innermode, op1,
8862 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8863 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8864 							    target,
8865 							    unsignedp));
8866 		    }
8867 		  temp = expand_widening_mult (mode, op0, op1, target,
8868 					       unsignedp, this_optab);
8869 		  return REDUCE_BIT_FIELD (temp);
8870 		}
8871 	      if (find_widening_optab_handler (other_optab, mode, innermode)
8872 		  != CODE_FOR_nothing
8873 		  && innermode == word_mode)
8874 		{
8875 		  rtx htem, hipart;
8876 		  op0 = expand_normal (treeop0);
8877 		  op1 = expand_normal (treeop1);
8878 		  /* op0 and op1 might be constants, despite the above
8879 		     != INTEGER_CST check.  Handle it.  */
8880 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8881 		    goto widen_mult_const;
8882 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8883 		    op1 = convert_modes (mode, word_mode, op1,
8884 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8885 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8886 				       unsignedp, OPTAB_LIB_WIDEN);
8887 		  hipart = gen_highpart (word_mode, temp);
8888 		  htem = expand_mult_highpart_adjust (word_mode, hipart,
8889 						      op0, op1, hipart,
8890 						      zextend_p);
8891 		  if (htem != hipart)
8892 		    emit_move_insn (hipart, htem);
8893 		  return REDUCE_BIT_FIELD (temp);
8894 		}
8895 	    }
8896 	}
8897       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8898       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8899       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8900       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8901 
8902     case FMA_EXPR:
8903       {
8904 	optab opt = fma_optab;
8905 	gimple *def0, *def2;
8906 
8907 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8908 	   call.  */
8909 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8910 	  {
8911 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8912 	    tree call_expr;
8913 
8914 	    gcc_assert (fn != NULL_TREE);
8915 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8916 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8917 	  }
8918 
8919 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8920 	/* The multiplication is commutative - look at its 2nd operand
8921 	   if the first isn't fed by a negate.  */
8922 	if (!def0)
8923 	  {
8924 	    def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8925 	    /* Swap operands if the 2nd operand is fed by a negate.  */
8926 	    if (def0)
8927 	      std::swap (treeop0, treeop1);
8928 	  }
8929 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8930 
8931 	op0 = op2 = NULL;
8932 
8933 	if (def0 && def2
8934 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8935 	  {
8936 	    opt = fnms_optab;
8937 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8938 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8939 	  }
8940 	else if (def0
8941 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8942 	  {
8943 	    opt = fnma_optab;
8944 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8945 	  }
8946 	else if (def2
8947 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8948 	  {
8949 	    opt = fms_optab;
8950 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8951 	  }
8952 
8953 	if (op0 == NULL)
8954 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8955 	if (op2 == NULL)
8956 	  op2 = expand_normal (treeop2);
8957 	op1 = expand_normal (treeop1);
8958 
8959 	return expand_ternary_op (TYPE_MODE (type), opt,
8960 				  op0, op1, op2, target, 0);
8961       }
8962 
8963     case MULT_EXPR:
8964       /* If this is a fixed-point operation, then we cannot use the code
8965 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8966          multiplications.   */
8967       if (ALL_FIXED_POINT_MODE_P (mode))
8968 	goto binop;
8969 
8970       /* If first operand is constant, swap them.
8971 	 Thus the following special case checks need only
8972 	 check the second operand.  */
8973       if (TREE_CODE (treeop0) == INTEGER_CST)
8974 	std::swap (treeop0, treeop1);
8975 
8976       /* Attempt to return something suitable for generating an
8977 	 indexed address, for machines that support that.  */
8978 
8979       if (modifier == EXPAND_SUM && mode == ptr_mode
8980 	  && tree_fits_shwi_p (treeop1))
8981 	{
8982 	  tree exp1 = treeop1;
8983 
8984 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8985 			     EXPAND_SUM);
8986 
8987 	  if (!REG_P (op0))
8988 	    op0 = force_operand (op0, NULL_RTX);
8989 	  if (!REG_P (op0))
8990 	    op0 = copy_to_mode_reg (mode, op0);
8991 
8992 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8993 			       gen_int_mode (tree_to_shwi (exp1),
8994 					     TYPE_MODE (TREE_TYPE (exp1)))));
8995 	}
8996 
8997       if (modifier == EXPAND_STACK_PARM)
8998 	target = 0;
8999 
9000       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9001       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
9002 
9003     case TRUNC_MOD_EXPR:
9004     case FLOOR_MOD_EXPR:
9005     case CEIL_MOD_EXPR:
9006     case ROUND_MOD_EXPR:
9007 
9008     case TRUNC_DIV_EXPR:
9009     case FLOOR_DIV_EXPR:
9010     case CEIL_DIV_EXPR:
9011     case ROUND_DIV_EXPR:
9012     case EXACT_DIV_EXPR:
9013      {
9014        /* If this is a fixed-point operation, then we cannot use the code
9015 	  below because "expand_divmod" doesn't support sat/no-sat fixed-point
9016 	  divisions.   */
9017        if (ALL_FIXED_POINT_MODE_P (mode))
9018 	 goto binop;
9019 
9020        if (modifier == EXPAND_STACK_PARM)
9021 	 target = 0;
9022        /* Possible optimization: compute the dividend with EXPAND_SUM
9023 	  then if the divisor is constant can optimize the case
9024 	  where some terms of the dividend have coeffs divisible by it.  */
9025        expand_operands (treeop0, treeop1,
9026 			subtarget, &op0, &op1, EXPAND_NORMAL);
9027        bool mod_p = code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR
9028 		    || code == CEIL_MOD_EXPR || code == ROUND_MOD_EXPR;
9029        if (SCALAR_INT_MODE_P (mode)
9030 	   && optimize >= 2
9031 	   && get_range_pos_neg (treeop0) == 1
9032 	   && get_range_pos_neg (treeop1) == 1)
9033 	 {
9034 	   /* If both arguments are known to be positive when interpreted
9035 	      as signed, we can expand it as both signed and unsigned
9036 	      division or modulo.  Choose the cheaper sequence in that case.  */
9037 	   bool speed_p = optimize_insn_for_speed_p ();
9038 	   do_pending_stack_adjust ();
9039 	   start_sequence ();
9040 	   rtx uns_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 1);
9041 	   rtx_insn *uns_insns = get_insns ();
9042 	   end_sequence ();
9043 	   start_sequence ();
9044 	   rtx sgn_ret = expand_divmod (mod_p, code, mode, op0, op1, target, 0);
9045 	   rtx_insn *sgn_insns = get_insns ();
9046 	   end_sequence ();
9047 	   unsigned uns_cost = seq_cost (uns_insns, speed_p);
9048 	   unsigned sgn_cost = seq_cost (sgn_insns, speed_p);
9049 
9050 	   /* If costs are the same then use as tie breaker the other
9051 	      other factor.  */
9052 	   if (uns_cost == sgn_cost)
9053 	     {
9054 		uns_cost = seq_cost (uns_insns, !speed_p);
9055 		sgn_cost = seq_cost (sgn_insns, !speed_p);
9056 	     }
9057 
9058 	   if (uns_cost < sgn_cost || (uns_cost == sgn_cost && unsignedp))
9059 	     {
9060 	       emit_insn (uns_insns);
9061 	       return uns_ret;
9062 	     }
9063 	   emit_insn (sgn_insns);
9064 	   return sgn_ret;
9065 	 }
9066        return expand_divmod (mod_p, code, mode, op0, op1, target, unsignedp);
9067      }
9068     case RDIV_EXPR:
9069       goto binop;
9070 
9071     case MULT_HIGHPART_EXPR:
9072       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
9073       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
9074       gcc_assert (temp);
9075       return temp;
9076 
9077     case FIXED_CONVERT_EXPR:
9078       op0 = expand_normal (treeop0);
9079       if (target == 0 || modifier == EXPAND_STACK_PARM)
9080 	target = gen_reg_rtx (mode);
9081 
9082       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
9083 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
9084           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
9085 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
9086       else
9087 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
9088       return target;
9089 
9090     case FIX_TRUNC_EXPR:
9091       op0 = expand_normal (treeop0);
9092       if (target == 0 || modifier == EXPAND_STACK_PARM)
9093 	target = gen_reg_rtx (mode);
9094       expand_fix (target, op0, unsignedp);
9095       return target;
9096 
9097     case FLOAT_EXPR:
9098       op0 = expand_normal (treeop0);
9099       if (target == 0 || modifier == EXPAND_STACK_PARM)
9100 	target = gen_reg_rtx (mode);
9101       /* expand_float can't figure out what to do if FROM has VOIDmode.
9102 	 So give it the correct mode.  With -O, cse will optimize this.  */
9103       if (GET_MODE (op0) == VOIDmode)
9104 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
9105 				op0);
9106       expand_float (target, op0,
9107 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9108       return target;
9109 
9110     case NEGATE_EXPR:
9111       op0 = expand_expr (treeop0, subtarget,
9112 			 VOIDmode, EXPAND_NORMAL);
9113       if (modifier == EXPAND_STACK_PARM)
9114 	target = 0;
9115       temp = expand_unop (mode,
9116       			  optab_for_tree_code (NEGATE_EXPR, type,
9117 					       optab_default),
9118 			  op0, target, 0);
9119       gcc_assert (temp);
9120       return REDUCE_BIT_FIELD (temp);
9121 
9122     case ABS_EXPR:
9123       op0 = expand_expr (treeop0, subtarget,
9124 			 VOIDmode, EXPAND_NORMAL);
9125       if (modifier == EXPAND_STACK_PARM)
9126 	target = 0;
9127 
9128       /* ABS_EXPR is not valid for complex arguments.  */
9129       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9130 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
9131 
9132       /* Unsigned abs is simply the operand.  Testing here means we don't
9133 	 risk generating incorrect code below.  */
9134       if (TYPE_UNSIGNED (type))
9135 	return op0;
9136 
9137       return expand_abs (mode, op0, target, unsignedp,
9138 			 safe_from_p (target, treeop0, 1));
9139 
9140     case MAX_EXPR:
9141     case MIN_EXPR:
9142       target = original_target;
9143       if (target == 0
9144 	  || modifier == EXPAND_STACK_PARM
9145 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
9146 	  || GET_MODE (target) != mode
9147 	  || (REG_P (target)
9148 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
9149 	target = gen_reg_rtx (mode);
9150       expand_operands (treeop0, treeop1,
9151 		       target, &op0, &op1, EXPAND_NORMAL);
9152 
9153       /* First try to do it with a special MIN or MAX instruction.
9154 	 If that does not win, use a conditional jump to select the proper
9155 	 value.  */
9156       this_optab = optab_for_tree_code (code, type, optab_default);
9157       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
9158 			   OPTAB_WIDEN);
9159       if (temp != 0)
9160 	return temp;
9161 
9162       /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
9163 	 and similarly for MAX <x, y>.  */
9164       if (VECTOR_TYPE_P (type))
9165 	{
9166 	  tree t0 = make_tree (type, op0);
9167 	  tree t1 = make_tree (type, op1);
9168 	  tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
9169 				    type, t0, t1);
9170 	  return expand_vec_cond_expr (type, comparison, t0, t1,
9171 				       original_target);
9172 	}
9173 
9174       /* At this point, a MEM target is no longer useful; we will get better
9175 	 code without it.  */
9176 
9177       if (! REG_P (target))
9178 	target = gen_reg_rtx (mode);
9179 
9180       /* If op1 was placed in target, swap op0 and op1.  */
9181       if (target != op0 && target == op1)
9182 	std::swap (op0, op1);
9183 
9184       /* We generate better code and avoid problems with op1 mentioning
9185 	 target by forcing op1 into a pseudo if it isn't a constant.  */
9186       if (! CONSTANT_P (op1))
9187 	op1 = force_reg (mode, op1);
9188 
9189       {
9190 	enum rtx_code comparison_code;
9191 	rtx cmpop1 = op1;
9192 
9193 	if (code == MAX_EXPR)
9194 	  comparison_code = unsignedp ? GEU : GE;
9195 	else
9196 	  comparison_code = unsignedp ? LEU : LE;
9197 
9198 	/* Canonicalize to comparisons against 0.  */
9199 	if (op1 == const1_rtx)
9200 	  {
9201 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
9202 	       or (a != 0 ? a : 1) for unsigned.
9203 	       For MIN we are safe converting (a <= 1 ? a : 1)
9204 	       into (a <= 0 ? a : 1)  */
9205 	    cmpop1 = const0_rtx;
9206 	    if (code == MAX_EXPR)
9207 	      comparison_code = unsignedp ? NE : GT;
9208 	  }
9209 	if (op1 == constm1_rtx && !unsignedp)
9210 	  {
9211 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
9212 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
9213 	    cmpop1 = const0_rtx;
9214 	    if (code == MIN_EXPR)
9215 	      comparison_code = LT;
9216 	  }
9217 
9218 	/* Use a conditional move if possible.  */
9219 	if (can_conditionally_move_p (mode))
9220 	  {
9221 	    rtx insn;
9222 
9223 	    start_sequence ();
9224 
9225 	    /* Try to emit the conditional move.  */
9226 	    insn = emit_conditional_move (target, comparison_code,
9227 					  op0, cmpop1, mode,
9228 					  op0, op1, mode,
9229 					  unsignedp);
9230 
9231 	    /* If we could do the conditional move, emit the sequence,
9232 	       and return.  */
9233 	    if (insn)
9234 	      {
9235 		rtx_insn *seq = get_insns ();
9236 		end_sequence ();
9237 		emit_insn (seq);
9238 		return target;
9239 	      }
9240 
9241 	    /* Otherwise discard the sequence and fall back to code with
9242 	       branches.  */
9243 	    end_sequence ();
9244 	  }
9245 
9246 	if (target != op0)
9247 	  emit_move_insn (target, op0);
9248 
9249 	lab = gen_label_rtx ();
9250 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
9251 				 unsignedp, mode, NULL_RTX, NULL, lab,
9252 				 profile_probability::uninitialized ());
9253       }
9254       emit_move_insn (target, op1);
9255       emit_label (lab);
9256       return target;
9257 
9258     case BIT_NOT_EXPR:
9259       op0 = expand_expr (treeop0, subtarget,
9260 			 VOIDmode, EXPAND_NORMAL);
9261       if (modifier == EXPAND_STACK_PARM)
9262 	target = 0;
9263       /* In case we have to reduce the result to bitfield precision
9264 	 for unsigned bitfield expand this as XOR with a proper constant
9265 	 instead.  */
9266       if (reduce_bit_field && TYPE_UNSIGNED (type))
9267 	{
9268 	  int_mode = SCALAR_INT_TYPE_MODE (type);
9269 	  wide_int mask = wi::mask (TYPE_PRECISION (type),
9270 				    false, GET_MODE_PRECISION (int_mode));
9271 
9272 	  temp = expand_binop (int_mode, xor_optab, op0,
9273 			       immed_wide_int_const (mask, int_mode),
9274 			       target, 1, OPTAB_LIB_WIDEN);
9275 	}
9276       else
9277 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
9278       gcc_assert (temp);
9279       return temp;
9280 
9281       /* ??? Can optimize bitwise operations with one arg constant.
9282 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
9283 	 and (a bitwise1 b) bitwise2 b (etc)
9284 	 but that is probably not worth while.  */
9285 
9286     case BIT_AND_EXPR:
9287     case BIT_IOR_EXPR:
9288     case BIT_XOR_EXPR:
9289       goto binop;
9290 
9291     case LROTATE_EXPR:
9292     case RROTATE_EXPR:
9293       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
9294 		  || type_has_mode_precision_p (type));
9295       /* fall through */
9296 
9297     case LSHIFT_EXPR:
9298     case RSHIFT_EXPR:
9299       {
9300 	/* If this is a fixed-point operation, then we cannot use the code
9301 	   below because "expand_shift" doesn't support sat/no-sat fixed-point
9302 	   shifts.  */
9303 	if (ALL_FIXED_POINT_MODE_P (mode))
9304 	  goto binop;
9305 
9306 	if (! safe_from_p (subtarget, treeop1, 1))
9307 	  subtarget = 0;
9308 	if (modifier == EXPAND_STACK_PARM)
9309 	  target = 0;
9310 	op0 = expand_expr (treeop0, subtarget,
9311 			   VOIDmode, EXPAND_NORMAL);
9312 
9313 	/* Left shift optimization when shifting across word_size boundary.
9314 
9315 	   If mode == GET_MODE_WIDER_MODE (word_mode), then normally
9316 	   there isn't native instruction to support this wide mode
9317 	   left shift.  Given below scenario:
9318 
9319 	    Type A = (Type) B  << C
9320 
9321 	    |<		 T	    >|
9322 	    | dest_high  |  dest_low |
9323 
9324 			 | word_size |
9325 
9326 	   If the shift amount C caused we shift B to across the word
9327 	   size boundary, i.e part of B shifted into high half of
9328 	   destination register, and part of B remains in the low
9329 	   half, then GCC will use the following left shift expand
9330 	   logic:
9331 
9332 	   1. Initialize dest_low to B.
9333 	   2. Initialize every bit of dest_high to the sign bit of B.
9334 	   3. Logic left shift dest_low by C bit to finalize dest_low.
9335 	      The value of dest_low before this shift is kept in a temp D.
9336 	   4. Logic left shift dest_high by C.
9337 	   5. Logic right shift D by (word_size - C).
9338 	   6. Or the result of 4 and 5 to finalize dest_high.
9339 
9340 	   While, by checking gimple statements, if operand B is
9341 	   coming from signed extension, then we can simplify above
9342 	   expand logic into:
9343 
9344 	      1. dest_high = src_low >> (word_size - C).
9345 	      2. dest_low = src_low << C.
9346 
9347 	   We can use one arithmetic right shift to finish all the
9348 	   purpose of steps 2, 4, 5, 6, thus we reduce the steps
9349 	   needed from 6 into 2.
9350 
9351 	   The case is similar for zero extension, except that we
9352 	   initialize dest_high to zero rather than copies of the sign
9353 	   bit from B.  Furthermore, we need to use a logical right shift
9354 	   in this case.
9355 
9356 	   The choice of sign-extension versus zero-extension is
9357 	   determined entirely by whether or not B is signed and is
9358 	   independent of the current setting of unsignedp.  */
9359 
9360 	temp = NULL_RTX;
9361 	if (code == LSHIFT_EXPR
9362 	    && target
9363 	    && REG_P (target)
9364 	    && GET_MODE_2XWIDER_MODE (word_mode).exists (&int_mode)
9365 	    && mode == int_mode
9366 	    && TREE_CONSTANT (treeop1)
9367 	    && TREE_CODE (treeop0) == SSA_NAME)
9368 	  {
9369 	    gimple *def = SSA_NAME_DEF_STMT (treeop0);
9370 	    if (is_gimple_assign (def)
9371 		&& gimple_assign_rhs_code (def) == NOP_EXPR)
9372 	      {
9373 		scalar_int_mode rmode = SCALAR_INT_TYPE_MODE
9374 		  (TREE_TYPE (gimple_assign_rhs1 (def)));
9375 
9376 		if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (int_mode)
9377 		    && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9378 		    && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9379 			>= GET_MODE_BITSIZE (word_mode)))
9380 		  {
9381 		    rtx_insn *seq, *seq_old;
9382 		    poly_uint64 high_off = subreg_highpart_offset (word_mode,
9383 								   int_mode);
9384 		    bool extend_unsigned
9385 		      = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9386 		    rtx low = lowpart_subreg (word_mode, op0, int_mode);
9387 		    rtx dest_low = lowpart_subreg (word_mode, target, int_mode);
9388 		    rtx dest_high = simplify_gen_subreg (word_mode, target,
9389 							 int_mode, high_off);
9390 		    HOST_WIDE_INT ramount = (BITS_PER_WORD
9391 					     - TREE_INT_CST_LOW (treeop1));
9392 		    tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9393 
9394 		    start_sequence ();
9395 		    /* dest_high = src_low >> (word_size - C).  */
9396 		    temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9397 						  rshift, dest_high,
9398 						  extend_unsigned);
9399 		    if (temp != dest_high)
9400 		      emit_move_insn (dest_high, temp);
9401 
9402 		    /* dest_low = src_low << C.  */
9403 		    temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9404 						  treeop1, dest_low, unsignedp);
9405 		    if (temp != dest_low)
9406 		      emit_move_insn (dest_low, temp);
9407 
9408 		    seq = get_insns ();
9409 		    end_sequence ();
9410 		    temp = target ;
9411 
9412 		    if (have_insn_for (ASHIFT, int_mode))
9413 		      {
9414 			bool speed_p = optimize_insn_for_speed_p ();
9415 			start_sequence ();
9416 			rtx ret_old = expand_variable_shift (code, int_mode,
9417 							     op0, treeop1,
9418 							     target,
9419 							     unsignedp);
9420 
9421 			seq_old = get_insns ();
9422 			end_sequence ();
9423 			if (seq_cost (seq, speed_p)
9424 			    >= seq_cost (seq_old, speed_p))
9425 			  {
9426 			    seq = seq_old;
9427 			    temp = ret_old;
9428 			  }
9429 		      }
9430 		      emit_insn (seq);
9431 		  }
9432 	      }
9433 	  }
9434 
9435 	if (temp == NULL_RTX)
9436 	  temp = expand_variable_shift (code, mode, op0, treeop1, target,
9437 					unsignedp);
9438 	if (code == LSHIFT_EXPR)
9439 	  temp = REDUCE_BIT_FIELD (temp);
9440 	return temp;
9441       }
9442 
9443       /* Could determine the answer when only additive constants differ.  Also,
9444 	 the addition of one can be handled by changing the condition.  */
9445     case LT_EXPR:
9446     case LE_EXPR:
9447     case GT_EXPR:
9448     case GE_EXPR:
9449     case EQ_EXPR:
9450     case NE_EXPR:
9451     case UNORDERED_EXPR:
9452     case ORDERED_EXPR:
9453     case UNLT_EXPR:
9454     case UNLE_EXPR:
9455     case UNGT_EXPR:
9456     case UNGE_EXPR:
9457     case UNEQ_EXPR:
9458     case LTGT_EXPR:
9459       {
9460 	temp = do_store_flag (ops,
9461 			      modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9462 			      tmode != VOIDmode ? tmode : mode);
9463 	if (temp)
9464 	  return temp;
9465 
9466 	/* Use a compare and a jump for BLKmode comparisons, or for function
9467 	   type comparisons is have_canonicalize_funcptr_for_compare.  */
9468 
9469 	if ((target == 0
9470 	     || modifier == EXPAND_STACK_PARM
9471 	     || ! safe_from_p (target, treeop0, 1)
9472 	     || ! safe_from_p (target, treeop1, 1)
9473 	     /* Make sure we don't have a hard reg (such as function's return
9474 		value) live across basic blocks, if not optimizing.  */
9475 	     || (!optimize && REG_P (target)
9476 		 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9477 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9478 
9479 	emit_move_insn (target, const0_rtx);
9480 
9481 	rtx_code_label *lab1 = gen_label_rtx ();
9482 	jumpifnot_1 (code, treeop0, treeop1, lab1,
9483 		     profile_probability::uninitialized ());
9484 
9485 	if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9486 	  emit_move_insn (target, constm1_rtx);
9487 	else
9488 	  emit_move_insn (target, const1_rtx);
9489 
9490 	emit_label (lab1);
9491 	return target;
9492       }
9493     case COMPLEX_EXPR:
9494       /* Get the rtx code of the operands.  */
9495       op0 = expand_normal (treeop0);
9496       op1 = expand_normal (treeop1);
9497 
9498       if (!target)
9499 	target = gen_reg_rtx (TYPE_MODE (type));
9500       else
9501 	/* If target overlaps with op1, then either we need to force
9502 	   op1 into a pseudo (if target also overlaps with op0),
9503 	   or write the complex parts in reverse order.  */
9504 	switch (GET_CODE (target))
9505 	  {
9506 	  case CONCAT:
9507 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9508 	      {
9509 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9510 		  {
9511 		  complex_expr_force_op1:
9512 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9513 		    emit_move_insn (temp, op1);
9514 		    op1 = temp;
9515 		    break;
9516 		  }
9517 	      complex_expr_swap_order:
9518 		/* Move the imaginary (op1) and real (op0) parts to their
9519 		   location.  */
9520 		write_complex_part (target, op1, true);
9521 		write_complex_part (target, op0, false);
9522 
9523 		return target;
9524 	      }
9525 	    break;
9526 	  case MEM:
9527 	    temp = adjust_address_nv (target,
9528 				      GET_MODE_INNER (GET_MODE (target)), 0);
9529 	    if (reg_overlap_mentioned_p (temp, op1))
9530 	      {
9531 		scalar_mode imode = GET_MODE_INNER (GET_MODE (target));
9532 		temp = adjust_address_nv (target, imode,
9533 					  GET_MODE_SIZE (imode));
9534 		if (reg_overlap_mentioned_p (temp, op0))
9535 		  goto complex_expr_force_op1;
9536 		goto complex_expr_swap_order;
9537 	      }
9538 	    break;
9539 	  default:
9540 	    if (reg_overlap_mentioned_p (target, op1))
9541 	      {
9542 		if (reg_overlap_mentioned_p (target, op0))
9543 		  goto complex_expr_force_op1;
9544 		goto complex_expr_swap_order;
9545 	      }
9546 	    break;
9547 	  }
9548 
9549       /* Move the real (op0) and imaginary (op1) parts to their location.  */
9550       write_complex_part (target, op0, false);
9551       write_complex_part (target, op1, true);
9552 
9553       return target;
9554 
9555     case WIDEN_SUM_EXPR:
9556       {
9557         tree oprnd0 = treeop0;
9558         tree oprnd1 = treeop1;
9559 
9560         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9561         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9562                                             target, unsignedp);
9563         return target;
9564       }
9565 
9566     case VEC_UNPACK_HI_EXPR:
9567     case VEC_UNPACK_LO_EXPR:
9568       {
9569 	op0 = expand_normal (treeop0);
9570 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9571 					  target, unsignedp);
9572 	gcc_assert (temp);
9573 	return temp;
9574       }
9575 
9576     case VEC_UNPACK_FLOAT_HI_EXPR:
9577     case VEC_UNPACK_FLOAT_LO_EXPR:
9578       {
9579 	op0 = expand_normal (treeop0);
9580 	/* The signedness is determined from input operand.  */
9581 	temp = expand_widen_pattern_expr
9582 	  (ops, op0, NULL_RTX, NULL_RTX,
9583 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9584 
9585 	gcc_assert (temp);
9586 	return temp;
9587       }
9588 
9589     case VEC_WIDEN_MULT_HI_EXPR:
9590     case VEC_WIDEN_MULT_LO_EXPR:
9591     case VEC_WIDEN_MULT_EVEN_EXPR:
9592     case VEC_WIDEN_MULT_ODD_EXPR:
9593     case VEC_WIDEN_LSHIFT_HI_EXPR:
9594     case VEC_WIDEN_LSHIFT_LO_EXPR:
9595       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9596       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9597 					  target, unsignedp);
9598       gcc_assert (target);
9599       return target;
9600 
9601     case VEC_PACK_TRUNC_EXPR:
9602     case VEC_PACK_SAT_EXPR:
9603     case VEC_PACK_FIX_TRUNC_EXPR:
9604       mode = TYPE_MODE (TREE_TYPE (treeop0));
9605       goto binop;
9606 
9607     case VEC_PERM_EXPR:
9608       {
9609 	expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9610 	vec_perm_builder sel;
9611 	if (TREE_CODE (treeop2) == VECTOR_CST
9612 	    && tree_to_vec_perm_builder (&sel, treeop2))
9613 	  {
9614 	    machine_mode sel_mode = TYPE_MODE (TREE_TYPE (treeop2));
9615 	    temp = expand_vec_perm_const (mode, op0, op1, sel,
9616 					  sel_mode, target);
9617 	  }
9618 	else
9619 	  {
9620 	    op2 = expand_normal (treeop2);
9621 	    temp = expand_vec_perm_var (mode, op0, op1, op2, target);
9622 	  }
9623 	gcc_assert (temp);
9624 	return temp;
9625       }
9626 
9627     case DOT_PROD_EXPR:
9628       {
9629 	tree oprnd0 = treeop0;
9630 	tree oprnd1 = treeop1;
9631 	tree oprnd2 = treeop2;
9632 	rtx op2;
9633 
9634 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9635 	op2 = expand_normal (oprnd2);
9636 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9637 					    target, unsignedp);
9638 	return target;
9639       }
9640 
9641       case SAD_EXPR:
9642       {
9643 	tree oprnd0 = treeop0;
9644 	tree oprnd1 = treeop1;
9645 	tree oprnd2 = treeop2;
9646 	rtx op2;
9647 
9648 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9649 	op2 = expand_normal (oprnd2);
9650 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9651 					    target, unsignedp);
9652 	return target;
9653       }
9654 
9655     case REALIGN_LOAD_EXPR:
9656       {
9657         tree oprnd0 = treeop0;
9658         tree oprnd1 = treeop1;
9659         tree oprnd2 = treeop2;
9660         rtx op2;
9661 
9662         this_optab = optab_for_tree_code (code, type, optab_default);
9663         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9664         op2 = expand_normal (oprnd2);
9665         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9666 				  target, unsignedp);
9667         gcc_assert (temp);
9668         return temp;
9669       }
9670 
9671     case COND_EXPR:
9672       {
9673 	/* A COND_EXPR with its type being VOID_TYPE represents a
9674 	   conditional jump and is handled in
9675 	   expand_gimple_cond_expr.  */
9676 	gcc_assert (!VOID_TYPE_P (type));
9677 
9678 	/* Note that COND_EXPRs whose type is a structure or union
9679 	   are required to be constructed to contain assignments of
9680 	   a temporary variable, so that we can evaluate them here
9681 	   for side effect only.  If type is void, we must do likewise.  */
9682 
9683 	gcc_assert (!TREE_ADDRESSABLE (type)
9684 		    && !ignore
9685 		    && TREE_TYPE (treeop1) != void_type_node
9686 		    && TREE_TYPE (treeop2) != void_type_node);
9687 
9688 	temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9689 	if (temp)
9690 	  return temp;
9691 
9692 	/* If we are not to produce a result, we have no target.  Otherwise,
9693 	   if a target was specified use it; it will not be used as an
9694 	   intermediate target unless it is safe.  If no target, use a
9695 	   temporary.  */
9696 
9697 	if (modifier != EXPAND_STACK_PARM
9698 	    && original_target
9699 	    && safe_from_p (original_target, treeop0, 1)
9700 	    && GET_MODE (original_target) == mode
9701 	    && !MEM_P (original_target))
9702 	  temp = original_target;
9703 	else
9704 	  temp = assign_temp (type, 0, 1);
9705 
9706 	do_pending_stack_adjust ();
9707 	NO_DEFER_POP;
9708 	rtx_code_label *lab0 = gen_label_rtx ();
9709 	rtx_code_label *lab1 = gen_label_rtx ();
9710 	jumpifnot (treeop0, lab0,
9711 		   profile_probability::uninitialized ());
9712 	store_expr (treeop1, temp,
9713 		    modifier == EXPAND_STACK_PARM,
9714 		    false, false);
9715 
9716 	emit_jump_insn (targetm.gen_jump (lab1));
9717 	emit_barrier ();
9718 	emit_label (lab0);
9719 	store_expr (treeop2, temp,
9720 		    modifier == EXPAND_STACK_PARM,
9721 		    false, false);
9722 
9723 	emit_label (lab1);
9724 	OK_DEFER_POP;
9725 	return temp;
9726       }
9727 
9728     case VEC_COND_EXPR:
9729       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9730       return target;
9731 
9732     case VEC_DUPLICATE_EXPR:
9733       op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
9734       target = expand_vector_broadcast (mode, op0);
9735       gcc_assert (target);
9736       return target;
9737 
9738     case VEC_SERIES_EXPR:
9739       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, modifier);
9740       return expand_vec_series_expr (mode, op0, op1, target);
9741 
9742     case BIT_INSERT_EXPR:
9743       {
9744 	unsigned bitpos = tree_to_uhwi (treeop2);
9745 	unsigned bitsize;
9746 	if (INTEGRAL_TYPE_P (TREE_TYPE (treeop1)))
9747 	  bitsize = TYPE_PRECISION (TREE_TYPE (treeop1));
9748 	else
9749 	  bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (treeop1)));
9750 	rtx op0 = expand_normal (treeop0);
9751 	rtx op1 = expand_normal (treeop1);
9752 	rtx dst = gen_reg_rtx (mode);
9753 	emit_move_insn (dst, op0);
9754 	store_bit_field (dst, bitsize, bitpos, 0, 0,
9755 			 TYPE_MODE (TREE_TYPE (treeop1)), op1, false);
9756 	return dst;
9757       }
9758 
9759     default:
9760       gcc_unreachable ();
9761     }
9762 
9763   /* Here to do an ordinary binary operator.  */
9764  binop:
9765   expand_operands (treeop0, treeop1,
9766 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9767  binop2:
9768   this_optab = optab_for_tree_code (code, type, optab_default);
9769  binop3:
9770   if (modifier == EXPAND_STACK_PARM)
9771     target = 0;
9772   temp = expand_binop (mode, this_optab, op0, op1, target,
9773 		       unsignedp, OPTAB_LIB_WIDEN);
9774   gcc_assert (temp);
9775   /* Bitwise operations do not need bitfield reduction as we expect their
9776      operands being properly truncated.  */
9777   if (code == BIT_XOR_EXPR
9778       || code == BIT_AND_EXPR
9779       || code == BIT_IOR_EXPR)
9780     return temp;
9781   return REDUCE_BIT_FIELD (temp);
9782 }
9783 #undef REDUCE_BIT_FIELD
9784 
9785 
9786 /* Return TRUE if expression STMT is suitable for replacement.
9787    Never consider memory loads as replaceable, because those don't ever lead
9788    into constant expressions.  */
9789 
9790 static bool
9791 stmt_is_replaceable_p (gimple *stmt)
9792 {
9793   if (ssa_is_replaceable_p (stmt))
9794     {
9795       /* Don't move around loads.  */
9796       if (!gimple_assign_single_p (stmt)
9797 	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9798 	return true;
9799     }
9800   return false;
9801 }
9802 
9803 rtx
9804 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9805 		    enum expand_modifier modifier, rtx *alt_rtl,
9806 		    bool inner_reference_p)
9807 {
9808   rtx op0, op1, temp, decl_rtl;
9809   tree type;
9810   int unsignedp;
9811   machine_mode mode, dmode;
9812   enum tree_code code = TREE_CODE (exp);
9813   rtx subtarget, original_target;
9814   int ignore;
9815   tree context;
9816   bool reduce_bit_field;
9817   location_t loc = EXPR_LOCATION (exp);
9818   struct separate_ops ops;
9819   tree treeop0, treeop1, treeop2;
9820   tree ssa_name = NULL_TREE;
9821   gimple *g;
9822 
9823   type = TREE_TYPE (exp);
9824   mode = TYPE_MODE (type);
9825   unsignedp = TYPE_UNSIGNED (type);
9826 
9827   treeop0 = treeop1 = treeop2 = NULL_TREE;
9828   if (!VL_EXP_CLASS_P (exp))
9829     switch (TREE_CODE_LENGTH (code))
9830       {
9831 	default:
9832 	case 3: treeop2 = TREE_OPERAND (exp, 2); /* FALLTHRU */
9833 	case 2: treeop1 = TREE_OPERAND (exp, 1); /* FALLTHRU */
9834 	case 1: treeop0 = TREE_OPERAND (exp, 0); /* FALLTHRU */
9835 	case 0: break;
9836       }
9837   ops.code = code;
9838   ops.type = type;
9839   ops.op0 = treeop0;
9840   ops.op1 = treeop1;
9841   ops.op2 = treeop2;
9842   ops.location = loc;
9843 
9844   ignore = (target == const0_rtx
9845 	    || ((CONVERT_EXPR_CODE_P (code)
9846 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9847 		&& TREE_CODE (type) == VOID_TYPE));
9848 
9849   /* An operation in what may be a bit-field type needs the
9850      result to be reduced to the precision of the bit-field type,
9851      which is narrower than that of the type's mode.  */
9852   reduce_bit_field = (!ignore
9853 		      && INTEGRAL_TYPE_P (type)
9854 		      && !type_has_mode_precision_p (type));
9855 
9856   /* If we are going to ignore this result, we need only do something
9857      if there is a side-effect somewhere in the expression.  If there
9858      is, short-circuit the most common cases here.  Note that we must
9859      not call expand_expr with anything but const0_rtx in case this
9860      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9861 
9862   if (ignore)
9863     {
9864       if (! TREE_SIDE_EFFECTS (exp))
9865 	return const0_rtx;
9866 
9867       /* Ensure we reference a volatile object even if value is ignored, but
9868 	 don't do this if all we are doing is taking its address.  */
9869       if (TREE_THIS_VOLATILE (exp)
9870 	  && TREE_CODE (exp) != FUNCTION_DECL
9871 	  && mode != VOIDmode && mode != BLKmode
9872 	  && modifier != EXPAND_CONST_ADDRESS)
9873 	{
9874 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9875 	  if (MEM_P (temp))
9876 	    copy_to_reg (temp);
9877 	  return const0_rtx;
9878 	}
9879 
9880       if (TREE_CODE_CLASS (code) == tcc_unary
9881 	  || code == BIT_FIELD_REF
9882 	  || code == COMPONENT_REF
9883 	  || code == INDIRECT_REF)
9884 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9885 			    modifier);
9886 
9887       else if (TREE_CODE_CLASS (code) == tcc_binary
9888 	       || TREE_CODE_CLASS (code) == tcc_comparison
9889 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9890 	{
9891 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9892 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9893 	  return const0_rtx;
9894 	}
9895 
9896       target = 0;
9897     }
9898 
9899   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9900     target = 0;
9901 
9902   /* Use subtarget as the target for operand 0 of a binary operation.  */
9903   subtarget = get_subtarget (target);
9904   original_target = target;
9905 
9906   switch (code)
9907     {
9908     case LABEL_DECL:
9909       {
9910 	tree function = decl_function_context (exp);
9911 
9912 	temp = label_rtx (exp);
9913 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9914 
9915 	if (function != current_function_decl
9916 	    && function != 0)
9917 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9918 
9919 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9920 	return temp;
9921       }
9922 
9923     case SSA_NAME:
9924       /* ??? ivopts calls expander, without any preparation from
9925          out-of-ssa.  So fake instructions as if this was an access to the
9926 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9927 	 reuse it, if partition base vars have it set already.  */
9928       if (!currently_expanding_to_rtl)
9929 	{
9930 	  tree var = SSA_NAME_VAR (exp);
9931 	  if (var && DECL_RTL_SET_P (var))
9932 	    return DECL_RTL (var);
9933 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9934 			      LAST_VIRTUAL_REGISTER + 1);
9935 	}
9936 
9937       g = get_gimple_for_ssa_name (exp);
9938       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9939       if (g == NULL
9940 	  && modifier == EXPAND_INITIALIZER
9941 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9942 	  && (optimize || !SSA_NAME_VAR (exp)
9943 	      || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9944 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9945 	g = SSA_NAME_DEF_STMT (exp);
9946       if (g)
9947 	{
9948 	  rtx r;
9949 	  location_t saved_loc = curr_insn_location ();
9950 	  location_t loc = gimple_location (g);
9951 	  if (loc != UNKNOWN_LOCATION)
9952 	    set_curr_insn_location (loc);
9953 	  ops.code = gimple_assign_rhs_code (g);
9954           switch (get_gimple_rhs_class (ops.code))
9955 	    {
9956 	    case GIMPLE_TERNARY_RHS:
9957 	      ops.op2 = gimple_assign_rhs3 (g);
9958 	      /* Fallthru */
9959 	    case GIMPLE_BINARY_RHS:
9960 	      ops.op1 = gimple_assign_rhs2 (g);
9961 
9962 	      /* Try to expand conditonal compare.  */
9963 	      if (targetm.gen_ccmp_first)
9964 		{
9965 		  gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9966 		  r = expand_ccmp_expr (g, mode);
9967 		  if (r)
9968 		    break;
9969 		}
9970 	      /* Fallthru */
9971 	    case GIMPLE_UNARY_RHS:
9972 	      ops.op0 = gimple_assign_rhs1 (g);
9973 	      ops.type = TREE_TYPE (gimple_assign_lhs (g));
9974 	      ops.location = loc;
9975 	      r = expand_expr_real_2 (&ops, target, tmode, modifier);
9976 	      break;
9977 	    case GIMPLE_SINGLE_RHS:
9978 	      {
9979 		r = expand_expr_real (gimple_assign_rhs1 (g), target,
9980 				      tmode, modifier, alt_rtl,
9981 				      inner_reference_p);
9982 		break;
9983 	      }
9984 	    default:
9985 	      gcc_unreachable ();
9986 	    }
9987 	  set_curr_insn_location (saved_loc);
9988 	  if (REG_P (r) && !REG_EXPR (r))
9989 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9990 	  return r;
9991 	}
9992 
9993       ssa_name = exp;
9994       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9995       exp = SSA_NAME_VAR (ssa_name);
9996       goto expand_decl_rtl;
9997 
9998     case PARM_DECL:
9999     case VAR_DECL:
10000       /* If a static var's type was incomplete when the decl was written,
10001 	 but the type is complete now, lay out the decl now.  */
10002       if (DECL_SIZE (exp) == 0
10003 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
10004 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
10005 	layout_decl (exp, 0);
10006 
10007       /* fall through */
10008 
10009     case FUNCTION_DECL:
10010     case RESULT_DECL:
10011       decl_rtl = DECL_RTL (exp);
10012     expand_decl_rtl:
10013       gcc_assert (decl_rtl);
10014 
10015       /* DECL_MODE might change when TYPE_MODE depends on attribute target
10016 	 settings for VECTOR_TYPE_P that might switch for the function.  */
10017       if (currently_expanding_to_rtl
10018 	  && code == VAR_DECL && MEM_P (decl_rtl)
10019 	  && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
10020 	decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
10021       else
10022 	decl_rtl = copy_rtx (decl_rtl);
10023 
10024       /* Record writes to register variables.  */
10025       if (modifier == EXPAND_WRITE
10026 	  && REG_P (decl_rtl)
10027 	  && HARD_REGISTER_P (decl_rtl))
10028         add_to_hard_reg_set (&crtl->asm_clobbers,
10029 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
10030 
10031       /* Ensure variable marked as used even if it doesn't go through
10032 	 a parser.  If it hasn't be used yet, write out an external
10033 	 definition.  */
10034       if (exp)
10035 	TREE_USED (exp) = 1;
10036 
10037       /* Show we haven't gotten RTL for this yet.  */
10038       temp = 0;
10039 
10040       /* Variables inherited from containing functions should have
10041 	 been lowered by this point.  */
10042       if (exp)
10043 	context = decl_function_context (exp);
10044       gcc_assert (!exp
10045 		  || SCOPE_FILE_SCOPE_P (context)
10046 		  || context == current_function_decl
10047 		  || TREE_STATIC (exp)
10048 		  || DECL_EXTERNAL (exp)
10049 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
10050 		  || TREE_CODE (exp) == FUNCTION_DECL);
10051 
10052       /* This is the case of an array whose size is to be determined
10053 	 from its initializer, while the initializer is still being parsed.
10054 	 ??? We aren't parsing while expanding anymore.  */
10055 
10056       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
10057 	temp = validize_mem (decl_rtl);
10058 
10059       /* If DECL_RTL is memory, we are in the normal case and the
10060 	 address is not valid, get the address into a register.  */
10061 
10062       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
10063 	{
10064 	  if (alt_rtl)
10065 	    *alt_rtl = decl_rtl;
10066 	  decl_rtl = use_anchored_address (decl_rtl);
10067 	  if (modifier != EXPAND_CONST_ADDRESS
10068 	      && modifier != EXPAND_SUM
10069 	      && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
10070 					       : GET_MODE (decl_rtl),
10071 					       XEXP (decl_rtl, 0),
10072 					       MEM_ADDR_SPACE (decl_rtl)))
10073 	    temp = replace_equiv_address (decl_rtl,
10074 					  copy_rtx (XEXP (decl_rtl, 0)));
10075 	}
10076 
10077       /* If we got something, return it.  But first, set the alignment
10078 	 if the address is a register.  */
10079       if (temp != 0)
10080 	{
10081 	  if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
10082 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
10083 
10084 	  return temp;
10085 	}
10086 
10087       if (exp)
10088 	dmode = DECL_MODE (exp);
10089       else
10090 	dmode = TYPE_MODE (TREE_TYPE (ssa_name));
10091 
10092       /* If the mode of DECL_RTL does not match that of the decl,
10093 	 there are two cases: we are dealing with a BLKmode value
10094 	 that is returned in a register, or we are dealing with
10095 	 a promoted value.  In the latter case, return a SUBREG
10096 	 of the wanted mode, but mark it so that we know that it
10097 	 was already extended.  */
10098       if (REG_P (decl_rtl)
10099 	  && dmode != BLKmode
10100 	  && GET_MODE (decl_rtl) != dmode)
10101 	{
10102 	  machine_mode pmode;
10103 
10104 	  /* Get the signedness to be used for this variable.  Ensure we get
10105 	     the same mode we got when the variable was declared.  */
10106 	  if (code != SSA_NAME)
10107 	    pmode = promote_decl_mode (exp, &unsignedp);
10108 	  else if ((g = SSA_NAME_DEF_STMT (ssa_name))
10109 		   && gimple_code (g) == GIMPLE_CALL
10110 		   && !gimple_call_internal_p (g))
10111 	    pmode = promote_function_mode (type, mode, &unsignedp,
10112 					   gimple_call_fntype (g),
10113 					   2);
10114 	  else
10115 	    pmode = promote_ssa_mode (ssa_name, &unsignedp);
10116 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
10117 
10118 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
10119 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
10120 	  SUBREG_PROMOTED_SET (temp, unsignedp);
10121 	  return temp;
10122 	}
10123 
10124       return decl_rtl;
10125 
10126     case INTEGER_CST:
10127       {
10128 	/* Given that TYPE_PRECISION (type) is not always equal to
10129 	   GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
10130 	   the former to the latter according to the signedness of the
10131 	   type.  */
10132 	scalar_int_mode mode = SCALAR_INT_TYPE_MODE (type);
10133 	temp = immed_wide_int_const
10134 	  (wi::to_wide (exp, GET_MODE_PRECISION (mode)), mode);
10135 	return temp;
10136       }
10137 
10138     case VECTOR_CST:
10139       {
10140 	tree tmp = NULL_TREE;
10141 	if (VECTOR_MODE_P (mode))
10142 	  return const_vector_from_tree (exp);
10143 	scalar_int_mode int_mode;
10144 	if (is_int_mode (mode, &int_mode))
10145 	  {
10146 	    if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
10147 	      return const_scalar_mask_from_tree (int_mode, exp);
10148 	    else
10149 	      {
10150 		tree type_for_mode
10151 		  = lang_hooks.types.type_for_mode (int_mode, 1);
10152 		if (type_for_mode)
10153 		  tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
10154 					type_for_mode, exp);
10155 	      }
10156 	  }
10157 	if (!tmp)
10158 	  {
10159 	    vec<constructor_elt, va_gc> *v;
10160 	    /* Constructors need to be fixed-length.  FIXME.  */
10161 	    unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
10162 	    vec_alloc (v, nunits);
10163 	    for (unsigned int i = 0; i < nunits; ++i)
10164 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
10165 	    tmp = build_constructor (type, v);
10166 	  }
10167 	return expand_expr (tmp, ignore ? const0_rtx : target,
10168 			    tmode, modifier);
10169       }
10170 
10171     case CONST_DECL:
10172       if (modifier == EXPAND_WRITE)
10173 	{
10174 	  /* Writing into CONST_DECL is always invalid, but handle it
10175 	     gracefully.  */
10176 	  addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
10177 	  scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
10178 	  op0 = expand_expr_addr_expr_1 (exp, NULL_RTX, address_mode,
10179 					 EXPAND_NORMAL, as);
10180 	  op0 = memory_address_addr_space (mode, op0, as);
10181 	  temp = gen_rtx_MEM (mode, op0);
10182 	  set_mem_addr_space (temp, as);
10183 	  return temp;
10184 	}
10185       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
10186 
10187     case REAL_CST:
10188       /* If optimized, generate immediate CONST_DOUBLE
10189 	 which will be turned into memory by reload if necessary.
10190 
10191 	 We used to force a register so that loop.c could see it.  But
10192 	 this does not allow gen_* patterns to perform optimizations with
10193 	 the constants.  It also produces two insns in cases like "x = 1.0;".
10194 	 On most machines, floating-point constants are not permitted in
10195 	 many insns, so we'd end up copying it to a register in any case.
10196 
10197 	 Now, we do the copying in expand_binop, if appropriate.  */
10198       return const_double_from_real_value (TREE_REAL_CST (exp),
10199 					   TYPE_MODE (TREE_TYPE (exp)));
10200 
10201     case FIXED_CST:
10202       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
10203 					   TYPE_MODE (TREE_TYPE (exp)));
10204 
10205     case COMPLEX_CST:
10206       /* Handle evaluating a complex constant in a CONCAT target.  */
10207       if (original_target && GET_CODE (original_target) == CONCAT)
10208 	{
10209 	  machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
10210 	  rtx rtarg, itarg;
10211 
10212 	  rtarg = XEXP (original_target, 0);
10213 	  itarg = XEXP (original_target, 1);
10214 
10215 	  /* Move the real and imaginary parts separately.  */
10216 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
10217 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
10218 
10219 	  if (op0 != rtarg)
10220 	    emit_move_insn (rtarg, op0);
10221 	  if (op1 != itarg)
10222 	    emit_move_insn (itarg, op1);
10223 
10224 	  return original_target;
10225 	}
10226 
10227       /* fall through */
10228 
10229     case STRING_CST:
10230       temp = expand_expr_constant (exp, 1, modifier);
10231 
10232       /* temp contains a constant address.
10233 	 On RISC machines where a constant address isn't valid,
10234 	 make some insns to get that address into a register.  */
10235       if (modifier != EXPAND_CONST_ADDRESS
10236 	  && modifier != EXPAND_INITIALIZER
10237 	  && modifier != EXPAND_SUM
10238 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
10239 					    MEM_ADDR_SPACE (temp)))
10240 	return replace_equiv_address (temp,
10241 				      copy_rtx (XEXP (temp, 0)));
10242       return temp;
10243 
10244     case POLY_INT_CST:
10245       return immed_wide_int_const (poly_int_cst_value (exp), mode);
10246 
10247     case SAVE_EXPR:
10248       {
10249 	tree val = treeop0;
10250 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
10251 				      inner_reference_p);
10252 
10253 	if (!SAVE_EXPR_RESOLVED_P (exp))
10254 	  {
10255 	    /* We can indeed still hit this case, typically via builtin
10256 	       expanders calling save_expr immediately before expanding
10257 	       something.  Assume this means that we only have to deal
10258 	       with non-BLKmode values.  */
10259 	    gcc_assert (GET_MODE (ret) != BLKmode);
10260 
10261 	    val = build_decl (curr_insn_location (),
10262 			      VAR_DECL, NULL, TREE_TYPE (exp));
10263 	    DECL_ARTIFICIAL (val) = 1;
10264 	    DECL_IGNORED_P (val) = 1;
10265 	    treeop0 = val;
10266 	    TREE_OPERAND (exp, 0) = treeop0;
10267 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
10268 
10269 	    if (!CONSTANT_P (ret))
10270 	      ret = copy_to_reg (ret);
10271 	    SET_DECL_RTL (val, ret);
10272 	  }
10273 
10274         return ret;
10275       }
10276 
10277 
10278     case CONSTRUCTOR:
10279       /* If we don't need the result, just ensure we evaluate any
10280 	 subexpressions.  */
10281       if (ignore)
10282 	{
10283 	  unsigned HOST_WIDE_INT idx;
10284 	  tree value;
10285 
10286 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
10287 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
10288 
10289 	  return const0_rtx;
10290 	}
10291 
10292       return expand_constructor (exp, target, modifier, false);
10293 
10294     case TARGET_MEM_REF:
10295       {
10296 	addr_space_t as
10297 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10298 	enum insn_code icode;
10299 	unsigned int align;
10300 
10301 	op0 = addr_for_mem_ref (exp, as, true);
10302 	op0 = memory_address_addr_space (mode, op0, as);
10303 	temp = gen_rtx_MEM (mode, op0);
10304 	set_mem_attributes (temp, exp, 0);
10305 	set_mem_addr_space (temp, as);
10306 	align = get_object_alignment (exp);
10307 	if (modifier != EXPAND_WRITE
10308 	    && modifier != EXPAND_MEMORY
10309 	    && mode != BLKmode
10310 	    && align < GET_MODE_ALIGNMENT (mode)
10311 	    /* If the target does not have special handling for unaligned
10312 	       loads of mode then it can use regular moves for them.  */
10313 	    && ((icode = optab_handler (movmisalign_optab, mode))
10314 		!= CODE_FOR_nothing))
10315 	  {
10316 	    struct expand_operand ops[2];
10317 
10318 	    /* We've already validated the memory, and we're creating a
10319 	       new pseudo destination.  The predicates really can't fail,
10320 	       nor can the generator.  */
10321 	    create_output_operand (&ops[0], NULL_RTX, mode);
10322 	    create_fixed_operand (&ops[1], temp);
10323 	    expand_insn (icode, 2, ops);
10324 	    temp = ops[0].value;
10325 	  }
10326 	return temp;
10327       }
10328 
10329     case MEM_REF:
10330       {
10331 	const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
10332 	addr_space_t as
10333 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
10334 	machine_mode address_mode;
10335 	tree base = TREE_OPERAND (exp, 0);
10336 	gimple *def_stmt;
10337 	enum insn_code icode;
10338 	unsigned align;
10339 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
10340 	   might end up in a register.  */
10341 	if (mem_ref_refers_to_non_mem_p (exp))
10342 	  {
10343 	    poly_int64 offset = mem_ref_offset (exp).force_shwi ();
10344 	    base = TREE_OPERAND (base, 0);
10345 	    if (known_eq (offset, 0)
10346 	        && !reverse
10347 		&& tree_fits_uhwi_p (TYPE_SIZE (type))
10348 		&& known_eq (GET_MODE_BITSIZE (DECL_MODE (base)),
10349 			     tree_to_uhwi (TYPE_SIZE (type))))
10350 	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
10351 				  target, tmode, modifier);
10352 	    if (TYPE_MODE (type) == BLKmode)
10353 	      {
10354 		temp = assign_stack_temp (DECL_MODE (base),
10355 					  GET_MODE_SIZE (DECL_MODE (base)));
10356 		store_expr (base, temp, 0, false, false);
10357 		temp = adjust_address (temp, BLKmode, offset);
10358 		set_mem_size (temp, int_size_in_bytes (type));
10359 		return temp;
10360 	      }
10361 	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10362 			  bitsize_int (offset * BITS_PER_UNIT));
10363 	    REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10364 	    return expand_expr (exp, target, tmode, modifier);
10365 	  }
10366 	address_mode = targetm.addr_space.address_mode (as);
10367 	base = TREE_OPERAND (exp, 0);
10368 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10369 	  {
10370 	    tree mask = gimple_assign_rhs2 (def_stmt);
10371 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10372 			   gimple_assign_rhs1 (def_stmt), mask);
10373 	    TREE_OPERAND (exp, 0) = base;
10374 	  }
10375 	align = get_object_alignment (exp);
10376 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10377 	op0 = memory_address_addr_space (mode, op0, as);
10378 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
10379 	  {
10380 	    rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10381 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10382 	    op0 = memory_address_addr_space (mode, op0, as);
10383 	  }
10384 	temp = gen_rtx_MEM (mode, op0);
10385 	set_mem_attributes (temp, exp, 0);
10386 	set_mem_addr_space (temp, as);
10387 	if (TREE_THIS_VOLATILE (exp))
10388 	  MEM_VOLATILE_P (temp) = 1;
10389 	if (modifier != EXPAND_WRITE
10390 	    && modifier != EXPAND_MEMORY
10391 	    && !inner_reference_p
10392 	    && mode != BLKmode
10393 	    && align < GET_MODE_ALIGNMENT (mode))
10394 	  {
10395 	    if ((icode = optab_handler (movmisalign_optab, mode))
10396 		!= CODE_FOR_nothing)
10397 	      {
10398 		struct expand_operand ops[2];
10399 
10400 		/* We've already validated the memory, and we're creating a
10401 		   new pseudo destination.  The predicates really can't fail,
10402 		   nor can the generator.  */
10403 		create_output_operand (&ops[0], NULL_RTX, mode);
10404 		create_fixed_operand (&ops[1], temp);
10405 		expand_insn (icode, 2, ops);
10406 		temp = ops[0].value;
10407 	      }
10408 	    else if (targetm.slow_unaligned_access (mode, align))
10409 	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10410 					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10411 					(modifier == EXPAND_STACK_PARM
10412 					 ? NULL_RTX : target),
10413 					mode, mode, false, alt_rtl);
10414 	  }
10415 	if (reverse
10416 	    && modifier != EXPAND_MEMORY
10417 	    && modifier != EXPAND_WRITE)
10418 	  temp = flip_storage_order (mode, temp);
10419 	return temp;
10420       }
10421 
10422     case ARRAY_REF:
10423 
10424       {
10425 	tree array = treeop0;
10426 	tree index = treeop1;
10427 	tree init;
10428 
10429 	/* Fold an expression like: "foo"[2].
10430 	   This is not done in fold so it won't happen inside &.
10431 	   Don't fold if this is for wide characters since it's too
10432 	   difficult to do correctly and this is a very rare case.  */
10433 
10434 	if (modifier != EXPAND_CONST_ADDRESS
10435 	    && modifier != EXPAND_INITIALIZER
10436 	    && modifier != EXPAND_MEMORY)
10437 	  {
10438 	    tree t = fold_read_from_constant_string (exp);
10439 
10440 	    if (t)
10441 	      return expand_expr (t, target, tmode, modifier);
10442 	  }
10443 
10444 	/* If this is a constant index into a constant array,
10445 	   just get the value from the array.  Handle both the cases when
10446 	   we have an explicit constructor and when our operand is a variable
10447 	   that was declared const.  */
10448 
10449 	if (modifier != EXPAND_CONST_ADDRESS
10450 	    && modifier != EXPAND_INITIALIZER
10451 	    && modifier != EXPAND_MEMORY
10452 	    && TREE_CODE (array) == CONSTRUCTOR
10453 	    && ! TREE_SIDE_EFFECTS (array)
10454 	    && TREE_CODE (index) == INTEGER_CST)
10455 	  {
10456 	    unsigned HOST_WIDE_INT ix;
10457 	    tree field, value;
10458 
10459 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10460 				      field, value)
10461 	      if (tree_int_cst_equal (field, index))
10462 		{
10463 		  if (!TREE_SIDE_EFFECTS (value))
10464 		    return expand_expr (fold (value), target, tmode, modifier);
10465 		  break;
10466 		}
10467 	  }
10468 
10469 	else if (optimize >= 1
10470 		 && modifier != EXPAND_CONST_ADDRESS
10471 		 && modifier != EXPAND_INITIALIZER
10472 		 && modifier != EXPAND_MEMORY
10473 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10474 		 && TREE_CODE (index) == INTEGER_CST
10475 		 && (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
10476 		 && (init = ctor_for_folding (array)) != error_mark_node)
10477 	  {
10478 	    if (init == NULL_TREE)
10479 	      {
10480 		tree value = build_zero_cst (type);
10481 		if (TREE_CODE (value) == CONSTRUCTOR)
10482 		  {
10483 		    /* If VALUE is a CONSTRUCTOR, this optimization is only
10484 		       useful if this doesn't store the CONSTRUCTOR into
10485 		       memory.  If it does, it is more efficient to just
10486 		       load the data from the array directly.  */
10487 		    rtx ret = expand_constructor (value, target,
10488 						  modifier, true);
10489 		    if (ret == NULL_RTX)
10490 		      value = NULL_TREE;
10491 		  }
10492 
10493 		if (value)
10494 		  return expand_expr (value, target, tmode, modifier);
10495 	      }
10496 	    else if (TREE_CODE (init) == CONSTRUCTOR)
10497 	      {
10498 		unsigned HOST_WIDE_INT ix;
10499 		tree field, value;
10500 
10501 		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10502 					  field, value)
10503 		  if (tree_int_cst_equal (field, index))
10504 		    {
10505 		      if (TREE_SIDE_EFFECTS (value))
10506 			break;
10507 
10508 		      if (TREE_CODE (value) == CONSTRUCTOR)
10509 			{
10510 			  /* If VALUE is a CONSTRUCTOR, this
10511 			     optimization is only useful if
10512 			     this doesn't store the CONSTRUCTOR
10513 			     into memory.  If it does, it is more
10514 			     efficient to just load the data from
10515 			     the array directly.  */
10516 			  rtx ret = expand_constructor (value, target,
10517 							modifier, true);
10518 			  if (ret == NULL_RTX)
10519 			    break;
10520 			}
10521 
10522 		      return
10523 		        expand_expr (fold (value), target, tmode, modifier);
10524 		    }
10525 	      }
10526 	    else if (TREE_CODE (init) == STRING_CST)
10527 	      {
10528 		tree low_bound = array_ref_low_bound (exp);
10529 		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10530 
10531 		/* Optimize the special case of a zero lower bound.
10532 
10533 		   We convert the lower bound to sizetype to avoid problems
10534 		   with constant folding.  E.g. suppose the lower bound is
10535 		   1 and its mode is QI.  Without the conversion
10536 		      (ARRAY + (INDEX - (unsigned char)1))
10537 		   becomes
10538 		      (ARRAY + (-(unsigned char)1) + INDEX)
10539 		   which becomes
10540 		      (ARRAY + 255 + INDEX).  Oops!  */
10541 		if (!integer_zerop (low_bound))
10542 		  index1 = size_diffop_loc (loc, index1,
10543 					    fold_convert_loc (loc, sizetype,
10544 							      low_bound));
10545 
10546 		if (tree_fits_uhwi_p (index1)
10547 		    && compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10548 		  {
10549 		    tree type = TREE_TYPE (TREE_TYPE (init));
10550 		    scalar_int_mode mode;
10551 
10552 		    if (is_int_mode (TYPE_MODE (type), &mode)
10553 			&& GET_MODE_SIZE (mode) == 1)
10554 		      return gen_int_mode (TREE_STRING_POINTER (init)
10555 					   [TREE_INT_CST_LOW (index1)],
10556 					   mode);
10557 		  }
10558 	      }
10559 	  }
10560       }
10561       goto normal_inner_ref;
10562 
10563     case COMPONENT_REF:
10564       /* If the operand is a CONSTRUCTOR, we can just extract the
10565 	 appropriate field if it is present.  */
10566       if (TREE_CODE (treeop0) == CONSTRUCTOR)
10567 	{
10568 	  unsigned HOST_WIDE_INT idx;
10569 	  tree field, value;
10570 	  scalar_int_mode field_mode;
10571 
10572 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10573 				    idx, field, value)
10574 	    if (field == treeop1
10575 		/* We can normally use the value of the field in the
10576 		   CONSTRUCTOR.  However, if this is a bitfield in
10577 		   an integral mode that we can fit in a HOST_WIDE_INT,
10578 		   we must mask only the number of bits in the bitfield,
10579 		   since this is done implicitly by the constructor.  If
10580 		   the bitfield does not meet either of those conditions,
10581 		   we can't do this optimization.  */
10582 		&& (! DECL_BIT_FIELD (field)
10583 		    || (is_int_mode (DECL_MODE (field), &field_mode)
10584 			&& (GET_MODE_PRECISION (field_mode)
10585 			    <= HOST_BITS_PER_WIDE_INT))))
10586 	      {
10587 		if (DECL_BIT_FIELD (field)
10588 		    && modifier == EXPAND_STACK_PARM)
10589 		  target = 0;
10590 		op0 = expand_expr (value, target, tmode, modifier);
10591 		if (DECL_BIT_FIELD (field))
10592 		  {
10593 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10594 		    scalar_int_mode imode
10595 		      = SCALAR_INT_TYPE_MODE (TREE_TYPE (field));
10596 
10597 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
10598 		      {
10599 			op1 = gen_int_mode ((HOST_WIDE_INT_1 << bitsize) - 1,
10600 					    imode);
10601 			op0 = expand_and (imode, op0, op1, target);
10602 		      }
10603 		    else
10604 		      {
10605 			int count = GET_MODE_PRECISION (imode) - bitsize;
10606 
10607 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10608 					    target, 0);
10609 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10610 					    target, 0);
10611 		      }
10612 		  }
10613 
10614 		return op0;
10615 	      }
10616 	}
10617       goto normal_inner_ref;
10618 
10619     case BIT_FIELD_REF:
10620     case ARRAY_RANGE_REF:
10621     normal_inner_ref:
10622       {
10623 	machine_mode mode1, mode2;
10624 	poly_int64 bitsize, bitpos, bytepos;
10625 	tree offset;
10626 	int reversep, volatilep = 0, must_force_mem;
10627 	tree tem
10628 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10629 				 &unsignedp, &reversep, &volatilep);
10630 	rtx orig_op0, memloc;
10631 	bool clear_mem_expr = false;
10632 
10633 	/* If we got back the original object, something is wrong.  Perhaps
10634 	   we are evaluating an expression too early.  In any event, don't
10635 	   infinitely recurse.  */
10636 	gcc_assert (tem != exp);
10637 
10638 	/* If TEM's type is a union of variable size, pass TARGET to the inner
10639 	   computation, since it will need a temporary and TARGET is known
10640 	   to have to do.  This occurs in unchecked conversion in Ada.  */
10641 	orig_op0 = op0
10642 	  = expand_expr_real (tem,
10643 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10644 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10645 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10646 				   != INTEGER_CST)
10647 			       && modifier != EXPAND_STACK_PARM
10648 			       ? target : NULL_RTX),
10649 			      VOIDmode,
10650 			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10651 			      NULL, true);
10652 
10653 	/* If the field has a mode, we want to access it in the
10654 	   field's mode, not the computed mode.
10655 	   If a MEM has VOIDmode (external with incomplete type),
10656 	   use BLKmode for it instead.  */
10657 	if (MEM_P (op0))
10658 	  {
10659 	    if (mode1 != VOIDmode)
10660 	      op0 = adjust_address (op0, mode1, 0);
10661 	    else if (GET_MODE (op0) == VOIDmode)
10662 	      op0 = adjust_address (op0, BLKmode, 0);
10663 	  }
10664 
10665 	mode2
10666 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10667 
10668 	/* Make sure bitpos is not negative, it can wreak havoc later.  */
10669 	if (maybe_lt (bitpos, 0))
10670 	  {
10671 	    gcc_checking_assert (offset == NULL_TREE);
10672 	    offset = size_int (bits_to_bytes_round_down (bitpos));
10673 	    bitpos = num_trailing_bits (bitpos);
10674 	  }
10675 
10676 	/* If we have either an offset, a BLKmode result, or a reference
10677 	   outside the underlying object, we must force it to memory.
10678 	   Such a case can occur in Ada if we have unchecked conversion
10679 	   of an expression from a scalar type to an aggregate type or
10680 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10681 	   passed a partially uninitialized object or a view-conversion
10682 	   to a larger size.  */
10683 	must_force_mem = (offset
10684 			  || mode1 == BLKmode
10685 			  || maybe_gt (bitpos + bitsize,
10686 				       GET_MODE_BITSIZE (mode2)));
10687 
10688 	/* Handle CONCAT first.  */
10689 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10690 	  {
10691 	    if (known_eq (bitpos, 0)
10692 		&& known_eq (bitsize, GET_MODE_BITSIZE (GET_MODE (op0)))
10693 		&& COMPLEX_MODE_P (mode1)
10694 		&& COMPLEX_MODE_P (GET_MODE (op0))
10695 		&& (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10696 		    == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10697 	      {
10698 		if (reversep)
10699 		  op0 = flip_storage_order (GET_MODE (op0), op0);
10700 		if (mode1 != GET_MODE (op0))
10701 		  {
10702 		    rtx parts[2];
10703 		    for (int i = 0; i < 2; i++)
10704 		      {
10705 			rtx op = read_complex_part (op0, i != 0);
10706 			if (GET_CODE (op) == SUBREG)
10707 			  op = force_reg (GET_MODE (op), op);
10708 			rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10709 						       op);
10710 			if (temp)
10711 			  op = temp;
10712 			else
10713 			  {
10714 			    if (!REG_P (op) && !MEM_P (op))
10715 			      op = force_reg (GET_MODE (op), op);
10716 			    op = gen_lowpart (GET_MODE_INNER (mode1), op);
10717 			  }
10718 			parts[i] = op;
10719 		      }
10720 		    op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10721 		  }
10722 		return op0;
10723 	      }
10724 	    if (known_eq (bitpos, 0)
10725 		&& known_eq (bitsize,
10726 			     GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10727 		&& maybe_ne (bitsize, 0))
10728 	      {
10729 		op0 = XEXP (op0, 0);
10730 		mode2 = GET_MODE (op0);
10731 	      }
10732 	    else if (known_eq (bitpos,
10733 			       GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))))
10734 		     && known_eq (bitsize,
10735 				  GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1))))
10736 		     && maybe_ne (bitpos, 0)
10737 		     && maybe_ne (bitsize, 0))
10738 	      {
10739 		op0 = XEXP (op0, 1);
10740 		bitpos = 0;
10741 		mode2 = GET_MODE (op0);
10742 	      }
10743 	    else
10744 	      /* Otherwise force into memory.  */
10745 	      must_force_mem = 1;
10746 	  }
10747 
10748 	/* If this is a constant, put it in a register if it is a legitimate
10749 	   constant and we don't need a memory reference.  */
10750 	if (CONSTANT_P (op0)
10751 	    && mode2 != BLKmode
10752 	    && targetm.legitimate_constant_p (mode2, op0)
10753 	    && !must_force_mem)
10754 	  op0 = force_reg (mode2, op0);
10755 
10756 	/* Otherwise, if this is a constant, try to force it to the constant
10757 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10758 	   is a legitimate constant.  */
10759 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10760 	  op0 = validize_mem (memloc);
10761 
10762 	/* Otherwise, if this is a constant or the object is not in memory
10763 	   and need be, put it there.  */
10764 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10765 	  {
10766 	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10767 	    emit_move_insn (memloc, op0);
10768 	    op0 = memloc;
10769 	    clear_mem_expr = true;
10770 	  }
10771 
10772 	if (offset)
10773 	  {
10774 	    machine_mode address_mode;
10775 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10776 					  EXPAND_SUM);
10777 
10778 	    gcc_assert (MEM_P (op0));
10779 
10780 	    address_mode = get_address_mode (op0);
10781 	    if (GET_MODE (offset_rtx) != address_mode)
10782 	      {
10783 		/* We cannot be sure that the RTL in offset_rtx is valid outside
10784 		   of a memory address context, so force it into a register
10785 		   before attempting to convert it to the desired mode.  */
10786 		offset_rtx = force_operand (offset_rtx, NULL_RTX);
10787 		offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10788 	      }
10789 
10790 	    /* See the comment in expand_assignment for the rationale.  */
10791 	    if (mode1 != VOIDmode
10792 		&& maybe_ne (bitpos, 0)
10793 		&& maybe_gt (bitsize, 0)
10794 		&& multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
10795 		&& multiple_p (bitpos, bitsize)
10796 		&& multiple_p (bitsize, GET_MODE_ALIGNMENT (mode1))
10797 		&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10798 	      {
10799 		op0 = adjust_address (op0, mode1, bytepos);
10800 		bitpos = 0;
10801 	      }
10802 
10803 	    op0 = offset_address (op0, offset_rtx,
10804 				  highest_pow2_factor (offset));
10805 	  }
10806 
10807 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10808 	   record its alignment as BIGGEST_ALIGNMENT.  */
10809 	if (MEM_P (op0)
10810 	    && known_eq (bitpos, 0)
10811 	    && offset != 0
10812 	    && is_aligning_offset (offset, tem))
10813 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10814 
10815 	/* Don't forget about volatility even if this is a bitfield.  */
10816 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10817 	  {
10818 	    if (op0 == orig_op0)
10819 	      op0 = copy_rtx (op0);
10820 
10821 	    MEM_VOLATILE_P (op0) = 1;
10822 	  }
10823 
10824 	/* In cases where an aligned union has an unaligned object
10825 	   as a field, we might be extracting a BLKmode value from
10826 	   an integer-mode (e.g., SImode) object.  Handle this case
10827 	   by doing the extract into an object as wide as the field
10828 	   (which we know to be the width of a basic mode), then
10829 	   storing into memory, and changing the mode to BLKmode.  */
10830 	if (mode1 == VOIDmode
10831 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10832 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10833 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10834 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10835 		&& modifier != EXPAND_CONST_ADDRESS
10836 		&& modifier != EXPAND_INITIALIZER
10837 		&& modifier != EXPAND_MEMORY)
10838 	    /* If the bitfield is volatile and the bitsize
10839 	       is narrower than the access size of the bitfield,
10840 	       we need to extract bitfields from the access.  */
10841 	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10842 		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10843 		&& mode1 != BLKmode
10844 		&& maybe_lt (bitsize, GET_MODE_SIZE (mode1) * BITS_PER_UNIT))
10845 	    /* If the field isn't aligned enough to fetch as a memref,
10846 	       fetch it as a bit field.  */
10847 	    || (mode1 != BLKmode
10848 		&& (((MEM_P (op0)
10849 		      ? MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10850 			|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode1))
10851 		      : TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10852 			|| !multiple_p (bitpos, GET_MODE_ALIGNMENT (mode)))
10853 		     && modifier != EXPAND_MEMORY
10854 		     && ((modifier == EXPAND_CONST_ADDRESS
10855 			  || modifier == EXPAND_INITIALIZER)
10856 			 ? STRICT_ALIGNMENT
10857 			 : targetm.slow_unaligned_access (mode1,
10858 							  MEM_ALIGN (op0))))
10859 		    || !multiple_p (bitpos, BITS_PER_UNIT)))
10860 	    /* If the type and the field are a constant size and the
10861 	       size of the type isn't the same size as the bitfield,
10862 	       we must use bitfield operations.  */
10863 	    || (known_size_p (bitsize)
10864 		&& TYPE_SIZE (TREE_TYPE (exp))
10865 		&& poly_int_tree_p (TYPE_SIZE (TREE_TYPE (exp)))
10866 		&& maybe_ne (wi::to_poly_offset (TYPE_SIZE (TREE_TYPE (exp))),
10867 			     bitsize)))
10868 	  {
10869 	    machine_mode ext_mode = mode;
10870 
10871 	    if (ext_mode == BLKmode
10872 		&& ! (target != 0 && MEM_P (op0)
10873 		      && MEM_P (target)
10874 		      && multiple_p (bitpos, BITS_PER_UNIT)))
10875 	      ext_mode = int_mode_for_size (bitsize, 1).else_blk ();
10876 
10877 	    if (ext_mode == BLKmode)
10878 	      {
10879 		if (target == 0)
10880 		  target = assign_temp (type, 1, 1);
10881 
10882 		/* ??? Unlike the similar test a few lines below, this one is
10883 		   very likely obsolete.  */
10884 		if (known_eq (bitsize, 0))
10885 		  return target;
10886 
10887 		/* In this case, BITPOS must start at a byte boundary and
10888 		   TARGET, if specified, must be a MEM.  */
10889 		gcc_assert (MEM_P (op0)
10890 			    && (!target || MEM_P (target)));
10891 
10892 		bytepos = exact_div (bitpos, BITS_PER_UNIT);
10893 		poly_int64 bytesize = bits_to_bytes_round_up (bitsize);
10894 		emit_block_move (target,
10895 				 adjust_address (op0, VOIDmode, bytepos),
10896 				 gen_int_mode (bytesize, Pmode),
10897 				 (modifier == EXPAND_STACK_PARM
10898 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10899 
10900 		return target;
10901 	      }
10902 
10903 	    /* If we have nothing to extract, the result will be 0 for targets
10904 	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10905 	       return 0 for the sake of consistency, as reading a zero-sized
10906 	       bitfield is valid in Ada and the value is fully specified.  */
10907 	    if (known_eq (bitsize, 0))
10908 	      return const0_rtx;
10909 
10910 	    op0 = validize_mem (op0);
10911 
10912 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10913 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10914 
10915 	    /* If the result has a record type and the extraction is done in
10916 	       an integral mode, then the field may be not aligned on a byte
10917 	       boundary; in this case, if it has reverse storage order, it
10918 	       needs to be extracted as a scalar field with reverse storage
10919 	       order and put back into memory order afterwards.  */
10920 	    if (TREE_CODE (type) == RECORD_TYPE
10921 		&& GET_MODE_CLASS (ext_mode) == MODE_INT)
10922 	      reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10923 
10924 	    gcc_checking_assert (known_ge (bitpos, 0));
10925 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10926 				     (modifier == EXPAND_STACK_PARM
10927 				      ? NULL_RTX : target),
10928 				     ext_mode, ext_mode, reversep, alt_rtl);
10929 
10930 	    /* If the result has a record type and the mode of OP0 is an
10931 	       integral mode then, if BITSIZE is narrower than this mode
10932 	       and this is for big-endian data, we must put the field
10933 	       into the high-order bits.  And we must also put it back
10934 	       into memory order if it has been previously reversed.  */
10935 	    scalar_int_mode op0_mode;
10936 	    if (TREE_CODE (type) == RECORD_TYPE
10937 		&& is_int_mode (GET_MODE (op0), &op0_mode))
10938 	      {
10939 		HOST_WIDE_INT size = GET_MODE_BITSIZE (op0_mode);
10940 
10941 		gcc_checking_assert (known_le (bitsize, size));
10942 		if (maybe_lt (bitsize, size)
10943 		    && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10944 		  op0 = expand_shift (LSHIFT_EXPR, op0_mode, op0,
10945 				      size - bitsize, op0, 1);
10946 
10947 		if (reversep)
10948 		  op0 = flip_storage_order (op0_mode, op0);
10949 	      }
10950 
10951 	    /* If the result type is BLKmode, store the data into a temporary
10952 	       of the appropriate type, but with the mode corresponding to the
10953 	       mode for the data we have (op0's mode).  */
10954 	    if (mode == BLKmode)
10955 	      {
10956 		rtx new_rtx
10957 		  = assign_stack_temp_for_type (ext_mode,
10958 						GET_MODE_BITSIZE (ext_mode),
10959 						type);
10960 		emit_move_insn (new_rtx, op0);
10961 		op0 = copy_rtx (new_rtx);
10962 		PUT_MODE (op0, BLKmode);
10963 	      }
10964 
10965 	    return op0;
10966 	  }
10967 
10968 	/* If the result is BLKmode, use that to access the object
10969 	   now as well.  */
10970 	if (mode == BLKmode)
10971 	  mode1 = BLKmode;
10972 
10973 	/* Get a reference to just this component.  */
10974 	bytepos = bits_to_bytes_round_down (bitpos);
10975 	if (modifier == EXPAND_CONST_ADDRESS
10976 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10977 	  op0 = adjust_address_nv (op0, mode1, bytepos);
10978 	else
10979 	  op0 = adjust_address (op0, mode1, bytepos);
10980 
10981 	if (op0 == orig_op0)
10982 	  op0 = copy_rtx (op0);
10983 
10984 	/* Don't set memory attributes if the base expression is
10985 	   SSA_NAME that got expanded as a MEM.  In that case, we should
10986 	   just honor its original memory attributes.  */
10987 	if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10988 	  set_mem_attributes (op0, exp, 0);
10989 
10990 	if (REG_P (XEXP (op0, 0)))
10991 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10992 
10993 	/* If op0 is a temporary because the original expressions was forced
10994 	   to memory, clear MEM_EXPR so that the original expression cannot
10995 	   be marked as addressable through MEM_EXPR of the temporary.  */
10996 	if (clear_mem_expr)
10997 	  set_mem_expr (op0, NULL_TREE);
10998 
10999 	MEM_VOLATILE_P (op0) |= volatilep;
11000 
11001         if (reversep
11002 	    && modifier != EXPAND_MEMORY
11003 	    && modifier != EXPAND_WRITE)
11004 	  op0 = flip_storage_order (mode1, op0);
11005 
11006 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
11007 	    || modifier == EXPAND_CONST_ADDRESS
11008 	    || modifier == EXPAND_INITIALIZER)
11009 	  return op0;
11010 
11011 	if (target == 0)
11012 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
11013 
11014 	convert_move (target, op0, unsignedp);
11015 	return target;
11016       }
11017 
11018     case OBJ_TYPE_REF:
11019       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
11020 
11021     case CALL_EXPR:
11022       /* All valid uses of __builtin_va_arg_pack () are removed during
11023 	 inlining.  */
11024       if (CALL_EXPR_VA_ARG_PACK (exp))
11025 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
11026       {
11027 	tree fndecl = get_callee_fndecl (exp), attr;
11028 
11029 	if (fndecl
11030 	    /* Don't diagnose the error attribute in thunks, those are
11031 	       artificially created.  */
11032 	    && !CALL_FROM_THUNK_P (exp)
11033 	    && (attr = lookup_attribute ("error",
11034 					 DECL_ATTRIBUTES (fndecl))) != NULL)
11035 	  {
11036 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11037 	    error ("%Kcall to %qs declared with attribute error: %s", exp,
11038 		   identifier_to_locale (ident),
11039 		   TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11040 	  }
11041 	if (fndecl
11042 	    /* Don't diagnose the warning attribute in thunks, those are
11043 	       artificially created.  */
11044 	    && !CALL_FROM_THUNK_P (exp)
11045 	    && (attr = lookup_attribute ("warning",
11046 					 DECL_ATTRIBUTES (fndecl))) != NULL)
11047 	  {
11048 	    const char *ident = lang_hooks.decl_printable_name (fndecl, 1);
11049 	    warning_at (tree_nonartificial_location (exp), 0,
11050 			"%Kcall to %qs declared with attribute warning: %s",
11051 			exp, identifier_to_locale (ident),
11052 			TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
11053 	  }
11054 
11055 	/* Check for a built-in function.  */
11056 	if (fndecl && DECL_BUILT_IN (fndecl))
11057 	  {
11058 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
11059 	    if (CALL_WITH_BOUNDS_P (exp))
11060 	      return expand_builtin_with_bounds (exp, target, subtarget,
11061 						 tmode, ignore);
11062 	    else
11063 	      return expand_builtin (exp, target, subtarget, tmode, ignore);
11064 	  }
11065       }
11066       return expand_call (exp, target, ignore);
11067 
11068     case VIEW_CONVERT_EXPR:
11069       op0 = NULL_RTX;
11070 
11071       /* If we are converting to BLKmode, try to avoid an intermediate
11072 	 temporary by fetching an inner memory reference.  */
11073       if (mode == BLKmode
11074 	  && poly_int_tree_p (TYPE_SIZE (type))
11075 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
11076 	  && handled_component_p (treeop0))
11077       {
11078 	machine_mode mode1;
11079 	poly_int64 bitsize, bitpos, bytepos;
11080 	tree offset;
11081 	int unsignedp, reversep, volatilep = 0;
11082 	tree tem
11083 	  = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
11084 				 &unsignedp, &reversep, &volatilep);
11085 	rtx orig_op0;
11086 
11087 	/* ??? We should work harder and deal with non-zero offsets.  */
11088 	if (!offset
11089 	    && multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
11090 	    && !reversep
11091 	    && known_size_p (bitsize)
11092 	    && known_eq (wi::to_poly_offset (TYPE_SIZE (type)), bitsize))
11093 	  {
11094 	    /* See the normal_inner_ref case for the rationale.  */
11095 	    orig_op0
11096 	      = expand_expr_real (tem,
11097 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
11098 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
11099 				       != INTEGER_CST)
11100 				   && modifier != EXPAND_STACK_PARM
11101 				   ? target : NULL_RTX),
11102 				  VOIDmode,
11103 				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
11104 				  NULL, true);
11105 
11106 	    if (MEM_P (orig_op0))
11107 	      {
11108 		op0 = orig_op0;
11109 
11110 		/* Get a reference to just this component.  */
11111 		if (modifier == EXPAND_CONST_ADDRESS
11112 		    || modifier == EXPAND_SUM
11113 		    || modifier == EXPAND_INITIALIZER)
11114 		  op0 = adjust_address_nv (op0, mode, bytepos);
11115 		else
11116 		  op0 = adjust_address (op0, mode, bytepos);
11117 
11118 		if (op0 == orig_op0)
11119 		  op0 = copy_rtx (op0);
11120 
11121 		set_mem_attributes (op0, treeop0, 0);
11122 		if (REG_P (XEXP (op0, 0)))
11123 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
11124 
11125 		MEM_VOLATILE_P (op0) |= volatilep;
11126 	      }
11127 	  }
11128       }
11129 
11130       if (!op0)
11131 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
11132 				NULL, inner_reference_p);
11133 
11134       /* If the input and output modes are both the same, we are done.  */
11135       if (mode == GET_MODE (op0))
11136 	;
11137       /* If neither mode is BLKmode, and both modes are the same size
11138 	 then we can use gen_lowpart.  */
11139       else if (mode != BLKmode
11140 	       && GET_MODE (op0) != BLKmode
11141 	       && known_eq (GET_MODE_PRECISION (mode),
11142 			    GET_MODE_PRECISION (GET_MODE (op0)))
11143 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
11144 	{
11145 	  if (GET_CODE (op0) == SUBREG)
11146 	    op0 = force_reg (GET_MODE (op0), op0);
11147 	  temp = gen_lowpart_common (mode, op0);
11148 	  if (temp)
11149 	    op0 = temp;
11150 	  else
11151 	    {
11152 	      if (!REG_P (op0) && !MEM_P (op0))
11153 		op0 = force_reg (GET_MODE (op0), op0);
11154 	      op0 = gen_lowpart (mode, op0);
11155 	    }
11156 	}
11157       /* If both types are integral, convert from one mode to the other.  */
11158       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
11159 	op0 = convert_modes (mode, GET_MODE (op0), op0,
11160 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
11161       /* If the output type is a bit-field type, do an extraction.  */
11162       else if (reduce_bit_field)
11163 	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
11164 				  TYPE_UNSIGNED (type), NULL_RTX,
11165 				  mode, mode, false, NULL);
11166       /* As a last resort, spill op0 to memory, and reload it in a
11167 	 different mode.  */
11168       else if (!MEM_P (op0))
11169 	{
11170 	  /* If the operand is not a MEM, force it into memory.  Since we
11171 	     are going to be changing the mode of the MEM, don't call
11172 	     force_const_mem for constants because we don't allow pool
11173 	     constants to change mode.  */
11174 	  tree inner_type = TREE_TYPE (treeop0);
11175 
11176 	  gcc_assert (!TREE_ADDRESSABLE (exp));
11177 
11178 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
11179 	    target
11180 	      = assign_stack_temp_for_type
11181 		(TYPE_MODE (inner_type),
11182 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
11183 
11184 	  emit_move_insn (target, op0);
11185 	  op0 = target;
11186 	}
11187 
11188       /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
11189 	 output type is such that the operand is known to be aligned, indicate
11190 	 that it is.  Otherwise, we need only be concerned about alignment for
11191 	 non-BLKmode results.  */
11192       if (MEM_P (op0))
11193 	{
11194 	  enum insn_code icode;
11195 
11196 	  if (modifier != EXPAND_WRITE
11197 	      && modifier != EXPAND_MEMORY
11198 	      && !inner_reference_p
11199 	      && mode != BLKmode
11200 	      && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
11201 	    {
11202 	      /* If the target does have special handling for unaligned
11203 		 loads of mode then use them.  */
11204 	      if ((icode = optab_handler (movmisalign_optab, mode))
11205 		  != CODE_FOR_nothing)
11206 		{
11207 		  rtx reg;
11208 
11209 		  op0 = adjust_address (op0, mode, 0);
11210 		  /* We've already validated the memory, and we're creating a
11211 		     new pseudo destination.  The predicates really can't
11212 		     fail.  */
11213 		  reg = gen_reg_rtx (mode);
11214 
11215 		  /* Nor can the insn generator.  */
11216 		  rtx_insn *insn = GEN_FCN (icode) (reg, op0);
11217 		  emit_insn (insn);
11218 		  return reg;
11219 		}
11220 	      else if (STRICT_ALIGNMENT)
11221 		{
11222 		  poly_uint64 mode_size = GET_MODE_SIZE (mode);
11223 		  poly_uint64 temp_size = mode_size;
11224 		  if (GET_MODE (op0) != BLKmode)
11225 		    temp_size = upper_bound (temp_size,
11226 					     GET_MODE_SIZE (GET_MODE (op0)));
11227 		  rtx new_rtx
11228 		    = assign_stack_temp_for_type (mode, temp_size, type);
11229 		  rtx new_with_op0_mode
11230 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
11231 
11232 		  gcc_assert (!TREE_ADDRESSABLE (exp));
11233 
11234 		  if (GET_MODE (op0) == BLKmode)
11235 		    {
11236 		      rtx size_rtx = gen_int_mode (mode_size, Pmode);
11237 		      emit_block_move (new_with_op0_mode, op0, size_rtx,
11238 				       (modifier == EXPAND_STACK_PARM
11239 					? BLOCK_OP_CALL_PARM
11240 					: BLOCK_OP_NORMAL));
11241 		    }
11242 		  else
11243 		    emit_move_insn (new_with_op0_mode, op0);
11244 
11245 		  op0 = new_rtx;
11246 		}
11247 	    }
11248 
11249 	  op0 = adjust_address (op0, mode, 0);
11250 	}
11251 
11252       return op0;
11253 
11254     case MODIFY_EXPR:
11255       {
11256 	tree lhs = treeop0;
11257 	tree rhs = treeop1;
11258 	gcc_assert (ignore);
11259 
11260 	/* Check for |= or &= of a bitfield of size one into another bitfield
11261 	   of size 1.  In this case, (unless we need the result of the
11262 	   assignment) we can do this more efficiently with a
11263 	   test followed by an assignment, if necessary.
11264 
11265 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
11266 	   things change so we do, this code should be enhanced to
11267 	   support it.  */
11268 	if (TREE_CODE (lhs) == COMPONENT_REF
11269 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
11270 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
11271 	    && TREE_OPERAND (rhs, 0) == lhs
11272 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
11273 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
11274 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
11275 	  {
11276 	    rtx_code_label *label = gen_label_rtx ();
11277 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
11278 	    do_jump (TREE_OPERAND (rhs, 1),
11279 		     value ? label : 0,
11280 		     value ? 0 : label,
11281 		     profile_probability::uninitialized ());
11282 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
11283 			       false);
11284 	    do_pending_stack_adjust ();
11285 	    emit_label (label);
11286 	    return const0_rtx;
11287 	  }
11288 
11289 	expand_assignment (lhs, rhs, false);
11290 	return const0_rtx;
11291       }
11292 
11293     case ADDR_EXPR:
11294       return expand_expr_addr_expr (exp, target, tmode, modifier);
11295 
11296     case REALPART_EXPR:
11297       op0 = expand_normal (treeop0);
11298       return read_complex_part (op0, false);
11299 
11300     case IMAGPART_EXPR:
11301       op0 = expand_normal (treeop0);
11302       return read_complex_part (op0, true);
11303 
11304     case RETURN_EXPR:
11305     case LABEL_EXPR:
11306     case GOTO_EXPR:
11307     case SWITCH_EXPR:
11308     case ASM_EXPR:
11309       /* Expanded in cfgexpand.c.  */
11310       gcc_unreachable ();
11311 
11312     case TRY_CATCH_EXPR:
11313     case CATCH_EXPR:
11314     case EH_FILTER_EXPR:
11315     case TRY_FINALLY_EXPR:
11316       /* Lowered by tree-eh.c.  */
11317       gcc_unreachable ();
11318 
11319     case WITH_CLEANUP_EXPR:
11320     case CLEANUP_POINT_EXPR:
11321     case TARGET_EXPR:
11322     case CASE_LABEL_EXPR:
11323     case VA_ARG_EXPR:
11324     case BIND_EXPR:
11325     case INIT_EXPR:
11326     case CONJ_EXPR:
11327     case COMPOUND_EXPR:
11328     case PREINCREMENT_EXPR:
11329     case PREDECREMENT_EXPR:
11330     case POSTINCREMENT_EXPR:
11331     case POSTDECREMENT_EXPR:
11332     case LOOP_EXPR:
11333     case EXIT_EXPR:
11334     case COMPOUND_LITERAL_EXPR:
11335       /* Lowered by gimplify.c.  */
11336       gcc_unreachable ();
11337 
11338     case FDESC_EXPR:
11339       /* Function descriptors are not valid except for as
11340 	 initialization constants, and should not be expanded.  */
11341       gcc_unreachable ();
11342 
11343     case WITH_SIZE_EXPR:
11344       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
11345 	 have pulled out the size to use in whatever context it needed.  */
11346       return expand_expr_real (treeop0, original_target, tmode,
11347 			       modifier, alt_rtl, inner_reference_p);
11348 
11349     default:
11350       return expand_expr_real_2 (&ops, target, tmode, modifier);
11351     }
11352 }
11353 
11354 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
11355    signedness of TYPE), possibly returning the result in TARGET.
11356    TYPE is known to be a partial integer type.  */
11357 static rtx
11358 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
11359 {
11360   HOST_WIDE_INT prec = TYPE_PRECISION (type);
11361   if (target && GET_MODE (target) != GET_MODE (exp))
11362     target = 0;
11363   /* For constant values, reduce using build_int_cst_type. */
11364   if (CONST_INT_P (exp))
11365     {
11366       HOST_WIDE_INT value = INTVAL (exp);
11367       tree t = build_int_cst_type (type, value);
11368       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
11369     }
11370   else if (TYPE_UNSIGNED (type))
11371     {
11372       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11373       rtx mask = immed_wide_int_const
11374 	(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
11375       return expand_and (mode, exp, mask, target);
11376     }
11377   else
11378     {
11379       scalar_int_mode mode = as_a <scalar_int_mode> (GET_MODE (exp));
11380       int count = GET_MODE_PRECISION (mode) - prec;
11381       exp = expand_shift (LSHIFT_EXPR, mode, exp, count, target, 0);
11382       return expand_shift (RSHIFT_EXPR, mode, exp, count, target, 0);
11383     }
11384 }
11385 
11386 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
11387    when applied to the address of EXP produces an address known to be
11388    aligned more than BIGGEST_ALIGNMENT.  */
11389 
11390 static int
11391 is_aligning_offset (const_tree offset, const_tree exp)
11392 {
11393   /* Strip off any conversions.  */
11394   while (CONVERT_EXPR_P (offset))
11395     offset = TREE_OPERAND (offset, 0);
11396 
11397   /* We must now have a BIT_AND_EXPR with a constant that is one less than
11398      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
11399   if (TREE_CODE (offset) != BIT_AND_EXPR
11400       || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11401       || compare_tree_int (TREE_OPERAND (offset, 1),
11402 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11403       || !pow2p_hwi (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1))
11404     return 0;
11405 
11406   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11407      It must be NEGATE_EXPR.  Then strip any more conversions.  */
11408   offset = TREE_OPERAND (offset, 0);
11409   while (CONVERT_EXPR_P (offset))
11410     offset = TREE_OPERAND (offset, 0);
11411 
11412   if (TREE_CODE (offset) != NEGATE_EXPR)
11413     return 0;
11414 
11415   offset = TREE_OPERAND (offset, 0);
11416   while (CONVERT_EXPR_P (offset))
11417     offset = TREE_OPERAND (offset, 0);
11418 
11419   /* This must now be the address of EXP.  */
11420   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11421 }
11422 
11423 /* Return the tree node if an ARG corresponds to a string constant or zero
11424    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
11425    in bytes within the string that ARG is accessing.  The type of the
11426    offset will be `sizetype'.  */
11427 
11428 tree
11429 string_constant (tree arg, tree *ptr_offset)
11430 {
11431   tree array, offset, lower_bound;
11432   STRIP_NOPS (arg);
11433 
11434   if (TREE_CODE (arg) == ADDR_EXPR)
11435     {
11436       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11437 	{
11438 	  *ptr_offset = size_zero_node;
11439 	  return TREE_OPERAND (arg, 0);
11440 	}
11441       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11442 	{
11443 	  array = TREE_OPERAND (arg, 0);
11444 	  offset = size_zero_node;
11445 	}
11446       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11447 	{
11448 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11449 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11450 	  if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11451 	    return 0;
11452 
11453 	  /* Check if the array has a nonzero lower bound.  */
11454 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11455 	  if (!integer_zerop (lower_bound))
11456 	    {
11457 	      /* If the offset and base aren't both constants, return 0.  */
11458 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
11459 	        return 0;
11460 	      if (TREE_CODE (offset) != INTEGER_CST)
11461 		return 0;
11462 	      /* Adjust offset by the lower bound.  */
11463 	      offset = size_diffop (fold_convert (sizetype, offset),
11464 				    fold_convert (sizetype, lower_bound));
11465 	    }
11466 	}
11467       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11468 	{
11469 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11470 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11471 	  if (TREE_CODE (array) != ADDR_EXPR)
11472 	    return 0;
11473 	  array = TREE_OPERAND (array, 0);
11474 	  if (TREE_CODE (array) != STRING_CST && !VAR_P (array))
11475 	    return 0;
11476 	}
11477       else
11478 	return 0;
11479     }
11480   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11481     {
11482       tree arg0 = TREE_OPERAND (arg, 0);
11483       tree arg1 = TREE_OPERAND (arg, 1);
11484 
11485       STRIP_NOPS (arg0);
11486       STRIP_NOPS (arg1);
11487 
11488       if (TREE_CODE (arg0) == ADDR_EXPR
11489 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11490 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11491 	{
11492 	  array = TREE_OPERAND (arg0, 0);
11493 	  offset = arg1;
11494 	}
11495       else if (TREE_CODE (arg1) == ADDR_EXPR
11496 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11497 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11498 	{
11499 	  array = TREE_OPERAND (arg1, 0);
11500 	  offset = arg0;
11501 	}
11502       else
11503 	return 0;
11504     }
11505   else
11506     return 0;
11507 
11508   if (TREE_CODE (array) == STRING_CST)
11509     {
11510       *ptr_offset = fold_convert (sizetype, offset);
11511       return array;
11512     }
11513   else if (VAR_P (array) || TREE_CODE (array) == CONST_DECL)
11514     {
11515       int length;
11516       tree init = ctor_for_folding (array);
11517 
11518       /* Variables initialized to string literals can be handled too.  */
11519       if (init == error_mark_node
11520 	  || !init
11521 	  || TREE_CODE (init) != STRING_CST)
11522 	return 0;
11523 
11524       /* Avoid const char foo[4] = "abcde";  */
11525       if (DECL_SIZE_UNIT (array) == NULL_TREE
11526 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11527 	  || (length = TREE_STRING_LENGTH (init)) <= 0
11528 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11529 	return 0;
11530 
11531       /* If variable is bigger than the string literal, OFFSET must be constant
11532 	 and inside of the bounds of the string literal.  */
11533       offset = fold_convert (sizetype, offset);
11534       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11535 	  && (! tree_fits_uhwi_p (offset)
11536 	      || compare_tree_int (offset, length) >= 0))
11537 	return 0;
11538 
11539       *ptr_offset = offset;
11540       return init;
11541     }
11542 
11543   return 0;
11544 }
11545 
11546 /* Generate code to calculate OPS, and exploded expression
11547    using a store-flag instruction and return an rtx for the result.
11548    OPS reflects a comparison.
11549 
11550    If TARGET is nonzero, store the result there if convenient.
11551 
11552    Return zero if there is no suitable set-flag instruction
11553    available on this machine.
11554 
11555    Once expand_expr has been called on the arguments of the comparison,
11556    we are committed to doing the store flag, since it is not safe to
11557    re-evaluate the expression.  We emit the store-flag insn by calling
11558    emit_store_flag, but only expand the arguments if we have a reason
11559    to believe that emit_store_flag will be successful.  If we think that
11560    it will, but it isn't, we have to simulate the store-flag with a
11561    set/jump/set sequence.  */
11562 
11563 static rtx
11564 do_store_flag (sepops ops, rtx target, machine_mode mode)
11565 {
11566   enum rtx_code code;
11567   tree arg0, arg1, type;
11568   machine_mode operand_mode;
11569   int unsignedp;
11570   rtx op0, op1;
11571   rtx subtarget = target;
11572   location_t loc = ops->location;
11573 
11574   arg0 = ops->op0;
11575   arg1 = ops->op1;
11576 
11577   /* Don't crash if the comparison was erroneous.  */
11578   if (arg0 == error_mark_node || arg1 == error_mark_node)
11579     return const0_rtx;
11580 
11581   type = TREE_TYPE (arg0);
11582   operand_mode = TYPE_MODE (type);
11583   unsignedp = TYPE_UNSIGNED (type);
11584 
11585   /* We won't bother with BLKmode store-flag operations because it would mean
11586      passing a lot of information to emit_store_flag.  */
11587   if (operand_mode == BLKmode)
11588     return 0;
11589 
11590   /* We won't bother with store-flag operations involving function pointers
11591      when function pointers must be canonicalized before comparisons.  */
11592   if (targetm.have_canonicalize_funcptr_for_compare ()
11593       && ((POINTER_TYPE_P (TREE_TYPE (arg0))
11594 	   && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg0))))
11595 	  || (POINTER_TYPE_P (TREE_TYPE (arg1))
11596 	      && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (arg1))))))
11597     return 0;
11598 
11599   STRIP_NOPS (arg0);
11600   STRIP_NOPS (arg1);
11601 
11602   /* For vector typed comparisons emit code to generate the desired
11603      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
11604      expander for this.  */
11605   if (TREE_CODE (ops->type) == VECTOR_TYPE)
11606     {
11607       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11608       if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11609 	  && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type, ops->code))
11610 	return expand_vec_cmp_expr (ops->type, ifexp, target);
11611       else
11612 	{
11613 	  tree if_true = constant_boolean_node (true, ops->type);
11614 	  tree if_false = constant_boolean_node (false, ops->type);
11615 	  return expand_vec_cond_expr (ops->type, ifexp, if_true,
11616 				       if_false, target);
11617 	}
11618     }
11619 
11620   /* Get the rtx comparison code to use.  We know that EXP is a comparison
11621      operation of some type.  Some comparisons against 1 and -1 can be
11622      converted to comparisons with zero.  Do so here so that the tests
11623      below will be aware that we have a comparison with zero.   These
11624      tests will not catch constants in the first operand, but constants
11625      are rarely passed as the first operand.  */
11626 
11627   switch (ops->code)
11628     {
11629     case EQ_EXPR:
11630       code = EQ;
11631       break;
11632     case NE_EXPR:
11633       code = NE;
11634       break;
11635     case LT_EXPR:
11636       if (integer_onep (arg1))
11637 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11638       else
11639 	code = unsignedp ? LTU : LT;
11640       break;
11641     case LE_EXPR:
11642       if (! unsignedp && integer_all_onesp (arg1))
11643 	arg1 = integer_zero_node, code = LT;
11644       else
11645 	code = unsignedp ? LEU : LE;
11646       break;
11647     case GT_EXPR:
11648       if (! unsignedp && integer_all_onesp (arg1))
11649 	arg1 = integer_zero_node, code = GE;
11650       else
11651 	code = unsignedp ? GTU : GT;
11652       break;
11653     case GE_EXPR:
11654       if (integer_onep (arg1))
11655 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11656       else
11657 	code = unsignedp ? GEU : GE;
11658       break;
11659 
11660     case UNORDERED_EXPR:
11661       code = UNORDERED;
11662       break;
11663     case ORDERED_EXPR:
11664       code = ORDERED;
11665       break;
11666     case UNLT_EXPR:
11667       code = UNLT;
11668       break;
11669     case UNLE_EXPR:
11670       code = UNLE;
11671       break;
11672     case UNGT_EXPR:
11673       code = UNGT;
11674       break;
11675     case UNGE_EXPR:
11676       code = UNGE;
11677       break;
11678     case UNEQ_EXPR:
11679       code = UNEQ;
11680       break;
11681     case LTGT_EXPR:
11682       code = LTGT;
11683       break;
11684 
11685     default:
11686       gcc_unreachable ();
11687     }
11688 
11689   /* Put a constant second.  */
11690   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11691       || TREE_CODE (arg0) == FIXED_CST)
11692     {
11693       std::swap (arg0, arg1);
11694       code = swap_condition (code);
11695     }
11696 
11697   /* If this is an equality or inequality test of a single bit, we can
11698      do this by shifting the bit being tested to the low-order bit and
11699      masking the result with the constant 1.  If the condition was EQ,
11700      we xor it with 1.  This does not require an scc insn and is faster
11701      than an scc insn even if we have it.
11702 
11703      The code to make this transformation was moved into fold_single_bit_test,
11704      so we just call into the folder and expand its result.  */
11705 
11706   if ((code == NE || code == EQ)
11707       && integer_zerop (arg1)
11708       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11709     {
11710       gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11711       if (srcstmt
11712 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11713 	{
11714 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11715 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11716 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11717 				       gimple_assign_rhs1 (srcstmt),
11718 				       gimple_assign_rhs2 (srcstmt));
11719 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11720 	  if (temp)
11721 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11722 	}
11723     }
11724 
11725   if (! get_subtarget (target)
11726       || GET_MODE (subtarget) != operand_mode)
11727     subtarget = 0;
11728 
11729   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11730 
11731   if (target == 0)
11732     target = gen_reg_rtx (mode);
11733 
11734   /* Try a cstore if possible.  */
11735   return emit_store_flag_force (target, code, op0, op1,
11736 				operand_mode, unsignedp,
11737 				(TYPE_PRECISION (ops->type) == 1
11738 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11739 }
11740 
11741 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
11742    0 otherwise (i.e. if there is no casesi instruction).
11743 
11744    DEFAULT_PROBABILITY is the probability of jumping to the default
11745    label.  */
11746 int
11747 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11748 	    rtx table_label, rtx default_label, rtx fallback_label,
11749             profile_probability default_probability)
11750 {
11751   struct expand_operand ops[5];
11752   scalar_int_mode index_mode = SImode;
11753   rtx op1, op2, index;
11754 
11755   if (! targetm.have_casesi ())
11756     return 0;
11757 
11758   /* The index must be some form of integer.  Convert it to SImode.  */
11759   scalar_int_mode omode = SCALAR_INT_TYPE_MODE (index_type);
11760   if (GET_MODE_BITSIZE (omode) > GET_MODE_BITSIZE (index_mode))
11761     {
11762       rtx rangertx = expand_normal (range);
11763 
11764       /* We must handle the endpoints in the original mode.  */
11765       index_expr = build2 (MINUS_EXPR, index_type,
11766 			   index_expr, minval);
11767       minval = integer_zero_node;
11768       index = expand_normal (index_expr);
11769       if (default_label)
11770         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11771 				 omode, 1, default_label,
11772                                  default_probability);
11773       /* Now we can safely truncate.  */
11774       index = convert_to_mode (index_mode, index, 0);
11775     }
11776   else
11777     {
11778       if (omode != index_mode)
11779 	{
11780 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11781 	  index_expr = fold_convert (index_type, index_expr);
11782 	}
11783 
11784       index = expand_normal (index_expr);
11785     }
11786 
11787   do_pending_stack_adjust ();
11788 
11789   op1 = expand_normal (minval);
11790   op2 = expand_normal (range);
11791 
11792   create_input_operand (&ops[0], index, index_mode);
11793   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11794   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11795   create_fixed_operand (&ops[3], table_label);
11796   create_fixed_operand (&ops[4], (default_label
11797 				  ? default_label
11798 				  : fallback_label));
11799   expand_jump_insn (targetm.code_for_casesi, 5, ops);
11800   return 1;
11801 }
11802 
11803 /* Attempt to generate a tablejump instruction; same concept.  */
11804 /* Subroutine of the next function.
11805 
11806    INDEX is the value being switched on, with the lowest value
11807    in the table already subtracted.
11808    MODE is its expected mode (needed if INDEX is constant).
11809    RANGE is the length of the jump table.
11810    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11811 
11812    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11813    index value is out of range.
11814    DEFAULT_PROBABILITY is the probability of jumping to
11815    the default label.  */
11816 
11817 static void
11818 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11819 	      rtx default_label, profile_probability default_probability)
11820 {
11821   rtx temp, vector;
11822 
11823   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11824     cfun->cfg->max_jumptable_ents = INTVAL (range);
11825 
11826   /* Do an unsigned comparison (in the proper mode) between the index
11827      expression and the value which represents the length of the range.
11828      Since we just finished subtracting the lower bound of the range
11829      from the index expression, this comparison allows us to simultaneously
11830      check that the original index expression value is both greater than
11831      or equal to the minimum value of the range and less than or equal to
11832      the maximum value of the range.  */
11833 
11834   if (default_label)
11835     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11836 			     default_label, default_probability);
11837 
11838 
11839   /* If index is in range, it must fit in Pmode.
11840      Convert to Pmode so we can index with it.  */
11841   if (mode != Pmode)
11842     index = convert_to_mode (Pmode, index, 1);
11843 
11844   /* Don't let a MEM slip through, because then INDEX that comes
11845      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11846      and break_out_memory_refs will go to work on it and mess it up.  */
11847 #ifdef PIC_CASE_VECTOR_ADDRESS
11848   if (flag_pic && !REG_P (index))
11849     index = copy_to_mode_reg (Pmode, index);
11850 #endif
11851 
11852   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11853      GET_MODE_SIZE, because this indicates how large insns are.  The other
11854      uses should all be Pmode, because they are addresses.  This code
11855      could fail if addresses and insns are not the same size.  */
11856   index = simplify_gen_binary (MULT, Pmode, index,
11857 			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11858 					     Pmode));
11859   index = simplify_gen_binary (PLUS, Pmode, index,
11860 			       gen_rtx_LABEL_REF (Pmode, table_label));
11861 
11862 #ifdef PIC_CASE_VECTOR_ADDRESS
11863   if (flag_pic)
11864     index = PIC_CASE_VECTOR_ADDRESS (index);
11865   else
11866 #endif
11867     index = memory_address (CASE_VECTOR_MODE, index);
11868   temp = gen_reg_rtx (CASE_VECTOR_MODE);
11869   vector = gen_const_mem (CASE_VECTOR_MODE, index);
11870   convert_move (temp, vector, 0);
11871 
11872   emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11873 
11874   /* If we are generating PIC code or if the table is PC-relative, the
11875      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11876   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11877     emit_barrier ();
11878 }
11879 
11880 int
11881 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11882 	       rtx table_label, rtx default_label,
11883 	       profile_probability default_probability)
11884 {
11885   rtx index;
11886 
11887   if (! targetm.have_tablejump ())
11888     return 0;
11889 
11890   index_expr = fold_build2 (MINUS_EXPR, index_type,
11891 			    fold_convert (index_type, index_expr),
11892 			    fold_convert (index_type, minval));
11893   index = expand_normal (index_expr);
11894   do_pending_stack_adjust ();
11895 
11896   do_tablejump (index, TYPE_MODE (index_type),
11897 		convert_modes (TYPE_MODE (index_type),
11898 			       TYPE_MODE (TREE_TYPE (range)),
11899 			       expand_normal (range),
11900 			       TYPE_UNSIGNED (TREE_TYPE (range))),
11901 		table_label, default_label, default_probability);
11902   return 1;
11903 }
11904 
11905 /* Return a CONST_VECTOR rtx representing vector mask for
11906    a VECTOR_CST of booleans.  */
11907 static rtx
11908 const_vector_mask_from_tree (tree exp)
11909 {
11910   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11911   machine_mode inner = GET_MODE_INNER (mode);
11912 
11913   rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
11914 			      VECTOR_CST_NELTS_PER_PATTERN (exp));
11915   unsigned int count = builder.encoded_nelts ();
11916   for (unsigned int i = 0; i < count; ++i)
11917     {
11918       tree elt = VECTOR_CST_ELT (exp, i);
11919       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11920       if (integer_zerop (elt))
11921 	builder.quick_push (CONST0_RTX (inner));
11922       else if (integer_onep (elt)
11923 	       || integer_minus_onep (elt))
11924 	builder.quick_push (CONSTM1_RTX (inner));
11925       else
11926 	gcc_unreachable ();
11927     }
11928   return builder.build ();
11929 }
11930 
11931 /* EXP is a VECTOR_CST in which each element is either all-zeros or all-ones.
11932    Return a constant scalar rtx of mode MODE in which bit X is set if element
11933    X of EXP is nonzero.  */
11934 static rtx
11935 const_scalar_mask_from_tree (scalar_int_mode mode, tree exp)
11936 {
11937   wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11938   tree elt;
11939 
11940   /* The result has a fixed number of bits so the input must too.  */
11941   unsigned int nunits = VECTOR_CST_NELTS (exp).to_constant ();
11942   for (unsigned int i = 0; i < nunits; ++i)
11943     {
11944       elt = VECTOR_CST_ELT (exp, i);
11945       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11946       if (integer_all_onesp (elt))
11947 	res = wi::set_bit (res, i);
11948       else
11949 	gcc_assert (integer_zerop (elt));
11950     }
11951 
11952   return immed_wide_int_const (res, mode);
11953 }
11954 
11955 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11956 static rtx
11957 const_vector_from_tree (tree exp)
11958 {
11959   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11960 
11961   if (initializer_zerop (exp))
11962     return CONST0_RTX (mode);
11963 
11964   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11965     return const_vector_mask_from_tree (exp);
11966 
11967   machine_mode inner = GET_MODE_INNER (mode);
11968 
11969   rtx_vector_builder builder (mode, VECTOR_CST_NPATTERNS (exp),
11970 			      VECTOR_CST_NELTS_PER_PATTERN (exp));
11971   unsigned int count = builder.encoded_nelts ();
11972   for (unsigned int i = 0; i < count; ++i)
11973     {
11974       tree elt = VECTOR_CST_ELT (exp, i);
11975       if (TREE_CODE (elt) == REAL_CST)
11976 	builder.quick_push (const_double_from_real_value (TREE_REAL_CST (elt),
11977 							  inner));
11978       else if (TREE_CODE (elt) == FIXED_CST)
11979 	builder.quick_push (CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11980 							  inner));
11981       else
11982 	builder.quick_push (immed_wide_int_const (wi::to_poly_wide (elt),
11983 						  inner));
11984     }
11985   return builder.build ();
11986 }
11987 
11988 /* Build a decl for a personality function given a language prefix.  */
11989 
11990 tree
11991 build_personality_function (const char *lang)
11992 {
11993   const char *unwind_and_version;
11994   tree decl, type;
11995   char *name;
11996 
11997   switch (targetm_common.except_unwind_info (&global_options))
11998     {
11999     case UI_NONE:
12000       return NULL;
12001     case UI_SJLJ:
12002       unwind_and_version = "_sj0";
12003       break;
12004     case UI_DWARF2:
12005     case UI_TARGET:
12006       unwind_and_version = "_v0";
12007       break;
12008     case UI_SEH:
12009       unwind_and_version = "_seh0";
12010       break;
12011     default:
12012       gcc_unreachable ();
12013     }
12014 
12015   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
12016 
12017   type = build_function_type_list (integer_type_node, integer_type_node,
12018 				   long_long_unsigned_type_node,
12019 				   ptr_type_node, ptr_type_node, NULL_TREE);
12020   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
12021 		     get_identifier (name), type);
12022   DECL_ARTIFICIAL (decl) = 1;
12023   DECL_EXTERNAL (decl) = 1;
12024   TREE_PUBLIC (decl) = 1;
12025 
12026   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
12027      are the flags assigned by targetm.encode_section_info.  */
12028   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
12029 
12030   return decl;
12031 }
12032 
12033 /* Extracts the personality function of DECL and returns the corresponding
12034    libfunc.  */
12035 
12036 rtx
12037 get_personality_function (tree decl)
12038 {
12039   tree personality = DECL_FUNCTION_PERSONALITY (decl);
12040   enum eh_personality_kind pk;
12041 
12042   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
12043   if (pk == eh_personality_none)
12044     return NULL;
12045 
12046   if (!personality
12047       && pk == eh_personality_any)
12048     personality = lang_hooks.eh_personality ();
12049 
12050   if (pk == eh_personality_lang)
12051     gcc_assert (personality != NULL_TREE);
12052 
12053   return XEXP (DECL_RTL (personality), 0);
12054 }
12055 
12056 /* Returns a tree for the size of EXP in bytes.  */
12057 
12058 static tree
12059 tree_expr_size (const_tree exp)
12060 {
12061   if (DECL_P (exp)
12062       && DECL_SIZE_UNIT (exp) != 0)
12063     return DECL_SIZE_UNIT (exp);
12064   else
12065     return size_in_bytes (TREE_TYPE (exp));
12066 }
12067 
12068 /* Return an rtx for the size in bytes of the value of EXP.  */
12069 
12070 rtx
12071 expr_size (tree exp)
12072 {
12073   tree size;
12074 
12075   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12076     size = TREE_OPERAND (exp, 1);
12077   else
12078     {
12079       size = tree_expr_size (exp);
12080       gcc_assert (size);
12081       gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
12082     }
12083 
12084   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
12085 }
12086 
12087 /* Return a wide integer for the size in bytes of the value of EXP, or -1
12088    if the size can vary or is larger than an integer.  */
12089 
12090 static HOST_WIDE_INT
12091 int_expr_size (tree exp)
12092 {
12093   tree size;
12094 
12095   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
12096     size = TREE_OPERAND (exp, 1);
12097   else
12098     {
12099       size = tree_expr_size (exp);
12100       gcc_assert (size);
12101     }
12102 
12103   if (size == 0 || !tree_fits_shwi_p (size))
12104     return -1;
12105 
12106   return tree_to_shwi (size);
12107 }
12108