xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/expr.c (revision cb0339e943798efefc7358cd1ebad87bda2ecd6e)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2016 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "predict.h"
29 #include "tm_p.h"
30 #include "ssa.h"
31 #include "expmed.h"
32 #include "optabs.h"
33 #include "regs.h"
34 #include "emit-rtl.h"
35 #include "recog.h"
36 #include "cgraph.h"
37 #include "diagnostic.h"
38 #include "alias.h"
39 #include "fold-const.h"
40 #include "stor-layout.h"
41 #include "attribs.h"
42 #include "varasm.h"
43 #include "except.h"
44 #include "insn-attr.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "stmt.h"
49 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
50 #include "expr.h"
51 #include "optabs-tree.h"
52 #include "libfuncs.h"
53 #include "reload.h"
54 #include "langhooks.h"
55 #include "common/common-target.h"
56 #include "tree-ssa-live.h"
57 #include "tree-outof-ssa.h"
58 #include "tree-ssa-address.h"
59 #include "builtins.h"
60 #include "tree-chkp.h"
61 #include "rtl-chkp.h"
62 #include "ccmp.h"
63 
64 
65 /* If this is nonzero, we do not bother generating VOLATILE
66    around volatile memory references, and we are willing to
67    output indirect addresses.  If cse is to follow, we reject
68    indirect addresses so a useful potential cse is generated;
69    if it is used only once, instruction combination will produce
70    the same indirect address eventually.  */
71 int cse_not_expected;
72 
73 /* This structure is used by move_by_pieces to describe the move to
74    be performed.  */
75 struct move_by_pieces_d
76 {
77   rtx to;
78   rtx to_addr;
79   int autinc_to;
80   int explicit_inc_to;
81   rtx from;
82   rtx from_addr;
83   int autinc_from;
84   int explicit_inc_from;
85   unsigned HOST_WIDE_INT len;
86   HOST_WIDE_INT offset;
87   int reverse;
88 };
89 
90 /* This structure is used by store_by_pieces to describe the clear to
91    be performed.  */
92 
93 struct store_by_pieces_d
94 {
95   rtx to;
96   rtx to_addr;
97   int autinc_to;
98   int explicit_inc_to;
99   unsigned HOST_WIDE_INT len;
100   HOST_WIDE_INT offset;
101   rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
102   void *constfundata;
103   int reverse;
104 };
105 
106 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
107 			      struct move_by_pieces_d *);
108 static bool block_move_libcall_safe_for_call_parm (void);
109 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
110 					unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
111 					unsigned HOST_WIDE_INT);
112 static tree emit_block_move_libcall_fn (int);
113 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
114 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
115 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
116 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
117 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
118 			       struct store_by_pieces_d *);
119 static tree clear_storage_libcall_fn (int);
120 static rtx_insn *compress_float_constant (rtx, rtx);
121 static rtx get_subtarget (rtx);
122 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
123 				     HOST_WIDE_INT, unsigned HOST_WIDE_INT,
124 				     unsigned HOST_WIDE_INT, machine_mode,
125 				     tree, int, alias_set_type, bool);
126 static void store_constructor (tree, rtx, int, HOST_WIDE_INT, bool);
127 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
128 			unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
129 			machine_mode, tree, alias_set_type, bool, bool);
130 
131 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
132 
133 static int is_aligning_offset (const_tree, const_tree);
134 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
135 static rtx do_store_flag (sepops, rtx, machine_mode);
136 #ifdef PUSH_ROUNDING
137 static void emit_single_push_insn (machine_mode, rtx, tree);
138 #endif
139 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
140 static rtx const_vector_from_tree (tree);
141 static rtx const_scalar_mask_from_tree (tree);
142 static tree tree_expr_size (const_tree);
143 static HOST_WIDE_INT int_expr_size (tree);
144 
145 
146 /* This is run to set up which modes can be used
147    directly in memory and to initialize the block move optab.  It is run
148    at the beginning of compilation and when the target is reinitialized.  */
149 
150 void
151 init_expr_target (void)
152 {
153   rtx insn, pat;
154   machine_mode mode;
155   int num_clobbers;
156   rtx mem, mem1;
157   rtx reg;
158 
159   /* Try indexing by frame ptr and try by stack ptr.
160      It is known that on the Convex the stack ptr isn't a valid index.
161      With luck, one or the other is valid on any machine.  */
162   mem = gen_rtx_MEM (word_mode, stack_pointer_rtx);
163   mem1 = gen_rtx_MEM (word_mode, frame_pointer_rtx);
164 
165   /* A scratch register we can modify in-place below to avoid
166      useless RTL allocations.  */
167   reg = gen_rtx_REG (word_mode, LAST_VIRTUAL_REGISTER + 1);
168 
169   insn = rtx_alloc (INSN);
170   pat = gen_rtx_SET (NULL_RTX, NULL_RTX);
171   PATTERN (insn) = pat;
172 
173   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
174        mode = (machine_mode) ((int) mode + 1))
175     {
176       int regno;
177 
178       direct_load[(int) mode] = direct_store[(int) mode] = 0;
179       PUT_MODE (mem, mode);
180       PUT_MODE (mem1, mode);
181 
182       /* See if there is some register that can be used in this mode and
183 	 directly loaded or stored from memory.  */
184 
185       if (mode != VOIDmode && mode != BLKmode)
186 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
187 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
188 	     regno++)
189 	  {
190 	    if (! HARD_REGNO_MODE_OK (regno, mode))
191 	      continue;
192 
193 	    set_mode_and_regno (reg, mode, regno);
194 
195 	    SET_SRC (pat) = mem;
196 	    SET_DEST (pat) = reg;
197 	    if (recog (pat, insn, &num_clobbers) >= 0)
198 	      direct_load[(int) mode] = 1;
199 
200 	    SET_SRC (pat) = mem1;
201 	    SET_DEST (pat) = reg;
202 	    if (recog (pat, insn, &num_clobbers) >= 0)
203 	      direct_load[(int) mode] = 1;
204 
205 	    SET_SRC (pat) = reg;
206 	    SET_DEST (pat) = mem;
207 	    if (recog (pat, insn, &num_clobbers) >= 0)
208 	      direct_store[(int) mode] = 1;
209 
210 	    SET_SRC (pat) = reg;
211 	    SET_DEST (pat) = mem1;
212 	    if (recog (pat, insn, &num_clobbers) >= 0)
213 	      direct_store[(int) mode] = 1;
214 	  }
215     }
216 
217   mem = gen_rtx_MEM (VOIDmode, gen_raw_REG (Pmode, LAST_VIRTUAL_REGISTER + 1));
218 
219   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
220        mode = GET_MODE_WIDER_MODE (mode))
221     {
222       machine_mode srcmode;
223       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
224 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
225 	{
226 	  enum insn_code ic;
227 
228 	  ic = can_extend_p (mode, srcmode, 0);
229 	  if (ic == CODE_FOR_nothing)
230 	    continue;
231 
232 	  PUT_MODE (mem, srcmode);
233 
234 	  if (insn_operand_matches (ic, 1, mem))
235 	    float_extend_from_mem[mode][srcmode] = true;
236 	}
237     }
238 }
239 
240 /* This is run at the start of compiling a function.  */
241 
242 void
243 init_expr (void)
244 {
245   memset (&crtl->expr, 0, sizeof (crtl->expr));
246 }
247 
248 /* Copy data from FROM to TO, where the machine modes are not the same.
249    Both modes may be integer, or both may be floating, or both may be
250    fixed-point.
251    UNSIGNEDP should be nonzero if FROM is an unsigned type.
252    This causes zero-extension instead of sign-extension.  */
253 
254 void
255 convert_move (rtx to, rtx from, int unsignedp)
256 {
257   machine_mode to_mode = GET_MODE (to);
258   machine_mode from_mode = GET_MODE (from);
259   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
260   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
261   enum insn_code code;
262   rtx libcall;
263 
264   /* rtx code for making an equivalent value.  */
265   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
266 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
267 
268 
269   gcc_assert (to_real == from_real);
270   gcc_assert (to_mode != BLKmode);
271   gcc_assert (from_mode != BLKmode);
272 
273   /* If the source and destination are already the same, then there's
274      nothing to do.  */
275   if (to == from)
276     return;
277 
278   /* If FROM is a SUBREG that indicates that we have already done at least
279      the required extension, strip it.  We don't handle such SUBREGs as
280      TO here.  */
281 
282   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
283       && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
284 	  >= GET_MODE_PRECISION (to_mode))
285       && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
286     from = gen_lowpart (to_mode, from), from_mode = to_mode;
287 
288   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
289 
290   if (to_mode == from_mode
291       || (from_mode == VOIDmode && CONSTANT_P (from)))
292     {
293       emit_move_insn (to, from);
294       return;
295     }
296 
297   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
298     {
299       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
300 
301       if (VECTOR_MODE_P (to_mode))
302 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
303       else
304 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
305 
306       emit_move_insn (to, from);
307       return;
308     }
309 
310   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
311     {
312       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
313       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
314       return;
315     }
316 
317   if (to_real)
318     {
319       rtx value;
320       rtx_insn *insns;
321       convert_optab tab;
322 
323       gcc_assert ((GET_MODE_PRECISION (from_mode)
324 		   != GET_MODE_PRECISION (to_mode))
325 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
326 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
327 
328       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
329 	/* Conversion between decimal float and binary float, same size.  */
330 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
331       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
332 	tab = sext_optab;
333       else
334 	tab = trunc_optab;
335 
336       /* Try converting directly if the insn is supported.  */
337 
338       code = convert_optab_handler (tab, to_mode, from_mode);
339       if (code != CODE_FOR_nothing)
340 	{
341 	  emit_unop_insn (code, to, from,
342 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
343 	  return;
344 	}
345 
346       /* Otherwise use a libcall.  */
347       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
348 
349       /* Is this conversion implemented yet?  */
350       gcc_assert (libcall);
351 
352       start_sequence ();
353       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
354 				       1, from, from_mode);
355       insns = get_insns ();
356       end_sequence ();
357       emit_libcall_block (insns, to, value,
358 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
359 								       from)
360 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
361       return;
362     }
363 
364   /* Handle pointer conversion.  */			/* SPEE 900220.  */
365   /* If the target has a converter from FROM_MODE to TO_MODE, use it.  */
366   {
367     convert_optab ctab;
368 
369     if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
370       ctab = trunc_optab;
371     else if (unsignedp)
372       ctab = zext_optab;
373     else
374       ctab = sext_optab;
375 
376     if (convert_optab_handler (ctab, to_mode, from_mode)
377 	!= CODE_FOR_nothing)
378       {
379 	emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
380 			to, from, UNKNOWN);
381 	return;
382       }
383   }
384 
385   /* Targets are expected to provide conversion insns between PxImode and
386      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
387   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
388     {
389       machine_mode full_mode
390 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
391 
392       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
393 		  != CODE_FOR_nothing);
394 
395       if (full_mode != from_mode)
396 	from = convert_to_mode (full_mode, from, unsignedp);
397       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
398 		      to, from, UNKNOWN);
399       return;
400     }
401   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
402     {
403       rtx new_from;
404       machine_mode full_mode
405 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
406       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
407       enum insn_code icode;
408 
409       icode = convert_optab_handler (ctab, full_mode, from_mode);
410       gcc_assert (icode != CODE_FOR_nothing);
411 
412       if (to_mode == full_mode)
413 	{
414 	  emit_unop_insn (icode, to, from, UNKNOWN);
415 	  return;
416 	}
417 
418       new_from = gen_reg_rtx (full_mode);
419       emit_unop_insn (icode, new_from, from, UNKNOWN);
420 
421       /* else proceed to integer conversions below.  */
422       from_mode = full_mode;
423       from = new_from;
424     }
425 
426    /* Make sure both are fixed-point modes or both are not.  */
427    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
428 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
429    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
430     {
431       /* If we widen from_mode to to_mode and they are in the same class,
432 	 we won't saturate the result.
433 	 Otherwise, always saturate the result to play safe.  */
434       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
435 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
436 	expand_fixed_convert (to, from, 0, 0);
437       else
438 	expand_fixed_convert (to, from, 0, 1);
439       return;
440     }
441 
442   /* Now both modes are integers.  */
443 
444   /* Handle expanding beyond a word.  */
445   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
446       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
447     {
448       rtx_insn *insns;
449       rtx lowpart;
450       rtx fill_value;
451       rtx lowfrom;
452       int i;
453       machine_mode lowpart_mode;
454       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
455 
456       /* Try converting directly if the insn is supported.  */
457       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
458 	  != CODE_FOR_nothing)
459 	{
460 	  /* If FROM is a SUBREG, put it into a register.  Do this
461 	     so that we always generate the same set of insns for
462 	     better cse'ing; if an intermediate assignment occurred,
463 	     we won't be doing the operation directly on the SUBREG.  */
464 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
465 	    from = force_reg (from_mode, from);
466 	  emit_unop_insn (code, to, from, equiv_code);
467 	  return;
468 	}
469       /* Next, try converting via full word.  */
470       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
471 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
472 		   != CODE_FOR_nothing))
473 	{
474 	  rtx word_to = gen_reg_rtx (word_mode);
475 	  if (REG_P (to))
476 	    {
477 	      if (reg_overlap_mentioned_p (to, from))
478 		from = force_reg (from_mode, from);
479 	      emit_clobber (to);
480 	    }
481 	  convert_move (word_to, from, unsignedp);
482 	  emit_unop_insn (code, to, word_to, equiv_code);
483 	  return;
484 	}
485 
486       /* No special multiword conversion insn; do it by hand.  */
487       start_sequence ();
488 
489       /* Since we will turn this into a no conflict block, we must ensure
490          the source does not overlap the target so force it into an isolated
491          register when maybe so.  Likewise for any MEM input, since the
492          conversion sequence might require several references to it and we
493          must ensure we're getting the same value every time.  */
494 
495       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
496 	from = force_reg (from_mode, from);
497 
498       /* Get a copy of FROM widened to a word, if necessary.  */
499       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
500 	lowpart_mode = word_mode;
501       else
502 	lowpart_mode = from_mode;
503 
504       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
505 
506       lowpart = gen_lowpart (lowpart_mode, to);
507       emit_move_insn (lowpart, lowfrom);
508 
509       /* Compute the value to put in each remaining word.  */
510       if (unsignedp)
511 	fill_value = const0_rtx;
512       else
513 	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
514 					    LT, lowfrom, const0_rtx,
515 					    lowpart_mode, 0, -1);
516 
517       /* Fill the remaining words.  */
518       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
519 	{
520 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
521 	  rtx subword = operand_subword (to, index, 1, to_mode);
522 
523 	  gcc_assert (subword);
524 
525 	  if (fill_value != subword)
526 	    emit_move_insn (subword, fill_value);
527 	}
528 
529       insns = get_insns ();
530       end_sequence ();
531 
532       emit_insn (insns);
533       return;
534     }
535 
536   /* Truncating multi-word to a word or less.  */
537   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
538       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
539     {
540       if (!((MEM_P (from)
541 	     && ! MEM_VOLATILE_P (from)
542 	     && direct_load[(int) to_mode]
543 	     && ! mode_dependent_address_p (XEXP (from, 0),
544 					    MEM_ADDR_SPACE (from)))
545 	    || REG_P (from)
546 	    || GET_CODE (from) == SUBREG))
547 	from = force_reg (from_mode, from);
548       convert_move (to, gen_lowpart (word_mode, from), 0);
549       return;
550     }
551 
552   /* Now follow all the conversions between integers
553      no more than a word long.  */
554 
555   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
556   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
557       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
558     {
559       if (!((MEM_P (from)
560 	     && ! MEM_VOLATILE_P (from)
561 	     && direct_load[(int) to_mode]
562 	     && ! mode_dependent_address_p (XEXP (from, 0),
563 					    MEM_ADDR_SPACE (from)))
564 	    || REG_P (from)
565 	    || GET_CODE (from) == SUBREG))
566 	from = force_reg (from_mode, from);
567       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
568 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
569 	from = copy_to_reg (from);
570       emit_move_insn (to, gen_lowpart (to_mode, from));
571       return;
572     }
573 
574   /* Handle extension.  */
575   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
576     {
577       /* Convert directly if that works.  */
578       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
579 	  != CODE_FOR_nothing)
580 	{
581 	  emit_unop_insn (code, to, from, equiv_code);
582 	  return;
583 	}
584       else
585 	{
586 	  machine_mode intermediate;
587 	  rtx tmp;
588 	  int shift_amount;
589 
590 	  /* Search for a mode to convert via.  */
591 	  for (intermediate = from_mode; intermediate != VOIDmode;
592 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
593 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
594 		  != CODE_FOR_nothing)
595 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
596 		     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
597 		&& (can_extend_p (intermediate, from_mode, unsignedp)
598 		    != CODE_FOR_nothing))
599 	      {
600 		convert_move (to, convert_to_mode (intermediate, from,
601 						   unsignedp), unsignedp);
602 		return;
603 	      }
604 
605 	  /* No suitable intermediate mode.
606 	     Generate what we need with	shifts.  */
607 	  shift_amount = (GET_MODE_PRECISION (to_mode)
608 			  - GET_MODE_PRECISION (from_mode));
609 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
610 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
611 			      to, unsignedp);
612 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
613 			      to, unsignedp);
614 	  if (tmp != to)
615 	    emit_move_insn (to, tmp);
616 	  return;
617 	}
618     }
619 
620   /* Support special truncate insns for certain modes.  */
621   if (convert_optab_handler (trunc_optab, to_mode,
622 			     from_mode) != CODE_FOR_nothing)
623     {
624       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
625 		      to, from, UNKNOWN);
626       return;
627     }
628 
629   /* Handle truncation of volatile memrefs, and so on;
630      the things that couldn't be truncated directly,
631      and for which there was no special instruction.
632 
633      ??? Code above formerly short-circuited this, for most integer
634      mode pairs, with a force_reg in from_mode followed by a recursive
635      call to this routine.  Appears always to have been wrong.  */
636   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
637     {
638       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
639       emit_move_insn (to, temp);
640       return;
641     }
642 
643   /* Mode combination is not recognized.  */
644   gcc_unreachable ();
645 }
646 
647 /* Return an rtx for a value that would result
648    from converting X to mode MODE.
649    Both X and MODE may be floating, or both integer.
650    UNSIGNEDP is nonzero if X is an unsigned value.
651    This can be done by referring to a part of X in place
652    or by copying to a new temporary with conversion.  */
653 
654 rtx
655 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
656 {
657   return convert_modes (mode, VOIDmode, x, unsignedp);
658 }
659 
660 /* Return an rtx for a value that would result
661    from converting X from mode OLDMODE to mode MODE.
662    Both modes may be floating, or both integer.
663    UNSIGNEDP is nonzero if X is an unsigned value.
664 
665    This can be done by referring to a part of X in place
666    or by copying to a new temporary with conversion.
667 
668    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
669 
670 rtx
671 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
672 {
673   rtx temp;
674 
675   /* If FROM is a SUBREG that indicates that we have already done at least
676      the required extension, strip it.  */
677 
678   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
679       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
680       && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
681     x = gen_lowpart (mode, SUBREG_REG (x));
682 
683   if (GET_MODE (x) != VOIDmode)
684     oldmode = GET_MODE (x);
685 
686   if (mode == oldmode)
687     return x;
688 
689   if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
690     {
691       /* If the caller did not tell us the old mode, then there is not
692 	 much to do with respect to canonicalization.  We have to
693 	 assume that all the bits are significant.  */
694       if (GET_MODE_CLASS (oldmode) != MODE_INT)
695 	oldmode = MAX_MODE_INT;
696       wide_int w = wide_int::from (std::make_pair (x, oldmode),
697 				   GET_MODE_PRECISION (mode),
698 				   unsignedp ? UNSIGNED : SIGNED);
699       return immed_wide_int_const (w, mode);
700     }
701 
702   /* We can do this with a gen_lowpart if both desired and current modes
703      are integer, and this is either a constant integer, a register, or a
704      non-volatile MEM. */
705   if (GET_MODE_CLASS (mode) == MODE_INT
706       && GET_MODE_CLASS (oldmode) == MODE_INT
707       && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
708       && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
709           || (REG_P (x)
710               && (!HARD_REGISTER_P (x)
711                   || HARD_REGNO_MODE_OK (REGNO (x), mode))
712               && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
713 
714    return gen_lowpart (mode, x);
715 
716   /* Converting from integer constant into mode is always equivalent to an
717      subreg operation.  */
718   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
719     {
720       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
721       return simplify_gen_subreg (mode, x, oldmode, 0);
722     }
723 
724   temp = gen_reg_rtx (mode);
725   convert_move (temp, x, unsignedp);
726   return temp;
727 }
728 
729 /* Return the largest alignment we can use for doing a move (or store)
730    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
731 
732 static unsigned int
733 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
734 {
735   machine_mode tmode;
736 
737   tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
738   if (align >= GET_MODE_ALIGNMENT (tmode))
739     align = GET_MODE_ALIGNMENT (tmode);
740   else
741     {
742       machine_mode tmode, xmode;
743 
744       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
745 	   tmode != VOIDmode;
746 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
747 	if (GET_MODE_SIZE (tmode) > max_pieces
748 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
749 	  break;
750 
751       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
752     }
753 
754   return align;
755 }
756 
757 /* Return the widest integer mode no wider than SIZE.  If no such mode
758    can be found, return VOIDmode.  */
759 
760 static machine_mode
761 widest_int_mode_for_size (unsigned int size)
762 {
763   machine_mode tmode, mode = VOIDmode;
764 
765   for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
766        tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
767     if (GET_MODE_SIZE (tmode) < size)
768       mode = tmode;
769 
770   return mode;
771 }
772 
773 /* Determine whether the LEN bytes can be moved by using several move
774    instructions.  Return nonzero if a call to move_by_pieces should
775    succeed.  */
776 
777 int
778 can_move_by_pieces (unsigned HOST_WIDE_INT len,
779 		    unsigned int align)
780 {
781   return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
782 						 optimize_insn_for_speed_p ());
783 }
784 
785 /* Generate several move instructions to copy LEN bytes from block FROM to
786    block TO.  (These are MEM rtx's with BLKmode).
787 
788    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
789    used to push FROM to the stack.
790 
791    ALIGN is maximum stack alignment we can assume.
792 
793    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
794    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
795    stpcpy.  */
796 
797 rtx
798 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
799 		unsigned int align, int endp)
800 {
801   struct move_by_pieces_d data;
802   machine_mode to_addr_mode;
803   machine_mode from_addr_mode = get_address_mode (from);
804   rtx to_addr, from_addr = XEXP (from, 0);
805   unsigned int max_size = MOVE_MAX_PIECES + 1;
806   enum insn_code icode;
807 
808   align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
809 
810   data.offset = 0;
811   data.from_addr = from_addr;
812   if (to)
813     {
814       to_addr_mode = get_address_mode (to);
815       to_addr = XEXP (to, 0);
816       data.to = to;
817       data.autinc_to
818 	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
819 	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
820       data.reverse
821 	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
822     }
823   else
824     {
825       to_addr_mode = VOIDmode;
826       to_addr = NULL_RTX;
827       data.to = NULL_RTX;
828       data.autinc_to = 1;
829       if (STACK_GROWS_DOWNWARD)
830 	data.reverse = 1;
831       else
832 	data.reverse = 0;
833     }
834   data.to_addr = to_addr;
835   data.from = from;
836   data.autinc_from
837     = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
838        || GET_CODE (from_addr) == POST_INC
839        || GET_CODE (from_addr) == POST_DEC);
840 
841   data.explicit_inc_from = 0;
842   data.explicit_inc_to = 0;
843   if (data.reverse) data.offset = len;
844   data.len = len;
845 
846   /* If copying requires more than two move insns,
847      copy addresses to registers (to make displacements shorter)
848      and use post-increment if available.  */
849   if (!(data.autinc_from && data.autinc_to)
850       && move_by_pieces_ninsns (len, align, max_size) > 2)
851     {
852       /* Find the mode of the largest move...
853 	 MODE might not be used depending on the definitions of the
854 	 USE_* macros below.  */
855       machine_mode mode ATTRIBUTE_UNUSED
856 	= widest_int_mode_for_size (max_size);
857 
858       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
859 	{
860 	  data.from_addr = copy_to_mode_reg (from_addr_mode,
861 					     plus_constant (from_addr_mode,
862 							    from_addr, len));
863 	  data.autinc_from = 1;
864 	  data.explicit_inc_from = -1;
865 	}
866       if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
867 	{
868 	  data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
869 	  data.autinc_from = 1;
870 	  data.explicit_inc_from = 1;
871 	}
872       if (!data.autinc_from && CONSTANT_P (from_addr))
873 	data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
874       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
875 	{
876 	  data.to_addr = copy_to_mode_reg (to_addr_mode,
877 					   plus_constant (to_addr_mode,
878 							  to_addr, len));
879 	  data.autinc_to = 1;
880 	  data.explicit_inc_to = -1;
881 	}
882       if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
883 	{
884 	  data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
885 	  data.autinc_to = 1;
886 	  data.explicit_inc_to = 1;
887 	}
888       if (!data.autinc_to && CONSTANT_P (to_addr))
889 	data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
890     }
891 
892   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
893 
894   /* First move what we can in the largest integer mode, then go to
895      successively smaller modes.  */
896 
897   while (max_size > 1 && data.len > 0)
898     {
899       machine_mode mode = widest_int_mode_for_size (max_size);
900 
901       if (mode == VOIDmode)
902 	break;
903 
904       icode = optab_handler (mov_optab, mode);
905       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
906 	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
907 
908       max_size = GET_MODE_SIZE (mode);
909     }
910 
911   /* The code above should have handled everything.  */
912   gcc_assert (!data.len);
913 
914   if (endp)
915     {
916       rtx to1;
917 
918       gcc_assert (!data.reverse);
919       if (data.autinc_to)
920 	{
921 	  if (endp == 2)
922 	    {
923 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
924 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
925 	      else
926 		data.to_addr = copy_to_mode_reg (to_addr_mode,
927 						 plus_constant (to_addr_mode,
928 								data.to_addr,
929 								-1));
930 	    }
931 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
932 					   data.offset);
933 	}
934       else
935 	{
936 	  if (endp == 2)
937 	    --data.offset;
938 	  to1 = adjust_address (data.to, QImode, data.offset);
939 	}
940       return to1;
941     }
942   else
943     return data.to;
944 }
945 
946 /* Return number of insns required to move L bytes by pieces.
947    ALIGN (in bits) is maximum alignment we can assume.  */
948 
949 unsigned HOST_WIDE_INT
950 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
951 		       unsigned int max_size)
952 {
953   unsigned HOST_WIDE_INT n_insns = 0;
954 
955   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
956 
957   while (max_size > 1 && l > 0)
958     {
959       machine_mode mode;
960       enum insn_code icode;
961 
962       mode = widest_int_mode_for_size (max_size);
963 
964       if (mode == VOIDmode)
965 	break;
966 
967       icode = optab_handler (mov_optab, mode);
968       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
969 	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
970 
971       max_size = GET_MODE_SIZE (mode);
972     }
973 
974   gcc_assert (!l);
975   return n_insns;
976 }
977 
978 /* Subroutine of move_by_pieces.  Move as many bytes as appropriate
979    with move instructions for mode MODE.  GENFUN is the gen_... function
980    to make a move insn for that mode.  DATA has all the other info.  */
981 
982 static void
983 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
984 		  struct move_by_pieces_d *data)
985 {
986   unsigned int size = GET_MODE_SIZE (mode);
987   rtx to1 = NULL_RTX, from1;
988 
989   while (data->len >= size)
990     {
991       if (data->reverse)
992 	data->offset -= size;
993 
994       if (data->to)
995 	{
996 	  if (data->autinc_to)
997 	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
998 					     data->offset);
999 	  else
1000 	    to1 = adjust_address (data->to, mode, data->offset);
1001 	}
1002 
1003       if (data->autinc_from)
1004 	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1005 					   data->offset);
1006       else
1007 	from1 = adjust_address (data->from, mode, data->offset);
1008 
1009       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1010 	emit_insn (gen_add2_insn (data->to_addr,
1011 				  gen_int_mode (-(HOST_WIDE_INT) size,
1012 						GET_MODE (data->to_addr))));
1013       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1014 	emit_insn (gen_add2_insn (data->from_addr,
1015 				  gen_int_mode (-(HOST_WIDE_INT) size,
1016 						GET_MODE (data->from_addr))));
1017 
1018       if (data->to)
1019 	emit_insn ((*genfun) (to1, from1));
1020       else
1021 	{
1022 #ifdef PUSH_ROUNDING
1023 	  emit_single_push_insn (mode, from1, NULL);
1024 #else
1025 	  gcc_unreachable ();
1026 #endif
1027 	}
1028 
1029       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1030 	emit_insn (gen_add2_insn (data->to_addr,
1031 				  gen_int_mode (size,
1032 						GET_MODE (data->to_addr))));
1033       if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1034 	emit_insn (gen_add2_insn (data->from_addr,
1035 				  gen_int_mode (size,
1036 						GET_MODE (data->from_addr))));
1037 
1038       if (! data->reverse)
1039 	data->offset += size;
1040 
1041       data->len -= size;
1042     }
1043 }
1044 
1045 /* Emit code to move a block Y to a block X.  This may be done with
1046    string-move instructions, with multiple scalar move instructions,
1047    or with a library call.
1048 
1049    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1050    SIZE is an rtx that says how long they are.
1051    ALIGN is the maximum alignment we can assume they have.
1052    METHOD describes what kind of copy this is, and what mechanisms may be used.
1053    MIN_SIZE is the minimal size of block to move
1054    MAX_SIZE is the maximal size of block to move, if it can not be represented
1055    in unsigned HOST_WIDE_INT, than it is mask of all ones.
1056 
1057    Return the address of the new block, if memcpy is called and returns it,
1058    0 otherwise.  */
1059 
1060 rtx
1061 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1062 		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1063 		       unsigned HOST_WIDE_INT min_size,
1064 		       unsigned HOST_WIDE_INT max_size,
1065 		       unsigned HOST_WIDE_INT probable_max_size)
1066 {
1067   bool may_use_call;
1068   rtx retval = 0;
1069   unsigned int align;
1070 
1071   gcc_assert (size);
1072   if (CONST_INT_P (size)
1073       && INTVAL (size) == 0)
1074     return 0;
1075 
1076   switch (method)
1077     {
1078     case BLOCK_OP_NORMAL:
1079     case BLOCK_OP_TAILCALL:
1080       may_use_call = true;
1081       break;
1082 
1083     case BLOCK_OP_CALL_PARM:
1084       may_use_call = block_move_libcall_safe_for_call_parm ();
1085 
1086       /* Make inhibit_defer_pop nonzero around the library call
1087 	 to force it to pop the arguments right away.  */
1088       NO_DEFER_POP;
1089       break;
1090 
1091     case BLOCK_OP_NO_LIBCALL:
1092       may_use_call = false;
1093       break;
1094 
1095     default:
1096       gcc_unreachable ();
1097     }
1098 
1099   gcc_assert (MEM_P (x) && MEM_P (y));
1100   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1101   gcc_assert (align >= BITS_PER_UNIT);
1102 
1103   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1104      block copy is more efficient for other large modes, e.g. DCmode.  */
1105   x = adjust_address (x, BLKmode, 0);
1106   y = adjust_address (y, BLKmode, 0);
1107 
1108   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1109      can be incorrect is coming from __builtin_memcpy.  */
1110   if (CONST_INT_P (size))
1111     {
1112       x = shallow_copy_rtx (x);
1113       y = shallow_copy_rtx (y);
1114       set_mem_size (x, INTVAL (size));
1115       set_mem_size (y, INTVAL (size));
1116     }
1117 
1118   if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1119     move_by_pieces (x, y, INTVAL (size), align, 0);
1120   else if (emit_block_move_via_movmem (x, y, size, align,
1121 				       expected_align, expected_size,
1122 				       min_size, max_size, probable_max_size))
1123     ;
1124   else if (may_use_call
1125 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1126 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1127     {
1128       /* Since x and y are passed to a libcall, mark the corresponding
1129 	 tree EXPR as addressable.  */
1130       tree y_expr = MEM_EXPR (y);
1131       tree x_expr = MEM_EXPR (x);
1132       if (y_expr)
1133 	mark_addressable (y_expr);
1134       if (x_expr)
1135 	mark_addressable (x_expr);
1136       retval = emit_block_move_via_libcall (x, y, size,
1137 					    method == BLOCK_OP_TAILCALL);
1138     }
1139 
1140   else
1141     emit_block_move_via_loop (x, y, size, align);
1142 
1143   if (method == BLOCK_OP_CALL_PARM)
1144     OK_DEFER_POP;
1145 
1146   return retval;
1147 }
1148 
1149 rtx
1150 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1151 {
1152   unsigned HOST_WIDE_INT max, min = 0;
1153   if (GET_CODE (size) == CONST_INT)
1154     min = max = UINTVAL (size);
1155   else
1156     max = GET_MODE_MASK (GET_MODE (size));
1157   return emit_block_move_hints (x, y, size, method, 0, -1,
1158 				min, max, max);
1159 }
1160 
1161 /* A subroutine of emit_block_move.  Returns true if calling the
1162    block move libcall will not clobber any parameters which may have
1163    already been placed on the stack.  */
1164 
1165 static bool
1166 block_move_libcall_safe_for_call_parm (void)
1167 {
1168 #if defined (REG_PARM_STACK_SPACE)
1169   tree fn;
1170 #endif
1171 
1172   /* If arguments are pushed on the stack, then they're safe.  */
1173   if (PUSH_ARGS)
1174     return true;
1175 
1176   /* If registers go on the stack anyway, any argument is sure to clobber
1177      an outgoing argument.  */
1178 #if defined (REG_PARM_STACK_SPACE)
1179   fn = emit_block_move_libcall_fn (false);
1180   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1181      depend on its argument.  */
1182   (void) fn;
1183   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1184       && REG_PARM_STACK_SPACE (fn) != 0)
1185     return false;
1186 #endif
1187 
1188   /* If any argument goes in memory, then it might clobber an outgoing
1189      argument.  */
1190   {
1191     CUMULATIVE_ARGS args_so_far_v;
1192     cumulative_args_t args_so_far;
1193     tree fn, arg;
1194 
1195     fn = emit_block_move_libcall_fn (false);
1196     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1197     args_so_far = pack_cumulative_args (&args_so_far_v);
1198 
1199     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1200     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1201       {
1202 	machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1203 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1204 					      NULL_TREE, true);
1205 	if (!tmp || !REG_P (tmp))
1206 	  return false;
1207 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1208 	  return false;
1209 	targetm.calls.function_arg_advance (args_so_far, mode,
1210 					    NULL_TREE, true);
1211       }
1212   }
1213   return true;
1214 }
1215 
1216 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1217    return true if successful.  */
1218 
1219 static bool
1220 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1221 			    unsigned int expected_align, HOST_WIDE_INT expected_size,
1222 			    unsigned HOST_WIDE_INT min_size,
1223 			    unsigned HOST_WIDE_INT max_size,
1224 			    unsigned HOST_WIDE_INT probable_max_size)
1225 {
1226   int save_volatile_ok = volatile_ok;
1227   machine_mode mode;
1228 
1229   if (expected_align < align)
1230     expected_align = align;
1231   if (expected_size != -1)
1232     {
1233       if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1234 	expected_size = probable_max_size;
1235       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1236 	expected_size = min_size;
1237     }
1238 
1239   /* Since this is a move insn, we don't care about volatility.  */
1240   volatile_ok = 1;
1241 
1242   /* Try the most limited insn first, because there's no point
1243      including more than one in the machine description unless
1244      the more limited one has some advantage.  */
1245 
1246   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1247        mode = GET_MODE_WIDER_MODE (mode))
1248     {
1249       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1250 
1251       if (code != CODE_FOR_nothing
1252 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1253 	     here because if SIZE is less than the mode mask, as it is
1254 	     returned by the macro, it will definitely be less than the
1255 	     actual mode mask.  Since SIZE is within the Pmode address
1256 	     space, we limit MODE to Pmode.  */
1257 	  && ((CONST_INT_P (size)
1258 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1259 		   <= (GET_MODE_MASK (mode) >> 1)))
1260 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1261 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1262 	{
1263 	  struct expand_operand ops[9];
1264 	  unsigned int nops;
1265 
1266 	  /* ??? When called via emit_block_move_for_call, it'd be
1267 	     nice if there were some way to inform the backend, so
1268 	     that it doesn't fail the expansion because it thinks
1269 	     emitting the libcall would be more efficient.  */
1270 	  nops = insn_data[(int) code].n_generator_args;
1271 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1272 
1273 	  create_fixed_operand (&ops[0], x);
1274 	  create_fixed_operand (&ops[1], y);
1275 	  /* The check above guarantees that this size conversion is valid.  */
1276 	  create_convert_operand_to (&ops[2], size, mode, true);
1277 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1278 	  if (nops >= 6)
1279 	    {
1280 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1281 	      create_integer_operand (&ops[5], expected_size);
1282 	    }
1283 	  if (nops >= 8)
1284 	    {
1285 	      create_integer_operand (&ops[6], min_size);
1286 	      /* If we can not represent the maximal size,
1287 		 make parameter NULL.  */
1288 	      if ((HOST_WIDE_INT) max_size != -1)
1289 	        create_integer_operand (&ops[7], max_size);
1290 	      else
1291 		create_fixed_operand (&ops[7], NULL);
1292 	    }
1293 	  if (nops == 9)
1294 	    {
1295 	      /* If we can not represent the maximal size,
1296 		 make parameter NULL.  */
1297 	      if ((HOST_WIDE_INT) probable_max_size != -1)
1298 	        create_integer_operand (&ops[8], probable_max_size);
1299 	      else
1300 		create_fixed_operand (&ops[8], NULL);
1301 	    }
1302 	  if (maybe_expand_insn (code, nops, ops))
1303 	    {
1304 	      volatile_ok = save_volatile_ok;
1305 	      return true;
1306 	    }
1307 	}
1308     }
1309 
1310   volatile_ok = save_volatile_ok;
1311   return false;
1312 }
1313 
1314 /* A subroutine of emit_block_move.  Expand a call to memcpy.
1315    Return the return value from memcpy, 0 otherwise.  */
1316 
1317 rtx
1318 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1319 {
1320   rtx dst_addr, src_addr;
1321   tree call_expr, fn, src_tree, dst_tree, size_tree;
1322   machine_mode size_mode;
1323   rtx retval;
1324 
1325   /* Emit code to copy the addresses of DST and SRC and SIZE into new
1326      pseudos.  We can then place those new pseudos into a VAR_DECL and
1327      use them later.  */
1328 
1329   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1330   src_addr = copy_addr_to_reg (XEXP (src, 0));
1331 
1332   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1333   src_addr = convert_memory_address (ptr_mode, src_addr);
1334 
1335   dst_tree = make_tree (ptr_type_node, dst_addr);
1336   src_tree = make_tree (ptr_type_node, src_addr);
1337 
1338   size_mode = TYPE_MODE (sizetype);
1339 
1340   size = convert_to_mode (size_mode, size, 1);
1341   size = copy_to_mode_reg (size_mode, size);
1342 
1343   /* It is incorrect to use the libcall calling conventions to call
1344      memcpy in this context.  This could be a user call to memcpy and
1345      the user may wish to examine the return value from memcpy.  For
1346      targets where libcalls and normal calls have different conventions
1347      for returning pointers, we could end up generating incorrect code.  */
1348 
1349   size_tree = make_tree (sizetype, size);
1350 
1351   fn = emit_block_move_libcall_fn (true);
1352   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1353   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1354 
1355   retval = expand_normal (call_expr);
1356 
1357   return retval;
1358 }
1359 
1360 /* A subroutine of emit_block_move_via_libcall.  Create the tree node
1361    for the function we use for block copies.  */
1362 
1363 static GTY(()) tree block_move_fn;
1364 
1365 void
1366 init_block_move_fn (const char *asmspec)
1367 {
1368   if (!block_move_fn)
1369     {
1370       tree args, fn, attrs, attr_args;
1371 
1372       fn = get_identifier ("memcpy");
1373       args = build_function_type_list (ptr_type_node, ptr_type_node,
1374 				       const_ptr_type_node, sizetype,
1375 				       NULL_TREE);
1376 
1377       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1378       DECL_EXTERNAL (fn) = 1;
1379       TREE_PUBLIC (fn) = 1;
1380       DECL_ARTIFICIAL (fn) = 1;
1381       TREE_NOTHROW (fn) = 1;
1382       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1383       DECL_VISIBILITY_SPECIFIED (fn) = 1;
1384 
1385       attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1386       attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1387 
1388       decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1389 
1390       block_move_fn = fn;
1391     }
1392 
1393   if (asmspec)
1394     set_user_assembler_name (block_move_fn, asmspec);
1395 }
1396 
1397 static tree
1398 emit_block_move_libcall_fn (int for_call)
1399 {
1400   static bool emitted_extern;
1401 
1402   if (!block_move_fn)
1403     init_block_move_fn (NULL);
1404 
1405   if (for_call && !emitted_extern)
1406     {
1407       emitted_extern = true;
1408       make_decl_rtl (block_move_fn);
1409     }
1410 
1411   return block_move_fn;
1412 }
1413 
1414 /* A subroutine of emit_block_move.  Copy the data via an explicit
1415    loop.  This is used only when libcalls are forbidden.  */
1416 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1417 
1418 static void
1419 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1420 			  unsigned int align ATTRIBUTE_UNUSED)
1421 {
1422   rtx_code_label *cmp_label, *top_label;
1423   rtx iter, x_addr, y_addr, tmp;
1424   machine_mode x_addr_mode = get_address_mode (x);
1425   machine_mode y_addr_mode = get_address_mode (y);
1426   machine_mode iter_mode;
1427 
1428   iter_mode = GET_MODE (size);
1429   if (iter_mode == VOIDmode)
1430     iter_mode = word_mode;
1431 
1432   top_label = gen_label_rtx ();
1433   cmp_label = gen_label_rtx ();
1434   iter = gen_reg_rtx (iter_mode);
1435 
1436   emit_move_insn (iter, const0_rtx);
1437 
1438   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1439   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1440   do_pending_stack_adjust ();
1441 
1442   emit_jump (cmp_label);
1443   emit_label (top_label);
1444 
1445   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1446   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1447 
1448   if (x_addr_mode != y_addr_mode)
1449     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1450   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1451 
1452   x = change_address (x, QImode, x_addr);
1453   y = change_address (y, QImode, y_addr);
1454 
1455   emit_move_insn (x, y);
1456 
1457   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1458 			     true, OPTAB_LIB_WIDEN);
1459   if (tmp != iter)
1460     emit_move_insn (iter, tmp);
1461 
1462   emit_label (cmp_label);
1463 
1464   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1465 			   true, top_label, REG_BR_PROB_BASE * 90 / 100);
1466 }
1467 
1468 /* Copy all or part of a value X into registers starting at REGNO.
1469    The number of registers to be filled is NREGS.  */
1470 
1471 void
1472 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1473 {
1474   if (nregs == 0)
1475     return;
1476 
1477   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1478     x = validize_mem (force_const_mem (mode, x));
1479 
1480   /* See if the machine can do this with a load multiple insn.  */
1481   if (targetm.have_load_multiple ())
1482     {
1483       rtx_insn *last = get_last_insn ();
1484       rtx first = gen_rtx_REG (word_mode, regno);
1485       if (rtx_insn *pat = targetm.gen_load_multiple (first, x,
1486 						     GEN_INT (nregs)))
1487 	{
1488 	  emit_insn (pat);
1489 	  return;
1490 	}
1491       else
1492 	delete_insns_since (last);
1493     }
1494 
1495   for (int i = 0; i < nregs; i++)
1496     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1497 		    operand_subword_force (x, i, mode));
1498 }
1499 
1500 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1501    The number of registers to be filled is NREGS.  */
1502 
1503 void
1504 move_block_from_reg (int regno, rtx x, int nregs)
1505 {
1506   if (nregs == 0)
1507     return;
1508 
1509   /* See if the machine can do this with a store multiple insn.  */
1510   if (targetm.have_store_multiple ())
1511     {
1512       rtx_insn *last = get_last_insn ();
1513       rtx first = gen_rtx_REG (word_mode, regno);
1514       if (rtx_insn *pat = targetm.gen_store_multiple (x, first,
1515 						      GEN_INT (nregs)))
1516 	{
1517 	  emit_insn (pat);
1518 	  return;
1519 	}
1520       else
1521 	delete_insns_since (last);
1522     }
1523 
1524   for (int i = 0; i < nregs; i++)
1525     {
1526       rtx tem = operand_subword (x, i, 1, BLKmode);
1527 
1528       gcc_assert (tem);
1529 
1530       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1531     }
1532 }
1533 
1534 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1535    ORIG, where ORIG is a non-consecutive group of registers represented by
1536    a PARALLEL.  The clone is identical to the original except in that the
1537    original set of registers is replaced by a new set of pseudo registers.
1538    The new set has the same modes as the original set.  */
1539 
1540 rtx
1541 gen_group_rtx (rtx orig)
1542 {
1543   int i, length;
1544   rtx *tmps;
1545 
1546   gcc_assert (GET_CODE (orig) == PARALLEL);
1547 
1548   length = XVECLEN (orig, 0);
1549   tmps = XALLOCAVEC (rtx, length);
1550 
1551   /* Skip a NULL entry in first slot.  */
1552   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1553 
1554   if (i)
1555     tmps[0] = 0;
1556 
1557   for (; i < length; i++)
1558     {
1559       machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1560       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1561 
1562       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1563     }
1564 
1565   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1566 }
1567 
1568 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1569    except that values are placed in TMPS[i], and must later be moved
1570    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1571 
1572 static void
1573 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1574 {
1575   rtx src;
1576   int start, i;
1577   machine_mode m = GET_MODE (orig_src);
1578 
1579   gcc_assert (GET_CODE (dst) == PARALLEL);
1580 
1581   if (m != VOIDmode
1582       && !SCALAR_INT_MODE_P (m)
1583       && !MEM_P (orig_src)
1584       && GET_CODE (orig_src) != CONCAT)
1585     {
1586       machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1587       if (imode == BLKmode)
1588 	src = assign_stack_temp (GET_MODE (orig_src), ssize);
1589       else
1590 	src = gen_reg_rtx (imode);
1591       if (imode != BLKmode)
1592 	src = gen_lowpart (GET_MODE (orig_src), src);
1593       emit_move_insn (src, orig_src);
1594       /* ...and back again.  */
1595       if (imode != BLKmode)
1596 	src = gen_lowpart (imode, src);
1597       emit_group_load_1 (tmps, dst, src, type, ssize);
1598       return;
1599     }
1600 
1601   /* Check for a NULL entry, used to indicate that the parameter goes
1602      both on the stack and in registers.  */
1603   if (XEXP (XVECEXP (dst, 0, 0), 0))
1604     start = 0;
1605   else
1606     start = 1;
1607 
1608   /* Process the pieces.  */
1609   for (i = start; i < XVECLEN (dst, 0); i++)
1610     {
1611       machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1612       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1613       unsigned int bytelen = GET_MODE_SIZE (mode);
1614       int shift = 0;
1615 
1616       /* Handle trailing fragments that run over the size of the struct.  */
1617       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1618 	{
1619 	  /* Arrange to shift the fragment to where it belongs.
1620 	     extract_bit_field loads to the lsb of the reg.  */
1621 	  if (
1622 #ifdef BLOCK_REG_PADDING
1623 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1624 	      == (BYTES_BIG_ENDIAN ? upward : downward)
1625 #else
1626 	      BYTES_BIG_ENDIAN
1627 #endif
1628 	      )
1629 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1630 	  bytelen = ssize - bytepos;
1631 	  gcc_assert (bytelen > 0);
1632 	}
1633 
1634       /* If we won't be loading directly from memory, protect the real source
1635 	 from strange tricks we might play; but make sure that the source can
1636 	 be loaded directly into the destination.  */
1637       src = orig_src;
1638       if (!MEM_P (orig_src)
1639 	  && (!CONSTANT_P (orig_src)
1640 	      || (GET_MODE (orig_src) != mode
1641 		  && GET_MODE (orig_src) != VOIDmode)))
1642 	{
1643 	  if (GET_MODE (orig_src) == VOIDmode)
1644 	    src = gen_reg_rtx (mode);
1645 	  else
1646 	    src = gen_reg_rtx (GET_MODE (orig_src));
1647 
1648 	  emit_move_insn (src, orig_src);
1649 	}
1650 
1651       /* Optimize the access just a bit.  */
1652       if (MEM_P (src)
1653 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1654 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1655 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1656 	  && bytelen == GET_MODE_SIZE (mode))
1657 	{
1658 	  tmps[i] = gen_reg_rtx (mode);
1659 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1660 	}
1661       else if (COMPLEX_MODE_P (mode)
1662 	       && GET_MODE (src) == mode
1663 	       && bytelen == GET_MODE_SIZE (mode))
1664 	/* Let emit_move_complex do the bulk of the work.  */
1665 	tmps[i] = src;
1666       else if (GET_CODE (src) == CONCAT)
1667 	{
1668 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1669 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1670 
1671 	  if ((bytepos == 0 && bytelen == slen0)
1672 	      || (bytepos != 0 && bytepos + bytelen <= slen))
1673 	    {
1674 	      /* The following assumes that the concatenated objects all
1675 		 have the same size.  In this case, a simple calculation
1676 		 can be used to determine the object and the bit field
1677 		 to be extracted.  */
1678 	      tmps[i] = XEXP (src, bytepos / slen0);
1679 	      if (! CONSTANT_P (tmps[i])
1680 		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1681 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1682 					     (bytepos % slen0) * BITS_PER_UNIT,
1683 					     1, NULL_RTX, mode, mode, false);
1684 	    }
1685 	  else
1686 	    {
1687 	      rtx mem;
1688 
1689 	      gcc_assert (!bytepos);
1690 	      mem = assign_stack_temp (GET_MODE (src), slen);
1691 	      emit_move_insn (mem, src);
1692 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1693 					   0, 1, NULL_RTX, mode, mode, false);
1694 	    }
1695 	}
1696       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1697 	 SIMD register, which is currently broken.  While we get GCC
1698 	 to emit proper RTL for these cases, let's dump to memory.  */
1699       else if (VECTOR_MODE_P (GET_MODE (dst))
1700 	       && REG_P (src))
1701 	{
1702 	  int slen = GET_MODE_SIZE (GET_MODE (src));
1703 	  rtx mem;
1704 
1705 	  mem = assign_stack_temp (GET_MODE (src), slen);
1706 	  emit_move_insn (mem, src);
1707 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1708 	}
1709       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1710                && XVECLEN (dst, 0) > 1)
1711         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1712       else if (CONSTANT_P (src))
1713 	{
1714 	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1715 
1716 	  if (len == ssize)
1717 	    tmps[i] = src;
1718 	  else
1719 	    {
1720 	      rtx first, second;
1721 
1722 	      /* TODO: const_wide_int can have sizes other than this...  */
1723 	      gcc_assert (2 * len == ssize);
1724 	      split_double (src, &first, &second);
1725 	      if (i)
1726 		tmps[i] = second;
1727 	      else
1728 		tmps[i] = first;
1729 	    }
1730 	}
1731       else if (REG_P (src) && GET_MODE (src) == mode)
1732 	tmps[i] = src;
1733       else
1734 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1735 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1736 				     mode, mode, false);
1737 
1738       if (shift)
1739 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1740 				shift, tmps[i], 0);
1741     }
1742 }
1743 
1744 /* Emit code to move a block SRC of type TYPE to a block DST,
1745    where DST is non-consecutive registers represented by a PARALLEL.
1746    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1747    if not known.  */
1748 
1749 void
1750 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1751 {
1752   rtx *tmps;
1753   int i;
1754 
1755   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1756   emit_group_load_1 (tmps, dst, src, type, ssize);
1757 
1758   /* Copy the extracted pieces into the proper (probable) hard regs.  */
1759   for (i = 0; i < XVECLEN (dst, 0); i++)
1760     {
1761       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1762       if (d == NULL)
1763 	continue;
1764       emit_move_insn (d, tmps[i]);
1765     }
1766 }
1767 
1768 /* Similar, but load SRC into new pseudos in a format that looks like
1769    PARALLEL.  This can later be fed to emit_group_move to get things
1770    in the right place.  */
1771 
1772 rtx
1773 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1774 {
1775   rtvec vec;
1776   int i;
1777 
1778   vec = rtvec_alloc (XVECLEN (parallel, 0));
1779   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1780 
1781   /* Convert the vector to look just like the original PARALLEL, except
1782      with the computed values.  */
1783   for (i = 0; i < XVECLEN (parallel, 0); i++)
1784     {
1785       rtx e = XVECEXP (parallel, 0, i);
1786       rtx d = XEXP (e, 0);
1787 
1788       if (d)
1789 	{
1790 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1791 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1792 	}
1793       RTVEC_ELT (vec, i) = e;
1794     }
1795 
1796   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1797 }
1798 
1799 /* Emit code to move a block SRC to block DST, where SRC and DST are
1800    non-consecutive groups of registers, each represented by a PARALLEL.  */
1801 
1802 void
1803 emit_group_move (rtx dst, rtx src)
1804 {
1805   int i;
1806 
1807   gcc_assert (GET_CODE (src) == PARALLEL
1808 	      && GET_CODE (dst) == PARALLEL
1809 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1810 
1811   /* Skip first entry if NULL.  */
1812   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1813     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1814 		    XEXP (XVECEXP (src, 0, i), 0));
1815 }
1816 
1817 /* Move a group of registers represented by a PARALLEL into pseudos.  */
1818 
1819 rtx
1820 emit_group_move_into_temps (rtx src)
1821 {
1822   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1823   int i;
1824 
1825   for (i = 0; i < XVECLEN (src, 0); i++)
1826     {
1827       rtx e = XVECEXP (src, 0, i);
1828       rtx d = XEXP (e, 0);
1829 
1830       if (d)
1831 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1832       RTVEC_ELT (vec, i) = e;
1833     }
1834 
1835   return gen_rtx_PARALLEL (GET_MODE (src), vec);
1836 }
1837 
1838 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1839    where SRC is non-consecutive registers represented by a PARALLEL.
1840    SSIZE represents the total size of block ORIG_DST, or -1 if not
1841    known.  */
1842 
1843 void
1844 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1845 {
1846   rtx *tmps, dst;
1847   int start, finish, i;
1848   machine_mode m = GET_MODE (orig_dst);
1849 
1850   gcc_assert (GET_CODE (src) == PARALLEL);
1851 
1852   if (!SCALAR_INT_MODE_P (m)
1853       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1854     {
1855       machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1856       if (imode == BLKmode)
1857         dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1858       else
1859         dst = gen_reg_rtx (imode);
1860       emit_group_store (dst, src, type, ssize);
1861       if (imode != BLKmode)
1862         dst = gen_lowpart (GET_MODE (orig_dst), dst);
1863       emit_move_insn (orig_dst, dst);
1864       return;
1865     }
1866 
1867   /* Check for a NULL entry, used to indicate that the parameter goes
1868      both on the stack and in registers.  */
1869   if (XEXP (XVECEXP (src, 0, 0), 0))
1870     start = 0;
1871   else
1872     start = 1;
1873   finish = XVECLEN (src, 0);
1874 
1875   tmps = XALLOCAVEC (rtx, finish);
1876 
1877   /* Copy the (probable) hard regs into pseudos.  */
1878   for (i = start; i < finish; i++)
1879     {
1880       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1881       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1882 	{
1883 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1884 	  emit_move_insn (tmps[i], reg);
1885 	}
1886       else
1887 	tmps[i] = reg;
1888     }
1889 
1890   /* If we won't be storing directly into memory, protect the real destination
1891      from strange tricks we might play.  */
1892   dst = orig_dst;
1893   if (GET_CODE (dst) == PARALLEL)
1894     {
1895       rtx temp;
1896 
1897       /* We can get a PARALLEL dst if there is a conditional expression in
1898 	 a return statement.  In that case, the dst and src are the same,
1899 	 so no action is necessary.  */
1900       if (rtx_equal_p (dst, src))
1901 	return;
1902 
1903       /* It is unclear if we can ever reach here, but we may as well handle
1904 	 it.  Allocate a temporary, and split this into a store/load to/from
1905 	 the temporary.  */
1906       temp = assign_stack_temp (GET_MODE (dst), ssize);
1907       emit_group_store (temp, src, type, ssize);
1908       emit_group_load (dst, temp, type, ssize);
1909       return;
1910     }
1911   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1912     {
1913       machine_mode outer = GET_MODE (dst);
1914       machine_mode inner;
1915       HOST_WIDE_INT bytepos;
1916       bool done = false;
1917       rtx temp;
1918 
1919       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1920 	dst = gen_reg_rtx (outer);
1921 
1922       /* Make life a bit easier for combine.  */
1923       /* If the first element of the vector is the low part
1924 	 of the destination mode, use a paradoxical subreg to
1925 	 initialize the destination.  */
1926       if (start < finish)
1927 	{
1928 	  inner = GET_MODE (tmps[start]);
1929 	  bytepos = subreg_lowpart_offset (inner, outer);
1930 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1931 	    {
1932 	      temp = simplify_gen_subreg (outer, tmps[start],
1933 					  inner, 0);
1934 	      if (temp)
1935 		{
1936 		  emit_move_insn (dst, temp);
1937 		  done = true;
1938 		  start++;
1939 		}
1940 	    }
1941 	}
1942 
1943       /* If the first element wasn't the low part, try the last.  */
1944       if (!done
1945 	  && start < finish - 1)
1946 	{
1947 	  inner = GET_MODE (tmps[finish - 1]);
1948 	  bytepos = subreg_lowpart_offset (inner, outer);
1949 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1950 	    {
1951 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
1952 					  inner, 0);
1953 	      if (temp)
1954 		{
1955 		  emit_move_insn (dst, temp);
1956 		  done = true;
1957 		  finish--;
1958 		}
1959 	    }
1960 	}
1961 
1962       /* Otherwise, simply initialize the result to zero.  */
1963       if (!done)
1964         emit_move_insn (dst, CONST0_RTX (outer));
1965     }
1966 
1967   /* Process the pieces.  */
1968   for (i = start; i < finish; i++)
1969     {
1970       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1971       machine_mode mode = GET_MODE (tmps[i]);
1972       unsigned int bytelen = GET_MODE_SIZE (mode);
1973       unsigned int adj_bytelen;
1974       rtx dest = dst;
1975 
1976       /* Handle trailing fragments that run over the size of the struct.  */
1977       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1978 	adj_bytelen = ssize - bytepos;
1979       else
1980 	adj_bytelen = bytelen;
1981 
1982       if (GET_CODE (dst) == CONCAT)
1983 	{
1984 	  if (bytepos + adj_bytelen
1985 	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1986 	    dest = XEXP (dst, 0);
1987 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
1988 	    {
1989 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
1990 	      dest = XEXP (dst, 1);
1991 	    }
1992 	  else
1993 	    {
1994 	      machine_mode dest_mode = GET_MODE (dest);
1995 	      machine_mode tmp_mode = GET_MODE (tmps[i]);
1996 
1997 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
1998 
1999 	      if (GET_MODE_ALIGNMENT (dest_mode)
2000 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2001 		{
2002 		  dest = assign_stack_temp (dest_mode,
2003 					    GET_MODE_SIZE (dest_mode));
2004 		  emit_move_insn (adjust_address (dest,
2005 						  tmp_mode,
2006 						  bytepos),
2007 				  tmps[i]);
2008 		  dst = dest;
2009 		}
2010 	      else
2011 		{
2012 		  dest = assign_stack_temp (tmp_mode,
2013 					    GET_MODE_SIZE (tmp_mode));
2014 		  emit_move_insn (dest, tmps[i]);
2015 		  dst = adjust_address (dest, dest_mode, bytepos);
2016 		}
2017 	      break;
2018 	    }
2019 	}
2020 
2021       /* Handle trailing fragments that run over the size of the struct.  */
2022       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2023 	{
2024 	  /* store_bit_field always takes its value from the lsb.
2025 	     Move the fragment to the lsb if it's not already there.  */
2026 	  if (
2027 #ifdef BLOCK_REG_PADDING
2028 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2029 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2030 #else
2031 	      BYTES_BIG_ENDIAN
2032 #endif
2033 	      )
2034 	    {
2035 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2036 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2037 				      shift, tmps[i], 0);
2038 	    }
2039 
2040 	  /* Make sure not to write past the end of the struct.  */
2041 	  store_bit_field (dest,
2042 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2043 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2044 			   VOIDmode, tmps[i], false);
2045 	}
2046 
2047       /* Optimize the access just a bit.  */
2048       else if (MEM_P (dest)
2049 	       && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2050 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2051 	       && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2052 	       && bytelen == GET_MODE_SIZE (mode))
2053 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2054 
2055       else
2056 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2057 			 0, 0, mode, tmps[i], false);
2058     }
2059 
2060   /* Copy from the pseudo into the (probable) hard reg.  */
2061   if (orig_dst != dst)
2062     emit_move_insn (orig_dst, dst);
2063 }
2064 
2065 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2066    of the value stored in X.  */
2067 
2068 rtx
2069 maybe_emit_group_store (rtx x, tree type)
2070 {
2071   machine_mode mode = TYPE_MODE (type);
2072   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2073   if (GET_CODE (x) == PARALLEL)
2074     {
2075       rtx result = gen_reg_rtx (mode);
2076       emit_group_store (result, x, type, int_size_in_bytes (type));
2077       return result;
2078     }
2079   return x;
2080 }
2081 
2082 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2083 
2084    This is used on targets that return BLKmode values in registers.  */
2085 
2086 void
2087 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2088 {
2089   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2090   rtx src = NULL, dst = NULL;
2091   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2092   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2093   machine_mode mode = GET_MODE (srcreg);
2094   machine_mode tmode = GET_MODE (target);
2095   machine_mode copy_mode;
2096 
2097   /* BLKmode registers created in the back-end shouldn't have survived.  */
2098   gcc_assert (mode != BLKmode);
2099 
2100   /* If the structure doesn't take up a whole number of words, see whether
2101      SRCREG is padded on the left or on the right.  If it's on the left,
2102      set PADDING_CORRECTION to the number of bits to skip.
2103 
2104      In most ABIs, the structure will be returned at the least end of
2105      the register, which translates to right padding on little-endian
2106      targets and left padding on big-endian targets.  The opposite
2107      holds if the structure is returned at the most significant
2108      end of the register.  */
2109   if (bytes % UNITS_PER_WORD != 0
2110       && (targetm.calls.return_in_msb (type)
2111 	  ? !BYTES_BIG_ENDIAN
2112 	  : BYTES_BIG_ENDIAN))
2113     padding_correction
2114       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2115 
2116   /* We can use a single move if we have an exact mode for the size.  */
2117   else if (MEM_P (target)
2118 	   && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2119 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2120 	   && bytes == GET_MODE_SIZE (mode))
2121   {
2122     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2123     return;
2124   }
2125 
2126   /* And if we additionally have the same mode for a register.  */
2127   else if (REG_P (target)
2128 	   && GET_MODE (target) == mode
2129 	   && bytes == GET_MODE_SIZE (mode))
2130   {
2131     emit_move_insn (target, srcreg);
2132     return;
2133   }
2134 
2135   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2136      into a new pseudo which is a full word.  */
2137   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2138     {
2139       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2140       mode = word_mode;
2141     }
2142 
2143   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2144      memory, take care of not reading/writing past its end by selecting
2145      a copy mode suited to BITSIZE.  This should always be possible given
2146      how it is computed.
2147 
2148      If the target lives in register, make sure not to select a copy mode
2149      larger than the mode of the register.
2150 
2151      We could probably emit more efficient code for machines which do not use
2152      strict alignment, but it doesn't seem worth the effort at the current
2153      time.  */
2154 
2155   copy_mode = word_mode;
2156   if (MEM_P (target))
2157     {
2158       machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2159       if (mem_mode != BLKmode)
2160 	copy_mode = mem_mode;
2161     }
2162   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2163     copy_mode = tmode;
2164 
2165   for (bitpos = 0, xbitpos = padding_correction;
2166        bitpos < bytes * BITS_PER_UNIT;
2167        bitpos += bitsize, xbitpos += bitsize)
2168     {
2169       /* We need a new source operand each time xbitpos is on a
2170 	 word boundary and when xbitpos == padding_correction
2171 	 (the first time through).  */
2172       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2173 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2174 
2175       /* We need a new destination operand each time bitpos is on
2176 	 a word boundary.  */
2177       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2178 	dst = target;
2179       else if (bitpos % BITS_PER_WORD == 0)
2180 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2181 
2182       /* Use xbitpos for the source extraction (right justified) and
2183 	 bitpos for the destination store (left justified).  */
2184       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2185 		       extract_bit_field (src, bitsize,
2186 					  xbitpos % BITS_PER_WORD, 1,
2187 					  NULL_RTX, copy_mode, copy_mode,
2188 					  false),
2189 		       false);
2190     }
2191 }
2192 
2193 /* Copy BLKmode value SRC into a register of mode MODE.  Return the
2194    register if it contains any data, otherwise return null.
2195 
2196    This is used on targets that return BLKmode values in registers.  */
2197 
2198 rtx
2199 copy_blkmode_to_reg (machine_mode mode, tree src)
2200 {
2201   int i, n_regs;
2202   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2203   unsigned int bitsize;
2204   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2205   machine_mode dst_mode;
2206 
2207   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2208 
2209   x = expand_normal (src);
2210 
2211   bytes = int_size_in_bytes (TREE_TYPE (src));
2212   if (bytes == 0)
2213     return NULL_RTX;
2214 
2215   /* If the structure doesn't take up a whole number of words, see
2216      whether the register value should be padded on the left or on
2217      the right.  Set PADDING_CORRECTION to the number of padding
2218      bits needed on the left side.
2219 
2220      In most ABIs, the structure will be returned at the least end of
2221      the register, which translates to right padding on little-endian
2222      targets and left padding on big-endian targets.  The opposite
2223      holds if the structure is returned at the most significant
2224      end of the register.  */
2225   if (bytes % UNITS_PER_WORD != 0
2226       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2227 	  ? !BYTES_BIG_ENDIAN
2228 	  : BYTES_BIG_ENDIAN))
2229     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2230 					   * BITS_PER_UNIT));
2231 
2232   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2233   dst_words = XALLOCAVEC (rtx, n_regs);
2234   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2235 
2236   /* Copy the structure BITSIZE bits at a time.  */
2237   for (bitpos = 0, xbitpos = padding_correction;
2238        bitpos < bytes * BITS_PER_UNIT;
2239        bitpos += bitsize, xbitpos += bitsize)
2240     {
2241       /* We need a new destination pseudo each time xbitpos is
2242 	 on a word boundary and when xbitpos == padding_correction
2243 	 (the first time through).  */
2244       if (xbitpos % BITS_PER_WORD == 0
2245 	  || xbitpos == padding_correction)
2246 	{
2247 	  /* Generate an appropriate register.  */
2248 	  dst_word = gen_reg_rtx (word_mode);
2249 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2250 
2251 	  /* Clear the destination before we move anything into it.  */
2252 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2253 	}
2254 
2255       /* We need a new source operand each time bitpos is on a word
2256 	 boundary.  */
2257       if (bitpos % BITS_PER_WORD == 0)
2258 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2259 
2260       /* Use bitpos for the source extraction (left justified) and
2261 	 xbitpos for the destination store (right justified).  */
2262       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2263 		       0, 0, word_mode,
2264 		       extract_bit_field (src_word, bitsize,
2265 					  bitpos % BITS_PER_WORD, 1,
2266 					  NULL_RTX, word_mode, word_mode,
2267 					  false),
2268 		       false);
2269     }
2270 
2271   if (mode == BLKmode)
2272     {
2273       /* Find the smallest integer mode large enough to hold the
2274 	 entire structure.  */
2275       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2276 	   mode != VOIDmode;
2277 	   mode = GET_MODE_WIDER_MODE (mode))
2278 	/* Have we found a large enough mode?  */
2279 	if (GET_MODE_SIZE (mode) >= bytes)
2280 	  break;
2281 
2282       /* A suitable mode should have been found.  */
2283       gcc_assert (mode != VOIDmode);
2284     }
2285 
2286   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2287     dst_mode = word_mode;
2288   else
2289     dst_mode = mode;
2290   dst = gen_reg_rtx (dst_mode);
2291 
2292   for (i = 0; i < n_regs; i++)
2293     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2294 
2295   if (mode != dst_mode)
2296     dst = gen_lowpart (mode, dst);
2297 
2298   return dst;
2299 }
2300 
2301 /* Add a USE expression for REG to the (possibly empty) list pointed
2302    to by CALL_FUSAGE.  REG must denote a hard register.  */
2303 
2304 void
2305 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2306 {
2307   gcc_assert (REG_P (reg));
2308 
2309   if (!HARD_REGISTER_P (reg))
2310     return;
2311 
2312   *call_fusage
2313     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2314 }
2315 
2316 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2317    to by CALL_FUSAGE.  REG must denote a hard register.  */
2318 
2319 void
2320 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2321 {
2322   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2323 
2324   *call_fusage
2325     = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2326 }
2327 
2328 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2329    starting at REGNO.  All of these registers must be hard registers.  */
2330 
2331 void
2332 use_regs (rtx *call_fusage, int regno, int nregs)
2333 {
2334   int i;
2335 
2336   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2337 
2338   for (i = 0; i < nregs; i++)
2339     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2340 }
2341 
2342 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2343    PARALLEL REGS.  This is for calls that pass values in multiple
2344    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2345 
2346 void
2347 use_group_regs (rtx *call_fusage, rtx regs)
2348 {
2349   int i;
2350 
2351   for (i = 0; i < XVECLEN (regs, 0); i++)
2352     {
2353       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2354 
2355       /* A NULL entry means the parameter goes both on the stack and in
2356 	 registers.  This can also be a MEM for targets that pass values
2357 	 partially on the stack and partially in registers.  */
2358       if (reg != 0 && REG_P (reg))
2359 	use_reg (call_fusage, reg);
2360     }
2361 }
2362 
2363 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2364    assigment and the code of the expresion on the RHS is CODE.  Return
2365    NULL otherwise.  */
2366 
2367 static gimple *
2368 get_def_for_expr (tree name, enum tree_code code)
2369 {
2370   gimple *def_stmt;
2371 
2372   if (TREE_CODE (name) != SSA_NAME)
2373     return NULL;
2374 
2375   def_stmt = get_gimple_for_ssa_name (name);
2376   if (!def_stmt
2377       || gimple_assign_rhs_code (def_stmt) != code)
2378     return NULL;
2379 
2380   return def_stmt;
2381 }
2382 
2383 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2384    assigment and the class of the expresion on the RHS is CLASS.  Return
2385    NULL otherwise.  */
2386 
2387 static gimple *
2388 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2389 {
2390   gimple *def_stmt;
2391 
2392   if (TREE_CODE (name) != SSA_NAME)
2393     return NULL;
2394 
2395   def_stmt = get_gimple_for_ssa_name (name);
2396   if (!def_stmt
2397       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2398     return NULL;
2399 
2400   return def_stmt;
2401 }
2402 
2403 
2404 /* Determine whether the LEN bytes generated by CONSTFUN can be
2405    stored to memory using several move instructions.  CONSTFUNDATA is
2406    a pointer which will be passed as argument in every CONSTFUN call.
2407    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2408    a memset operation and false if it's a copy of a constant string.
2409    Return nonzero if a call to store_by_pieces should succeed.  */
2410 
2411 int
2412 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2413 		     rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2414 		     void *constfundata, unsigned int align, bool memsetp)
2415 {
2416   unsigned HOST_WIDE_INT l;
2417   unsigned int max_size;
2418   HOST_WIDE_INT offset = 0;
2419   machine_mode mode;
2420   enum insn_code icode;
2421   int reverse;
2422   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
2423   rtx cst ATTRIBUTE_UNUSED;
2424 
2425   if (len == 0)
2426     return 1;
2427 
2428   if (!targetm.use_by_pieces_infrastructure_p (len, align,
2429 					       memsetp
2430 						 ? SET_BY_PIECES
2431 						 : STORE_BY_PIECES,
2432 					       optimize_insn_for_speed_p ()))
2433     return 0;
2434 
2435   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2436 
2437   /* We would first store what we can in the largest integer mode, then go to
2438      successively smaller modes.  */
2439 
2440   for (reverse = 0;
2441        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2442        reverse++)
2443     {
2444       l = len;
2445       max_size = STORE_MAX_PIECES + 1;
2446       while (max_size > 1 && l > 0)
2447 	{
2448 	  mode = widest_int_mode_for_size (max_size);
2449 
2450 	  if (mode == VOIDmode)
2451 	    break;
2452 
2453 	  icode = optab_handler (mov_optab, mode);
2454 	  if (icode != CODE_FOR_nothing
2455 	      && align >= GET_MODE_ALIGNMENT (mode))
2456 	    {
2457 	      unsigned int size = GET_MODE_SIZE (mode);
2458 
2459 	      while (l >= size)
2460 		{
2461 		  if (reverse)
2462 		    offset -= size;
2463 
2464 		  cst = (*constfun) (constfundata, offset, mode);
2465 		  if (!targetm.legitimate_constant_p (mode, cst))
2466 		    return 0;
2467 
2468 		  if (!reverse)
2469 		    offset += size;
2470 
2471 		  l -= size;
2472 		}
2473 	    }
2474 
2475 	  max_size = GET_MODE_SIZE (mode);
2476 	}
2477 
2478       /* The code above should have handled everything.  */
2479       gcc_assert (!l);
2480     }
2481 
2482   return 1;
2483 }
2484 
2485 /* Generate several move instructions to store LEN bytes generated by
2486    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2487    pointer which will be passed as argument in every CONSTFUN call.
2488    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2489    a memset operation and false if it's a copy of a constant string.
2490    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2491    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2492    stpcpy.  */
2493 
2494 rtx
2495 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2496 		 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2497 		 void *constfundata, unsigned int align, bool memsetp, int endp)
2498 {
2499   machine_mode to_addr_mode = get_address_mode (to);
2500   struct store_by_pieces_d data;
2501 
2502   if (len == 0)
2503     {
2504       gcc_assert (endp != 2);
2505       return to;
2506     }
2507 
2508   gcc_assert (targetm.use_by_pieces_infrastructure_p
2509 		(len, align,
2510 		 memsetp
2511 		   ? SET_BY_PIECES
2512 		   : STORE_BY_PIECES,
2513 		 optimize_insn_for_speed_p ()));
2514 
2515   data.constfun = constfun;
2516   data.constfundata = constfundata;
2517   data.len = len;
2518   data.to = to;
2519   store_by_pieces_1 (&data, align);
2520   if (endp)
2521     {
2522       rtx to1;
2523 
2524       gcc_assert (!data.reverse);
2525       if (data.autinc_to)
2526 	{
2527 	  if (endp == 2)
2528 	    {
2529 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2530 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2531 	      else
2532 		data.to_addr = copy_to_mode_reg (to_addr_mode,
2533 						 plus_constant (to_addr_mode,
2534 								data.to_addr,
2535 								-1));
2536 	    }
2537 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2538 					   data.offset);
2539 	}
2540       else
2541 	{
2542 	  if (endp == 2)
2543 	    --data.offset;
2544 	  to1 = adjust_address (data.to, QImode, data.offset);
2545 	}
2546       return to1;
2547     }
2548   else
2549     return data.to;
2550 }
2551 
2552 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2553    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2554 
2555 static void
2556 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2557 {
2558   struct store_by_pieces_d data;
2559 
2560   if (len == 0)
2561     return;
2562 
2563   data.constfun = clear_by_pieces_1;
2564   data.constfundata = NULL;
2565   data.len = len;
2566   data.to = to;
2567   store_by_pieces_1 (&data, align);
2568 }
2569 
2570 /* Callback routine for clear_by_pieces.
2571    Return const0_rtx unconditionally.  */
2572 
2573 static rtx
2574 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2575 		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2576 		   machine_mode mode ATTRIBUTE_UNUSED)
2577 {
2578   return const0_rtx;
2579 }
2580 
2581 /* Subroutine of clear_by_pieces and store_by_pieces.
2582    Generate several move instructions to store LEN bytes of block TO.  (A MEM
2583    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2584 
2585 static void
2586 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2587 		   unsigned int align ATTRIBUTE_UNUSED)
2588 {
2589   machine_mode to_addr_mode = get_address_mode (data->to);
2590   rtx to_addr = XEXP (data->to, 0);
2591   unsigned int max_size = STORE_MAX_PIECES + 1;
2592   enum insn_code icode;
2593 
2594   data->offset = 0;
2595   data->to_addr = to_addr;
2596   data->autinc_to
2597     = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2598        || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2599 
2600   data->explicit_inc_to = 0;
2601   data->reverse
2602     = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2603   if (data->reverse)
2604     data->offset = data->len;
2605 
2606   /* If storing requires more than two move insns,
2607      copy addresses to registers (to make displacements shorter)
2608      and use post-increment if available.  */
2609   if (!data->autinc_to
2610       && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2611     {
2612       /* Determine the main mode we'll be using.
2613 	 MODE might not be used depending on the definitions of the
2614 	 USE_* macros below.  */
2615       machine_mode mode ATTRIBUTE_UNUSED
2616 	= widest_int_mode_for_size (max_size);
2617 
2618       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2619 	{
2620 	  data->to_addr = copy_to_mode_reg (to_addr_mode,
2621 					    plus_constant (to_addr_mode,
2622 							   to_addr,
2623 							   data->len));
2624 	  data->autinc_to = 1;
2625 	  data->explicit_inc_to = -1;
2626 	}
2627 
2628       if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2629 	  && ! data->autinc_to)
2630 	{
2631 	  data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2632 	  data->autinc_to = 1;
2633 	  data->explicit_inc_to = 1;
2634 	}
2635 
2636       if ( !data->autinc_to && CONSTANT_P (to_addr))
2637 	data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2638     }
2639 
2640   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2641 
2642   /* First store what we can in the largest integer mode, then go to
2643      successively smaller modes.  */
2644 
2645   while (max_size > 1 && data->len > 0)
2646     {
2647       machine_mode mode = widest_int_mode_for_size (max_size);
2648 
2649       if (mode == VOIDmode)
2650 	break;
2651 
2652       icode = optab_handler (mov_optab, mode);
2653       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2654 	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2655 
2656       max_size = GET_MODE_SIZE (mode);
2657     }
2658 
2659   /* The code above should have handled everything.  */
2660   gcc_assert (!data->len);
2661 }
2662 
2663 /* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2664    with move instructions for mode MODE.  GENFUN is the gen_... function
2665    to make a move insn for that mode.  DATA has all the other info.  */
2666 
2667 static void
2668 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2669 		   struct store_by_pieces_d *data)
2670 {
2671   unsigned int size = GET_MODE_SIZE (mode);
2672   rtx to1, cst;
2673 
2674   while (data->len >= size)
2675     {
2676       if (data->reverse)
2677 	data->offset -= size;
2678 
2679       if (data->autinc_to)
2680 	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2681 					 data->offset);
2682       else
2683 	to1 = adjust_address (data->to, mode, data->offset);
2684 
2685       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2686 	emit_insn (gen_add2_insn (data->to_addr,
2687 				  gen_int_mode (-(HOST_WIDE_INT) size,
2688 						GET_MODE (data->to_addr))));
2689 
2690       cst = (*data->constfun) (data->constfundata, data->offset, mode);
2691       emit_insn ((*genfun) (to1, cst));
2692 
2693       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2694 	emit_insn (gen_add2_insn (data->to_addr,
2695 				  gen_int_mode (size,
2696 						GET_MODE (data->to_addr))));
2697 
2698       if (! data->reverse)
2699 	data->offset += size;
2700 
2701       data->len -= size;
2702     }
2703 }
2704 
2705 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2706    its length in bytes.  */
2707 
2708 rtx
2709 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2710 		     unsigned int expected_align, HOST_WIDE_INT expected_size,
2711 		     unsigned HOST_WIDE_INT min_size,
2712 		     unsigned HOST_WIDE_INT max_size,
2713 		     unsigned HOST_WIDE_INT probable_max_size)
2714 {
2715   machine_mode mode = GET_MODE (object);
2716   unsigned int align;
2717 
2718   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2719 
2720   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2721      just move a zero.  Otherwise, do this a piece at a time.  */
2722   if (mode != BLKmode
2723       && CONST_INT_P (size)
2724       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2725     {
2726       rtx zero = CONST0_RTX (mode);
2727       if (zero != NULL)
2728 	{
2729 	  emit_move_insn (object, zero);
2730 	  return NULL;
2731 	}
2732 
2733       if (COMPLEX_MODE_P (mode))
2734 	{
2735 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2736 	  if (zero != NULL)
2737 	    {
2738 	      write_complex_part (object, zero, 0);
2739 	      write_complex_part (object, zero, 1);
2740 	      return NULL;
2741 	    }
2742 	}
2743     }
2744 
2745   if (size == const0_rtx)
2746     return NULL;
2747 
2748   align = MEM_ALIGN (object);
2749 
2750   if (CONST_INT_P (size)
2751       && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2752 						 CLEAR_BY_PIECES,
2753 						 optimize_insn_for_speed_p ()))
2754     clear_by_pieces (object, INTVAL (size), align);
2755   else if (set_storage_via_setmem (object, size, const0_rtx, align,
2756 				   expected_align, expected_size,
2757 				   min_size, max_size, probable_max_size))
2758     ;
2759   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2760     return set_storage_via_libcall (object, size, const0_rtx,
2761 				    method == BLOCK_OP_TAILCALL);
2762   else
2763     gcc_unreachable ();
2764 
2765   return NULL;
2766 }
2767 
2768 rtx
2769 clear_storage (rtx object, rtx size, enum block_op_methods method)
2770 {
2771   unsigned HOST_WIDE_INT max, min = 0;
2772   if (GET_CODE (size) == CONST_INT)
2773     min = max = UINTVAL (size);
2774   else
2775     max = GET_MODE_MASK (GET_MODE (size));
2776   return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2777 }
2778 
2779 
2780 /* A subroutine of clear_storage.  Expand a call to memset.
2781    Return the return value of memset, 0 otherwise.  */
2782 
2783 rtx
2784 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2785 {
2786   tree call_expr, fn, object_tree, size_tree, val_tree;
2787   machine_mode size_mode;
2788   rtx retval;
2789 
2790   /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2791      place those into new pseudos into a VAR_DECL and use them later.  */
2792 
2793   object = copy_addr_to_reg (XEXP (object, 0));
2794 
2795   size_mode = TYPE_MODE (sizetype);
2796   size = convert_to_mode (size_mode, size, 1);
2797   size = copy_to_mode_reg (size_mode, size);
2798 
2799   /* It is incorrect to use the libcall calling conventions to call
2800      memset in this context.  This could be a user call to memset and
2801      the user may wish to examine the return value from memset.  For
2802      targets where libcalls and normal calls have different conventions
2803      for returning pointers, we could end up generating incorrect code.  */
2804 
2805   object_tree = make_tree (ptr_type_node, object);
2806   if (!CONST_INT_P (val))
2807     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2808   size_tree = make_tree (sizetype, size);
2809   val_tree = make_tree (integer_type_node, val);
2810 
2811   fn = clear_storage_libcall_fn (true);
2812   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2813   CALL_EXPR_TAILCALL (call_expr) = tailcall;
2814 
2815   retval = expand_normal (call_expr);
2816 
2817   return retval;
2818 }
2819 
2820 /* A subroutine of set_storage_via_libcall.  Create the tree node
2821    for the function we use for block clears.  */
2822 
2823 tree block_clear_fn;
2824 
2825 void
2826 init_block_clear_fn (const char *asmspec)
2827 {
2828   if (!block_clear_fn)
2829     {
2830       tree fn, args;
2831 
2832       fn = get_identifier ("memset");
2833       args = build_function_type_list (ptr_type_node, ptr_type_node,
2834 				       integer_type_node, sizetype,
2835 				       NULL_TREE);
2836 
2837       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2838       DECL_EXTERNAL (fn) = 1;
2839       TREE_PUBLIC (fn) = 1;
2840       DECL_ARTIFICIAL (fn) = 1;
2841       TREE_NOTHROW (fn) = 1;
2842       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2843       DECL_VISIBILITY_SPECIFIED (fn) = 1;
2844 
2845       block_clear_fn = fn;
2846     }
2847 
2848   if (asmspec)
2849     set_user_assembler_name (block_clear_fn, asmspec);
2850 }
2851 
2852 static tree
2853 clear_storage_libcall_fn (int for_call)
2854 {
2855   static bool emitted_extern;
2856 
2857   if (!block_clear_fn)
2858     init_block_clear_fn (NULL);
2859 
2860   if (for_call && !emitted_extern)
2861     {
2862       emitted_extern = true;
2863       make_decl_rtl (block_clear_fn);
2864     }
2865 
2866   return block_clear_fn;
2867 }
2868 
2869 /* Expand a setmem pattern; return true if successful.  */
2870 
2871 bool
2872 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2873 			unsigned int expected_align, HOST_WIDE_INT expected_size,
2874 			unsigned HOST_WIDE_INT min_size,
2875 			unsigned HOST_WIDE_INT max_size,
2876 			unsigned HOST_WIDE_INT probable_max_size)
2877 {
2878   /* Try the most limited insn first, because there's no point
2879      including more than one in the machine description unless
2880      the more limited one has some advantage.  */
2881 
2882   machine_mode mode;
2883 
2884   if (expected_align < align)
2885     expected_align = align;
2886   if (expected_size != -1)
2887     {
2888       if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2889 	expected_size = max_size;
2890       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2891 	expected_size = min_size;
2892     }
2893 
2894   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2895        mode = GET_MODE_WIDER_MODE (mode))
2896     {
2897       enum insn_code code = direct_optab_handler (setmem_optab, mode);
2898 
2899       if (code != CODE_FOR_nothing
2900 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2901 	     here because if SIZE is less than the mode mask, as it is
2902 	     returned by the macro, it will definitely be less than the
2903 	     actual mode mask.  Since SIZE is within the Pmode address
2904 	     space, we limit MODE to Pmode.  */
2905 	  && ((CONST_INT_P (size)
2906 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2907 		   <= (GET_MODE_MASK (mode) >> 1)))
2908 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
2909 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2910 	{
2911 	  struct expand_operand ops[9];
2912 	  unsigned int nops;
2913 
2914 	  nops = insn_data[(int) code].n_generator_args;
2915 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2916 
2917 	  create_fixed_operand (&ops[0], object);
2918 	  /* The check above guarantees that this size conversion is valid.  */
2919 	  create_convert_operand_to (&ops[1], size, mode, true);
2920 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
2921 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2922 	  if (nops >= 6)
2923 	    {
2924 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2925 	      create_integer_operand (&ops[5], expected_size);
2926 	    }
2927 	  if (nops >= 8)
2928 	    {
2929 	      create_integer_operand (&ops[6], min_size);
2930 	      /* If we can not represent the maximal size,
2931 		 make parameter NULL.  */
2932 	      if ((HOST_WIDE_INT) max_size != -1)
2933 	        create_integer_operand (&ops[7], max_size);
2934 	      else
2935 		create_fixed_operand (&ops[7], NULL);
2936 	    }
2937 	  if (nops == 9)
2938 	    {
2939 	      /* If we can not represent the maximal size,
2940 		 make parameter NULL.  */
2941 	      if ((HOST_WIDE_INT) probable_max_size != -1)
2942 	        create_integer_operand (&ops[8], probable_max_size);
2943 	      else
2944 		create_fixed_operand (&ops[8], NULL);
2945 	    }
2946 	  if (maybe_expand_insn (code, nops, ops))
2947 	    return true;
2948 	}
2949     }
2950 
2951   return false;
2952 }
2953 
2954 
2955 /* Write to one of the components of the complex value CPLX.  Write VAL to
2956    the real part if IMAG_P is false, and the imaginary part if its true.  */
2957 
2958 void
2959 write_complex_part (rtx cplx, rtx val, bool imag_p)
2960 {
2961   machine_mode cmode;
2962   machine_mode imode;
2963   unsigned ibitsize;
2964 
2965   if (GET_CODE (cplx) == CONCAT)
2966     {
2967       emit_move_insn (XEXP (cplx, imag_p), val);
2968       return;
2969     }
2970 
2971   cmode = GET_MODE (cplx);
2972   imode = GET_MODE_INNER (cmode);
2973   ibitsize = GET_MODE_BITSIZE (imode);
2974 
2975   /* For MEMs simplify_gen_subreg may generate an invalid new address
2976      because, e.g., the original address is considered mode-dependent
2977      by the target, which restricts simplify_subreg from invoking
2978      adjust_address_nv.  Instead of preparing fallback support for an
2979      invalid address, we call adjust_address_nv directly.  */
2980   if (MEM_P (cplx))
2981     {
2982       emit_move_insn (adjust_address_nv (cplx, imode,
2983 					 imag_p ? GET_MODE_SIZE (imode) : 0),
2984 		      val);
2985       return;
2986     }
2987 
2988   /* If the sub-object is at least word sized, then we know that subregging
2989      will work.  This special case is important, since store_bit_field
2990      wants to operate on integer modes, and there's rarely an OImode to
2991      correspond to TCmode.  */
2992   if (ibitsize >= BITS_PER_WORD
2993       /* For hard regs we have exact predicates.  Assume we can split
2994 	 the original object if it spans an even number of hard regs.
2995 	 This special case is important for SCmode on 64-bit platforms
2996 	 where the natural size of floating-point regs is 32-bit.  */
2997       || (REG_P (cplx)
2998 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2999 	  && REG_NREGS (cplx) % 2 == 0))
3000     {
3001       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3002 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3003       if (part)
3004         {
3005 	  emit_move_insn (part, val);
3006 	  return;
3007 	}
3008       else
3009 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3010 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3011     }
3012 
3013   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val,
3014 		   false);
3015 }
3016 
3017 /* Extract one of the components of the complex value CPLX.  Extract the
3018    real part if IMAG_P is false, and the imaginary part if it's true.  */
3019 
3020 rtx
3021 read_complex_part (rtx cplx, bool imag_p)
3022 {
3023   machine_mode cmode, imode;
3024   unsigned ibitsize;
3025 
3026   if (GET_CODE (cplx) == CONCAT)
3027     return XEXP (cplx, imag_p);
3028 
3029   cmode = GET_MODE (cplx);
3030   imode = GET_MODE_INNER (cmode);
3031   ibitsize = GET_MODE_BITSIZE (imode);
3032 
3033   /* Special case reads from complex constants that got spilled to memory.  */
3034   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3035     {
3036       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3037       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3038 	{
3039 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3040 	  if (CONSTANT_CLASS_P (part))
3041 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3042 	}
3043     }
3044 
3045   /* For MEMs simplify_gen_subreg may generate an invalid new address
3046      because, e.g., the original address is considered mode-dependent
3047      by the target, which restricts simplify_subreg from invoking
3048      adjust_address_nv.  Instead of preparing fallback support for an
3049      invalid address, we call adjust_address_nv directly.  */
3050   if (MEM_P (cplx))
3051     return adjust_address_nv (cplx, imode,
3052 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3053 
3054   /* If the sub-object is at least word sized, then we know that subregging
3055      will work.  This special case is important, since extract_bit_field
3056      wants to operate on integer modes, and there's rarely an OImode to
3057      correspond to TCmode.  */
3058   if (ibitsize >= BITS_PER_WORD
3059       /* For hard regs we have exact predicates.  Assume we can split
3060 	 the original object if it spans an even number of hard regs.
3061 	 This special case is important for SCmode on 64-bit platforms
3062 	 where the natural size of floating-point regs is 32-bit.  */
3063       || (REG_P (cplx)
3064 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3065 	  && REG_NREGS (cplx) % 2 == 0))
3066     {
3067       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3068 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3069       if (ret)
3070         return ret;
3071       else
3072 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3073 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3074     }
3075 
3076   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3077 			    true, NULL_RTX, imode, imode, false);
3078 }
3079 
3080 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3081    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3082    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3083    we'll force-create a SUBREG if needed.  */
3084 
3085 static rtx
3086 emit_move_change_mode (machine_mode new_mode,
3087 		       machine_mode old_mode, rtx x, bool force)
3088 {
3089   rtx ret;
3090 
3091   if (push_operand (x, GET_MODE (x)))
3092     {
3093       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3094       MEM_COPY_ATTRIBUTES (ret, x);
3095     }
3096   else if (MEM_P (x))
3097     {
3098       /* We don't have to worry about changing the address since the
3099 	 size in bytes is supposed to be the same.  */
3100       if (reload_in_progress)
3101 	{
3102 	  /* Copy the MEM to change the mode and move any
3103 	     substitutions from the old MEM to the new one.  */
3104 	  ret = adjust_address_nv (x, new_mode, 0);
3105 	  copy_replacements (x, ret);
3106 	}
3107       else
3108 	ret = adjust_address (x, new_mode, 0);
3109     }
3110   else
3111     {
3112       /* Note that we do want simplify_subreg's behavior of validating
3113 	 that the new mode is ok for a hard register.  If we were to use
3114 	 simplify_gen_subreg, we would create the subreg, but would
3115 	 probably run into the target not being able to implement it.  */
3116       /* Except, of course, when FORCE is true, when this is exactly what
3117 	 we want.  Which is needed for CCmodes on some targets.  */
3118       if (force)
3119 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3120       else
3121 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3122     }
3123 
3124   return ret;
3125 }
3126 
3127 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3128    an integer mode of the same size as MODE.  Returns the instruction
3129    emitted, or NULL if such a move could not be generated.  */
3130 
3131 static rtx_insn *
3132 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3133 {
3134   machine_mode imode;
3135   enum insn_code code;
3136 
3137   /* There must exist a mode of the exact size we require.  */
3138   imode = int_mode_for_mode (mode);
3139   if (imode == BLKmode)
3140     return NULL;
3141 
3142   /* The target must support moves in this mode.  */
3143   code = optab_handler (mov_optab, imode);
3144   if (code == CODE_FOR_nothing)
3145     return NULL;
3146 
3147   x = emit_move_change_mode (imode, mode, x, force);
3148   if (x == NULL_RTX)
3149     return NULL;
3150   y = emit_move_change_mode (imode, mode, y, force);
3151   if (y == NULL_RTX)
3152     return NULL;
3153   return emit_insn (GEN_FCN (code) (x, y));
3154 }
3155 
3156 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3157    Return an equivalent MEM that does not use an auto-increment.  */
3158 
3159 rtx
3160 emit_move_resolve_push (machine_mode mode, rtx x)
3161 {
3162   enum rtx_code code = GET_CODE (XEXP (x, 0));
3163   HOST_WIDE_INT adjust;
3164   rtx temp;
3165 
3166   adjust = GET_MODE_SIZE (mode);
3167 #ifdef PUSH_ROUNDING
3168   adjust = PUSH_ROUNDING (adjust);
3169 #endif
3170   if (code == PRE_DEC || code == POST_DEC)
3171     adjust = -adjust;
3172   else if (code == PRE_MODIFY || code == POST_MODIFY)
3173     {
3174       rtx expr = XEXP (XEXP (x, 0), 1);
3175       HOST_WIDE_INT val;
3176 
3177       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3178       gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3179       val = INTVAL (XEXP (expr, 1));
3180       if (GET_CODE (expr) == MINUS)
3181 	val = -val;
3182       gcc_assert (adjust == val || adjust == -val);
3183       adjust = val;
3184     }
3185 
3186   /* Do not use anti_adjust_stack, since we don't want to update
3187      stack_pointer_delta.  */
3188   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3189 			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3190 			      0, OPTAB_LIB_WIDEN);
3191   if (temp != stack_pointer_rtx)
3192     emit_move_insn (stack_pointer_rtx, temp);
3193 
3194   switch (code)
3195     {
3196     case PRE_INC:
3197     case PRE_DEC:
3198     case PRE_MODIFY:
3199       temp = stack_pointer_rtx;
3200       break;
3201     case POST_INC:
3202     case POST_DEC:
3203     case POST_MODIFY:
3204       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3205       break;
3206     default:
3207       gcc_unreachable ();
3208     }
3209 
3210   return replace_equiv_address (x, temp);
3211 }
3212 
3213 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3214    X is known to satisfy push_operand, and MODE is known to be complex.
3215    Returns the last instruction emitted.  */
3216 
3217 rtx_insn *
3218 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3219 {
3220   machine_mode submode = GET_MODE_INNER (mode);
3221   bool imag_first;
3222 
3223 #ifdef PUSH_ROUNDING
3224   unsigned int submodesize = GET_MODE_SIZE (submode);
3225 
3226   /* In case we output to the stack, but the size is smaller than the
3227      machine can push exactly, we need to use move instructions.  */
3228   if (PUSH_ROUNDING (submodesize) != submodesize)
3229     {
3230       x = emit_move_resolve_push (mode, x);
3231       return emit_move_insn (x, y);
3232     }
3233 #endif
3234 
3235   /* Note that the real part always precedes the imag part in memory
3236      regardless of machine's endianness.  */
3237   switch (GET_CODE (XEXP (x, 0)))
3238     {
3239     case PRE_DEC:
3240     case POST_DEC:
3241       imag_first = true;
3242       break;
3243     case PRE_INC:
3244     case POST_INC:
3245       imag_first = false;
3246       break;
3247     default:
3248       gcc_unreachable ();
3249     }
3250 
3251   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3252 		  read_complex_part (y, imag_first));
3253   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3254 			 read_complex_part (y, !imag_first));
3255 }
3256 
3257 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3258    via two moves of the parts.  Returns the last instruction emitted.  */
3259 
3260 rtx_insn *
3261 emit_move_complex_parts (rtx x, rtx y)
3262 {
3263   /* Show the output dies here.  This is necessary for SUBREGs
3264      of pseudos since we cannot track their lifetimes correctly;
3265      hard regs shouldn't appear here except as return values.  */
3266   if (!reload_completed && !reload_in_progress
3267       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3268     emit_clobber (x);
3269 
3270   write_complex_part (x, read_complex_part (y, false), false);
3271   write_complex_part (x, read_complex_part (y, true), true);
3272 
3273   return get_last_insn ();
3274 }
3275 
3276 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3277    MODE is known to be complex.  Returns the last instruction emitted.  */
3278 
3279 static rtx_insn *
3280 emit_move_complex (machine_mode mode, rtx x, rtx y)
3281 {
3282   bool try_int;
3283 
3284   /* Need to take special care for pushes, to maintain proper ordering
3285      of the data, and possibly extra padding.  */
3286   if (push_operand (x, mode))
3287     return emit_move_complex_push (mode, x, y);
3288 
3289   /* See if we can coerce the target into moving both values at once, except
3290      for floating point where we favor moving as parts if this is easy.  */
3291   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3292       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3293       && !(REG_P (x)
3294 	   && HARD_REGISTER_P (x)
3295 	   && REG_NREGS (x) == 1)
3296       && !(REG_P (y)
3297 	   && HARD_REGISTER_P (y)
3298 	   && REG_NREGS (y) == 1))
3299     try_int = false;
3300   /* Not possible if the values are inherently not adjacent.  */
3301   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3302     try_int = false;
3303   /* Is possible if both are registers (or subregs of registers).  */
3304   else if (register_operand (x, mode) && register_operand (y, mode))
3305     try_int = true;
3306   /* If one of the operands is a memory, and alignment constraints
3307      are friendly enough, we may be able to do combined memory operations.
3308      We do not attempt this if Y is a constant because that combination is
3309      usually better with the by-parts thing below.  */
3310   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3311 	   && (!STRICT_ALIGNMENT
3312 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3313     try_int = true;
3314   else
3315     try_int = false;
3316 
3317   if (try_int)
3318     {
3319       rtx_insn *ret;
3320 
3321       /* For memory to memory moves, optimal behavior can be had with the
3322 	 existing block move logic.  */
3323       if (MEM_P (x) && MEM_P (y))
3324 	{
3325 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3326 			   BLOCK_OP_NO_LIBCALL);
3327 	  return get_last_insn ();
3328 	}
3329 
3330       ret = emit_move_via_integer (mode, x, y, true);
3331       if (ret)
3332 	return ret;
3333     }
3334 
3335   return emit_move_complex_parts (x, y);
3336 }
3337 
3338 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3339    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3340 
3341 static rtx_insn *
3342 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3343 {
3344   rtx_insn *ret;
3345 
3346   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3347   if (mode != CCmode)
3348     {
3349       enum insn_code code = optab_handler (mov_optab, CCmode);
3350       if (code != CODE_FOR_nothing)
3351 	{
3352 	  x = emit_move_change_mode (CCmode, mode, x, true);
3353 	  y = emit_move_change_mode (CCmode, mode, y, true);
3354 	  return emit_insn (GEN_FCN (code) (x, y));
3355 	}
3356     }
3357 
3358   /* Otherwise, find the MODE_INT mode of the same width.  */
3359   ret = emit_move_via_integer (mode, x, y, false);
3360   gcc_assert (ret != NULL);
3361   return ret;
3362 }
3363 
3364 /* Return true if word I of OP lies entirely in the
3365    undefined bits of a paradoxical subreg.  */
3366 
3367 static bool
3368 undefined_operand_subword_p (const_rtx op, int i)
3369 {
3370   machine_mode innermode, innermostmode;
3371   int offset;
3372   if (GET_CODE (op) != SUBREG)
3373     return false;
3374   innermode = GET_MODE (op);
3375   innermostmode = GET_MODE (SUBREG_REG (op));
3376   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3377   /* The SUBREG_BYTE represents offset, as if the value were stored in
3378      memory, except for a paradoxical subreg where we define
3379      SUBREG_BYTE to be 0; undo this exception as in
3380      simplify_subreg.  */
3381   if (SUBREG_BYTE (op) == 0
3382       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3383     {
3384       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3385       if (WORDS_BIG_ENDIAN)
3386 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3387       if (BYTES_BIG_ENDIAN)
3388 	offset += difference % UNITS_PER_WORD;
3389     }
3390   if (offset >= GET_MODE_SIZE (innermostmode)
3391       || offset <= -GET_MODE_SIZE (word_mode))
3392     return true;
3393   return false;
3394 }
3395 
3396 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3397    MODE is any multi-word or full-word mode that lacks a move_insn
3398    pattern.  Note that you will get better code if you define such
3399    patterns, even if they must turn into multiple assembler instructions.  */
3400 
3401 static rtx_insn *
3402 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3403 {
3404   rtx_insn *last_insn = 0;
3405   rtx_insn *seq;
3406   rtx inner;
3407   bool need_clobber;
3408   int i;
3409 
3410   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3411 
3412   /* If X is a push on the stack, do the push now and replace
3413      X with a reference to the stack pointer.  */
3414   if (push_operand (x, mode))
3415     x = emit_move_resolve_push (mode, x);
3416 
3417   /* If we are in reload, see if either operand is a MEM whose address
3418      is scheduled for replacement.  */
3419   if (reload_in_progress && MEM_P (x)
3420       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3421     x = replace_equiv_address_nv (x, inner);
3422   if (reload_in_progress && MEM_P (y)
3423       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3424     y = replace_equiv_address_nv (y, inner);
3425 
3426   start_sequence ();
3427 
3428   need_clobber = false;
3429   for (i = 0;
3430        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3431        i++)
3432     {
3433       rtx xpart = operand_subword (x, i, 1, mode);
3434       rtx ypart;
3435 
3436       /* Do not generate code for a move if it would come entirely
3437 	 from the undefined bits of a paradoxical subreg.  */
3438       if (undefined_operand_subword_p (y, i))
3439 	continue;
3440 
3441       ypart = operand_subword (y, i, 1, mode);
3442 
3443       /* If we can't get a part of Y, put Y into memory if it is a
3444 	 constant.  Otherwise, force it into a register.  Then we must
3445 	 be able to get a part of Y.  */
3446       if (ypart == 0 && CONSTANT_P (y))
3447 	{
3448 	  y = use_anchored_address (force_const_mem (mode, y));
3449 	  ypart = operand_subword (y, i, 1, mode);
3450 	}
3451       else if (ypart == 0)
3452 	ypart = operand_subword_force (y, i, mode);
3453 
3454       gcc_assert (xpart && ypart);
3455 
3456       need_clobber |= (GET_CODE (xpart) == SUBREG);
3457 
3458       last_insn = emit_move_insn (xpart, ypart);
3459     }
3460 
3461   seq = get_insns ();
3462   end_sequence ();
3463 
3464   /* Show the output dies here.  This is necessary for SUBREGs
3465      of pseudos since we cannot track their lifetimes correctly;
3466      hard regs shouldn't appear here except as return values.
3467      We never want to emit such a clobber after reload.  */
3468   if (x != y
3469       && ! (reload_in_progress || reload_completed)
3470       && need_clobber != 0)
3471     emit_clobber (x);
3472 
3473   emit_insn (seq);
3474 
3475   return last_insn;
3476 }
3477 
3478 /* Low level part of emit_move_insn.
3479    Called just like emit_move_insn, but assumes X and Y
3480    are basically valid.  */
3481 
3482 rtx_insn *
3483 emit_move_insn_1 (rtx x, rtx y)
3484 {
3485   machine_mode mode = GET_MODE (x);
3486   enum insn_code code;
3487 
3488   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3489 
3490   code = optab_handler (mov_optab, mode);
3491   if (code != CODE_FOR_nothing)
3492     return emit_insn (GEN_FCN (code) (x, y));
3493 
3494   /* Expand complex moves by moving real part and imag part.  */
3495   if (COMPLEX_MODE_P (mode))
3496     return emit_move_complex (mode, x, y);
3497 
3498   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3499       || ALL_FIXED_POINT_MODE_P (mode))
3500     {
3501       rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3502 
3503       /* If we can't find an integer mode, use multi words.  */
3504       if (result)
3505 	return result;
3506       else
3507 	return emit_move_multi_word (mode, x, y);
3508     }
3509 
3510   if (GET_MODE_CLASS (mode) == MODE_CC)
3511     return emit_move_ccmode (mode, x, y);
3512 
3513   /* Try using a move pattern for the corresponding integer mode.  This is
3514      only safe when simplify_subreg can convert MODE constants into integer
3515      constants.  At present, it can only do this reliably if the value
3516      fits within a HOST_WIDE_INT.  */
3517   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3518     {
3519       rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3520 
3521       if (ret)
3522 	{
3523 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3524 	    return ret;
3525 	}
3526     }
3527 
3528   return emit_move_multi_word (mode, x, y);
3529 }
3530 
3531 /* Generate code to copy Y into X.
3532    Both Y and X must have the same mode, except that
3533    Y can be a constant with VOIDmode.
3534    This mode cannot be BLKmode; use emit_block_move for that.
3535 
3536    Return the last instruction emitted.  */
3537 
3538 rtx_insn *
3539 emit_move_insn (rtx x, rtx y)
3540 {
3541   machine_mode mode = GET_MODE (x);
3542   rtx y_cst = NULL_RTX;
3543   rtx_insn *last_insn;
3544   rtx set;
3545 
3546   gcc_assert (mode != BLKmode
3547 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3548 
3549   if (CONSTANT_P (y))
3550     {
3551       if (optimize
3552 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3553 	  && (last_insn = compress_float_constant (x, y)))
3554 	return last_insn;
3555 
3556       y_cst = y;
3557 
3558       if (!targetm.legitimate_constant_p (mode, y))
3559 	{
3560 	  y = force_const_mem (mode, y);
3561 
3562 	  /* If the target's cannot_force_const_mem prevented the spill,
3563 	     assume that the target's move expanders will also take care
3564 	     of the non-legitimate constant.  */
3565 	  if (!y)
3566 	    y = y_cst;
3567 	  else
3568 	    y = use_anchored_address (y);
3569 	}
3570     }
3571 
3572   /* If X or Y are memory references, verify that their addresses are valid
3573      for the machine.  */
3574   if (MEM_P (x)
3575       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3576 					 MEM_ADDR_SPACE (x))
3577 	  && ! push_operand (x, GET_MODE (x))))
3578     x = validize_mem (x);
3579 
3580   if (MEM_P (y)
3581       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3582 					MEM_ADDR_SPACE (y)))
3583     y = validize_mem (y);
3584 
3585   gcc_assert (mode != BLKmode);
3586 
3587   last_insn = emit_move_insn_1 (x, y);
3588 
3589   if (y_cst && REG_P (x)
3590       && (set = single_set (last_insn)) != NULL_RTX
3591       && SET_DEST (set) == x
3592       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3593     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3594 
3595   return last_insn;
3596 }
3597 
3598 /* Generate the body of an instruction to copy Y into X.
3599    It may be a list of insns, if one insn isn't enough.  */
3600 
3601 rtx_insn *
3602 gen_move_insn (rtx x, rtx y)
3603 {
3604   rtx_insn *seq;
3605 
3606   start_sequence ();
3607   emit_move_insn_1 (x, y);
3608   seq = get_insns ();
3609   end_sequence ();
3610   return seq;
3611 }
3612 
3613 /* If Y is representable exactly in a narrower mode, and the target can
3614    perform the extension directly from constant or memory, then emit the
3615    move as an extension.  */
3616 
3617 static rtx_insn *
3618 compress_float_constant (rtx x, rtx y)
3619 {
3620   machine_mode dstmode = GET_MODE (x);
3621   machine_mode orig_srcmode = GET_MODE (y);
3622   machine_mode srcmode;
3623   const REAL_VALUE_TYPE *r;
3624   int oldcost, newcost;
3625   bool speed = optimize_insn_for_speed_p ();
3626 
3627   r = CONST_DOUBLE_REAL_VALUE (y);
3628 
3629   if (targetm.legitimate_constant_p (dstmode, y))
3630     oldcost = set_src_cost (y, orig_srcmode, speed);
3631   else
3632     oldcost = set_src_cost (force_const_mem (dstmode, y), dstmode, speed);
3633 
3634   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3635        srcmode != orig_srcmode;
3636        srcmode = GET_MODE_WIDER_MODE (srcmode))
3637     {
3638       enum insn_code ic;
3639       rtx trunc_y;
3640       rtx_insn *last_insn;
3641 
3642       /* Skip if the target can't extend this way.  */
3643       ic = can_extend_p (dstmode, srcmode, 0);
3644       if (ic == CODE_FOR_nothing)
3645 	continue;
3646 
3647       /* Skip if the narrowed value isn't exact.  */
3648       if (! exact_real_truncate (srcmode, r))
3649 	continue;
3650 
3651       trunc_y = const_double_from_real_value (*r, srcmode);
3652 
3653       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3654 	{
3655 	  /* Skip if the target needs extra instructions to perform
3656 	     the extension.  */
3657 	  if (!insn_operand_matches (ic, 1, trunc_y))
3658 	    continue;
3659 	  /* This is valid, but may not be cheaper than the original. */
3660 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3661 				  dstmode, speed);
3662 	  if (oldcost < newcost)
3663 	    continue;
3664 	}
3665       else if (float_extend_from_mem[dstmode][srcmode])
3666 	{
3667 	  trunc_y = force_const_mem (srcmode, trunc_y);
3668 	  /* This is valid, but may not be cheaper than the original. */
3669 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3670 				  dstmode, speed);
3671 	  if (oldcost < newcost)
3672 	    continue;
3673 	  trunc_y = validize_mem (trunc_y);
3674 	}
3675       else
3676 	continue;
3677 
3678       /* For CSE's benefit, force the compressed constant pool entry
3679 	 into a new pseudo.  This constant may be used in different modes,
3680 	 and if not, combine will put things back together for us.  */
3681       trunc_y = force_reg (srcmode, trunc_y);
3682 
3683       /* If x is a hard register, perform the extension into a pseudo,
3684 	 so that e.g. stack realignment code is aware of it.  */
3685       rtx target = x;
3686       if (REG_P (x) && HARD_REGISTER_P (x))
3687 	target = gen_reg_rtx (dstmode);
3688 
3689       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3690       last_insn = get_last_insn ();
3691 
3692       if (REG_P (target))
3693 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3694 
3695       if (target != x)
3696 	return emit_move_insn (x, target);
3697       return last_insn;
3698     }
3699 
3700   return NULL;
3701 }
3702 
3703 /* Pushing data onto the stack.  */
3704 
3705 /* Push a block of length SIZE (perhaps variable)
3706    and return an rtx to address the beginning of the block.
3707    The value may be virtual_outgoing_args_rtx.
3708 
3709    EXTRA is the number of bytes of padding to push in addition to SIZE.
3710    BELOW nonzero means this padding comes at low addresses;
3711    otherwise, the padding comes at high addresses.  */
3712 
3713 rtx
3714 push_block (rtx size, int extra, int below)
3715 {
3716   rtx temp;
3717 
3718   size = convert_modes (Pmode, ptr_mode, size, 1);
3719   if (CONSTANT_P (size))
3720     anti_adjust_stack (plus_constant (Pmode, size, extra));
3721   else if (REG_P (size) && extra == 0)
3722     anti_adjust_stack (size);
3723   else
3724     {
3725       temp = copy_to_mode_reg (Pmode, size);
3726       if (extra != 0)
3727 	temp = expand_binop (Pmode, add_optab, temp,
3728 			     gen_int_mode (extra, Pmode),
3729 			     temp, 0, OPTAB_LIB_WIDEN);
3730       anti_adjust_stack (temp);
3731     }
3732 
3733   if (STACK_GROWS_DOWNWARD)
3734     {
3735       temp = virtual_outgoing_args_rtx;
3736       if (extra != 0 && below)
3737 	temp = plus_constant (Pmode, temp, extra);
3738     }
3739   else
3740     {
3741       if (CONST_INT_P (size))
3742 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3743 			      -INTVAL (size) - (below ? 0 : extra));
3744       else if (extra != 0 && !below)
3745 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3746 			     negate_rtx (Pmode, plus_constant (Pmode, size,
3747 							       extra)));
3748       else
3749 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3750 			     negate_rtx (Pmode, size));
3751     }
3752 
3753   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3754 }
3755 
3756 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3757 
3758 static rtx
3759 mem_autoinc_base (rtx mem)
3760 {
3761   if (MEM_P (mem))
3762     {
3763       rtx addr = XEXP (mem, 0);
3764       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3765 	return XEXP (addr, 0);
3766     }
3767   return NULL;
3768 }
3769 
3770 /* A utility routine used here, in reload, and in try_split.  The insns
3771    after PREV up to and including LAST are known to adjust the stack,
3772    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3773    placing notes as appropriate.  PREV may be NULL, indicating the
3774    entire insn sequence prior to LAST should be scanned.
3775 
3776    The set of allowed stack pointer modifications is small:
3777      (1) One or more auto-inc style memory references (aka pushes),
3778      (2) One or more addition/subtraction with the SP as destination,
3779      (3) A single move insn with the SP as destination,
3780      (4) A call_pop insn,
3781      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3782 
3783    Insns in the sequence that do not modify the SP are ignored,
3784    except for noreturn calls.
3785 
3786    The return value is the amount of adjustment that can be trivially
3787    verified, via immediate operand or auto-inc.  If the adjustment
3788    cannot be trivially extracted, the return value is INT_MIN.  */
3789 
3790 HOST_WIDE_INT
3791 find_args_size_adjust (rtx_insn *insn)
3792 {
3793   rtx dest, set, pat;
3794   int i;
3795 
3796   pat = PATTERN (insn);
3797   set = NULL;
3798 
3799   /* Look for a call_pop pattern.  */
3800   if (CALL_P (insn))
3801     {
3802       /* We have to allow non-call_pop patterns for the case
3803 	 of emit_single_push_insn of a TLS address.  */
3804       if (GET_CODE (pat) != PARALLEL)
3805 	return 0;
3806 
3807       /* All call_pop have a stack pointer adjust in the parallel.
3808 	 The call itself is always first, and the stack adjust is
3809 	 usually last, so search from the end.  */
3810       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3811 	{
3812 	  set = XVECEXP (pat, 0, i);
3813 	  if (GET_CODE (set) != SET)
3814 	    continue;
3815 	  dest = SET_DEST (set);
3816 	  if (dest == stack_pointer_rtx)
3817 	    break;
3818 	}
3819       /* We'd better have found the stack pointer adjust.  */
3820       if (i == 0)
3821 	return 0;
3822       /* Fall through to process the extracted SET and DEST
3823 	 as if it was a standalone insn.  */
3824     }
3825   else if (GET_CODE (pat) == SET)
3826     set = pat;
3827   else if ((set = single_set (insn)) != NULL)
3828     ;
3829   else if (GET_CODE (pat) == PARALLEL)
3830     {
3831       /* ??? Some older ports use a parallel with a stack adjust
3832 	 and a store for a PUSH_ROUNDING pattern, rather than a
3833 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3834       /* ??? See h8300 and m68k, pushqi1.  */
3835       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3836 	{
3837 	  set = XVECEXP (pat, 0, i);
3838 	  if (GET_CODE (set) != SET)
3839 	    continue;
3840 	  dest = SET_DEST (set);
3841 	  if (dest == stack_pointer_rtx)
3842 	    break;
3843 
3844 	  /* We do not expect an auto-inc of the sp in the parallel.  */
3845 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3846 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3847 			       != stack_pointer_rtx);
3848 	}
3849       if (i < 0)
3850 	return 0;
3851     }
3852   else
3853     return 0;
3854 
3855   dest = SET_DEST (set);
3856 
3857   /* Look for direct modifications of the stack pointer.  */
3858   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3859     {
3860       /* Look for a trivial adjustment, otherwise assume nothing.  */
3861       /* Note that the SPU restore_stack_block pattern refers to
3862 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
3863       if (SCALAR_INT_MODE_P (GET_MODE (dest))
3864 	  && GET_CODE (SET_SRC (set)) == PLUS
3865 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3866 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3867 	return INTVAL (XEXP (SET_SRC (set), 1));
3868       /* ??? Reload can generate no-op moves, which will be cleaned
3869 	 up later.  Recognize it and continue searching.  */
3870       else if (rtx_equal_p (dest, SET_SRC (set)))
3871 	return 0;
3872       else
3873 	return HOST_WIDE_INT_MIN;
3874     }
3875   else
3876     {
3877       rtx mem, addr;
3878 
3879       /* Otherwise only think about autoinc patterns.  */
3880       if (mem_autoinc_base (dest) == stack_pointer_rtx)
3881 	{
3882 	  mem = dest;
3883 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3884 			       != stack_pointer_rtx);
3885 	}
3886       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3887 	mem = SET_SRC (set);
3888       else
3889 	return 0;
3890 
3891       addr = XEXP (mem, 0);
3892       switch (GET_CODE (addr))
3893 	{
3894 	case PRE_INC:
3895 	case POST_INC:
3896 	  return GET_MODE_SIZE (GET_MODE (mem));
3897 	case PRE_DEC:
3898 	case POST_DEC:
3899 	  return -GET_MODE_SIZE (GET_MODE (mem));
3900 	case PRE_MODIFY:
3901 	case POST_MODIFY:
3902 	  addr = XEXP (addr, 1);
3903 	  gcc_assert (GET_CODE (addr) == PLUS);
3904 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3905 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3906 	  return INTVAL (XEXP (addr, 1));
3907 	default:
3908 	  gcc_unreachable ();
3909 	}
3910     }
3911 }
3912 
3913 int
3914 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3915 {
3916   int args_size = end_args_size;
3917   bool saw_unknown = false;
3918   rtx_insn *insn;
3919 
3920   for (insn = last; insn != prev; insn = PREV_INSN (insn))
3921     {
3922       HOST_WIDE_INT this_delta;
3923 
3924       if (!NONDEBUG_INSN_P (insn))
3925 	continue;
3926 
3927       this_delta = find_args_size_adjust (insn);
3928       if (this_delta == 0)
3929 	{
3930 	  if (!CALL_P (insn)
3931 	      || ACCUMULATE_OUTGOING_ARGS
3932 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3933 	    continue;
3934 	}
3935 
3936       gcc_assert (!saw_unknown);
3937       if (this_delta == HOST_WIDE_INT_MIN)
3938 	saw_unknown = true;
3939 
3940       add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3941       if (STACK_GROWS_DOWNWARD)
3942 	this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3943 
3944       args_size -= this_delta;
3945     }
3946 
3947   return saw_unknown ? INT_MIN : args_size;
3948 }
3949 
3950 #ifdef PUSH_ROUNDING
3951 /* Emit single push insn.  */
3952 
3953 static void
3954 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
3955 {
3956   rtx dest_addr;
3957   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3958   rtx dest;
3959   enum insn_code icode;
3960 
3961   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3962   /* If there is push pattern, use it.  Otherwise try old way of throwing
3963      MEM representing push operation to move expander.  */
3964   icode = optab_handler (push_optab, mode);
3965   if (icode != CODE_FOR_nothing)
3966     {
3967       struct expand_operand ops[1];
3968 
3969       create_input_operand (&ops[0], x, mode);
3970       if (maybe_expand_insn (icode, 1, ops))
3971 	return;
3972     }
3973   if (GET_MODE_SIZE (mode) == rounded_size)
3974     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3975   /* If we are to pad downward, adjust the stack pointer first and
3976      then store X into the stack location using an offset.  This is
3977      because emit_move_insn does not know how to pad; it does not have
3978      access to type.  */
3979   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3980     {
3981       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3982       HOST_WIDE_INT offset;
3983 
3984       emit_move_insn (stack_pointer_rtx,
3985 		      expand_binop (Pmode,
3986 				    STACK_GROWS_DOWNWARD ? sub_optab
3987 				    : add_optab,
3988 				    stack_pointer_rtx,
3989 				    gen_int_mode (rounded_size, Pmode),
3990 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3991 
3992       offset = (HOST_WIDE_INT) padding_size;
3993       if (STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_DEC)
3994 	/* We have already decremented the stack pointer, so get the
3995 	   previous value.  */
3996 	offset += (HOST_WIDE_INT) rounded_size;
3997 
3998       if (!STACK_GROWS_DOWNWARD && STACK_PUSH_CODE == POST_INC)
3999 	/* We have already incremented the stack pointer, so get the
4000 	   previous value.  */
4001 	offset -= (HOST_WIDE_INT) rounded_size;
4002 
4003       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4004 				gen_int_mode (offset, Pmode));
4005     }
4006   else
4007     {
4008       if (STACK_GROWS_DOWNWARD)
4009 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4010 	dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4011 				  gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4012 						Pmode));
4013       else
4014 	/* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4015 	dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4016 				  gen_int_mode (rounded_size, Pmode));
4017 
4018       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4019     }
4020 
4021   dest = gen_rtx_MEM (mode, dest_addr);
4022 
4023   if (type != 0)
4024     {
4025       set_mem_attributes (dest, type, 1);
4026 
4027       if (cfun->tail_call_marked)
4028 	/* Function incoming arguments may overlap with sibling call
4029 	   outgoing arguments and we cannot allow reordering of reads
4030 	   from function arguments with stores to outgoing arguments
4031 	   of sibling calls.  */
4032 	set_mem_alias_set (dest, 0);
4033     }
4034   emit_move_insn (dest, x);
4035 }
4036 
4037 /* Emit and annotate a single push insn.  */
4038 
4039 static void
4040 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4041 {
4042   int delta, old_delta = stack_pointer_delta;
4043   rtx_insn *prev = get_last_insn ();
4044   rtx_insn *last;
4045 
4046   emit_single_push_insn_1 (mode, x, type);
4047 
4048   last = get_last_insn ();
4049 
4050   /* Notice the common case where we emitted exactly one insn.  */
4051   if (PREV_INSN (last) == prev)
4052     {
4053       add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4054       return;
4055     }
4056 
4057   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4058   gcc_assert (delta == INT_MIN || delta == old_delta);
4059 }
4060 #endif
4061 
4062 /* If reading SIZE bytes from X will end up reading from
4063    Y return the number of bytes that overlap.  Return -1
4064    if there is no overlap or -2 if we can't determine
4065    (for example when X and Y have different base registers).  */
4066 
4067 static int
4068 memory_load_overlap (rtx x, rtx y, HOST_WIDE_INT size)
4069 {
4070   rtx tmp = plus_constant (Pmode, x, size);
4071   rtx sub = simplify_gen_binary (MINUS, Pmode, tmp, y);
4072 
4073   if (!CONST_INT_P (sub))
4074     return -2;
4075 
4076   HOST_WIDE_INT val = INTVAL (sub);
4077 
4078   return IN_RANGE (val, 1, size) ? val : -1;
4079 }
4080 
4081 /* Generate code to push X onto the stack, assuming it has mode MODE and
4082    type TYPE.
4083    MODE is redundant except when X is a CONST_INT (since they don't
4084    carry mode info).
4085    SIZE is an rtx for the size of data to be copied (in bytes),
4086    needed only if X is BLKmode.
4087    Return true if successful.  May return false if asked to push a
4088    partial argument during a sibcall optimization (as specified by
4089    SIBCALL_P) and the incoming and outgoing pointers cannot be shown
4090    to not overlap.
4091 
4092    ALIGN (in bits) is maximum alignment we can assume.
4093 
4094    If PARTIAL and REG are both nonzero, then copy that many of the first
4095    bytes of X into registers starting with REG, and push the rest of X.
4096    The amount of space pushed is decreased by PARTIAL bytes.
4097    REG must be a hard register in this case.
4098    If REG is zero but PARTIAL is not, take any all others actions for an
4099    argument partially in registers, but do not actually load any
4100    registers.
4101 
4102    EXTRA is the amount in bytes of extra space to leave next to this arg.
4103    This is ignored if an argument block has already been allocated.
4104 
4105    On a machine that lacks real push insns, ARGS_ADDR is the address of
4106    the bottom of the argument block for this call.  We use indexing off there
4107    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4108    argument block has not been preallocated.
4109 
4110    ARGS_SO_FAR is the size of args previously pushed for this call.
4111 
4112    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4113    for arguments passed in registers.  If nonzero, it will be the number
4114    of bytes required.  */
4115 
4116 bool
4117 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4118 		unsigned int align, int partial, rtx reg, int extra,
4119 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4120 		rtx alignment_pad, bool sibcall_p)
4121 {
4122   rtx xinner;
4123   enum direction stack_direction = STACK_GROWS_DOWNWARD ? downward : upward;
4124 
4125   /* Decide where to pad the argument: `downward' for below,
4126      `upward' for above, or `none' for don't pad it.
4127      Default is below for small data on big-endian machines; else above.  */
4128   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4129 
4130   /* Invert direction if stack is post-decrement.
4131      FIXME: why?  */
4132   if (STACK_PUSH_CODE == POST_DEC)
4133     if (where_pad != none)
4134       where_pad = (where_pad == downward ? upward : downward);
4135 
4136   xinner = x;
4137 
4138   int nregs = partial / UNITS_PER_WORD;
4139   rtx *tmp_regs = NULL;
4140   int overlapping = 0;
4141 
4142   if (mode == BLKmode
4143       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)
4144 	  && type != NULL_TREE))
4145     {
4146       /* Copy a block into the stack, entirely or partially.  */
4147 
4148       rtx temp;
4149       int used;
4150       int offset;
4151       int skip;
4152 
4153       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4154       used = partial - offset;
4155 
4156       if (mode != BLKmode)
4157 	{
4158 	  /* A value is to be stored in an insufficiently aligned
4159 	     stack slot; copy via a suitably aligned slot if
4160 	     necessary.  */
4161 	  size = GEN_INT (GET_MODE_SIZE (mode));
4162 	  if (!MEM_P (xinner))
4163 	    {
4164 	      temp = assign_temp (type, 1, 1);
4165 	      emit_move_insn (temp, xinner);
4166 	      xinner = temp;
4167 	    }
4168 	}
4169 
4170       gcc_assert (size);
4171 
4172       /* USED is now the # of bytes we need not copy to the stack
4173 	 because registers will take care of them.  */
4174 
4175       if (partial != 0)
4176 	xinner = adjust_address (xinner, BLKmode, used);
4177 
4178       /* If the partial register-part of the arg counts in its stack size,
4179 	 skip the part of stack space corresponding to the registers.
4180 	 Otherwise, start copying to the beginning of the stack space,
4181 	 by setting SKIP to 0.  */
4182       skip = (reg_parm_stack_space == 0) ? 0 : used;
4183 
4184 #ifdef PUSH_ROUNDING
4185       /* Do it with several push insns if that doesn't take lots of insns
4186 	 and if there is no difficulty with push insns that skip bytes
4187 	 on the stack for alignment purposes.  */
4188       if (args_addr == 0
4189 	  && PUSH_ARGS
4190 	  && CONST_INT_P (size)
4191 	  && skip == 0
4192 	  && MEM_ALIGN (xinner) >= align
4193 	  && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4194 	  /* Here we avoid the case of a structure whose weak alignment
4195 	     forces many pushes of a small amount of data,
4196 	     and such small pushes do rounding that causes trouble.  */
4197 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4198 	      || align >= BIGGEST_ALIGNMENT
4199 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4200 		  == (align / BITS_PER_UNIT)))
4201 	  && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4202 	{
4203 	  /* Push padding now if padding above and stack grows down,
4204 	     or if padding below and stack grows up.
4205 	     But if space already allocated, this has already been done.  */
4206 	  if (extra && args_addr == 0
4207 	      && where_pad != none && where_pad != stack_direction)
4208 	    anti_adjust_stack (GEN_INT (extra));
4209 
4210 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4211 	}
4212       else
4213 #endif /* PUSH_ROUNDING  */
4214 	{
4215 	  rtx target;
4216 
4217 	  /* Otherwise make space on the stack and copy the data
4218 	     to the address of that space.  */
4219 
4220 	  /* Deduct words put into registers from the size we must copy.  */
4221 	  if (partial != 0)
4222 	    {
4223 	      if (CONST_INT_P (size))
4224 		size = GEN_INT (INTVAL (size) - used);
4225 	      else
4226 		size = expand_binop (GET_MODE (size), sub_optab, size,
4227 				     gen_int_mode (used, GET_MODE (size)),
4228 				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4229 	    }
4230 
4231 	  /* Get the address of the stack space.
4232 	     In this case, we do not deal with EXTRA separately.
4233 	     A single stack adjust will do.  */
4234 	  if (! args_addr)
4235 	    {
4236 	      temp = push_block (size, extra, where_pad == downward);
4237 	      extra = 0;
4238 	    }
4239 	  else if (CONST_INT_P (args_so_far))
4240 	    temp = memory_address (BLKmode,
4241 				   plus_constant (Pmode, args_addr,
4242 						  skip + INTVAL (args_so_far)));
4243 	  else
4244 	    temp = memory_address (BLKmode,
4245 				   plus_constant (Pmode,
4246 						  gen_rtx_PLUS (Pmode,
4247 								args_addr,
4248 								args_so_far),
4249 						  skip));
4250 
4251 	  if (!ACCUMULATE_OUTGOING_ARGS)
4252 	    {
4253 	      /* If the source is referenced relative to the stack pointer,
4254 		 copy it to another register to stabilize it.  We do not need
4255 		 to do this if we know that we won't be changing sp.  */
4256 
4257 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4258 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4259 		temp = copy_to_reg (temp);
4260 	    }
4261 
4262 	  target = gen_rtx_MEM (BLKmode, temp);
4263 
4264 	  /* We do *not* set_mem_attributes here, because incoming arguments
4265 	     may overlap with sibling call outgoing arguments and we cannot
4266 	     allow reordering of reads from function arguments with stores
4267 	     to outgoing arguments of sibling calls.  We do, however, want
4268 	     to record the alignment of the stack slot.  */
4269 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4270 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4271 	  set_mem_align (target, align);
4272 
4273 	  /* If part should go in registers and pushing to that part would
4274 	     overwrite some of the values that need to go into regs, load the
4275 	     overlapping values into temporary pseudos to be moved into the hard
4276 	     regs at the end after the stack pushing has completed.
4277 	     We cannot load them directly into the hard regs here because
4278 	     they can be clobbered by the block move expansions.
4279 	     See PR 65358.  */
4280 
4281 	  if (partial > 0 && reg != 0 && mode == BLKmode
4282 	      && GET_CODE (reg) != PARALLEL)
4283 	    {
4284 	      overlapping = memory_load_overlap (XEXP (x, 0), temp, partial);
4285 	      if (overlapping > 0)
4286 	        {
4287 		  gcc_assert (overlapping % UNITS_PER_WORD == 0);
4288 		  overlapping /= UNITS_PER_WORD;
4289 
4290 		  tmp_regs = XALLOCAVEC (rtx, overlapping);
4291 
4292 		  for (int i = 0; i < overlapping; i++)
4293 		    tmp_regs[i] = gen_reg_rtx (word_mode);
4294 
4295 		  for (int i = 0; i < overlapping; i++)
4296 		    emit_move_insn (tmp_regs[i],
4297 				    operand_subword_force (target, i, mode));
4298 	        }
4299 	      else if (overlapping == -1)
4300 		overlapping = 0;
4301 	      /* Could not determine whether there is overlap.
4302 	         Fail the sibcall.  */
4303 	      else
4304 		{
4305 		  overlapping = 0;
4306 		  if (sibcall_p)
4307 		    return false;
4308 		}
4309 	    }
4310 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4311 	}
4312     }
4313   else if (partial > 0)
4314     {
4315       /* Scalar partly in registers.  */
4316 
4317       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4318       int i;
4319       int not_stack;
4320       /* # bytes of start of argument
4321 	 that we must make space for but need not store.  */
4322       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4323       int args_offset = INTVAL (args_so_far);
4324       int skip;
4325 
4326       /* Push padding now if padding above and stack grows down,
4327 	 or if padding below and stack grows up.
4328 	 But if space already allocated, this has already been done.  */
4329       if (extra && args_addr == 0
4330 	  && where_pad != none && where_pad != stack_direction)
4331 	anti_adjust_stack (GEN_INT (extra));
4332 
4333       /* If we make space by pushing it, we might as well push
4334 	 the real data.  Otherwise, we can leave OFFSET nonzero
4335 	 and leave the space uninitialized.  */
4336       if (args_addr == 0)
4337 	offset = 0;
4338 
4339       /* Now NOT_STACK gets the number of words that we don't need to
4340 	 allocate on the stack.  Convert OFFSET to words too.  */
4341       not_stack = (partial - offset) / UNITS_PER_WORD;
4342       offset /= UNITS_PER_WORD;
4343 
4344       /* If the partial register-part of the arg counts in its stack size,
4345 	 skip the part of stack space corresponding to the registers.
4346 	 Otherwise, start copying to the beginning of the stack space,
4347 	 by setting SKIP to 0.  */
4348       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4349 
4350       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4351 	x = validize_mem (force_const_mem (mode, x));
4352 
4353       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4354 	 SUBREGs of such registers are not allowed.  */
4355       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4356 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4357 	x = copy_to_reg (x);
4358 
4359       /* Loop over all the words allocated on the stack for this arg.  */
4360       /* We can do it by words, because any scalar bigger than a word
4361 	 has a size a multiple of a word.  */
4362       for (i = size - 1; i >= not_stack; i--)
4363 	if (i >= not_stack + offset)
4364 	  if (!emit_push_insn (operand_subword_force (x, i, mode),
4365 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4366 			  0, args_addr,
4367 			  GEN_INT (args_offset + ((i - not_stack + skip)
4368 						  * UNITS_PER_WORD)),
4369 			  reg_parm_stack_space, alignment_pad, sibcall_p))
4370 	    return false;
4371     }
4372   else
4373     {
4374       rtx addr;
4375       rtx dest;
4376 
4377       /* Push padding now if padding above and stack grows down,
4378 	 or if padding below and stack grows up.
4379 	 But if space already allocated, this has already been done.  */
4380       if (extra && args_addr == 0
4381 	  && where_pad != none && where_pad != stack_direction)
4382 	anti_adjust_stack (GEN_INT (extra));
4383 
4384 #ifdef PUSH_ROUNDING
4385       if (args_addr == 0 && PUSH_ARGS)
4386 	emit_single_push_insn (mode, x, type);
4387       else
4388 #endif
4389 	{
4390 	  if (CONST_INT_P (args_so_far))
4391 	    addr
4392 	      = memory_address (mode,
4393 				plus_constant (Pmode, args_addr,
4394 					       INTVAL (args_so_far)));
4395 	  else
4396 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4397 						       args_so_far));
4398 	  dest = gen_rtx_MEM (mode, addr);
4399 
4400 	  /* We do *not* set_mem_attributes here, because incoming arguments
4401 	     may overlap with sibling call outgoing arguments and we cannot
4402 	     allow reordering of reads from function arguments with stores
4403 	     to outgoing arguments of sibling calls.  We do, however, want
4404 	     to record the alignment of the stack slot.  */
4405 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4406 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4407 	  set_mem_align (dest, align);
4408 
4409 	  emit_move_insn (dest, x);
4410 	}
4411     }
4412 
4413   /* Move the partial arguments into the registers and any overlapping
4414      values that we moved into the pseudos in tmp_regs.  */
4415   if (partial > 0 && reg != 0)
4416     {
4417       /* Handle calls that pass values in multiple non-contiguous locations.
4418 	 The Irix 6 ABI has examples of this.  */
4419       if (GET_CODE (reg) == PARALLEL)
4420 	emit_group_load (reg, x, type, -1);
4421       else
4422         {
4423 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4424 	  move_block_to_reg (REGNO (reg), x, nregs - overlapping, mode);
4425 
4426 	  for (int i = 0; i < overlapping; i++)
4427 	    emit_move_insn (gen_rtx_REG (word_mode, REGNO (reg)
4428 						    + nregs - overlapping + i),
4429 			    tmp_regs[i]);
4430 
4431 	}
4432     }
4433 
4434   if (extra && args_addr == 0 && where_pad == stack_direction)
4435     anti_adjust_stack (GEN_INT (extra));
4436 
4437   if (alignment_pad && args_addr == 0)
4438     anti_adjust_stack (alignment_pad);
4439 
4440   return true;
4441 }
4442 
4443 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4444    operations.  */
4445 
4446 static rtx
4447 get_subtarget (rtx x)
4448 {
4449   return (optimize
4450           || x == 0
4451 	   /* Only registers can be subtargets.  */
4452 	   || !REG_P (x)
4453 	   /* Don't use hard regs to avoid extending their life.  */
4454 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4455 	  ? 0 : x);
4456 }
4457 
4458 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4459    FIELD is a bitfield.  Returns true if the optimization was successful,
4460    and there's nothing else to do.  */
4461 
4462 static bool
4463 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4464 				 unsigned HOST_WIDE_INT bitpos,
4465 				 unsigned HOST_WIDE_INT bitregion_start,
4466 				 unsigned HOST_WIDE_INT bitregion_end,
4467 				 machine_mode mode1, rtx str_rtx,
4468 				 tree to, tree src, bool reverse)
4469 {
4470   machine_mode str_mode = GET_MODE (str_rtx);
4471   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4472   tree op0, op1;
4473   rtx value, result;
4474   optab binop;
4475   gimple *srcstmt;
4476   enum tree_code code;
4477 
4478   if (mode1 != VOIDmode
4479       || bitsize >= BITS_PER_WORD
4480       || str_bitsize > BITS_PER_WORD
4481       || TREE_SIDE_EFFECTS (to)
4482       || TREE_THIS_VOLATILE (to))
4483     return false;
4484 
4485   STRIP_NOPS (src);
4486   if (TREE_CODE (src) != SSA_NAME)
4487     return false;
4488   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4489     return false;
4490 
4491   srcstmt = get_gimple_for_ssa_name (src);
4492   if (!srcstmt
4493       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4494     return false;
4495 
4496   code = gimple_assign_rhs_code (srcstmt);
4497 
4498   op0 = gimple_assign_rhs1 (srcstmt);
4499 
4500   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4501      to find its initialization.  Hopefully the initialization will
4502      be from a bitfield load.  */
4503   if (TREE_CODE (op0) == SSA_NAME)
4504     {
4505       gimple *op0stmt = get_gimple_for_ssa_name (op0);
4506 
4507       /* We want to eventually have OP0 be the same as TO, which
4508 	 should be a bitfield.  */
4509       if (!op0stmt
4510 	  || !is_gimple_assign (op0stmt)
4511 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4512 	return false;
4513       op0 = gimple_assign_rhs1 (op0stmt);
4514     }
4515 
4516   op1 = gimple_assign_rhs2 (srcstmt);
4517 
4518   if (!operand_equal_p (to, op0, 0))
4519     return false;
4520 
4521   if (MEM_P (str_rtx))
4522     {
4523       unsigned HOST_WIDE_INT offset1;
4524 
4525       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4526 	str_mode = word_mode;
4527       str_mode = get_best_mode (bitsize, bitpos,
4528 				bitregion_start, bitregion_end,
4529 				MEM_ALIGN (str_rtx), str_mode, 0);
4530       if (str_mode == VOIDmode)
4531 	return false;
4532       str_bitsize = GET_MODE_BITSIZE (str_mode);
4533 
4534       offset1 = bitpos;
4535       bitpos %= str_bitsize;
4536       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4537       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4538     }
4539   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4540     return false;
4541   else
4542     gcc_assert (!reverse);
4543 
4544   /* If the bit field covers the whole REG/MEM, store_field
4545      will likely generate better code.  */
4546   if (bitsize >= str_bitsize)
4547     return false;
4548 
4549   /* We can't handle fields split across multiple entities.  */
4550   if (bitpos + bitsize > str_bitsize)
4551     return false;
4552 
4553   if (reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
4554     bitpos = str_bitsize - bitpos - bitsize;
4555 
4556   switch (code)
4557     {
4558     case PLUS_EXPR:
4559     case MINUS_EXPR:
4560       /* For now, just optimize the case of the topmost bitfield
4561 	 where we don't need to do any masking and also
4562 	 1 bit bitfields where xor can be used.
4563 	 We might win by one instruction for the other bitfields
4564 	 too if insv/extv instructions aren't used, so that
4565 	 can be added later.  */
4566       if ((reverse || bitpos + bitsize != str_bitsize)
4567 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4568 	break;
4569 
4570       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4571       value = convert_modes (str_mode,
4572 			     TYPE_MODE (TREE_TYPE (op1)), value,
4573 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4574 
4575       /* We may be accessing data outside the field, which means
4576 	 we can alias adjacent data.  */
4577       if (MEM_P (str_rtx))
4578 	{
4579 	  str_rtx = shallow_copy_rtx (str_rtx);
4580 	  set_mem_alias_set (str_rtx, 0);
4581 	  set_mem_expr (str_rtx, 0);
4582 	}
4583 
4584       if (bitsize == 1 && (reverse || bitpos + bitsize != str_bitsize))
4585 	{
4586 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4587 	  binop = xor_optab;
4588 	}
4589       else
4590 	binop = code == PLUS_EXPR ? add_optab : sub_optab;
4591 
4592       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4593       if (reverse)
4594 	value = flip_storage_order (str_mode, value);
4595       result = expand_binop (str_mode, binop, str_rtx,
4596 			     value, str_rtx, 1, OPTAB_WIDEN);
4597       if (result != str_rtx)
4598 	emit_move_insn (str_rtx, result);
4599       return true;
4600 
4601     case BIT_IOR_EXPR:
4602     case BIT_XOR_EXPR:
4603       if (TREE_CODE (op1) != INTEGER_CST)
4604 	break;
4605       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4606       value = convert_modes (str_mode,
4607 			     TYPE_MODE (TREE_TYPE (op1)), value,
4608 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4609 
4610       /* We may be accessing data outside the field, which means
4611 	 we can alias adjacent data.  */
4612       if (MEM_P (str_rtx))
4613 	{
4614 	  str_rtx = shallow_copy_rtx (str_rtx);
4615 	  set_mem_alias_set (str_rtx, 0);
4616 	  set_mem_expr (str_rtx, 0);
4617 	}
4618 
4619       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4620       if (bitpos + bitsize != str_bitsize)
4621 	{
4622 	  rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4623 				   str_mode);
4624 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4625 	}
4626       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4627       if (reverse)
4628 	value = flip_storage_order (str_mode, value);
4629       result = expand_binop (str_mode, binop, str_rtx,
4630 			     value, str_rtx, 1, OPTAB_WIDEN);
4631       if (result != str_rtx)
4632 	emit_move_insn (str_rtx, result);
4633       return true;
4634 
4635     default:
4636       break;
4637     }
4638 
4639   return false;
4640 }
4641 
4642 /* In the C++ memory model, consecutive bit fields in a structure are
4643    considered one memory location.
4644 
4645    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4646    returns the bit range of consecutive bits in which this COMPONENT_REF
4647    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4648    and *OFFSET may be adjusted in the process.
4649 
4650    If the access does not need to be restricted, 0 is returned in both
4651    *BITSTART and *BITEND.  */
4652 
4653 void
4654 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4655 	       unsigned HOST_WIDE_INT *bitend,
4656 	       tree exp,
4657 	       HOST_WIDE_INT *bitpos,
4658 	       tree *offset)
4659 {
4660   HOST_WIDE_INT bitoffset;
4661   tree field, repr;
4662 
4663   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4664 
4665   field = TREE_OPERAND (exp, 1);
4666   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4667   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4668      need to limit the range we can access.  */
4669   if (!repr)
4670     {
4671       *bitstart = *bitend = 0;
4672       return;
4673     }
4674 
4675   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4676      part of a larger bit field, then the representative does not serve any
4677      useful purpose.  This can occur in Ada.  */
4678   if (handled_component_p (TREE_OPERAND (exp, 0)))
4679     {
4680       machine_mode rmode;
4681       HOST_WIDE_INT rbitsize, rbitpos;
4682       tree roffset;
4683       int unsignedp, reversep, volatilep = 0;
4684       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4685 			   &roffset, &rmode, &unsignedp, &reversep,
4686 			   &volatilep, false);
4687       if ((rbitpos % BITS_PER_UNIT) != 0)
4688 	{
4689 	  *bitstart = *bitend = 0;
4690 	  return;
4691 	}
4692     }
4693 
4694   /* Compute the adjustment to bitpos from the offset of the field
4695      relative to the representative.  DECL_FIELD_OFFSET of field and
4696      repr are the same by construction if they are not constants,
4697      see finish_bitfield_layout.  */
4698   if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4699       && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4700     bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4701 		 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4702   else
4703     bitoffset = 0;
4704   bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4705 		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4706 
4707   /* If the adjustment is larger than bitpos, we would have a negative bit
4708      position for the lower bound and this may wreak havoc later.  Adjust
4709      offset and bitpos to make the lower bound non-negative in that case.  */
4710   if (bitoffset > *bitpos)
4711     {
4712       HOST_WIDE_INT adjust = bitoffset - *bitpos;
4713       gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4714 
4715       *bitpos += adjust;
4716       if (*offset == NULL_TREE)
4717 	*offset = size_int (-adjust / BITS_PER_UNIT);
4718       else
4719 	*offset
4720 	  = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4721       *bitstart = 0;
4722     }
4723   else
4724     *bitstart = *bitpos - bitoffset;
4725 
4726   *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4727 }
4728 
4729 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4730    in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4731    DECL_RTL was not set yet, return NORTL.  */
4732 
4733 static inline bool
4734 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4735 {
4736   if (TREE_CODE (addr) != ADDR_EXPR)
4737     return false;
4738 
4739   tree base = TREE_OPERAND (addr, 0);
4740 
4741   if (!DECL_P (base)
4742       || TREE_ADDRESSABLE (base)
4743       || DECL_MODE (base) == BLKmode)
4744     return false;
4745 
4746   if (!DECL_RTL_SET_P (base))
4747     return nortl;
4748 
4749   return (!MEM_P (DECL_RTL (base)));
4750 }
4751 
4752 /* Returns true if the MEM_REF REF refers to an object that does not
4753    reside in memory and has non-BLKmode.  */
4754 
4755 static inline bool
4756 mem_ref_refers_to_non_mem_p (tree ref)
4757 {
4758   tree base = TREE_OPERAND (ref, 0);
4759   return addr_expr_of_non_mem_decl_p_1 (base, false);
4760 }
4761 
4762 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4763    is true, try generating a nontemporal store.  */
4764 
4765 void
4766 expand_assignment (tree to, tree from, bool nontemporal)
4767 {
4768   rtx to_rtx = 0;
4769   rtx result;
4770   machine_mode mode;
4771   unsigned int align;
4772   enum insn_code icode;
4773 
4774   /* Don't crash if the lhs of the assignment was erroneous.  */
4775   if (TREE_CODE (to) == ERROR_MARK)
4776     {
4777       expand_normal (from);
4778       return;
4779     }
4780 
4781   /* Optimize away no-op moves without side-effects.  */
4782   if (operand_equal_p (to, from, 0))
4783     return;
4784 
4785   /* Handle misaligned stores.  */
4786   mode = TYPE_MODE (TREE_TYPE (to));
4787   if ((TREE_CODE (to) == MEM_REF
4788        || TREE_CODE (to) == TARGET_MEM_REF)
4789       && mode != BLKmode
4790       && !mem_ref_refers_to_non_mem_p (to)
4791       && ((align = get_object_alignment (to))
4792 	  < GET_MODE_ALIGNMENT (mode))
4793       && (((icode = optab_handler (movmisalign_optab, mode))
4794 	   != CODE_FOR_nothing)
4795 	  || SLOW_UNALIGNED_ACCESS (mode, align)))
4796     {
4797       rtx reg, mem;
4798 
4799       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4800       reg = force_not_mem (reg);
4801       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4802       if (TREE_CODE (to) == MEM_REF && REF_REVERSE_STORAGE_ORDER (to))
4803 	reg = flip_storage_order (mode, reg);
4804 
4805       if (icode != CODE_FOR_nothing)
4806 	{
4807 	  struct expand_operand ops[2];
4808 
4809 	  create_fixed_operand (&ops[0], mem);
4810 	  create_input_operand (&ops[1], reg, mode);
4811 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4812 	     would silently be omitted.  */
4813 	  expand_insn (icode, 2, ops);
4814 	}
4815       else
4816 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg,
4817 			 false);
4818       return;
4819     }
4820 
4821   /* Assignment of a structure component needs special treatment
4822      if the structure component's rtx is not simply a MEM.
4823      Assignment of an array element at a constant index, and assignment of
4824      an array element in an unaligned packed structure field, has the same
4825      problem.  Same for (partially) storing into a non-memory object.  */
4826   if (handled_component_p (to)
4827       || (TREE_CODE (to) == MEM_REF
4828 	  && (REF_REVERSE_STORAGE_ORDER (to)
4829 	      || mem_ref_refers_to_non_mem_p (to)))
4830       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4831     {
4832       machine_mode mode1;
4833       HOST_WIDE_INT bitsize, bitpos;
4834       unsigned HOST_WIDE_INT bitregion_start = 0;
4835       unsigned HOST_WIDE_INT bitregion_end = 0;
4836       tree offset;
4837       int unsignedp, reversep, volatilep = 0;
4838       tree tem;
4839 
4840       push_temp_slots ();
4841       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4842 				 &unsignedp, &reversep, &volatilep, true);
4843 
4844       /* Make sure bitpos is not negative, it can wreak havoc later.  */
4845       if (bitpos < 0)
4846 	{
4847 	  gcc_assert (offset == NULL_TREE);
4848 	  offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4849 					? 3 : exact_log2 (BITS_PER_UNIT)));
4850 	  bitpos &= BITS_PER_UNIT - 1;
4851 	}
4852 
4853       if (TREE_CODE (to) == COMPONENT_REF
4854 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4855 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4856       /* The C++ memory model naturally applies to byte-aligned fields.
4857 	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4858 	 BITSIZE are not byte-aligned, there is no need to limit the range
4859 	 we can access.  This can occur with packed structures in Ada.  */
4860       else if (bitsize > 0
4861 	       && bitsize % BITS_PER_UNIT == 0
4862 	       && bitpos % BITS_PER_UNIT == 0)
4863 	{
4864 	  bitregion_start = bitpos;
4865 	  bitregion_end = bitpos + bitsize - 1;
4866 	}
4867 
4868       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4869 
4870       /* If the field has a mode, we want to access it in the
4871 	 field's mode, not the computed mode.
4872 	 If a MEM has VOIDmode (external with incomplete type),
4873 	 use BLKmode for it instead.  */
4874       if (MEM_P (to_rtx))
4875 	{
4876 	  if (mode1 != VOIDmode)
4877 	    to_rtx = adjust_address (to_rtx, mode1, 0);
4878 	  else if (GET_MODE (to_rtx) == VOIDmode)
4879 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
4880 	}
4881 
4882       if (offset != 0)
4883 	{
4884 	  machine_mode address_mode;
4885 	  rtx offset_rtx;
4886 
4887 	  if (!MEM_P (to_rtx))
4888 	    {
4889 	      /* We can get constant negative offsets into arrays with broken
4890 		 user code.  Translate this to a trap instead of ICEing.  */
4891 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4892 	      expand_builtin_trap ();
4893 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4894 	    }
4895 
4896 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4897 	  address_mode = get_address_mode (to_rtx);
4898 	  if (GET_MODE (offset_rtx) != address_mode)
4899 	    {
4900 		/* We cannot be sure that the RTL in offset_rtx is valid outside
4901 		   of a memory address context, so force it into a register
4902 		   before attempting to convert it to the desired mode.  */
4903 	      offset_rtx = force_operand (offset_rtx, NULL_RTX);
4904 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4905 	    }
4906 
4907 	  /* If we have an expression in OFFSET_RTX and a non-zero
4908 	     byte offset in BITPOS, adding the byte offset before the
4909 	     OFFSET_RTX results in better intermediate code, which makes
4910 	     later rtl optimization passes perform better.
4911 
4912 	     We prefer intermediate code like this:
4913 
4914 	     r124:DI=r123:DI+0x18
4915 	     [r124:DI]=r121:DI
4916 
4917 	     ... instead of ...
4918 
4919 	     r124:DI=r123:DI+0x10
4920 	     [r124:DI+0x8]=r121:DI
4921 
4922 	     This is only done for aligned data values, as these can
4923 	     be expected to result in single move instructions.  */
4924 	  if (mode1 != VOIDmode
4925 	      && bitpos != 0
4926 	      && bitsize > 0
4927 	      && (bitpos % bitsize) == 0
4928 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4929 	      && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4930 	    {
4931 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4932 	      bitregion_start = 0;
4933 	      if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4934 		bitregion_end -= bitpos;
4935 	      bitpos = 0;
4936 	    }
4937 
4938 	  to_rtx = offset_address (to_rtx, offset_rtx,
4939 				   highest_pow2_factor_for_target (to,
4940 				   				   offset));
4941 	}
4942 
4943       /* No action is needed if the target is not a memory and the field
4944 	 lies completely outside that target.  This can occur if the source
4945 	 code contains an out-of-bounds access to a small array.  */
4946       if (!MEM_P (to_rtx)
4947 	  && GET_MODE (to_rtx) != BLKmode
4948 	  && (unsigned HOST_WIDE_INT) bitpos
4949 	     >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4950 	{
4951 	  expand_normal (from);
4952 	  result = NULL;
4953 	}
4954       /* Handle expand_expr of a complex value returning a CONCAT.  */
4955       else if (GET_CODE (to_rtx) == CONCAT)
4956 	{
4957 	  unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4958 	  if (TYPE_MODE (TREE_TYPE (from)) == GET_MODE (to_rtx)
4959 	      && COMPLEX_MODE_P (GET_MODE (to_rtx))
4960 	      && bitpos == 0
4961 	      && bitsize == mode_bitsize)
4962 	    result = store_expr (from, to_rtx, false, nontemporal, reversep);
4963 	  else if (COMPLEX_MODE_P (GET_MODE (to_rtx))
4964 		   && (TYPE_MODE (TREE_TYPE (from))
4965 		       == GET_MODE_INNER (GET_MODE (to_rtx)))
4966 		   && bitsize == mode_bitsize / 2
4967 		   && (bitpos == 0 || bitpos == mode_bitsize / 2))
4968 	    result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4969 				 nontemporal, reversep);
4970 	  else if (bitpos + bitsize <= mode_bitsize / 2)
4971 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4972 				  bitregion_start, bitregion_end,
4973 				  mode1, from, get_alias_set (to),
4974 				  nontemporal, reversep);
4975 	  else if (bitpos >= mode_bitsize / 2)
4976 	    result = store_field (XEXP (to_rtx, 1), bitsize,
4977 				  bitpos - mode_bitsize / 2,
4978 				  bitregion_start, bitregion_end,
4979 				  mode1, from, get_alias_set (to),
4980 				  nontemporal, reversep);
4981 	  else if (bitpos == 0 && bitsize == mode_bitsize)
4982 	    {
4983 	      result = expand_normal (from);
4984 	      if (GET_CODE (result) == CONCAT)
4985 		{
4986 		  machine_mode to_mode = GET_MODE_INNER (GET_MODE (to_rtx));
4987 		  machine_mode from_mode = GET_MODE_INNER (GET_MODE (result));
4988 		  rtx from_real
4989 		    = simplify_gen_subreg (to_mode, XEXP (result, 0),
4990 					   from_mode, 0);
4991 		  rtx from_imag
4992 		    = simplify_gen_subreg (to_mode, XEXP (result, 1),
4993 					   from_mode, 1);
4994 		  emit_move_insn (XEXP (to_rtx, 0), from_real);
4995 		  emit_move_insn (XEXP (to_rtx, 1), from_imag);
4996 		}
4997 	      else
4998 		{
4999 		  rtx from_rtx
5000 		    = simplify_gen_subreg (GET_MODE (to_rtx), result,
5001 					   TYPE_MODE (TREE_TYPE (from)), 0);
5002 		  emit_move_insn (XEXP (to_rtx, 0),
5003 				  read_complex_part (from_rtx, false));
5004 		  emit_move_insn (XEXP (to_rtx, 1),
5005 				  read_complex_part (from_rtx, true));
5006 		}
5007 	    }
5008 	  else
5009 	    {
5010 	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
5011 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
5012 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
5013 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
5014 	      result = store_field (temp, bitsize, bitpos,
5015 				    bitregion_start, bitregion_end,
5016 				    mode1, from, get_alias_set (to),
5017 				    nontemporal, reversep);
5018 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
5019 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
5020 	    }
5021 	}
5022       else
5023 	{
5024 	  if (MEM_P (to_rtx))
5025 	    {
5026 	      /* If the field is at offset zero, we could have been given the
5027 		 DECL_RTX of the parent struct.  Don't munge it.  */
5028 	      to_rtx = shallow_copy_rtx (to_rtx);
5029 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
5030 	      if (volatilep)
5031 		MEM_VOLATILE_P (to_rtx) = 1;
5032 	    }
5033 
5034 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
5035 					       bitregion_start, bitregion_end,
5036 					       mode1, to_rtx, to, from,
5037 					       reversep))
5038 	    result = NULL;
5039 	  else
5040 	    result = store_field (to_rtx, bitsize, bitpos,
5041 				  bitregion_start, bitregion_end,
5042 				  mode1, from, get_alias_set (to),
5043 				  nontemporal, reversep);
5044 	}
5045 
5046       if (result)
5047 	preserve_temp_slots (result);
5048       pop_temp_slots ();
5049       return;
5050     }
5051 
5052   /* If the rhs is a function call and its value is not an aggregate,
5053      call the function before we start to compute the lhs.
5054      This is needed for correct code for cases such as
5055      val = setjmp (buf) on machines where reference to val
5056      requires loading up part of an address in a separate insn.
5057 
5058      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5059      since it might be a promoted variable where the zero- or sign- extension
5060      needs to be done.  Handling this in the normal way is safe because no
5061      computation is done before the call.  The same is true for SSA names.  */
5062   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5063       && COMPLETE_TYPE_P (TREE_TYPE (from))
5064       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5065       && ! (((TREE_CODE (to) == VAR_DECL
5066 	      || TREE_CODE (to) == PARM_DECL
5067 	      || TREE_CODE (to) == RESULT_DECL)
5068 	     && REG_P (DECL_RTL (to)))
5069 	    || TREE_CODE (to) == SSA_NAME))
5070     {
5071       rtx value;
5072       rtx bounds;
5073 
5074       push_temp_slots ();
5075       value = expand_normal (from);
5076 
5077       /* Split value and bounds to store them separately.  */
5078       chkp_split_slot (value, &value, &bounds);
5079 
5080       if (to_rtx == 0)
5081 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5082 
5083       /* Handle calls that return values in multiple non-contiguous locations.
5084 	 The Irix 6 ABI has examples of this.  */
5085       if (GET_CODE (to_rtx) == PARALLEL)
5086 	{
5087 	  if (GET_CODE (value) == PARALLEL)
5088 	    emit_group_move (to_rtx, value);
5089 	  else
5090 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5091 			     int_size_in_bytes (TREE_TYPE (from)));
5092 	}
5093       else if (GET_CODE (value) == PARALLEL)
5094 	emit_group_store (to_rtx, value, TREE_TYPE (from),
5095 			  int_size_in_bytes (TREE_TYPE (from)));
5096       else if (GET_MODE (to_rtx) == BLKmode)
5097 	{
5098 	  /* Handle calls that return BLKmode values in registers.  */
5099 	  if (REG_P (value))
5100 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5101 	  else
5102 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5103 	}
5104       else
5105 	{
5106 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5107 	    value = convert_memory_address_addr_space
5108 		      (GET_MODE (to_rtx), value,
5109 		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5110 
5111 	  emit_move_insn (to_rtx, value);
5112 	}
5113 
5114       /* Store bounds if required.  */
5115       if (bounds
5116 	  && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5117 	{
5118 	  gcc_assert (MEM_P (to_rtx));
5119 	  chkp_emit_bounds_store (bounds, value, to_rtx);
5120 	}
5121 
5122       preserve_temp_slots (to_rtx);
5123       pop_temp_slots ();
5124       return;
5125     }
5126 
5127   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5128   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5129 
5130   /* Don't move directly into a return register.  */
5131   if (TREE_CODE (to) == RESULT_DECL
5132       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5133     {
5134       rtx temp;
5135 
5136       push_temp_slots ();
5137 
5138       /* If the source is itself a return value, it still is in a pseudo at
5139 	 this point so we can move it back to the return register directly.  */
5140       if (REG_P (to_rtx)
5141 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5142 	  && TREE_CODE (from) != CALL_EXPR)
5143 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5144       else
5145 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5146 
5147       /* Handle calls that return values in multiple non-contiguous locations.
5148 	 The Irix 6 ABI has examples of this.  */
5149       if (GET_CODE (to_rtx) == PARALLEL)
5150 	{
5151 	  if (GET_CODE (temp) == PARALLEL)
5152 	    emit_group_move (to_rtx, temp);
5153 	  else
5154 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5155 			     int_size_in_bytes (TREE_TYPE (from)));
5156 	}
5157       else if (temp)
5158 	emit_move_insn (to_rtx, temp);
5159 
5160       preserve_temp_slots (to_rtx);
5161       pop_temp_slots ();
5162       return;
5163     }
5164 
5165   /* In case we are returning the contents of an object which overlaps
5166      the place the value is being stored, use a safe function when copying
5167      a value through a pointer into a structure value return block.  */
5168   if (TREE_CODE (to) == RESULT_DECL
5169       && TREE_CODE (from) == INDIRECT_REF
5170       && ADDR_SPACE_GENERIC_P
5171 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5172       && refs_may_alias_p (to, from)
5173       && cfun->returns_struct
5174       && !cfun->returns_pcc_struct)
5175     {
5176       rtx from_rtx, size;
5177 
5178       push_temp_slots ();
5179       size = expr_size (from);
5180       from_rtx = expand_normal (from);
5181 
5182       emit_library_call (memmove_libfunc, LCT_NORMAL,
5183 			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5184 			 XEXP (from_rtx, 0), Pmode,
5185 			 convert_to_mode (TYPE_MODE (sizetype),
5186 					  size, TYPE_UNSIGNED (sizetype)),
5187 			 TYPE_MODE (sizetype));
5188 
5189       preserve_temp_slots (to_rtx);
5190       pop_temp_slots ();
5191       return;
5192     }
5193 
5194   /* Compute FROM and store the value in the rtx we got.  */
5195 
5196   push_temp_slots ();
5197   result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, false, to);
5198   preserve_temp_slots (result);
5199   pop_temp_slots ();
5200   return;
5201 }
5202 
5203 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5204    succeeded, false otherwise.  */
5205 
5206 bool
5207 emit_storent_insn (rtx to, rtx from)
5208 {
5209   struct expand_operand ops[2];
5210   machine_mode mode = GET_MODE (to);
5211   enum insn_code code = optab_handler (storent_optab, mode);
5212 
5213   if (code == CODE_FOR_nothing)
5214     return false;
5215 
5216   create_fixed_operand (&ops[0], to);
5217   create_input_operand (&ops[1], from, mode);
5218   return maybe_expand_insn (code, 2, ops);
5219 }
5220 
5221 /* Generate code for computing expression EXP,
5222    and storing the value into TARGET.
5223 
5224    If the mode is BLKmode then we may return TARGET itself.
5225    It turns out that in BLKmode it doesn't cause a problem.
5226    because C has no operators that could combine two different
5227    assignments into the same BLKmode object with different values
5228    with no sequence point.  Will other languages need this to
5229    be more thorough?
5230 
5231    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5232    stack, and block moves may need to be treated specially.
5233 
5234    If NONTEMPORAL is true, try using a nontemporal store instruction.
5235 
5236    If REVERSE is true, the store is to be done in reverse order.
5237 
5238    If BTARGET is not NULL then computed bounds of EXP are
5239    associated with BTARGET.  */
5240 
5241 rtx
5242 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5243 			bool nontemporal, bool reverse, tree btarget)
5244 {
5245   rtx temp;
5246   rtx alt_rtl = NULL_RTX;
5247   location_t loc = curr_insn_location ();
5248 
5249   if (VOID_TYPE_P (TREE_TYPE (exp)))
5250     {
5251       /* C++ can generate ?: expressions with a throw expression in one
5252 	 branch and an rvalue in the other. Here, we resolve attempts to
5253 	 store the throw expression's nonexistent result.  */
5254       gcc_assert (!call_param_p);
5255       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5256       return NULL_RTX;
5257     }
5258   if (TREE_CODE (exp) == COMPOUND_EXPR)
5259     {
5260       /* Perform first part of compound expression, then assign from second
5261 	 part.  */
5262       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5263 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5264       return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5265 				     call_param_p, nontemporal, reverse,
5266 				     btarget);
5267     }
5268   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5269     {
5270       /* For conditional expression, get safe form of the target.  Then
5271 	 test the condition, doing the appropriate assignment on either
5272 	 side.  This avoids the creation of unnecessary temporaries.
5273 	 For non-BLKmode, it is more efficient not to do this.  */
5274 
5275       rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5276 
5277       do_pending_stack_adjust ();
5278       NO_DEFER_POP;
5279       jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5280       store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5281 			      nontemporal, reverse, btarget);
5282       emit_jump_insn (targetm.gen_jump (lab2));
5283       emit_barrier ();
5284       emit_label (lab1);
5285       store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5286 			      nontemporal, reverse, btarget);
5287       emit_label (lab2);
5288       OK_DEFER_POP;
5289 
5290       return NULL_RTX;
5291     }
5292   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5293     /* If this is a scalar in a register that is stored in a wider mode
5294        than the declared mode, compute the result into its declared mode
5295        and then convert to the wider mode.  Our value is the computed
5296        expression.  */
5297     {
5298       rtx inner_target = 0;
5299 
5300       /* We can do the conversion inside EXP, which will often result
5301 	 in some optimizations.  Do the conversion in two steps: first
5302 	 change the signedness, if needed, then the extend.  But don't
5303 	 do this if the type of EXP is a subtype of something else
5304 	 since then the conversion might involve more than just
5305 	 converting modes.  */
5306       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5307 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5308 	  && GET_MODE_PRECISION (GET_MODE (target))
5309 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5310 	{
5311 	  if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5312 					  TYPE_UNSIGNED (TREE_TYPE (exp))))
5313 	    {
5314 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5315 		 version, so use the mode instead.  */
5316 	      tree ntype
5317 		= (signed_or_unsigned_type_for
5318 		   (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5319 	      if (ntype == NULL)
5320 		ntype = lang_hooks.types.type_for_mode
5321 		  (TYPE_MODE (TREE_TYPE (exp)),
5322 		   SUBREG_PROMOTED_SIGN (target));
5323 
5324 	      exp = fold_convert_loc (loc, ntype, exp);
5325 	    }
5326 
5327 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5328 				  (GET_MODE (SUBREG_REG (target)),
5329 				   SUBREG_PROMOTED_SIGN (target)),
5330 				  exp);
5331 
5332 	  inner_target = SUBREG_REG (target);
5333 	}
5334 
5335       temp = expand_expr (exp, inner_target, VOIDmode,
5336 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5337 
5338       /* Handle bounds returned by call.  */
5339       if (TREE_CODE (exp) == CALL_EXPR)
5340 	{
5341 	  rtx bounds;
5342 	  chkp_split_slot (temp, &temp, &bounds);
5343 	  if (bounds && btarget)
5344 	    {
5345 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5346 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5347 	      chkp_set_rtl_bounds (btarget, tmp);
5348 	    }
5349 	}
5350 
5351       /* If TEMP is a VOIDmode constant, use convert_modes to make
5352 	 sure that we properly convert it.  */
5353       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5354 	{
5355 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5356 				temp, SUBREG_PROMOTED_SIGN (target));
5357 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5358 			        GET_MODE (target), temp,
5359 				SUBREG_PROMOTED_SIGN (target));
5360 	}
5361 
5362       convert_move (SUBREG_REG (target), temp,
5363 		    SUBREG_PROMOTED_SIGN (target));
5364 
5365       return NULL_RTX;
5366     }
5367   else if ((TREE_CODE (exp) == STRING_CST
5368 	    || (TREE_CODE (exp) == MEM_REF
5369 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5370 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5371 		   == STRING_CST
5372 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5373 	   && !nontemporal && !call_param_p
5374 	   && MEM_P (target))
5375     {
5376       /* Optimize initialization of an array with a STRING_CST.  */
5377       HOST_WIDE_INT exp_len, str_copy_len;
5378       rtx dest_mem;
5379       tree str = TREE_CODE (exp) == STRING_CST
5380 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5381 
5382       exp_len = int_expr_size (exp);
5383       if (exp_len <= 0)
5384 	goto normal_expr;
5385 
5386       if (TREE_STRING_LENGTH (str) <= 0)
5387 	goto normal_expr;
5388 
5389       str_copy_len = strlen (TREE_STRING_POINTER (str));
5390       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5391 	goto normal_expr;
5392 
5393       str_copy_len = TREE_STRING_LENGTH (str);
5394       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5395 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5396 	{
5397 	  str_copy_len += STORE_MAX_PIECES - 1;
5398 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5399 	}
5400       str_copy_len = MIN (str_copy_len, exp_len);
5401       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5402 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5403 				MEM_ALIGN (target), false))
5404 	goto normal_expr;
5405 
5406       dest_mem = target;
5407 
5408       dest_mem = store_by_pieces (dest_mem,
5409 				  str_copy_len, builtin_strncpy_read_str,
5410 				  CONST_CAST (char *,
5411 					      TREE_STRING_POINTER (str)),
5412 				  MEM_ALIGN (target), false,
5413 				  exp_len > str_copy_len ? 1 : 0);
5414       if (exp_len > str_copy_len)
5415 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5416 		       GEN_INT (exp_len - str_copy_len),
5417 		       BLOCK_OP_NORMAL);
5418       return NULL_RTX;
5419     }
5420   else
5421     {
5422       rtx tmp_target;
5423 
5424   normal_expr:
5425       /* If we want to use a nontemporal or a reverse order store, force the
5426 	 value into a register first.  */
5427       tmp_target = nontemporal || reverse ? NULL_RTX : target;
5428       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5429 			       (call_param_p
5430 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5431 			       &alt_rtl, false);
5432 
5433       /* Handle bounds returned by call.  */
5434       if (TREE_CODE (exp) == CALL_EXPR)
5435 	{
5436 	  rtx bounds;
5437 	  chkp_split_slot (temp, &temp, &bounds);
5438 	  if (bounds && btarget)
5439 	    {
5440 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5441 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5442 	      chkp_set_rtl_bounds (btarget, tmp);
5443 	    }
5444 	}
5445     }
5446 
5447   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5448      the same as that of TARGET, adjust the constant.  This is needed, for
5449      example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5450      only a word-sized value.  */
5451   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5452       && TREE_CODE (exp) != ERROR_MARK
5453       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5454     temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5455 			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5456 
5457   /* If value was not generated in the target, store it there.
5458      Convert the value to TARGET's type first if necessary and emit the
5459      pending incrementations that have been queued when expanding EXP.
5460      Note that we cannot emit the whole queue blindly because this will
5461      effectively disable the POST_INC optimization later.
5462 
5463      If TEMP and TARGET compare equal according to rtx_equal_p, but
5464      one or both of them are volatile memory refs, we have to distinguish
5465      two cases:
5466      - expand_expr has used TARGET.  In this case, we must not generate
5467        another copy.  This can be detected by TARGET being equal according
5468        to == .
5469      - expand_expr has not used TARGET - that means that the source just
5470        happens to have the same RTX form.  Since temp will have been created
5471        by expand_expr, it will compare unequal according to == .
5472        We must generate a copy in this case, to reach the correct number
5473        of volatile memory references.  */
5474 
5475   if ((! rtx_equal_p (temp, target)
5476        || (temp != target && (side_effects_p (temp)
5477 			      || side_effects_p (target))))
5478       && TREE_CODE (exp) != ERROR_MARK
5479       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5480 	 but TARGET is not valid memory reference, TEMP will differ
5481 	 from TARGET although it is really the same location.  */
5482       && !(alt_rtl
5483 	   && rtx_equal_p (alt_rtl, target)
5484 	   && !side_effects_p (alt_rtl)
5485 	   && !side_effects_p (target))
5486       /* If there's nothing to copy, don't bother.  Don't call
5487 	 expr_size unless necessary, because some front-ends (C++)
5488 	 expr_size-hook must not be given objects that are not
5489 	 supposed to be bit-copied or bit-initialized.  */
5490       && expr_size (exp) != const0_rtx)
5491     {
5492       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5493 	{
5494 	  if (GET_MODE (target) == BLKmode)
5495 	    {
5496 	      /* Handle calls that return BLKmode values in registers.  */
5497 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5498 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5499 	      else
5500 		store_bit_field (target,
5501 				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5502 				 0, 0, 0, GET_MODE (temp), temp, reverse);
5503 	    }
5504 	  else
5505 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5506 	}
5507 
5508       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5509 	{
5510 	  /* Handle copying a string constant into an array.  The string
5511 	     constant may be shorter than the array.  So copy just the string's
5512 	     actual length, and clear the rest.  First get the size of the data
5513 	     type of the string, which is actually the size of the target.  */
5514 	  rtx size = expr_size (exp);
5515 
5516 	  if (CONST_INT_P (size)
5517 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5518 	    emit_block_move (target, temp, size,
5519 			     (call_param_p
5520 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5521 	  else
5522 	    {
5523 	      machine_mode pointer_mode
5524 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5525 	      machine_mode address_mode = get_address_mode (target);
5526 
5527 	      /* Compute the size of the data to copy from the string.  */
5528 	      tree copy_size
5529 		= size_binop_loc (loc, MIN_EXPR,
5530 				  make_tree (sizetype, size),
5531 				  size_int (TREE_STRING_LENGTH (exp)));
5532 	      rtx copy_size_rtx
5533 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5534 			       (call_param_p
5535 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5536 	      rtx_code_label *label = 0;
5537 
5538 	      /* Copy that much.  */
5539 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5540 					       TYPE_UNSIGNED (sizetype));
5541 	      emit_block_move (target, temp, copy_size_rtx,
5542 			       (call_param_p
5543 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5544 
5545 	      /* Figure out how much is left in TARGET that we have to clear.
5546 		 Do all calculations in pointer_mode.  */
5547 	      if (CONST_INT_P (copy_size_rtx))
5548 		{
5549 		  size = plus_constant (address_mode, size,
5550 					-INTVAL (copy_size_rtx));
5551 		  target = adjust_address (target, BLKmode,
5552 					   INTVAL (copy_size_rtx));
5553 		}
5554 	      else
5555 		{
5556 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5557 				       copy_size_rtx, NULL_RTX, 0,
5558 				       OPTAB_LIB_WIDEN);
5559 
5560 		  if (GET_MODE (copy_size_rtx) != address_mode)
5561 		    copy_size_rtx = convert_to_mode (address_mode,
5562 						     copy_size_rtx,
5563 						     TYPE_UNSIGNED (sizetype));
5564 
5565 		  target = offset_address (target, copy_size_rtx,
5566 					   highest_pow2_factor (copy_size));
5567 		  label = gen_label_rtx ();
5568 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5569 					   GET_MODE (size), 0, label);
5570 		}
5571 
5572 	      if (size != const0_rtx)
5573 		clear_storage (target, size, BLOCK_OP_NORMAL);
5574 
5575 	      if (label)
5576 		emit_label (label);
5577 	    }
5578 	}
5579       /* Handle calls that return values in multiple non-contiguous locations.
5580 	 The Irix 6 ABI has examples of this.  */
5581       else if (GET_CODE (target) == PARALLEL)
5582 	{
5583 	  if (GET_CODE (temp) == PARALLEL)
5584 	    emit_group_move (target, temp);
5585 	  else
5586 	    emit_group_load (target, temp, TREE_TYPE (exp),
5587 			     int_size_in_bytes (TREE_TYPE (exp)));
5588 	}
5589       else if (GET_CODE (temp) == PARALLEL)
5590 	emit_group_store (target, temp, TREE_TYPE (exp),
5591 			  int_size_in_bytes (TREE_TYPE (exp)));
5592       else if (GET_MODE (temp) == BLKmode)
5593 	emit_block_move (target, temp, expr_size (exp),
5594 			 (call_param_p
5595 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5596       /* If we emit a nontemporal store, there is nothing else to do.  */
5597       else if (nontemporal && emit_storent_insn (target, temp))
5598 	;
5599       else
5600 	{
5601 	  if (reverse)
5602 	    temp = flip_storage_order (GET_MODE (target), temp);
5603 	  temp = force_operand (temp, target);
5604 	  if (temp != target)
5605 	    emit_move_insn (target, temp);
5606 	}
5607     }
5608 
5609   return NULL_RTX;
5610 }
5611 
5612 /* Same as store_expr_with_bounds but ignoring bounds of EXP.  */
5613 rtx
5614 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal,
5615 	    bool reverse)
5616 {
5617   return store_expr_with_bounds (exp, target, call_param_p, nontemporal,
5618 				 reverse, NULL);
5619 }
5620 
5621 /* Return true if field F of structure TYPE is a flexible array.  */
5622 
5623 static bool
5624 flexible_array_member_p (const_tree f, const_tree type)
5625 {
5626   const_tree tf;
5627 
5628   tf = TREE_TYPE (f);
5629   return (DECL_CHAIN (f) == NULL
5630 	  && TREE_CODE (tf) == ARRAY_TYPE
5631 	  && TYPE_DOMAIN (tf)
5632 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5633 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5634 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5635 	  && int_size_in_bytes (type) >= 0);
5636 }
5637 
5638 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5639    must have in order for it to completely initialize a value of type TYPE.
5640    Return -1 if the number isn't known.
5641 
5642    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5643 
5644 static HOST_WIDE_INT
5645 count_type_elements (const_tree type, bool for_ctor_p)
5646 {
5647   switch (TREE_CODE (type))
5648     {
5649     case ARRAY_TYPE:
5650       {
5651 	tree nelts;
5652 
5653 	nelts = array_type_nelts (type);
5654 	if (nelts && tree_fits_uhwi_p (nelts))
5655 	  {
5656 	    unsigned HOST_WIDE_INT n;
5657 
5658 	    n = tree_to_uhwi (nelts) + 1;
5659 	    if (n == 0 || for_ctor_p)
5660 	      return n;
5661 	    else
5662 	      return n * count_type_elements (TREE_TYPE (type), false);
5663 	  }
5664 	return for_ctor_p ? -1 : 1;
5665       }
5666 
5667     case RECORD_TYPE:
5668       {
5669 	unsigned HOST_WIDE_INT n;
5670 	tree f;
5671 
5672 	n = 0;
5673 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5674 	  if (TREE_CODE (f) == FIELD_DECL)
5675 	    {
5676 	      if (!for_ctor_p)
5677 		n += count_type_elements (TREE_TYPE (f), false);
5678 	      else if (!flexible_array_member_p (f, type))
5679 		/* Don't count flexible arrays, which are not supposed
5680 		   to be initialized.  */
5681 		n += 1;
5682 	    }
5683 
5684 	return n;
5685       }
5686 
5687     case UNION_TYPE:
5688     case QUAL_UNION_TYPE:
5689       {
5690 	tree f;
5691 	HOST_WIDE_INT n, m;
5692 
5693 	gcc_assert (!for_ctor_p);
5694 	/* Estimate the number of scalars in each field and pick the
5695 	   maximum.  Other estimates would do instead; the idea is simply
5696 	   to make sure that the estimate is not sensitive to the ordering
5697 	   of the fields.  */
5698 	n = 1;
5699 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5700 	  if (TREE_CODE (f) == FIELD_DECL)
5701 	    {
5702 	      m = count_type_elements (TREE_TYPE (f), false);
5703 	      /* If the field doesn't span the whole union, add an extra
5704 		 scalar for the rest.  */
5705 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5706 				    TYPE_SIZE (type)) != 1)
5707 		m++;
5708 	      if (n < m)
5709 		n = m;
5710 	    }
5711 	return n;
5712       }
5713 
5714     case COMPLEX_TYPE:
5715       return 2;
5716 
5717     case VECTOR_TYPE:
5718       return TYPE_VECTOR_SUBPARTS (type);
5719 
5720     case INTEGER_TYPE:
5721     case REAL_TYPE:
5722     case FIXED_POINT_TYPE:
5723     case ENUMERAL_TYPE:
5724     case BOOLEAN_TYPE:
5725     case POINTER_TYPE:
5726     case OFFSET_TYPE:
5727     case REFERENCE_TYPE:
5728     case NULLPTR_TYPE:
5729       return 1;
5730 
5731     case ERROR_MARK:
5732       return 0;
5733 
5734     case VOID_TYPE:
5735     case METHOD_TYPE:
5736     case FUNCTION_TYPE:
5737     case LANG_TYPE:
5738     default:
5739       gcc_unreachable ();
5740     }
5741 }
5742 
5743 /* Helper for categorize_ctor_elements.  Identical interface.  */
5744 
5745 static bool
5746 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5747 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5748 {
5749   unsigned HOST_WIDE_INT idx;
5750   HOST_WIDE_INT nz_elts, init_elts, num_fields;
5751   tree value, purpose, elt_type;
5752 
5753   /* Whether CTOR is a valid constant initializer, in accordance with what
5754      initializer_constant_valid_p does.  If inferred from the constructor
5755      elements, true until proven otherwise.  */
5756   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5757   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5758 
5759   nz_elts = 0;
5760   init_elts = 0;
5761   num_fields = 0;
5762   elt_type = NULL_TREE;
5763 
5764   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5765     {
5766       HOST_WIDE_INT mult = 1;
5767 
5768       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5769 	{
5770 	  tree lo_index = TREE_OPERAND (purpose, 0);
5771 	  tree hi_index = TREE_OPERAND (purpose, 1);
5772 
5773 	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5774 	    mult = (tree_to_uhwi (hi_index)
5775 		    - tree_to_uhwi (lo_index) + 1);
5776 	}
5777       num_fields += mult;
5778       elt_type = TREE_TYPE (value);
5779 
5780       switch (TREE_CODE (value))
5781 	{
5782 	case CONSTRUCTOR:
5783 	  {
5784 	    HOST_WIDE_INT nz = 0, ic = 0;
5785 
5786 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5787 							   p_complete);
5788 
5789 	    nz_elts += mult * nz;
5790  	    init_elts += mult * ic;
5791 
5792 	    if (const_from_elts_p && const_p)
5793 	      const_p = const_elt_p;
5794 	  }
5795 	  break;
5796 
5797 	case INTEGER_CST:
5798 	case REAL_CST:
5799 	case FIXED_CST:
5800 	  if (!initializer_zerop (value))
5801 	    nz_elts += mult;
5802 	  init_elts += mult;
5803 	  break;
5804 
5805 	case STRING_CST:
5806 	  nz_elts += mult * TREE_STRING_LENGTH (value);
5807 	  init_elts += mult * TREE_STRING_LENGTH (value);
5808 	  break;
5809 
5810 	case COMPLEX_CST:
5811 	  if (!initializer_zerop (TREE_REALPART (value)))
5812 	    nz_elts += mult;
5813 	  if (!initializer_zerop (TREE_IMAGPART (value)))
5814 	    nz_elts += mult;
5815 	  init_elts += mult;
5816 	  break;
5817 
5818 	case VECTOR_CST:
5819 	  {
5820 	    unsigned i;
5821 	    for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5822 	      {
5823 		tree v = VECTOR_CST_ELT (value, i);
5824 		if (!initializer_zerop (v))
5825 		  nz_elts += mult;
5826 		init_elts += mult;
5827 	      }
5828 	  }
5829 	  break;
5830 
5831 	default:
5832 	  {
5833 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5834 	    nz_elts += mult * tc;
5835 	    init_elts += mult * tc;
5836 
5837 	    if (const_from_elts_p && const_p)
5838 	      const_p
5839 		= initializer_constant_valid_p (value,
5840 						elt_type,
5841 						TYPE_REVERSE_STORAGE_ORDER
5842 						(TREE_TYPE (ctor)))
5843 		  != NULL_TREE;
5844 	  }
5845 	  break;
5846 	}
5847     }
5848 
5849   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5850 						num_fields, elt_type))
5851     *p_complete = false;
5852 
5853   *p_nz_elts += nz_elts;
5854   *p_init_elts += init_elts;
5855 
5856   return const_p;
5857 }
5858 
5859 /* Examine CTOR to discover:
5860    * how many scalar fields are set to nonzero values,
5861      and place it in *P_NZ_ELTS;
5862    * how many scalar fields in total are in CTOR,
5863      and place it in *P_ELT_COUNT.
5864    * whether the constructor is complete -- in the sense that every
5865      meaningful byte is explicitly given a value --
5866      and place it in *P_COMPLETE.
5867 
5868    Return whether or not CTOR is a valid static constant initializer, the same
5869    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5870 
5871 bool
5872 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5873 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
5874 {
5875   *p_nz_elts = 0;
5876   *p_init_elts = 0;
5877   *p_complete = true;
5878 
5879   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5880 }
5881 
5882 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5883    of which had type LAST_TYPE.  Each element was itself a complete
5884    initializer, in the sense that every meaningful byte was explicitly
5885    given a value.  Return true if the same is true for the constructor
5886    as a whole.  */
5887 
5888 bool
5889 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5890 			  const_tree last_type)
5891 {
5892   if (TREE_CODE (type) == UNION_TYPE
5893       || TREE_CODE (type) == QUAL_UNION_TYPE)
5894     {
5895       if (num_elts == 0)
5896 	return false;
5897 
5898       gcc_assert (num_elts == 1 && last_type);
5899 
5900       /* ??? We could look at each element of the union, and find the
5901 	 largest element.  Which would avoid comparing the size of the
5902 	 initialized element against any tail padding in the union.
5903 	 Doesn't seem worth the effort...  */
5904       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5905     }
5906 
5907   return count_type_elements (type, true) == num_elts;
5908 }
5909 
5910 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
5911 
5912 static int
5913 mostly_zeros_p (const_tree exp)
5914 {
5915   if (TREE_CODE (exp) == CONSTRUCTOR)
5916     {
5917       HOST_WIDE_INT nz_elts, init_elts;
5918       bool complete_p;
5919 
5920       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5921       return !complete_p || nz_elts < init_elts / 4;
5922     }
5923 
5924   return initializer_zerop (exp);
5925 }
5926 
5927 /* Return 1 if EXP contains all zeros.  */
5928 
5929 static int
5930 all_zeros_p (const_tree exp)
5931 {
5932   if (TREE_CODE (exp) == CONSTRUCTOR)
5933     {
5934       HOST_WIDE_INT nz_elts, init_elts;
5935       bool complete_p;
5936 
5937       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5938       return nz_elts == 0;
5939     }
5940 
5941   return initializer_zerop (exp);
5942 }
5943 
5944 /* Helper function for store_constructor.
5945    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5946    CLEARED is as for store_constructor.
5947    ALIAS_SET is the alias set to use for any stores.
5948    If REVERSE is true, the store is to be done in reverse order.
5949 
5950    This provides a recursive shortcut back to store_constructor when it isn't
5951    necessary to go through store_field.  This is so that we can pass through
5952    the cleared field to let store_constructor know that we may not have to
5953    clear a substructure if the outer structure has already been cleared.  */
5954 
5955 static void
5956 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5957 			 HOST_WIDE_INT bitpos,
5958 			 unsigned HOST_WIDE_INT bitregion_start,
5959 			 unsigned HOST_WIDE_INT bitregion_end,
5960 			 machine_mode mode,
5961 			 tree exp, int cleared,
5962 			 alias_set_type alias_set, bool reverse)
5963 {
5964   if (TREE_CODE (exp) == CONSTRUCTOR
5965       /* We can only call store_constructor recursively if the size and
5966 	 bit position are on a byte boundary.  */
5967       && bitpos % BITS_PER_UNIT == 0
5968       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5969       /* If we have a nonzero bitpos for a register target, then we just
5970 	 let store_field do the bitfield handling.  This is unlikely to
5971 	 generate unnecessary clear instructions anyways.  */
5972       && (bitpos == 0 || MEM_P (target)))
5973     {
5974       if (MEM_P (target))
5975 	target
5976 	  = adjust_address (target,
5977 			    GET_MODE (target) == BLKmode
5978 			    || 0 != (bitpos
5979 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5980 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5981 
5982 
5983       /* Update the alias set, if required.  */
5984       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5985 	  && MEM_ALIAS_SET (target) != 0)
5986 	{
5987 	  target = copy_rtx (target);
5988 	  set_mem_alias_set (target, alias_set);
5989 	}
5990 
5991       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT,
5992 			 reverse);
5993     }
5994   else
5995     store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
5996 		 exp, alias_set, false, reverse);
5997 }
5998 
5999 
6000 /* Returns the number of FIELD_DECLs in TYPE.  */
6001 
6002 static int
6003 fields_length (const_tree type)
6004 {
6005   tree t = TYPE_FIELDS (type);
6006   int count = 0;
6007 
6008   for (; t; t = DECL_CHAIN (t))
6009     if (TREE_CODE (t) == FIELD_DECL)
6010       ++count;
6011 
6012   return count;
6013 }
6014 
6015 
6016 /* Store the value of constructor EXP into the rtx TARGET.
6017    TARGET is either a REG or a MEM; we know it cannot conflict, since
6018    safe_from_p has been called.
6019    CLEARED is true if TARGET is known to have been zero'd.
6020    SIZE is the number of bytes of TARGET we are allowed to modify: this
6021    may not be the same as the size of EXP if we are assigning to a field
6022    which has been packed to exclude padding bits.
6023    If REVERSE is true, the store is to be done in reverse order.  */
6024 
6025 static void
6026 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size,
6027 		   bool reverse)
6028 {
6029   tree type = TREE_TYPE (exp);
6030   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
6031   HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0;
6032 
6033   switch (TREE_CODE (type))
6034     {
6035     case RECORD_TYPE:
6036     case UNION_TYPE:
6037     case QUAL_UNION_TYPE:
6038       {
6039 	unsigned HOST_WIDE_INT idx;
6040 	tree field, value;
6041 
6042 	/* The storage order is specified for every aggregate type.  */
6043 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6044 
6045 	/* If size is zero or the target is already cleared, do nothing.  */
6046 	if (size == 0 || cleared)
6047 	  cleared = 1;
6048 	/* We either clear the aggregate or indicate the value is dead.  */
6049 	else if ((TREE_CODE (type) == UNION_TYPE
6050 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
6051 		 && ! CONSTRUCTOR_ELTS (exp))
6052 	  /* If the constructor is empty, clear the union.  */
6053 	  {
6054 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
6055 	    cleared = 1;
6056 	  }
6057 
6058 	/* If we are building a static constructor into a register,
6059 	   set the initial value as zero so we can fold the value into
6060 	   a constant.  But if more than one register is involved,
6061 	   this probably loses.  */
6062 	else if (REG_P (target) && TREE_STATIC (exp)
6063 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6064 	  {
6065 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6066 	    cleared = 1;
6067 	  }
6068 
6069         /* If the constructor has fewer fields than the structure or
6070 	   if we are initializing the structure to mostly zeros, clear
6071 	   the whole structure first.  Don't do this if TARGET is a
6072 	   register whose mode size isn't equal to SIZE since
6073 	   clear_storage can't handle this case.  */
6074 	else if (size > 0
6075 		 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6076 		      != fields_length (type))
6077 		     || mostly_zeros_p (exp))
6078 		 && (!REG_P (target)
6079 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6080 			 == size)))
6081 	  {
6082 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6083 	    cleared = 1;
6084 	  }
6085 
6086 	if (REG_P (target) && !cleared)
6087 	  emit_clobber (target);
6088 
6089 	/* Store each element of the constructor into the
6090 	   corresponding field of TARGET.  */
6091 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6092 	  {
6093 	    machine_mode mode;
6094 	    HOST_WIDE_INT bitsize;
6095 	    HOST_WIDE_INT bitpos = 0;
6096 	    tree offset;
6097 	    rtx to_rtx = target;
6098 
6099 	    /* Just ignore missing fields.  We cleared the whole
6100 	       structure, above, if any fields are missing.  */
6101 	    if (field == 0)
6102 	      continue;
6103 
6104 	    if (cleared && initializer_zerop (value))
6105 	      continue;
6106 
6107 	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
6108 	      bitsize = tree_to_uhwi (DECL_SIZE (field));
6109 	    else
6110 	      gcc_unreachable ();
6111 
6112 	    mode = DECL_MODE (field);
6113 	    if (DECL_BIT_FIELD (field))
6114 	      mode = VOIDmode;
6115 
6116 	    offset = DECL_FIELD_OFFSET (field);
6117 	    if (tree_fits_shwi_p (offset)
6118 		&& tree_fits_shwi_p (bit_position (field)))
6119 	      {
6120 		bitpos = int_bit_position (field);
6121 		offset = NULL_TREE;
6122 	      }
6123 	    else
6124 	      gcc_unreachable ();
6125 
6126 	    /* If this initializes a field that is smaller than a
6127 	       word, at the start of a word, try to widen it to a full
6128 	       word.  This special case allows us to output C++ member
6129 	       function initializations in a form that the optimizers
6130 	       can understand.  */
6131 	    if (WORD_REGISTER_OPERATIONS
6132 		&& REG_P (target)
6133 		&& bitsize < BITS_PER_WORD
6134 		&& bitpos % BITS_PER_WORD == 0
6135 		&& GET_MODE_CLASS (mode) == MODE_INT
6136 		&& TREE_CODE (value) == INTEGER_CST
6137 		&& exp_size >= 0
6138 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6139 	      {
6140 		tree type = TREE_TYPE (value);
6141 
6142 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6143 		  {
6144 		    type = lang_hooks.types.type_for_mode
6145 		      (word_mode, TYPE_UNSIGNED (type));
6146 		    value = fold_convert (type, value);
6147 		    /* Make sure the bits beyond the original bitsize are zero
6148 		       so that we can correctly avoid extra zeroing stores in
6149 		       later constructor elements.  */
6150 		    tree bitsize_mask
6151 		      = wide_int_to_tree (type, wi::mask (bitsize, false,
6152 							   BITS_PER_WORD));
6153 		    value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6154 		  }
6155 
6156 		if (BYTES_BIG_ENDIAN)
6157 		  value
6158 		   = fold_build2 (LSHIFT_EXPR, type, value,
6159 				   build_int_cst (type,
6160 						  BITS_PER_WORD - bitsize));
6161 		bitsize = BITS_PER_WORD;
6162 		mode = word_mode;
6163 	      }
6164 
6165 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6166 		&& DECL_NONADDRESSABLE_P (field))
6167 	      {
6168 		to_rtx = copy_rtx (to_rtx);
6169 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6170 	      }
6171 
6172 	    store_constructor_field (to_rtx, bitsize, bitpos,
6173 				     0, bitregion_end, mode,
6174 				     value, cleared,
6175 				     get_alias_set (TREE_TYPE (field)),
6176 				     reverse);
6177 	  }
6178 	break;
6179       }
6180     case ARRAY_TYPE:
6181       {
6182 	tree value, index;
6183 	unsigned HOST_WIDE_INT i;
6184 	int need_to_clear;
6185 	tree domain;
6186 	tree elttype = TREE_TYPE (type);
6187 	int const_bounds_p;
6188 	HOST_WIDE_INT minelt = 0;
6189 	HOST_WIDE_INT maxelt = 0;
6190 
6191 	/* The storage order is specified for every aggregate type.  */
6192 	reverse = TYPE_REVERSE_STORAGE_ORDER (type);
6193 
6194 	domain = TYPE_DOMAIN (type);
6195 	const_bounds_p = (TYPE_MIN_VALUE (domain)
6196 			  && TYPE_MAX_VALUE (domain)
6197 			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6198 			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6199 
6200 	/* If we have constant bounds for the range of the type, get them.  */
6201 	if (const_bounds_p)
6202 	  {
6203 	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6204 	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6205 	  }
6206 
6207 	/* If the constructor has fewer elements than the array, clear
6208            the whole array first.  Similarly if this is static
6209            constructor of a non-BLKmode object.  */
6210 	if (cleared)
6211 	  need_to_clear = 0;
6212 	else if (REG_P (target) && TREE_STATIC (exp))
6213 	  need_to_clear = 1;
6214 	else
6215 	  {
6216 	    unsigned HOST_WIDE_INT idx;
6217 	    tree index, value;
6218 	    HOST_WIDE_INT count = 0, zero_count = 0;
6219 	    need_to_clear = ! const_bounds_p;
6220 
6221 	    /* This loop is a more accurate version of the loop in
6222 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6223 	       is also needed to check for missing elements.  */
6224 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6225 	      {
6226 		HOST_WIDE_INT this_node_count;
6227 
6228 		if (need_to_clear)
6229 		  break;
6230 
6231 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6232 		  {
6233 		    tree lo_index = TREE_OPERAND (index, 0);
6234 		    tree hi_index = TREE_OPERAND (index, 1);
6235 
6236 		    if (! tree_fits_uhwi_p (lo_index)
6237 			|| ! tree_fits_uhwi_p (hi_index))
6238 		      {
6239 			need_to_clear = 1;
6240 			break;
6241 		      }
6242 
6243 		    this_node_count = (tree_to_uhwi (hi_index)
6244 				       - tree_to_uhwi (lo_index) + 1);
6245 		  }
6246 		else
6247 		  this_node_count = 1;
6248 
6249 		count += this_node_count;
6250 		if (mostly_zeros_p (value))
6251 		  zero_count += this_node_count;
6252 	      }
6253 
6254 	    /* Clear the entire array first if there are any missing
6255 	       elements, or if the incidence of zero elements is >=
6256 	       75%.  */
6257 	    if (! need_to_clear
6258 		&& (count < maxelt - minelt + 1
6259 		    || 4 * zero_count >= 3 * count))
6260 	      need_to_clear = 1;
6261 	  }
6262 
6263 	if (need_to_clear && size > 0)
6264 	  {
6265 	    if (REG_P (target))
6266 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
6267 	    else
6268 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6269 	    cleared = 1;
6270 	  }
6271 
6272 	if (!cleared && REG_P (target))
6273 	  /* Inform later passes that the old value is dead.  */
6274 	  emit_clobber (target);
6275 
6276 	/* Store each element of the constructor into the
6277 	   corresponding element of TARGET, determined by counting the
6278 	   elements.  */
6279 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6280 	  {
6281 	    machine_mode mode;
6282 	    HOST_WIDE_INT bitsize;
6283 	    HOST_WIDE_INT bitpos;
6284 	    rtx xtarget = target;
6285 
6286 	    if (cleared && initializer_zerop (value))
6287 	      continue;
6288 
6289 	    mode = TYPE_MODE (elttype);
6290 	    if (mode == BLKmode)
6291 	      bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6292 			 ? tree_to_uhwi (TYPE_SIZE (elttype))
6293 			 : -1);
6294 	    else
6295 	      bitsize = GET_MODE_BITSIZE (mode);
6296 
6297 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6298 	      {
6299 		tree lo_index = TREE_OPERAND (index, 0);
6300 		tree hi_index = TREE_OPERAND (index, 1);
6301 		rtx index_r, pos_rtx;
6302 		HOST_WIDE_INT lo, hi, count;
6303 		tree position;
6304 
6305 		/* If the range is constant and "small", unroll the loop.  */
6306 		if (const_bounds_p
6307 		    && tree_fits_shwi_p (lo_index)
6308 		    && tree_fits_shwi_p (hi_index)
6309 		    && (lo = tree_to_shwi (lo_index),
6310 			hi = tree_to_shwi (hi_index),
6311 			count = hi - lo + 1,
6312 			(!MEM_P (target)
6313 			 || count <= 2
6314 			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6315 			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6316 				 <= 40 * 8)))))
6317 		  {
6318 		    lo -= minelt;  hi -= minelt;
6319 		    for (; lo <= hi; lo++)
6320 		      {
6321 			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6322 
6323 			if (MEM_P (target)
6324 			    && !MEM_KEEP_ALIAS_SET_P (target)
6325 			    && TREE_CODE (type) == ARRAY_TYPE
6326 			    && TYPE_NONALIASED_COMPONENT (type))
6327 			  {
6328 			    target = copy_rtx (target);
6329 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6330 			  }
6331 
6332 			store_constructor_field
6333 			  (target, bitsize, bitpos, 0, bitregion_end,
6334 			   mode, value, cleared,
6335 			   get_alias_set (elttype), reverse);
6336 		      }
6337 		  }
6338 		else
6339 		  {
6340 		    rtx_code_label *loop_start = gen_label_rtx ();
6341 		    rtx_code_label *loop_end = gen_label_rtx ();
6342 		    tree exit_cond;
6343 
6344 		    expand_normal (hi_index);
6345 
6346 		    index = build_decl (EXPR_LOCATION (exp),
6347 					VAR_DECL, NULL_TREE, domain);
6348 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6349 		    SET_DECL_RTL (index, index_r);
6350 		    store_expr (lo_index, index_r, 0, false, reverse);
6351 
6352 		    /* Build the head of the loop.  */
6353 		    do_pending_stack_adjust ();
6354 		    emit_label (loop_start);
6355 
6356 		    /* Assign value to element index.  */
6357 		    position =
6358 		      fold_convert (ssizetype,
6359 				    fold_build2 (MINUS_EXPR,
6360 						 TREE_TYPE (index),
6361 						 index,
6362 						 TYPE_MIN_VALUE (domain)));
6363 
6364 		    position =
6365 			size_binop (MULT_EXPR, position,
6366 				    fold_convert (ssizetype,
6367 						  TYPE_SIZE_UNIT (elttype)));
6368 
6369 		    pos_rtx = expand_normal (position);
6370 		    xtarget = offset_address (target, pos_rtx,
6371 					      highest_pow2_factor (position));
6372 		    xtarget = adjust_address (xtarget, mode, 0);
6373 		    if (TREE_CODE (value) == CONSTRUCTOR)
6374 		      store_constructor (value, xtarget, cleared,
6375 					 bitsize / BITS_PER_UNIT, reverse);
6376 		    else
6377 		      store_expr (value, xtarget, 0, false, reverse);
6378 
6379 		    /* Generate a conditional jump to exit the loop.  */
6380 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6381 					index, hi_index);
6382 		    jumpif (exit_cond, loop_end, -1);
6383 
6384 		    /* Update the loop counter, and jump to the head of
6385 		       the loop.  */
6386 		    expand_assignment (index,
6387 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6388 					       index, integer_one_node),
6389 				       false);
6390 
6391 		    emit_jump (loop_start);
6392 
6393 		    /* Build the end of the loop.  */
6394 		    emit_label (loop_end);
6395 		  }
6396 	      }
6397 	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6398 		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6399 	      {
6400 		tree position;
6401 
6402 		if (index == 0)
6403 		  index = ssize_int (1);
6404 
6405 		if (minelt)
6406 		  index = fold_convert (ssizetype,
6407 					fold_build2 (MINUS_EXPR,
6408 						     TREE_TYPE (index),
6409 						     index,
6410 						     TYPE_MIN_VALUE (domain)));
6411 
6412 		position =
6413 		  size_binop (MULT_EXPR, index,
6414 			      fold_convert (ssizetype,
6415 					    TYPE_SIZE_UNIT (elttype)));
6416 		xtarget = offset_address (target,
6417 					  expand_normal (position),
6418 					  highest_pow2_factor (position));
6419 		xtarget = adjust_address (xtarget, mode, 0);
6420 		store_expr (value, xtarget, 0, false, reverse);
6421 	      }
6422 	    else
6423 	      {
6424 		if (index != 0)
6425 		  bitpos = ((tree_to_shwi (index) - minelt)
6426 			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6427 		else
6428 		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6429 
6430 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6431 		    && TREE_CODE (type) == ARRAY_TYPE
6432 		    && TYPE_NONALIASED_COMPONENT (type))
6433 		  {
6434 		    target = copy_rtx (target);
6435 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6436 		  }
6437 		store_constructor_field (target, bitsize, bitpos, 0,
6438 					 bitregion_end, mode, value,
6439 					 cleared, get_alias_set (elttype),
6440 					 reverse);
6441 	      }
6442 	  }
6443 	break;
6444       }
6445 
6446     case VECTOR_TYPE:
6447       {
6448 	unsigned HOST_WIDE_INT idx;
6449 	constructor_elt *ce;
6450 	int i;
6451 	int need_to_clear;
6452 	int icode = CODE_FOR_nothing;
6453 	tree elttype = TREE_TYPE (type);
6454 	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6455 	machine_mode eltmode = TYPE_MODE (elttype);
6456 	HOST_WIDE_INT bitsize;
6457 	HOST_WIDE_INT bitpos;
6458 	rtvec vector = NULL;
6459 	unsigned n_elts;
6460 	alias_set_type alias;
6461 
6462 	gcc_assert (eltmode != BLKmode);
6463 
6464 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6465 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6466 	  {
6467 	    machine_mode mode = GET_MODE (target);
6468 
6469 	    icode = (int) optab_handler (vec_init_optab, mode);
6470 	    /* Don't use vec_init<mode> if some elements have VECTOR_TYPE.  */
6471 	    if (icode != CODE_FOR_nothing)
6472 	      {
6473 		tree value;
6474 
6475 		FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6476 		  if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6477 		    {
6478 		      icode = CODE_FOR_nothing;
6479 		      break;
6480 		    }
6481 	      }
6482 	    if (icode != CODE_FOR_nothing)
6483 	      {
6484 		unsigned int i;
6485 
6486 		vector = rtvec_alloc (n_elts);
6487 		for (i = 0; i < n_elts; i++)
6488 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6489 	      }
6490 	  }
6491 
6492 	/* If the constructor has fewer elements than the vector,
6493 	   clear the whole array first.  Similarly if this is static
6494 	   constructor of a non-BLKmode object.  */
6495 	if (cleared)
6496 	  need_to_clear = 0;
6497 	else if (REG_P (target) && TREE_STATIC (exp))
6498 	  need_to_clear = 1;
6499 	else
6500 	  {
6501 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6502 	    tree value;
6503 
6504 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6505 	      {
6506 		int n_elts_here = tree_to_uhwi
6507 		  (int_const_binop (TRUNC_DIV_EXPR,
6508 				    TYPE_SIZE (TREE_TYPE (value)),
6509 				    TYPE_SIZE (elttype)));
6510 
6511 		count += n_elts_here;
6512 		if (mostly_zeros_p (value))
6513 		  zero_count += n_elts_here;
6514 	      }
6515 
6516 	    /* Clear the entire vector first if there are any missing elements,
6517 	       or if the incidence of zero elements is >= 75%.  */
6518 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6519 	  }
6520 
6521 	if (need_to_clear && size > 0 && !vector)
6522 	  {
6523 	    if (REG_P (target))
6524 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6525 	    else
6526 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6527 	    cleared = 1;
6528 	  }
6529 
6530 	/* Inform later passes that the old value is dead.  */
6531 	if (!cleared && !vector && REG_P (target))
6532 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6533 
6534         if (MEM_P (target))
6535 	  alias = MEM_ALIAS_SET (target);
6536 	else
6537 	  alias = get_alias_set (elttype);
6538 
6539         /* Store each element of the constructor into the corresponding
6540 	   element of TARGET, determined by counting the elements.  */
6541 	for (idx = 0, i = 0;
6542 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6543 	     idx++, i += bitsize / elt_size)
6544 	  {
6545 	    HOST_WIDE_INT eltpos;
6546 	    tree value = ce->value;
6547 
6548 	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6549 	    if (cleared && initializer_zerop (value))
6550 	      continue;
6551 
6552 	    if (ce->index)
6553 	      eltpos = tree_to_uhwi (ce->index);
6554 	    else
6555 	      eltpos = i;
6556 
6557 	    if (vector)
6558 	      {
6559 		/* vec_init<mode> should not be used if there are VECTOR_TYPE
6560 		   elements.  */
6561 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6562 		RTVEC_ELT (vector, eltpos)
6563 		  = expand_normal (value);
6564 	      }
6565 	    else
6566 	      {
6567 		machine_mode value_mode =
6568 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6569 		  ? TYPE_MODE (TREE_TYPE (value))
6570 		  : eltmode;
6571 		bitpos = eltpos * elt_size;
6572 		store_constructor_field (target, bitsize, bitpos, 0,
6573 					 bitregion_end, value_mode,
6574 					 value, cleared, alias, reverse);
6575 	      }
6576 	  }
6577 
6578 	if (vector)
6579 	  emit_insn (GEN_FCN (icode)
6580 		     (target,
6581 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6582 	break;
6583       }
6584 
6585     default:
6586       gcc_unreachable ();
6587     }
6588 }
6589 
6590 /* Store the value of EXP (an expression tree)
6591    into a subfield of TARGET which has mode MODE and occupies
6592    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6593    If MODE is VOIDmode, it means that we are storing into a bit-field.
6594 
6595    BITREGION_START is bitpos of the first bitfield in this region.
6596    BITREGION_END is the bitpos of the ending bitfield in this region.
6597    These two fields are 0, if the C++ memory model does not apply,
6598    or we are not interested in keeping track of bitfield regions.
6599 
6600    Always return const0_rtx unless we have something particular to
6601    return.
6602 
6603    ALIAS_SET is the alias set for the destination.  This value will
6604    (in general) be different from that for TARGET, since TARGET is a
6605    reference to the containing structure.
6606 
6607    If NONTEMPORAL is true, try generating a nontemporal store.
6608 
6609    If REVERSE is true, the store is to be done in reverse order.  */
6610 
6611 static rtx
6612 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6613 	     unsigned HOST_WIDE_INT bitregion_start,
6614 	     unsigned HOST_WIDE_INT bitregion_end,
6615 	     machine_mode mode, tree exp,
6616 	     alias_set_type alias_set, bool nontemporal,  bool reverse)
6617 {
6618   if (TREE_CODE (exp) == ERROR_MARK)
6619     return const0_rtx;
6620 
6621   /* If we have nothing to store, do nothing unless the expression has
6622      side-effects.  Don't do that for zero sized addressable lhs of
6623      calls.  */
6624   if (bitsize == 0
6625       && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6626 	  || TREE_CODE (exp) != CALL_EXPR))
6627     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6628 
6629   if (GET_CODE (target) == CONCAT)
6630     {
6631       /* We're storing into a struct containing a single __complex.  */
6632 
6633       gcc_assert (!bitpos);
6634       return store_expr (exp, target, 0, nontemporal, reverse);
6635     }
6636 
6637   /* If the structure is in a register or if the component
6638      is a bit field, we cannot use addressing to access it.
6639      Use bit-field techniques or SUBREG to store in it.  */
6640 
6641   if (mode == VOIDmode
6642       || (mode != BLKmode && ! direct_store[(int) mode]
6643 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6644 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6645       || REG_P (target)
6646       || GET_CODE (target) == SUBREG
6647       /* If the field isn't aligned enough to store as an ordinary memref,
6648 	 store it as a bit field.  */
6649       || (mode != BLKmode
6650 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6651 		|| bitpos % GET_MODE_ALIGNMENT (mode))
6652 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6653 	      || (bitpos % BITS_PER_UNIT != 0)))
6654       || (bitsize >= 0 && mode != BLKmode
6655 	  && GET_MODE_BITSIZE (mode) > bitsize)
6656       /* If the RHS and field are a constant size and the size of the
6657 	 RHS isn't the same size as the bitfield, we must use bitfield
6658 	 operations.  */
6659       || (bitsize >= 0
6660 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6661 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0
6662 	  /* Except for initialization of full bytes from a CONSTRUCTOR, which
6663 	     we will handle specially below.  */
6664 	  && !(TREE_CODE (exp) == CONSTRUCTOR
6665 	       && bitsize % BITS_PER_UNIT == 0)
6666 	  /* And except for bitwise copying of TREE_ADDRESSABLE types,
6667 	     where the FIELD_DECL has the right bitsize, but TREE_TYPE (exp)
6668 	     includes some extra padding.  store_expr / expand_expr will in
6669 	     that case call get_inner_reference that will have the bitsize
6670 	     we check here and thus the block move will not clobber the
6671 	     padding that shouldn't be clobbered.  In the future we could
6672 	     replace the TREE_ADDRESSABLE check with a check that
6673 	     get_base_address needs to live in memory.  */
6674 	  && (!TREE_ADDRESSABLE (TREE_TYPE (exp))
6675 	      || TREE_CODE (exp) != COMPONENT_REF
6676 	      || TREE_CODE (DECL_SIZE (TREE_OPERAND (exp, 1))) != INTEGER_CST
6677 	      || (bitsize % BITS_PER_UNIT != 0)
6678 	      || (bitpos % BITS_PER_UNIT != 0)
6679 	      || (compare_tree_int (DECL_SIZE (TREE_OPERAND (exp, 1)), bitsize)
6680 		  != 0)))
6681       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6682          decl we must use bitfield operations.  */
6683       || (bitsize >= 0
6684 	  && TREE_CODE (exp) == MEM_REF
6685 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6686 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6687 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6688 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6689     {
6690       rtx temp;
6691       gimple *nop_def;
6692 
6693       /* If EXP is a NOP_EXPR of precision less than its mode, then that
6694 	 implies a mask operation.  If the precision is the same size as
6695 	 the field we're storing into, that mask is redundant.  This is
6696 	 particularly common with bit field assignments generated by the
6697 	 C front end.  */
6698       nop_def = get_def_for_expr (exp, NOP_EXPR);
6699       if (nop_def)
6700 	{
6701 	  tree type = TREE_TYPE (exp);
6702 	  if (INTEGRAL_TYPE_P (type)
6703 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6704 	      && bitsize == TYPE_PRECISION (type))
6705 	    {
6706 	      tree op = gimple_assign_rhs1 (nop_def);
6707 	      type = TREE_TYPE (op);
6708 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6709 		exp = op;
6710 	    }
6711 	}
6712 
6713       temp = expand_normal (exp);
6714 
6715       /* Handle calls that return values in multiple non-contiguous locations.
6716 	 The Irix 6 ABI has examples of this.  */
6717       if (GET_CODE (temp) == PARALLEL)
6718 	{
6719 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6720 	  machine_mode temp_mode = GET_MODE (temp);
6721 	  if (temp_mode == BLKmode || temp_mode == VOIDmode)
6722 	    temp_mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6723 	  rtx temp_target = gen_reg_rtx (temp_mode);
6724 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6725 	  temp = temp_target;
6726 	}
6727 
6728       /* Handle calls that return BLKmode values in registers.  */
6729       else if (mode == BLKmode && REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6730 	{
6731 	  rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6732 	  copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6733 	  temp = temp_target;
6734 	}
6735 
6736       /* If the value has aggregate type and an integral mode then, if BITSIZE
6737 	 is narrower than this mode and this is for big-endian data, we first
6738 	 need to put the value into the low-order bits for store_bit_field,
6739 	 except when MODE is BLKmode and BITSIZE larger than the word size
6740 	 (see the handling of fields larger than a word in store_bit_field).
6741 	 Moreover, the field may be not aligned on a byte boundary; in this
6742 	 case, if it has reverse storage order, it needs to be accessed as a
6743 	 scalar field with reverse storage order and we must first put the
6744 	 value into target order.  */
6745       if (AGGREGATE_TYPE_P (TREE_TYPE (exp))
6746 	  && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT)
6747 	{
6748 	  HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (temp));
6749 
6750 	  reverse = TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (exp));
6751 
6752 	  if (reverse)
6753 	    temp = flip_storage_order (GET_MODE (temp), temp);
6754 
6755 	  if (bitsize < size
6756 	      && reverse ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN
6757 	      && !(mode == BLKmode && bitsize > BITS_PER_WORD))
6758 	    temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6759 				 size - bitsize, NULL_RTX, 1);
6760 	}
6761 
6762       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
6763       if (mode != VOIDmode && mode != BLKmode
6764 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6765 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6766 
6767       /* If the mode of TEMP and TARGET is BLKmode, both must be in memory
6768 	 and BITPOS must be aligned on a byte boundary.  If so, we simply do
6769 	 a block copy.  Likewise for a BLKmode-like TARGET.  */
6770       if (GET_MODE (temp) == BLKmode
6771 	  && (GET_MODE (target) == BLKmode
6772 	      || (MEM_P (target)
6773 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6774 		  && (bitpos % BITS_PER_UNIT) == 0
6775 		  && (bitsize % BITS_PER_UNIT) == 0)))
6776 	{
6777 	  gcc_assert (MEM_P (target) && MEM_P (temp)
6778 		      && (bitpos % BITS_PER_UNIT) == 0);
6779 
6780 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6781 	  emit_block_move (target, temp,
6782 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6783 				    / BITS_PER_UNIT),
6784 			   BLOCK_OP_NORMAL);
6785 
6786 	  return const0_rtx;
6787 	}
6788 
6789       /* If the mode of TEMP is still BLKmode and BITSIZE not larger than the
6790 	 word size, we need to load the value (see again store_bit_field).  */
6791       if (GET_MODE (temp) == BLKmode && bitsize <= BITS_PER_WORD)
6792 	{
6793 	  machine_mode temp_mode = smallest_mode_for_size (bitsize, MODE_INT);
6794 	  temp = extract_bit_field (temp, bitsize, 0, 1, NULL_RTX, temp_mode,
6795 				    temp_mode, false);
6796 	}
6797 
6798       /* Store the value in the bitfield.  */
6799       store_bit_field (target, bitsize, bitpos,
6800 		       bitregion_start, bitregion_end,
6801 		       mode, temp, reverse);
6802 
6803       return const0_rtx;
6804     }
6805   else
6806     {
6807       /* Now build a reference to just the desired component.  */
6808       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6809 
6810       if (to_rtx == target)
6811 	to_rtx = copy_rtx (to_rtx);
6812 
6813       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6814 	set_mem_alias_set (to_rtx, alias_set);
6815 
6816       /* Above we avoided using bitfield operations for storing a CONSTRUCTOR
6817 	 into a target smaller than its type; handle that case now.  */
6818       if (TREE_CODE (exp) == CONSTRUCTOR && bitsize >= 0)
6819 	{
6820 	  gcc_assert (bitsize % BITS_PER_UNIT == 0);
6821 	  store_constructor (exp, to_rtx, 0, bitsize / BITS_PER_UNIT, reverse);
6822 	  return to_rtx;
6823 	}
6824 
6825       return store_expr (exp, to_rtx, 0, nontemporal, reverse);
6826     }
6827 }
6828 
6829 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6830    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6831    codes and find the ultimate containing object, which we return.
6832 
6833    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6834    bit position, *PUNSIGNEDP to the signedness and *PREVERSEP to the
6835    storage order of the field.
6836    If the position of the field is variable, we store a tree
6837    giving the variable offset (in units) in *POFFSET.
6838    This offset is in addition to the bit position.
6839    If the position is not variable, we store 0 in *POFFSET.
6840 
6841    If any of the extraction expressions is volatile,
6842    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6843 
6844    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6845    Otherwise, it is a mode that can be used to access the field.
6846 
6847    If the field describes a variable-sized object, *PMODE is set to
6848    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6849    this case, but the address of the object can be found.
6850 
6851    If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6852    look through nodes that serve as markers of a greater alignment than
6853    the one that can be deduced from the expression.  These nodes make it
6854    possible for front-ends to prevent temporaries from being created by
6855    the middle-end on alignment considerations.  For that purpose, the
6856    normal operating mode at high-level is to always pass FALSE so that
6857    the ultimate containing object is really returned; moreover, the
6858    associated predicate handled_component_p will always return TRUE
6859    on these nodes, thus indicating that they are essentially handled
6860    by get_inner_reference.  TRUE should only be passed when the caller
6861    is scanning the expression in order to build another representation
6862    and specifically knows how to handle these nodes; as such, this is
6863    the normal operating mode in the RTL expanders.  */
6864 
6865 tree
6866 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6867 		     HOST_WIDE_INT *pbitpos, tree *poffset,
6868 		     machine_mode *pmode, int *punsignedp,
6869 		     int *preversep, int *pvolatilep, bool keep_aligning)
6870 {
6871   tree size_tree = 0;
6872   machine_mode mode = VOIDmode;
6873   bool blkmode_bitfield = false;
6874   tree offset = size_zero_node;
6875   offset_int bit_offset = 0;
6876 
6877   /* First get the mode, signedness, storage order and size.  We do this from
6878      just the outermost expression.  */
6879   *pbitsize = -1;
6880   if (TREE_CODE (exp) == COMPONENT_REF)
6881     {
6882       tree field = TREE_OPERAND (exp, 1);
6883       size_tree = DECL_SIZE (field);
6884       if (flag_strict_volatile_bitfields > 0
6885 	  && TREE_THIS_VOLATILE (exp)
6886 	  && DECL_BIT_FIELD_TYPE (field)
6887 	  && DECL_MODE (field) != BLKmode)
6888 	/* Volatile bitfields should be accessed in the mode of the
6889 	     field's type, not the mode computed based on the bit
6890 	     size.  */
6891 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6892       else if (!DECL_BIT_FIELD (field))
6893 	{
6894 	  mode = DECL_MODE (field);
6895 	  /* For vector fields re-check the target flags, as DECL_MODE
6896 	     could have been set with different target flags than
6897 	     the current function has.  */
6898 	  if (mode == BLKmode
6899 	      && VECTOR_TYPE_P (TREE_TYPE (field))
6900 	      && VECTOR_MODE_P (TYPE_MODE_RAW (TREE_TYPE (field))))
6901 	    mode = TYPE_MODE (TREE_TYPE (field));
6902 	}
6903       else if (DECL_MODE (field) == BLKmode)
6904 	blkmode_bitfield = true;
6905 
6906       *punsignedp = DECL_UNSIGNED (field);
6907     }
6908   else if (TREE_CODE (exp) == BIT_FIELD_REF)
6909     {
6910       size_tree = TREE_OPERAND (exp, 1);
6911       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6912 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6913 
6914       /* For vector types, with the correct size of access, use the mode of
6915 	 inner type.  */
6916       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6917 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6918 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6919         mode = TYPE_MODE (TREE_TYPE (exp));
6920     }
6921   else
6922     {
6923       mode = TYPE_MODE (TREE_TYPE (exp));
6924       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6925 
6926       if (mode == BLKmode)
6927 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
6928       else
6929 	*pbitsize = GET_MODE_BITSIZE (mode);
6930     }
6931 
6932   if (size_tree != 0)
6933     {
6934       if (! tree_fits_uhwi_p (size_tree))
6935 	mode = BLKmode, *pbitsize = -1;
6936       else
6937 	*pbitsize = tree_to_uhwi (size_tree);
6938     }
6939 
6940   *preversep = reverse_storage_order_for_component_p (exp);
6941 
6942   /* Compute cumulative bit-offset for nested component-refs and array-refs,
6943      and find the ultimate containing object.  */
6944   while (1)
6945     {
6946       switch (TREE_CODE (exp))
6947 	{
6948 	case BIT_FIELD_REF:
6949 	  bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6950 	  break;
6951 
6952 	case COMPONENT_REF:
6953 	  {
6954 	    tree field = TREE_OPERAND (exp, 1);
6955 	    tree this_offset = component_ref_field_offset (exp);
6956 
6957 	    /* If this field hasn't been filled in yet, don't go past it.
6958 	       This should only happen when folding expressions made during
6959 	       type construction.  */
6960 	    if (this_offset == 0)
6961 	      break;
6962 
6963 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
6964 	    bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6965 
6966 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6967 	  }
6968 	  break;
6969 
6970 	case ARRAY_REF:
6971 	case ARRAY_RANGE_REF:
6972 	  {
6973 	    tree index = TREE_OPERAND (exp, 1);
6974 	    tree low_bound = array_ref_low_bound (exp);
6975 	    tree unit_size = array_ref_element_size (exp);
6976 
6977 	    /* We assume all arrays have sizes that are a multiple of a byte.
6978 	       First subtract the lower bound, if any, in the type of the
6979 	       index, then convert to sizetype and multiply by the size of
6980 	       the array element.  */
6981 	    if (! integer_zerop (low_bound))
6982 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6983 				   index, low_bound);
6984 
6985 	    offset = size_binop (PLUS_EXPR, offset,
6986 			         size_binop (MULT_EXPR,
6987 					     fold_convert (sizetype, index),
6988 					     unit_size));
6989 	  }
6990 	  break;
6991 
6992 	case REALPART_EXPR:
6993 	  break;
6994 
6995 	case IMAGPART_EXPR:
6996 	  bit_offset += *pbitsize;
6997 	  break;
6998 
6999 	case VIEW_CONVERT_EXPR:
7000 	  if (keep_aligning && STRICT_ALIGNMENT
7001 	      && (TYPE_ALIGN (TREE_TYPE (exp))
7002 	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
7003 	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
7004 		  < BIGGEST_ALIGNMENT)
7005 	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
7006 		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7007 	    goto done;
7008 	  break;
7009 
7010 	case MEM_REF:
7011 	  /* Hand back the decl for MEM[&decl, off].  */
7012 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
7013 	    {
7014 	      tree off = TREE_OPERAND (exp, 1);
7015 	      if (!integer_zerop (off))
7016 		{
7017 		  offset_int boff, coff = mem_ref_offset (exp);
7018 		  boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
7019 		  bit_offset += boff;
7020 		}
7021 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7022 	    }
7023 	  goto done;
7024 
7025 	default:
7026 	  goto done;
7027 	}
7028 
7029       /* If any reference in the chain is volatile, the effect is volatile.  */
7030       if (TREE_THIS_VOLATILE (exp))
7031 	*pvolatilep = 1;
7032 
7033       exp = TREE_OPERAND (exp, 0);
7034     }
7035  done:
7036 
7037   /* If OFFSET is constant, see if we can return the whole thing as a
7038      constant bit position.  Make sure to handle overflow during
7039      this conversion.  */
7040   if (TREE_CODE (offset) == INTEGER_CST)
7041     {
7042       offset_int tem = wi::sext (wi::to_offset (offset),
7043 				 TYPE_PRECISION (sizetype));
7044       tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
7045       tem += bit_offset;
7046       if (wi::fits_shwi_p (tem))
7047 	{
7048 	  *pbitpos = tem.to_shwi ();
7049 	  *poffset = offset = NULL_TREE;
7050 	}
7051     }
7052 
7053   /* Otherwise, split it up.  */
7054   if (offset)
7055     {
7056       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
7057       if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
7058         {
7059 	  offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
7060 	  offset_int tem = bit_offset.and_not (mask);
7061 	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
7062 	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
7063 	  bit_offset -= tem;
7064 	  tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
7065 	  offset = size_binop (PLUS_EXPR, offset,
7066 			       wide_int_to_tree (sizetype, tem));
7067 	}
7068 
7069       *pbitpos = bit_offset.to_shwi ();
7070       *poffset = offset;
7071     }
7072 
7073   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
7074   if (mode == VOIDmode
7075       && blkmode_bitfield
7076       && (*pbitpos % BITS_PER_UNIT) == 0
7077       && (*pbitsize % BITS_PER_UNIT) == 0)
7078     *pmode = BLKmode;
7079   else
7080     *pmode = mode;
7081 
7082   return exp;
7083 }
7084 
7085 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7086 
7087 static unsigned HOST_WIDE_INT
7088 target_align (const_tree target)
7089 {
7090   /* We might have a chain of nested references with intermediate misaligning
7091      bitfields components, so need to recurse to find out.  */
7092 
7093   unsigned HOST_WIDE_INT this_align, outer_align;
7094 
7095   switch (TREE_CODE (target))
7096     {
7097     case BIT_FIELD_REF:
7098       return 1;
7099 
7100     case COMPONENT_REF:
7101       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7102       outer_align = target_align (TREE_OPERAND (target, 0));
7103       return MIN (this_align, outer_align);
7104 
7105     case ARRAY_REF:
7106     case ARRAY_RANGE_REF:
7107       this_align = TYPE_ALIGN (TREE_TYPE (target));
7108       outer_align = target_align (TREE_OPERAND (target, 0));
7109       return MIN (this_align, outer_align);
7110 
7111     CASE_CONVERT:
7112     case NON_LVALUE_EXPR:
7113     case VIEW_CONVERT_EXPR:
7114       this_align = TYPE_ALIGN (TREE_TYPE (target));
7115       outer_align = target_align (TREE_OPERAND (target, 0));
7116       return MAX (this_align, outer_align);
7117 
7118     default:
7119       return TYPE_ALIGN (TREE_TYPE (target));
7120     }
7121 }
7122 
7123 
7124 /* Given an rtx VALUE that may contain additions and multiplications, return
7125    an equivalent value that just refers to a register, memory, or constant.
7126    This is done by generating instructions to perform the arithmetic and
7127    returning a pseudo-register containing the value.
7128 
7129    The returned value may be a REG, SUBREG, MEM or constant.  */
7130 
7131 rtx
7132 force_operand (rtx value, rtx target)
7133 {
7134   rtx op1, op2;
7135   /* Use subtarget as the target for operand 0 of a binary operation.  */
7136   rtx subtarget = get_subtarget (target);
7137   enum rtx_code code = GET_CODE (value);
7138 
7139   /* Check for subreg applied to an expression produced by loop optimizer.  */
7140   if (code == SUBREG
7141       && !REG_P (SUBREG_REG (value))
7142       && !MEM_P (SUBREG_REG (value)))
7143     {
7144       value
7145 	= simplify_gen_subreg (GET_MODE (value),
7146 			       force_reg (GET_MODE (SUBREG_REG (value)),
7147 					  force_operand (SUBREG_REG (value),
7148 							 NULL_RTX)),
7149 			       GET_MODE (SUBREG_REG (value)),
7150 			       SUBREG_BYTE (value));
7151       code = GET_CODE (value);
7152     }
7153 
7154   /* Check for a PIC address load.  */
7155   if ((code == PLUS || code == MINUS)
7156       && XEXP (value, 0) == pic_offset_table_rtx
7157       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7158 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7159 	  || GET_CODE (XEXP (value, 1)) == CONST))
7160     {
7161       if (!subtarget)
7162 	subtarget = gen_reg_rtx (GET_MODE (value));
7163       emit_move_insn (subtarget, value);
7164       return subtarget;
7165     }
7166 
7167   if (ARITHMETIC_P (value))
7168     {
7169       op2 = XEXP (value, 1);
7170       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7171 	subtarget = 0;
7172       if (code == MINUS && CONST_INT_P (op2))
7173 	{
7174 	  code = PLUS;
7175 	  op2 = negate_rtx (GET_MODE (value), op2);
7176 	}
7177 
7178       /* Check for an addition with OP2 a constant integer and our first
7179          operand a PLUS of a virtual register and something else.  In that
7180          case, we want to emit the sum of the virtual register and the
7181          constant first and then add the other value.  This allows virtual
7182          register instantiation to simply modify the constant rather than
7183          creating another one around this addition.  */
7184       if (code == PLUS && CONST_INT_P (op2)
7185 	  && GET_CODE (XEXP (value, 0)) == PLUS
7186 	  && REG_P (XEXP (XEXP (value, 0), 0))
7187 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7188 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7189 	{
7190 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7191 					  XEXP (XEXP (value, 0), 0), op2,
7192 					  subtarget, 0, OPTAB_LIB_WIDEN);
7193 	  return expand_simple_binop (GET_MODE (value), code, temp,
7194 				      force_operand (XEXP (XEXP (value,
7195 								 0), 1), 0),
7196 				      target, 0, OPTAB_LIB_WIDEN);
7197 	}
7198 
7199       op1 = force_operand (XEXP (value, 0), subtarget);
7200       op2 = force_operand (op2, NULL_RTX);
7201       switch (code)
7202 	{
7203 	case MULT:
7204 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7205 	case DIV:
7206 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7207 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7208 					target, 1, OPTAB_LIB_WIDEN);
7209 	  else
7210 	    return expand_divmod (0,
7211 				  FLOAT_MODE_P (GET_MODE (value))
7212 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7213 				  GET_MODE (value), op1, op2, target, 0);
7214 	case MOD:
7215 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7216 				target, 0);
7217 	case UDIV:
7218 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7219 				target, 1);
7220 	case UMOD:
7221 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7222 				target, 1);
7223 	case ASHIFTRT:
7224 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7225 				      target, 0, OPTAB_LIB_WIDEN);
7226 	default:
7227 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7228 				      target, 1, OPTAB_LIB_WIDEN);
7229 	}
7230     }
7231   if (UNARY_P (value))
7232     {
7233       if (!target)
7234 	target = gen_reg_rtx (GET_MODE (value));
7235       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7236       switch (code)
7237 	{
7238 	case ZERO_EXTEND:
7239 	case SIGN_EXTEND:
7240 	case TRUNCATE:
7241 	case FLOAT_EXTEND:
7242 	case FLOAT_TRUNCATE:
7243 	  convert_move (target, op1, code == ZERO_EXTEND);
7244 	  return target;
7245 
7246 	case FIX:
7247 	case UNSIGNED_FIX:
7248 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7249 	  return target;
7250 
7251 	case FLOAT:
7252 	case UNSIGNED_FLOAT:
7253 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7254 	  return target;
7255 
7256 	default:
7257 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7258 	}
7259     }
7260 
7261 #ifdef INSN_SCHEDULING
7262   /* On machines that have insn scheduling, we want all memory reference to be
7263      explicit, so we need to deal with such paradoxical SUBREGs.  */
7264   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7265     value
7266       = simplify_gen_subreg (GET_MODE (value),
7267 			     force_reg (GET_MODE (SUBREG_REG (value)),
7268 					force_operand (SUBREG_REG (value),
7269 						       NULL_RTX)),
7270 			     GET_MODE (SUBREG_REG (value)),
7271 			     SUBREG_BYTE (value));
7272 #endif
7273 
7274   return value;
7275 }
7276 
7277 /* Subroutine of expand_expr: return nonzero iff there is no way that
7278    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7279    call is going to be used to determine whether we need a temporary
7280    for EXP, as opposed to a recursive call to this function.
7281 
7282    It is always safe for this routine to return zero since it merely
7283    searches for optimization opportunities.  */
7284 
7285 int
7286 safe_from_p (const_rtx x, tree exp, int top_p)
7287 {
7288   rtx exp_rtl = 0;
7289   int i, nops;
7290 
7291   if (x == 0
7292       /* If EXP has varying size, we MUST use a target since we currently
7293 	 have no way of allocating temporaries of variable size
7294 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7295 	 So we assume here that something at a higher level has prevented a
7296 	 clash.  This is somewhat bogus, but the best we can do.  Only
7297 	 do this when X is BLKmode and when we are at the top level.  */
7298       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7299 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7300 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7301 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7302 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7303 	      != INTEGER_CST)
7304 	  && GET_MODE (x) == BLKmode)
7305       /* If X is in the outgoing argument area, it is always safe.  */
7306       || (MEM_P (x)
7307 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7308 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7309 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7310     return 1;
7311 
7312   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7313      find the underlying pseudo.  */
7314   if (GET_CODE (x) == SUBREG)
7315     {
7316       x = SUBREG_REG (x);
7317       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7318 	return 0;
7319     }
7320 
7321   /* Now look at our tree code and possibly recurse.  */
7322   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7323     {
7324     case tcc_declaration:
7325       exp_rtl = DECL_RTL_IF_SET (exp);
7326       break;
7327 
7328     case tcc_constant:
7329       return 1;
7330 
7331     case tcc_exceptional:
7332       if (TREE_CODE (exp) == TREE_LIST)
7333 	{
7334 	  while (1)
7335 	    {
7336 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7337 		return 0;
7338 	      exp = TREE_CHAIN (exp);
7339 	      if (!exp)
7340 		return 1;
7341 	      if (TREE_CODE (exp) != TREE_LIST)
7342 		return safe_from_p (x, exp, 0);
7343 	    }
7344 	}
7345       else if (TREE_CODE (exp) == CONSTRUCTOR)
7346 	{
7347 	  constructor_elt *ce;
7348 	  unsigned HOST_WIDE_INT idx;
7349 
7350 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7351 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7352 		|| !safe_from_p (x, ce->value, 0))
7353 	      return 0;
7354 	  return 1;
7355 	}
7356       else if (TREE_CODE (exp) == ERROR_MARK)
7357 	return 1;	/* An already-visited SAVE_EXPR? */
7358       else
7359 	return 0;
7360 
7361     case tcc_statement:
7362       /* The only case we look at here is the DECL_INITIAL inside a
7363 	 DECL_EXPR.  */
7364       return (TREE_CODE (exp) != DECL_EXPR
7365 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7366 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7367 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7368 
7369     case tcc_binary:
7370     case tcc_comparison:
7371       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7372 	return 0;
7373       /* Fall through.  */
7374 
7375     case tcc_unary:
7376       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7377 
7378     case tcc_expression:
7379     case tcc_reference:
7380     case tcc_vl_exp:
7381       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7382 	 the expression.  If it is set, we conflict iff we are that rtx or
7383 	 both are in memory.  Otherwise, we check all operands of the
7384 	 expression recursively.  */
7385 
7386       switch (TREE_CODE (exp))
7387 	{
7388 	case ADDR_EXPR:
7389 	  /* If the operand is static or we are static, we can't conflict.
7390 	     Likewise if we don't conflict with the operand at all.  */
7391 	  if (staticp (TREE_OPERAND (exp, 0))
7392 	      || TREE_STATIC (exp)
7393 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7394 	    return 1;
7395 
7396 	  /* Otherwise, the only way this can conflict is if we are taking
7397 	     the address of a DECL a that address if part of X, which is
7398 	     very rare.  */
7399 	  exp = TREE_OPERAND (exp, 0);
7400 	  if (DECL_P (exp))
7401 	    {
7402 	      if (!DECL_RTL_SET_P (exp)
7403 		  || !MEM_P (DECL_RTL (exp)))
7404 		return 0;
7405 	      else
7406 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7407 	    }
7408 	  break;
7409 
7410 	case MEM_REF:
7411 	  if (MEM_P (x)
7412 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7413 					get_alias_set (exp)))
7414 	    return 0;
7415 	  break;
7416 
7417 	case CALL_EXPR:
7418 	  /* Assume that the call will clobber all hard registers and
7419 	     all of memory.  */
7420 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7421 	      || MEM_P (x))
7422 	    return 0;
7423 	  break;
7424 
7425 	case WITH_CLEANUP_EXPR:
7426 	case CLEANUP_POINT_EXPR:
7427 	  /* Lowered by gimplify.c.  */
7428 	  gcc_unreachable ();
7429 
7430 	case SAVE_EXPR:
7431 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7432 
7433 	default:
7434 	  break;
7435 	}
7436 
7437       /* If we have an rtx, we do not need to scan our operands.  */
7438       if (exp_rtl)
7439 	break;
7440 
7441       nops = TREE_OPERAND_LENGTH (exp);
7442       for (i = 0; i < nops; i++)
7443 	if (TREE_OPERAND (exp, i) != 0
7444 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7445 	  return 0;
7446 
7447       break;
7448 
7449     case tcc_type:
7450       /* Should never get a type here.  */
7451       gcc_unreachable ();
7452     }
7453 
7454   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7455      with it.  */
7456   if (exp_rtl)
7457     {
7458       if (GET_CODE (exp_rtl) == SUBREG)
7459 	{
7460 	  exp_rtl = SUBREG_REG (exp_rtl);
7461 	  if (REG_P (exp_rtl)
7462 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7463 	    return 0;
7464 	}
7465 
7466       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7467 	 are memory and they conflict.  */
7468       return ! (rtx_equal_p (x, exp_rtl)
7469 		|| (MEM_P (x) && MEM_P (exp_rtl)
7470 		    && true_dependence (exp_rtl, VOIDmode, x)));
7471     }
7472 
7473   /* If we reach here, it is safe.  */
7474   return 1;
7475 }
7476 
7477 
7478 /* Return the highest power of two that EXP is known to be a multiple of.
7479    This is used in updating alignment of MEMs in array references.  */
7480 
7481 unsigned HOST_WIDE_INT
7482 highest_pow2_factor (const_tree exp)
7483 {
7484   unsigned HOST_WIDE_INT ret;
7485   int trailing_zeros = tree_ctz (exp);
7486   if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7487     return BIGGEST_ALIGNMENT;
7488   ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7489   if (ret > BIGGEST_ALIGNMENT)
7490     return BIGGEST_ALIGNMENT;
7491   return ret;
7492 }
7493 
7494 /* Similar, except that the alignment requirements of TARGET are
7495    taken into account.  Assume it is at least as aligned as its
7496    type, unless it is a COMPONENT_REF in which case the layout of
7497    the structure gives the alignment.  */
7498 
7499 static unsigned HOST_WIDE_INT
7500 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7501 {
7502   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7503   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7504 
7505   return MAX (factor, talign);
7506 }
7507 
7508 /* Convert the tree comparison code TCODE to the rtl one where the
7509    signedness is UNSIGNEDP.  */
7510 
7511 static enum rtx_code
7512 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7513 {
7514   enum rtx_code code;
7515   switch (tcode)
7516     {
7517     case EQ_EXPR:
7518       code = EQ;
7519       break;
7520     case NE_EXPR:
7521       code = NE;
7522       break;
7523     case LT_EXPR:
7524       code = unsignedp ? LTU : LT;
7525       break;
7526     case LE_EXPR:
7527       code = unsignedp ? LEU : LE;
7528       break;
7529     case GT_EXPR:
7530       code = unsignedp ? GTU : GT;
7531       break;
7532     case GE_EXPR:
7533       code = unsignedp ? GEU : GE;
7534       break;
7535     case UNORDERED_EXPR:
7536       code = UNORDERED;
7537       break;
7538     case ORDERED_EXPR:
7539       code = ORDERED;
7540       break;
7541     case UNLT_EXPR:
7542       code = UNLT;
7543       break;
7544     case UNLE_EXPR:
7545       code = UNLE;
7546       break;
7547     case UNGT_EXPR:
7548       code = UNGT;
7549       break;
7550     case UNGE_EXPR:
7551       code = UNGE;
7552       break;
7553     case UNEQ_EXPR:
7554       code = UNEQ;
7555       break;
7556     case LTGT_EXPR:
7557       code = LTGT;
7558       break;
7559 
7560     default:
7561       gcc_unreachable ();
7562     }
7563   return code;
7564 }
7565 
7566 /* Subroutine of expand_expr.  Expand the two operands of a binary
7567    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7568    The value may be stored in TARGET if TARGET is nonzero.  The
7569    MODIFIER argument is as documented by expand_expr.  */
7570 
7571 void
7572 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7573 		 enum expand_modifier modifier)
7574 {
7575   if (! safe_from_p (target, exp1, 1))
7576     target = 0;
7577   if (operand_equal_p (exp0, exp1, 0))
7578     {
7579       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7580       *op1 = copy_rtx (*op0);
7581     }
7582   else
7583     {
7584       /* If we need to preserve evaluation order, copy exp0 into its own
7585 	 temporary variable so that it can't be clobbered by exp1.  */
7586       if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7587 	exp0 = save_expr (exp0);
7588       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7589       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7590     }
7591 }
7592 
7593 
7594 /* Return a MEM that contains constant EXP.  DEFER is as for
7595    output_constant_def and MODIFIER is as for expand_expr.  */
7596 
7597 static rtx
7598 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7599 {
7600   rtx mem;
7601 
7602   mem = output_constant_def (exp, defer);
7603   if (modifier != EXPAND_INITIALIZER)
7604     mem = use_anchored_address (mem);
7605   return mem;
7606 }
7607 
7608 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7609    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7610 
7611 static rtx
7612 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7613 		         enum expand_modifier modifier, addr_space_t as)
7614 {
7615   rtx result, subtarget;
7616   tree inner, offset;
7617   HOST_WIDE_INT bitsize, bitpos;
7618   int unsignedp, reversep, volatilep = 0;
7619   machine_mode mode1;
7620 
7621   /* If we are taking the address of a constant and are at the top level,
7622      we have to use output_constant_def since we can't call force_const_mem
7623      at top level.  */
7624   /* ??? This should be considered a front-end bug.  We should not be
7625      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7626      exception here is STRING_CST.  */
7627   if (CONSTANT_CLASS_P (exp))
7628     {
7629       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7630       if (modifier < EXPAND_SUM)
7631 	result = force_operand (result, target);
7632       return result;
7633     }
7634 
7635   /* Everything must be something allowed by is_gimple_addressable.  */
7636   switch (TREE_CODE (exp))
7637     {
7638     case INDIRECT_REF:
7639       /* This case will happen via recursion for &a->b.  */
7640       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7641 
7642     case MEM_REF:
7643       {
7644 	tree tem = TREE_OPERAND (exp, 0);
7645 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7646 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7647 	return expand_expr (tem, target, tmode, modifier);
7648       }
7649 
7650     case CONST_DECL:
7651       /* Expand the initializer like constants above.  */
7652       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7653 					   0, modifier), 0);
7654       if (modifier < EXPAND_SUM)
7655 	result = force_operand (result, target);
7656       return result;
7657 
7658     case REALPART_EXPR:
7659       /* The real part of the complex number is always first, therefore
7660 	 the address is the same as the address of the parent object.  */
7661       offset = 0;
7662       bitpos = 0;
7663       inner = TREE_OPERAND (exp, 0);
7664       break;
7665 
7666     case IMAGPART_EXPR:
7667       /* The imaginary part of the complex number is always second.
7668 	 The expression is therefore always offset by the size of the
7669 	 scalar type.  */
7670       offset = 0;
7671       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7672       inner = TREE_OPERAND (exp, 0);
7673       break;
7674 
7675     case COMPOUND_LITERAL_EXPR:
7676       /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7677 	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7678 	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7679 	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7680 	 the initializers aren't gimplified.  */
7681       if (COMPOUND_LITERAL_EXPR_DECL (exp)
7682 	  && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7683 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7684 					target, tmode, modifier, as);
7685       /* FALLTHRU */
7686     default:
7687       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7688 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7689 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7690 	 CONSTRUCTORs too, which should yield a memory reference for the
7691 	 constructor's contents.  Assume language specific tree nodes can
7692 	 be expanded in some interesting way.  */
7693       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7694       if (DECL_P (exp)
7695 	  || TREE_CODE (exp) == CONSTRUCTOR
7696 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7697 	{
7698 	  result = expand_expr (exp, target, tmode,
7699 				modifier == EXPAND_INITIALIZER
7700 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7701 
7702 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7703 	     marked TREE_ADDRESSABLE, which will be either a front-end
7704 	     or a tree optimizer bug.  */
7705 
7706 	  gcc_assert (MEM_P (result));
7707 	  result = XEXP (result, 0);
7708 
7709 	  /* ??? Is this needed anymore?  */
7710 	  if (DECL_P (exp))
7711 	    TREE_USED (exp) = 1;
7712 
7713 	  if (modifier != EXPAND_INITIALIZER
7714 	      && modifier != EXPAND_CONST_ADDRESS
7715 	      && modifier != EXPAND_SUM)
7716 	    result = force_operand (result, target);
7717 	  return result;
7718 	}
7719 
7720       /* Pass FALSE as the last argument to get_inner_reference although
7721 	 we are expanding to RTL.  The rationale is that we know how to
7722 	 handle "aligning nodes" here: we can just bypass them because
7723 	 they won't change the final object whose address will be returned
7724 	 (they actually exist only for that purpose).  */
7725       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
7726 				   &unsignedp, &reversep, &volatilep, false);
7727       break;
7728     }
7729 
7730   /* We must have made progress.  */
7731   gcc_assert (inner != exp);
7732 
7733   subtarget = offset || bitpos ? NULL_RTX : target;
7734   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7735      inner alignment, force the inner to be sufficiently aligned.  */
7736   if (CONSTANT_CLASS_P (inner)
7737       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7738     {
7739       inner = copy_node (inner);
7740       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7741       TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7742       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7743     }
7744   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7745 
7746   if (offset)
7747     {
7748       rtx tmp;
7749 
7750       if (modifier != EXPAND_NORMAL)
7751 	result = force_operand (result, NULL);
7752       tmp = expand_expr (offset, NULL_RTX, tmode,
7753 			 modifier == EXPAND_INITIALIZER
7754 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7755 
7756       /* expand_expr is allowed to return an object in a mode other
7757 	 than TMODE.  If it did, we need to convert.  */
7758       if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7759 	tmp = convert_modes (tmode, GET_MODE (tmp),
7760 			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7761       result = convert_memory_address_addr_space (tmode, result, as);
7762       tmp = convert_memory_address_addr_space (tmode, tmp, as);
7763 
7764       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7765 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
7766       else
7767 	{
7768 	  subtarget = bitpos ? NULL_RTX : target;
7769 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7770 					1, OPTAB_LIB_WIDEN);
7771 	}
7772     }
7773 
7774   if (bitpos)
7775     {
7776       /* Someone beforehand should have rejected taking the address
7777 	 of such an object.  */
7778       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7779 
7780       result = convert_memory_address_addr_space (tmode, result, as);
7781       result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7782       if (modifier < EXPAND_SUM)
7783 	result = force_operand (result, target);
7784     }
7785 
7786   return result;
7787 }
7788 
7789 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7790    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7791 
7792 static rtx
7793 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7794 		       enum expand_modifier modifier)
7795 {
7796   addr_space_t as = ADDR_SPACE_GENERIC;
7797   machine_mode address_mode = Pmode;
7798   machine_mode pointer_mode = ptr_mode;
7799   machine_mode rmode;
7800   rtx result;
7801 
7802   /* Target mode of VOIDmode says "whatever's natural".  */
7803   if (tmode == VOIDmode)
7804     tmode = TYPE_MODE (TREE_TYPE (exp));
7805 
7806   if (POINTER_TYPE_P (TREE_TYPE (exp)))
7807     {
7808       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7809       address_mode = targetm.addr_space.address_mode (as);
7810       pointer_mode = targetm.addr_space.pointer_mode (as);
7811     }
7812 
7813   /* We can get called with some Weird Things if the user does silliness
7814      like "(short) &a".  In that case, convert_memory_address won't do
7815      the right thing, so ignore the given target mode.  */
7816   if (tmode != address_mode && tmode != pointer_mode)
7817     tmode = address_mode;
7818 
7819   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7820 				    tmode, modifier, as);
7821 
7822   /* Despite expand_expr claims concerning ignoring TMODE when not
7823      strictly convenient, stuff breaks if we don't honor it.  Note
7824      that combined with the above, we only do this for pointer modes.  */
7825   rmode = GET_MODE (result);
7826   if (rmode == VOIDmode)
7827     rmode = tmode;
7828   if (rmode != tmode)
7829     result = convert_memory_address_addr_space (tmode, result, as);
7830 
7831   return result;
7832 }
7833 
7834 /* Generate code for computing CONSTRUCTOR EXP.
7835    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7836    is TRUE, instead of creating a temporary variable in memory
7837    NULL is returned and the caller needs to handle it differently.  */
7838 
7839 static rtx
7840 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7841 		    bool avoid_temp_mem)
7842 {
7843   tree type = TREE_TYPE (exp);
7844   machine_mode mode = TYPE_MODE (type);
7845 
7846   /* Try to avoid creating a temporary at all.  This is possible
7847      if all of the initializer is zero.
7848      FIXME: try to handle all [0..255] initializers we can handle
7849      with memset.  */
7850   if (TREE_STATIC (exp)
7851       && !TREE_ADDRESSABLE (exp)
7852       && target != 0 && mode == BLKmode
7853       && all_zeros_p (exp))
7854     {
7855       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7856       return target;
7857     }
7858 
7859   /* All elts simple constants => refer to a constant in memory.  But
7860      if this is a non-BLKmode mode, let it store a field at a time
7861      since that should make a CONST_INT, CONST_WIDE_INT or
7862      CONST_DOUBLE when we fold.  Likewise, if we have a target we can
7863      use, it is best to store directly into the target unless the type
7864      is large enough that memcpy will be used.  If we are making an
7865      initializer and all operands are constant, put it in memory as
7866      well.
7867 
7868      FIXME: Avoid trying to fill vector constructors piece-meal.
7869      Output them with output_constant_def below unless we're sure
7870      they're zeros.  This should go away when vector initializers
7871      are treated like VECTOR_CST instead of arrays.  */
7872   if ((TREE_STATIC (exp)
7873        && ((mode == BLKmode
7874 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
7875 		  || TREE_ADDRESSABLE (exp)
7876 		  || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7877 		      && (! can_move_by_pieces
7878 				     (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7879 				      TYPE_ALIGN (type)))
7880 		      && ! mostly_zeros_p (exp))))
7881       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7882 	  && TREE_CONSTANT (exp)))
7883     {
7884       rtx constructor;
7885 
7886       if (avoid_temp_mem)
7887 	return NULL_RTX;
7888 
7889       constructor = expand_expr_constant (exp, 1, modifier);
7890 
7891       if (modifier != EXPAND_CONST_ADDRESS
7892 	  && modifier != EXPAND_INITIALIZER
7893 	  && modifier != EXPAND_SUM)
7894 	constructor = validize_mem (constructor);
7895 
7896       return constructor;
7897     }
7898 
7899   /* Handle calls that pass values in multiple non-contiguous
7900      locations.  The Irix 6 ABI has examples of this.  */
7901   if (target == 0 || ! safe_from_p (target, exp, 1)
7902       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7903     {
7904       if (avoid_temp_mem)
7905 	return NULL_RTX;
7906 
7907       target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7908     }
7909 
7910   store_constructor (exp, target, 0, int_expr_size (exp), false);
7911   return target;
7912 }
7913 
7914 
7915 /* expand_expr: generate code for computing expression EXP.
7916    An rtx for the computed value is returned.  The value is never null.
7917    In the case of a void EXP, const0_rtx is returned.
7918 
7919    The value may be stored in TARGET if TARGET is nonzero.
7920    TARGET is just a suggestion; callers must assume that
7921    the rtx returned may not be the same as TARGET.
7922 
7923    If TARGET is CONST0_RTX, it means that the value will be ignored.
7924 
7925    If TMODE is not VOIDmode, it suggests generating the
7926    result in mode TMODE.  But this is done only when convenient.
7927    Otherwise, TMODE is ignored and the value generated in its natural mode.
7928    TMODE is just a suggestion; callers must assume that
7929    the rtx returned may not have mode TMODE.
7930 
7931    Note that TARGET may have neither TMODE nor MODE.  In that case, it
7932    probably will not be used.
7933 
7934    If MODIFIER is EXPAND_SUM then when EXP is an addition
7935    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7936    or a nest of (PLUS ...) and (MINUS ...) where the terms are
7937    products as above, or REG or MEM, or constant.
7938    Ordinarily in such cases we would output mul or add instructions
7939    and then return a pseudo reg containing the sum.
7940 
7941    EXPAND_INITIALIZER is much like EXPAND_SUM except that
7942    it also marks a label as absolutely required (it can't be dead).
7943    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7944    This is used for outputting expressions used in initializers.
7945 
7946    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7947    with a constant address even if that address is not normally legitimate.
7948    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7949 
7950    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7951    a call parameter.  Such targets require special care as we haven't yet
7952    marked TARGET so that it's safe from being trashed by libcalls.  We
7953    don't want to use TARGET for anything but the final result;
7954    Intermediate values must go elsewhere.   Additionally, calls to
7955    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7956 
7957    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7958    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7959    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7960    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7961    recursively.
7962 
7963    If INNER_REFERENCE_P is true, we are expanding an inner reference.
7964    In this case, we don't adjust a returned MEM rtx that wouldn't be
7965    sufficiently aligned for its mode; instead, it's up to the caller
7966    to deal with it afterwards.  This is used to make sure that unaligned
7967    base objects for which out-of-bounds accesses are supported, for
7968    example record types with trailing arrays, aren't realigned behind
7969    the back of the caller.
7970    The normal operating mode is to pass FALSE for this parameter.  */
7971 
7972 rtx
7973 expand_expr_real (tree exp, rtx target, machine_mode tmode,
7974 		  enum expand_modifier modifier, rtx *alt_rtl,
7975 		  bool inner_reference_p)
7976 {
7977   rtx ret;
7978 
7979   /* Handle ERROR_MARK before anybody tries to access its type.  */
7980   if (TREE_CODE (exp) == ERROR_MARK
7981       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7982     {
7983       ret = CONST0_RTX (tmode);
7984       return ret ? ret : const0_rtx;
7985     }
7986 
7987   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7988 			    inner_reference_p);
7989   return ret;
7990 }
7991 
7992 /* Try to expand the conditional expression which is represented by
7993    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If it succeeds
7994    return the rtl reg which represents the result.  Otherwise return
7995    NULL_RTX.  */
7996 
7997 static rtx
7998 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7999 			      tree treeop1 ATTRIBUTE_UNUSED,
8000 			      tree treeop2 ATTRIBUTE_UNUSED)
8001 {
8002   rtx insn;
8003   rtx op00, op01, op1, op2;
8004   enum rtx_code comparison_code;
8005   machine_mode comparison_mode;
8006   gimple *srcstmt;
8007   rtx temp;
8008   tree type = TREE_TYPE (treeop1);
8009   int unsignedp = TYPE_UNSIGNED (type);
8010   machine_mode mode = TYPE_MODE (type);
8011   machine_mode orig_mode = mode;
8012 
8013   /* If we cannot do a conditional move on the mode, try doing it
8014      with the promoted mode. */
8015   if (!can_conditionally_move_p (mode))
8016     {
8017       mode = promote_mode (type, mode, &unsignedp);
8018       if (!can_conditionally_move_p (mode))
8019 	return NULL_RTX;
8020       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
8021     }
8022   else
8023     temp = assign_temp (type, 0, 1);
8024 
8025   start_sequence ();
8026   expand_operands (treeop1, treeop2,
8027 		   temp, &op1, &op2, EXPAND_NORMAL);
8028 
8029   if (TREE_CODE (treeop0) == SSA_NAME
8030       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8031     {
8032       tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8033       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8034       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8035       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8036       comparison_mode = TYPE_MODE (type);
8037       unsignedp = TYPE_UNSIGNED (type);
8038       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8039     }
8040   else if (COMPARISON_CLASS_P (treeop0))
8041     {
8042       tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8043       enum tree_code cmpcode = TREE_CODE (treeop0);
8044       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8045       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8046       unsignedp = TYPE_UNSIGNED (type);
8047       comparison_mode = TYPE_MODE (type);
8048       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8049     }
8050   else
8051     {
8052       op00 = expand_normal (treeop0);
8053       op01 = const0_rtx;
8054       comparison_code = NE;
8055       comparison_mode = GET_MODE (op00);
8056       if (comparison_mode == VOIDmode)
8057 	comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8058     }
8059 
8060   if (GET_MODE (op1) != mode)
8061     op1 = gen_lowpart (mode, op1);
8062 
8063   if (GET_MODE (op2) != mode)
8064     op2 = gen_lowpart (mode, op2);
8065 
8066   /* Try to emit the conditional move.  */
8067   insn = emit_conditional_move (temp, comparison_code,
8068 				op00, op01, comparison_mode,
8069 				op1, op2, mode,
8070 				unsignedp);
8071 
8072   /* If we could do the conditional move, emit the sequence,
8073      and return.  */
8074   if (insn)
8075     {
8076       rtx_insn *seq = get_insns ();
8077       end_sequence ();
8078       emit_insn (seq);
8079       return convert_modes (orig_mode, mode, temp, 0);
8080     }
8081 
8082   /* Otherwise discard the sequence and fall back to code with
8083      branches.  */
8084   end_sequence ();
8085   return NULL_RTX;
8086 }
8087 
8088 rtx
8089 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8090 		    enum expand_modifier modifier)
8091 {
8092   rtx op0, op1, op2, temp;
8093   rtx_code_label *lab;
8094   tree type;
8095   int unsignedp;
8096   machine_mode mode;
8097   enum tree_code code = ops->code;
8098   optab this_optab;
8099   rtx subtarget, original_target;
8100   int ignore;
8101   bool reduce_bit_field;
8102   location_t loc = ops->location;
8103   tree treeop0, treeop1, treeop2;
8104 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8105 				 ? reduce_to_bit_field_precision ((expr), \
8106 								  target, \
8107 								  type)	  \
8108 				 : (expr))
8109 
8110   type = ops->type;
8111   mode = TYPE_MODE (type);
8112   unsignedp = TYPE_UNSIGNED (type);
8113 
8114   treeop0 = ops->op0;
8115   treeop1 = ops->op1;
8116   treeop2 = ops->op2;
8117 
8118   /* We should be called only on simple (binary or unary) expressions,
8119      exactly those that are valid in gimple expressions that aren't
8120      GIMPLE_SINGLE_RHS (or invalid).  */
8121   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8122 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8123 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8124 
8125   ignore = (target == const0_rtx
8126 	    || ((CONVERT_EXPR_CODE_P (code)
8127 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8128 		&& TREE_CODE (type) == VOID_TYPE));
8129 
8130   /* We should be called only if we need the result.  */
8131   gcc_assert (!ignore);
8132 
8133   /* An operation in what may be a bit-field type needs the
8134      result to be reduced to the precision of the bit-field type,
8135      which is narrower than that of the type's mode.  */
8136   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8137 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8138 
8139   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8140     target = 0;
8141 
8142   /* Use subtarget as the target for operand 0 of a binary operation.  */
8143   subtarget = get_subtarget (target);
8144   original_target = target;
8145 
8146   switch (code)
8147     {
8148     case NON_LVALUE_EXPR:
8149     case PAREN_EXPR:
8150     CASE_CONVERT:
8151       if (treeop0 == error_mark_node)
8152 	return const0_rtx;
8153 
8154       if (TREE_CODE (type) == UNION_TYPE)
8155 	{
8156 	  tree valtype = TREE_TYPE (treeop0);
8157 
8158 	  /* If both input and output are BLKmode, this conversion isn't doing
8159 	     anything except possibly changing memory attribute.  */
8160 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8161 	    {
8162 	      rtx result = expand_expr (treeop0, target, tmode,
8163 					modifier);
8164 
8165 	      result = copy_rtx (result);
8166 	      set_mem_attributes (result, type, 0);
8167 	      return result;
8168 	    }
8169 
8170 	  if (target == 0)
8171 	    {
8172 	      if (TYPE_MODE (type) != BLKmode)
8173 		target = gen_reg_rtx (TYPE_MODE (type));
8174 	      else
8175 		target = assign_temp (type, 1, 1);
8176 	    }
8177 
8178 	  if (MEM_P (target))
8179 	    /* Store data into beginning of memory target.  */
8180 	    store_expr (treeop0,
8181 			adjust_address (target, TYPE_MODE (valtype), 0),
8182 			modifier == EXPAND_STACK_PARM,
8183 			false, TYPE_REVERSE_STORAGE_ORDER (type));
8184 
8185 	  else
8186 	    {
8187 	      gcc_assert (REG_P (target)
8188 			  && !TYPE_REVERSE_STORAGE_ORDER (type));
8189 
8190 	      /* Store this field into a union of the proper type.  */
8191 	      store_field (target,
8192 			   MIN ((int_size_in_bytes (TREE_TYPE
8193 						    (treeop0))
8194 				 * BITS_PER_UNIT),
8195 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8196 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0,
8197 			   false, false);
8198 	    }
8199 
8200 	  /* Return the entire union.  */
8201 	  return target;
8202 	}
8203 
8204       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8205 	{
8206 	  op0 = expand_expr (treeop0, target, VOIDmode,
8207 			     modifier);
8208 
8209 	  /* If the signedness of the conversion differs and OP0 is
8210 	     a promoted SUBREG, clear that indication since we now
8211 	     have to do the proper extension.  */
8212 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8213 	      && GET_CODE (op0) == SUBREG)
8214 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8215 
8216 	  return REDUCE_BIT_FIELD (op0);
8217 	}
8218 
8219       op0 = expand_expr (treeop0, NULL_RTX, mode,
8220 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8221       if (GET_MODE (op0) == mode)
8222 	;
8223 
8224       /* If OP0 is a constant, just convert it into the proper mode.  */
8225       else if (CONSTANT_P (op0))
8226 	{
8227 	  tree inner_type = TREE_TYPE (treeop0);
8228 	  machine_mode inner_mode = GET_MODE (op0);
8229 
8230 	  if (inner_mode == VOIDmode)
8231 	    inner_mode = TYPE_MODE (inner_type);
8232 
8233 	  if (modifier == EXPAND_INITIALIZER)
8234 	    op0 = lowpart_subreg (mode, op0, inner_mode);
8235 	  else
8236 	    op0=  convert_modes (mode, inner_mode, op0,
8237 				 TYPE_UNSIGNED (inner_type));
8238 	}
8239 
8240       else if (modifier == EXPAND_INITIALIZER)
8241 	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8242 
8243       else if (target == 0)
8244 	op0 = convert_to_mode (mode, op0,
8245 			       TYPE_UNSIGNED (TREE_TYPE
8246 					      (treeop0)));
8247       else
8248 	{
8249 	  convert_move (target, op0,
8250 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8251 	  op0 = target;
8252 	}
8253 
8254       return REDUCE_BIT_FIELD (op0);
8255 
8256     case ADDR_SPACE_CONVERT_EXPR:
8257       {
8258 	tree treeop0_type = TREE_TYPE (treeop0);
8259 
8260 	gcc_assert (POINTER_TYPE_P (type));
8261 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8262 
8263 	addr_space_t as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8264 	addr_space_t as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8265 
8266         /* Conversions between pointers to the same address space should
8267 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8268 	gcc_assert (as_to != as_from);
8269 
8270 	op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8271 
8272         /* Ask target code to handle conversion between pointers
8273 	   to overlapping address spaces.  */
8274 	if (targetm.addr_space.subset_p (as_to, as_from)
8275 	    || targetm.addr_space.subset_p (as_from, as_to))
8276 	  {
8277 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8278 	  }
8279         else
8280           {
8281 	    /* For disjoint address spaces, converting anything but a null
8282 	       pointer invokes undefined behavior.  We truncate or extend the
8283 	       value as if we'd converted via integers, which handles 0 as
8284 	       required, and all others as the programmer likely expects.  */
8285 #ifndef POINTERS_EXTEND_UNSIGNED
8286 	    const int POINTERS_EXTEND_UNSIGNED = 1;
8287 #endif
8288 	    op0 = convert_modes (mode, TYPE_MODE (treeop0_type),
8289 				 op0, POINTERS_EXTEND_UNSIGNED);
8290 	  }
8291 	gcc_assert (op0);
8292 	return op0;
8293       }
8294 
8295     case POINTER_PLUS_EXPR:
8296       /* Even though the sizetype mode and the pointer's mode can be different
8297          expand is able to handle this correctly and get the correct result out
8298          of the PLUS_EXPR code.  */
8299       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8300          if sizetype precision is smaller than pointer precision.  */
8301       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8302 	treeop1 = fold_convert_loc (loc, type,
8303 				    fold_convert_loc (loc, ssizetype,
8304 						      treeop1));
8305       /* If sizetype precision is larger than pointer precision, truncate the
8306 	 offset to have matching modes.  */
8307       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8308 	treeop1 = fold_convert_loc (loc, type, treeop1);
8309 
8310     case PLUS_EXPR:
8311       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8312 	 something else, make sure we add the register to the constant and
8313 	 then to the other thing.  This case can occur during strength
8314 	 reduction and doing it this way will produce better code if the
8315 	 frame pointer or argument pointer is eliminated.
8316 
8317 	 fold-const.c will ensure that the constant is always in the inner
8318 	 PLUS_EXPR, so the only case we need to do anything about is if
8319 	 sp, ap, or fp is our second argument, in which case we must swap
8320 	 the innermost first argument and our second argument.  */
8321 
8322       if (TREE_CODE (treeop0) == PLUS_EXPR
8323 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8324 	  && TREE_CODE (treeop1) == VAR_DECL
8325 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8326 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8327 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8328 	{
8329 	  gcc_unreachable ();
8330 	}
8331 
8332       /* If the result is to be ptr_mode and we are adding an integer to
8333 	 something, we might be forming a constant.  So try to use
8334 	 plus_constant.  If it produces a sum and we can't accept it,
8335 	 use force_operand.  This allows P = &ARR[const] to generate
8336 	 efficient code on machines where a SYMBOL_REF is not a valid
8337 	 address.
8338 
8339 	 If this is an EXPAND_SUM call, always return the sum.  */
8340       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8341 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8342 	{
8343 	  if (modifier == EXPAND_STACK_PARM)
8344 	    target = 0;
8345 	  if (TREE_CODE (treeop0) == INTEGER_CST
8346 	      && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8347 	      && TREE_CONSTANT (treeop1))
8348 	    {
8349 	      rtx constant_part;
8350 	      HOST_WIDE_INT wc;
8351 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8352 
8353 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8354 				 EXPAND_SUM);
8355 	      /* Use wi::shwi to ensure that the constant is
8356 		 truncated according to the mode of OP1, then sign extended
8357 		 to a HOST_WIDE_INT.  Using the constant directly can result
8358 		 in non-canonical RTL in a 64x32 cross compile.  */
8359 	      wc = TREE_INT_CST_LOW (treeop0);
8360 	      constant_part =
8361 		immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8362 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8363 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8364 		op1 = force_operand (op1, target);
8365 	      return REDUCE_BIT_FIELD (op1);
8366 	    }
8367 
8368 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8369 		   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8370 		   && TREE_CONSTANT (treeop0))
8371 	    {
8372 	      rtx constant_part;
8373 	      HOST_WIDE_INT wc;
8374 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8375 
8376 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8377 				 (modifier == EXPAND_INITIALIZER
8378 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8379 	      if (! CONSTANT_P (op0))
8380 		{
8381 		  op1 = expand_expr (treeop1, NULL_RTX,
8382 				     VOIDmode, modifier);
8383 		  /* Return a PLUS if modifier says it's OK.  */
8384 		  if (modifier == EXPAND_SUM
8385 		      || modifier == EXPAND_INITIALIZER)
8386 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8387 		  goto binop2;
8388 		}
8389 	      /* Use wi::shwi to ensure that the constant is
8390 		 truncated according to the mode of OP1, then sign extended
8391 		 to a HOST_WIDE_INT.  Using the constant directly can result
8392 		 in non-canonical RTL in a 64x32 cross compile.  */
8393 	      wc = TREE_INT_CST_LOW (treeop1);
8394 	      constant_part
8395 		= immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8396 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8397 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8398 		op0 = force_operand (op0, target);
8399 	      return REDUCE_BIT_FIELD (op0);
8400 	    }
8401 	}
8402 
8403       /* Use TER to expand pointer addition of a negated value
8404 	 as pointer subtraction.  */
8405       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8406 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8407 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8408 	  && TREE_CODE (treeop1) == SSA_NAME
8409 	  && TYPE_MODE (TREE_TYPE (treeop0))
8410 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8411 	{
8412 	  gimple *def = get_def_for_expr (treeop1, NEGATE_EXPR);
8413 	  if (def)
8414 	    {
8415 	      treeop1 = gimple_assign_rhs1 (def);
8416 	      code = MINUS_EXPR;
8417 	      goto do_minus;
8418 	    }
8419 	}
8420 
8421       /* No sense saving up arithmetic to be done
8422 	 if it's all in the wrong mode to form part of an address.
8423 	 And force_operand won't know whether to sign-extend or
8424 	 zero-extend.  */
8425       if (modifier != EXPAND_INITIALIZER
8426 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8427 	{
8428 	  expand_operands (treeop0, treeop1,
8429 			   subtarget, &op0, &op1, modifier);
8430 	  if (op0 == const0_rtx)
8431 	    return op1;
8432 	  if (op1 == const0_rtx)
8433 	    return op0;
8434 	  goto binop2;
8435 	}
8436 
8437       expand_operands (treeop0, treeop1,
8438 		       subtarget, &op0, &op1, modifier);
8439       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8440 
8441     case MINUS_EXPR:
8442     do_minus:
8443       /* For initializers, we are allowed to return a MINUS of two
8444 	 symbolic constants.  Here we handle all cases when both operands
8445 	 are constant.  */
8446       /* Handle difference of two symbolic constants,
8447 	 for the sake of an initializer.  */
8448       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8449 	  && really_constant_p (treeop0)
8450 	  && really_constant_p (treeop1))
8451 	{
8452 	  expand_operands (treeop0, treeop1,
8453 			   NULL_RTX, &op0, &op1, modifier);
8454 
8455 	  /* If the last operand is a CONST_INT, use plus_constant of
8456 	     the negated constant.  Else make the MINUS.  */
8457 	  if (CONST_INT_P (op1))
8458 	    return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8459 						    -INTVAL (op1)));
8460 	  else
8461 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8462 	}
8463 
8464       /* No sense saving up arithmetic to be done
8465 	 if it's all in the wrong mode to form part of an address.
8466 	 And force_operand won't know whether to sign-extend or
8467 	 zero-extend.  */
8468       if (modifier != EXPAND_INITIALIZER
8469 	  && (modifier != EXPAND_SUM || mode != ptr_mode))
8470 	goto binop;
8471 
8472       expand_operands (treeop0, treeop1,
8473 		       subtarget, &op0, &op1, modifier);
8474 
8475       /* Convert A - const to A + (-const).  */
8476       if (CONST_INT_P (op1))
8477 	{
8478 	  op1 = negate_rtx (mode, op1);
8479 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8480 	}
8481 
8482       goto binop2;
8483 
8484     case WIDEN_MULT_PLUS_EXPR:
8485     case WIDEN_MULT_MINUS_EXPR:
8486       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8487       op2 = expand_normal (treeop2);
8488       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8489 					  target, unsignedp);
8490       return target;
8491 
8492     case WIDEN_MULT_EXPR:
8493       /* If first operand is constant, swap them.
8494 	 Thus the following special case checks need only
8495 	 check the second operand.  */
8496       if (TREE_CODE (treeop0) == INTEGER_CST)
8497 	std::swap (treeop0, treeop1);
8498 
8499       /* First, check if we have a multiplication of one signed and one
8500 	 unsigned operand.  */
8501       if (TREE_CODE (treeop1) != INTEGER_CST
8502 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8503 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8504 	{
8505 	  machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8506 	  this_optab = usmul_widen_optab;
8507 	  if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8508 		!= CODE_FOR_nothing)
8509 	    {
8510 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8511 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8512 				 EXPAND_NORMAL);
8513 	      else
8514 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8515 				 EXPAND_NORMAL);
8516 	      /* op0 and op1 might still be constant, despite the above
8517 		 != INTEGER_CST check.  Handle it.  */
8518 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8519 		{
8520 		  op0 = convert_modes (innermode, mode, op0, true);
8521 		  op1 = convert_modes (innermode, mode, op1, false);
8522 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8523 							target, unsignedp));
8524 		}
8525 	      goto binop3;
8526 	    }
8527 	}
8528       /* Check for a multiplication with matching signedness.  */
8529       else if ((TREE_CODE (treeop1) == INTEGER_CST
8530 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8531 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8532 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8533 	{
8534 	  tree op0type = TREE_TYPE (treeop0);
8535 	  machine_mode innermode = TYPE_MODE (op0type);
8536 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8537 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8538 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8539 
8540 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8541 	    {
8542 	      if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8543 		    != CODE_FOR_nothing)
8544 		{
8545 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8546 				   EXPAND_NORMAL);
8547 		  /* op0 and op1 might still be constant, despite the above
8548 		     != INTEGER_CST check.  Handle it.  */
8549 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8550 		    {
8551 		     widen_mult_const:
8552 		      op0 = convert_modes (innermode, mode, op0, zextend_p);
8553 		      op1
8554 			= convert_modes (innermode, mode, op1,
8555 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8556 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8557 							    target,
8558 							    unsignedp));
8559 		    }
8560 		  temp = expand_widening_mult (mode, op0, op1, target,
8561 					       unsignedp, this_optab);
8562 		  return REDUCE_BIT_FIELD (temp);
8563 		}
8564 	      if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8565 		    != CODE_FOR_nothing
8566 		  && innermode == word_mode)
8567 		{
8568 		  rtx htem, hipart;
8569 		  op0 = expand_normal (treeop0);
8570 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8571 		    op1 = convert_modes (innermode, mode,
8572 					 expand_normal (treeop1),
8573 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8574 		  else
8575 		    op1 = expand_normal (treeop1);
8576 		  /* op0 and op1 might still be constant, despite the above
8577 		     != INTEGER_CST check.  Handle it.  */
8578 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8579 		    goto widen_mult_const;
8580 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8581 				       unsignedp, OPTAB_LIB_WIDEN);
8582 		  hipart = gen_highpart (innermode, temp);
8583 		  htem = expand_mult_highpart_adjust (innermode, hipart,
8584 						      op0, op1, hipart,
8585 						      zextend_p);
8586 		  if (htem != hipart)
8587 		    emit_move_insn (hipart, htem);
8588 		  return REDUCE_BIT_FIELD (temp);
8589 		}
8590 	    }
8591 	}
8592       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8593       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8594       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8595       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8596 
8597     case FMA_EXPR:
8598       {
8599 	optab opt = fma_optab;
8600 	gimple *def0, *def2;
8601 
8602 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8603 	   call.  */
8604 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8605 	  {
8606 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8607 	    tree call_expr;
8608 
8609 	    gcc_assert (fn != NULL_TREE);
8610 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8611 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8612 	  }
8613 
8614 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8615 	/* The multiplication is commutative - look at its 2nd operand
8616 	   if the first isn't fed by a negate.  */
8617 	if (!def0)
8618 	  {
8619 	    def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8620 	    /* Swap operands if the 2nd operand is fed by a negate.  */
8621 	    if (def0)
8622 	      std::swap (treeop0, treeop1);
8623 	  }
8624 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8625 
8626 	op0 = op2 = NULL;
8627 
8628 	if (def0 && def2
8629 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8630 	  {
8631 	    opt = fnms_optab;
8632 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8633 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8634 	  }
8635 	else if (def0
8636 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8637 	  {
8638 	    opt = fnma_optab;
8639 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8640 	  }
8641 	else if (def2
8642 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8643 	  {
8644 	    opt = fms_optab;
8645 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8646 	  }
8647 
8648 	if (op0 == NULL)
8649 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8650 	if (op2 == NULL)
8651 	  op2 = expand_normal (treeop2);
8652 	op1 = expand_normal (treeop1);
8653 
8654 	return expand_ternary_op (TYPE_MODE (type), opt,
8655 				  op0, op1, op2, target, 0);
8656       }
8657 
8658     case MULT_EXPR:
8659       /* If this is a fixed-point operation, then we cannot use the code
8660 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8661          multiplications.   */
8662       if (ALL_FIXED_POINT_MODE_P (mode))
8663 	goto binop;
8664 
8665       /* If first operand is constant, swap them.
8666 	 Thus the following special case checks need only
8667 	 check the second operand.  */
8668       if (TREE_CODE (treeop0) == INTEGER_CST)
8669 	std::swap (treeop0, treeop1);
8670 
8671       /* Attempt to return something suitable for generating an
8672 	 indexed address, for machines that support that.  */
8673 
8674       if (modifier == EXPAND_SUM && mode == ptr_mode
8675 	  && tree_fits_shwi_p (treeop1))
8676 	{
8677 	  tree exp1 = treeop1;
8678 
8679 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8680 			     EXPAND_SUM);
8681 
8682 	  if (!REG_P (op0))
8683 	    op0 = force_operand (op0, NULL_RTX);
8684 	  if (!REG_P (op0))
8685 	    op0 = copy_to_mode_reg (mode, op0);
8686 
8687 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8688 			       gen_int_mode (tree_to_shwi (exp1),
8689 					     TYPE_MODE (TREE_TYPE (exp1)))));
8690 	}
8691 
8692       if (modifier == EXPAND_STACK_PARM)
8693 	target = 0;
8694 
8695       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8696       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8697 
8698     case TRUNC_DIV_EXPR:
8699     case FLOOR_DIV_EXPR:
8700     case CEIL_DIV_EXPR:
8701     case ROUND_DIV_EXPR:
8702     case EXACT_DIV_EXPR:
8703       /* If this is a fixed-point operation, then we cannot use the code
8704 	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8705          divisions.   */
8706       if (ALL_FIXED_POINT_MODE_P (mode))
8707 	goto binop;
8708 
8709       if (modifier == EXPAND_STACK_PARM)
8710 	target = 0;
8711       /* Possible optimization: compute the dividend with EXPAND_SUM
8712 	 then if the divisor is constant can optimize the case
8713 	 where some terms of the dividend have coeffs divisible by it.  */
8714       expand_operands (treeop0, treeop1,
8715 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8716       return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8717 
8718     case RDIV_EXPR:
8719       goto binop;
8720 
8721     case MULT_HIGHPART_EXPR:
8722       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8723       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8724       gcc_assert (temp);
8725       return temp;
8726 
8727     case TRUNC_MOD_EXPR:
8728     case FLOOR_MOD_EXPR:
8729     case CEIL_MOD_EXPR:
8730     case ROUND_MOD_EXPR:
8731       if (modifier == EXPAND_STACK_PARM)
8732 	target = 0;
8733       expand_operands (treeop0, treeop1,
8734 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8735       return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8736 
8737     case FIXED_CONVERT_EXPR:
8738       op0 = expand_normal (treeop0);
8739       if (target == 0 || modifier == EXPAND_STACK_PARM)
8740 	target = gen_reg_rtx (mode);
8741 
8742       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8743 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8744           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8745 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8746       else
8747 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8748       return target;
8749 
8750     case FIX_TRUNC_EXPR:
8751       op0 = expand_normal (treeop0);
8752       if (target == 0 || modifier == EXPAND_STACK_PARM)
8753 	target = gen_reg_rtx (mode);
8754       expand_fix (target, op0, unsignedp);
8755       return target;
8756 
8757     case FLOAT_EXPR:
8758       op0 = expand_normal (treeop0);
8759       if (target == 0 || modifier == EXPAND_STACK_PARM)
8760 	target = gen_reg_rtx (mode);
8761       /* expand_float can't figure out what to do if FROM has VOIDmode.
8762 	 So give it the correct mode.  With -O, cse will optimize this.  */
8763       if (GET_MODE (op0) == VOIDmode)
8764 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8765 				op0);
8766       expand_float (target, op0,
8767 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8768       return target;
8769 
8770     case NEGATE_EXPR:
8771       op0 = expand_expr (treeop0, subtarget,
8772 			 VOIDmode, EXPAND_NORMAL);
8773       if (modifier == EXPAND_STACK_PARM)
8774 	target = 0;
8775       temp = expand_unop (mode,
8776       			  optab_for_tree_code (NEGATE_EXPR, type,
8777 					       optab_default),
8778 			  op0, target, 0);
8779       gcc_assert (temp);
8780       return REDUCE_BIT_FIELD (temp);
8781 
8782     case ABS_EXPR:
8783       op0 = expand_expr (treeop0, subtarget,
8784 			 VOIDmode, EXPAND_NORMAL);
8785       if (modifier == EXPAND_STACK_PARM)
8786 	target = 0;
8787 
8788       /* ABS_EXPR is not valid for complex arguments.  */
8789       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8790 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8791 
8792       /* Unsigned abs is simply the operand.  Testing here means we don't
8793 	 risk generating incorrect code below.  */
8794       if (TYPE_UNSIGNED (type))
8795 	return op0;
8796 
8797       return expand_abs (mode, op0, target, unsignedp,
8798 			 safe_from_p (target, treeop0, 1));
8799 
8800     case MAX_EXPR:
8801     case MIN_EXPR:
8802       target = original_target;
8803       if (target == 0
8804 	  || modifier == EXPAND_STACK_PARM
8805 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8806 	  || GET_MODE (target) != mode
8807 	  || (REG_P (target)
8808 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8809 	target = gen_reg_rtx (mode);
8810       expand_operands (treeop0, treeop1,
8811 		       target, &op0, &op1, EXPAND_NORMAL);
8812 
8813       /* First try to do it with a special MIN or MAX instruction.
8814 	 If that does not win, use a conditional jump to select the proper
8815 	 value.  */
8816       this_optab = optab_for_tree_code (code, type, optab_default);
8817       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8818 			   OPTAB_WIDEN);
8819       if (temp != 0)
8820 	return temp;
8821 
8822       /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
8823 	 and similarly for MAX <x, y>.  */
8824       if (VECTOR_TYPE_P (type))
8825 	{
8826 	  tree t0 = make_tree (type, op0);
8827 	  tree t1 = make_tree (type, op1);
8828 	  tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
8829 				    type, t0, t1);
8830 	  return expand_vec_cond_expr (type, comparison, t0, t1,
8831 				       original_target);
8832 	}
8833 
8834       /* At this point, a MEM target is no longer useful; we will get better
8835 	 code without it.  */
8836 
8837       if (! REG_P (target))
8838 	target = gen_reg_rtx (mode);
8839 
8840       /* If op1 was placed in target, swap op0 and op1.  */
8841       if (target != op0 && target == op1)
8842 	std::swap (op0, op1);
8843 
8844       /* We generate better code and avoid problems with op1 mentioning
8845 	 target by forcing op1 into a pseudo if it isn't a constant.  */
8846       if (! CONSTANT_P (op1))
8847 	op1 = force_reg (mode, op1);
8848 
8849       {
8850 	enum rtx_code comparison_code;
8851 	rtx cmpop1 = op1;
8852 
8853 	if (code == MAX_EXPR)
8854 	  comparison_code = unsignedp ? GEU : GE;
8855 	else
8856 	  comparison_code = unsignedp ? LEU : LE;
8857 
8858 	/* Canonicalize to comparisons against 0.  */
8859 	if (op1 == const1_rtx)
8860 	  {
8861 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8862 	       or (a != 0 ? a : 1) for unsigned.
8863 	       For MIN we are safe converting (a <= 1 ? a : 1)
8864 	       into (a <= 0 ? a : 1)  */
8865 	    cmpop1 = const0_rtx;
8866 	    if (code == MAX_EXPR)
8867 	      comparison_code = unsignedp ? NE : GT;
8868 	  }
8869 	if (op1 == constm1_rtx && !unsignedp)
8870 	  {
8871 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8872 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8873 	    cmpop1 = const0_rtx;
8874 	    if (code == MIN_EXPR)
8875 	      comparison_code = LT;
8876 	  }
8877 
8878 	/* Use a conditional move if possible.  */
8879 	if (can_conditionally_move_p (mode))
8880 	  {
8881 	    rtx insn;
8882 
8883 	    start_sequence ();
8884 
8885 	    /* Try to emit the conditional move.  */
8886 	    insn = emit_conditional_move (target, comparison_code,
8887 					  op0, cmpop1, mode,
8888 					  op0, op1, mode,
8889 					  unsignedp);
8890 
8891 	    /* If we could do the conditional move, emit the sequence,
8892 	       and return.  */
8893 	    if (insn)
8894 	      {
8895 		rtx_insn *seq = get_insns ();
8896 		end_sequence ();
8897 		emit_insn (seq);
8898 		return target;
8899 	      }
8900 
8901 	    /* Otherwise discard the sequence and fall back to code with
8902 	       branches.  */
8903 	    end_sequence ();
8904 	  }
8905 
8906 	if (target != op0)
8907 	  emit_move_insn (target, op0);
8908 
8909 	lab = gen_label_rtx ();
8910 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8911 				 unsignedp, mode, NULL_RTX, NULL, lab,
8912 				 -1);
8913       }
8914       emit_move_insn (target, op1);
8915       emit_label (lab);
8916       return target;
8917 
8918     case BIT_NOT_EXPR:
8919       op0 = expand_expr (treeop0, subtarget,
8920 			 VOIDmode, EXPAND_NORMAL);
8921       if (modifier == EXPAND_STACK_PARM)
8922 	target = 0;
8923       /* In case we have to reduce the result to bitfield precision
8924 	 for unsigned bitfield expand this as XOR with a proper constant
8925 	 instead.  */
8926       if (reduce_bit_field && TYPE_UNSIGNED (type))
8927 	{
8928 	  wide_int mask = wi::mask (TYPE_PRECISION (type),
8929 				    false, GET_MODE_PRECISION (mode));
8930 
8931 	  temp = expand_binop (mode, xor_optab, op0,
8932 			       immed_wide_int_const (mask, mode),
8933 			       target, 1, OPTAB_LIB_WIDEN);
8934 	}
8935       else
8936 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8937       gcc_assert (temp);
8938       return temp;
8939 
8940       /* ??? Can optimize bitwise operations with one arg constant.
8941 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8942 	 and (a bitwise1 b) bitwise2 b (etc)
8943 	 but that is probably not worth while.  */
8944 
8945     case BIT_AND_EXPR:
8946     case BIT_IOR_EXPR:
8947     case BIT_XOR_EXPR:
8948       goto binop;
8949 
8950     case LROTATE_EXPR:
8951     case RROTATE_EXPR:
8952       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8953 		  || (GET_MODE_PRECISION (TYPE_MODE (type))
8954 		      == TYPE_PRECISION (type)));
8955       /* fall through */
8956 
8957     case LSHIFT_EXPR:
8958     case RSHIFT_EXPR:
8959       {
8960 	/* If this is a fixed-point operation, then we cannot use the code
8961 	   below because "expand_shift" doesn't support sat/no-sat fixed-point
8962 	   shifts.  */
8963 	if (ALL_FIXED_POINT_MODE_P (mode))
8964 	  goto binop;
8965 
8966 	if (! safe_from_p (subtarget, treeop1, 1))
8967 	  subtarget = 0;
8968 	if (modifier == EXPAND_STACK_PARM)
8969 	  target = 0;
8970 	op0 = expand_expr (treeop0, subtarget,
8971 			   VOIDmode, EXPAND_NORMAL);
8972 
8973 	/* Left shift optimization when shifting across word_size boundary.
8974 
8975 	   If mode == GET_MODE_WIDER_MODE (word_mode), then normally
8976 	   there isn't native instruction to support this wide mode
8977 	   left shift.  Given below scenario:
8978 
8979 	    Type A = (Type) B  << C
8980 
8981 	    |<		 T	    >|
8982 	    | dest_high  |  dest_low |
8983 
8984 			 | word_size |
8985 
8986 	   If the shift amount C caused we shift B to across the word
8987 	   size boundary, i.e part of B shifted into high half of
8988 	   destination register, and part of B remains in the low
8989 	   half, then GCC will use the following left shift expand
8990 	   logic:
8991 
8992 	   1. Initialize dest_low to B.
8993 	   2. Initialize every bit of dest_high to the sign bit of B.
8994 	   3. Logic left shift dest_low by C bit to finalize dest_low.
8995 	      The value of dest_low before this shift is kept in a temp D.
8996 	   4. Logic left shift dest_high by C.
8997 	   5. Logic right shift D by (word_size - C).
8998 	   6. Or the result of 4 and 5 to finalize dest_high.
8999 
9000 	   While, by checking gimple statements, if operand B is
9001 	   coming from signed extension, then we can simplify above
9002 	   expand logic into:
9003 
9004 	      1. dest_high = src_low >> (word_size - C).
9005 	      2. dest_low = src_low << C.
9006 
9007 	   We can use one arithmetic right shift to finish all the
9008 	   purpose of steps 2, 4, 5, 6, thus we reduce the steps
9009 	   needed from 6 into 2.
9010 
9011 	   The case is similar for zero extension, except that we
9012 	   initialize dest_high to zero rather than copies of the sign
9013 	   bit from B.  Furthermore, we need to use a logical right shift
9014 	   in this case.
9015 
9016 	   The choice of sign-extension versus zero-extension is
9017 	   determined entirely by whether or not B is signed and is
9018 	   independent of the current setting of unsignedp.  */
9019 
9020 	temp = NULL_RTX;
9021 	if (code == LSHIFT_EXPR
9022 	    && target
9023 	    && REG_P (target)
9024 	    && mode == GET_MODE_WIDER_MODE (word_mode)
9025 	    && GET_MODE_SIZE (mode) == 2 * GET_MODE_SIZE (word_mode)
9026 	    && TREE_CONSTANT (treeop1)
9027 	    && TREE_CODE (treeop0) == SSA_NAME)
9028 	  {
9029 	    gimple *def = SSA_NAME_DEF_STMT (treeop0);
9030 	    if (is_gimple_assign (def)
9031 		&& gimple_assign_rhs_code (def) == NOP_EXPR)
9032 	      {
9033 		machine_mode rmode = TYPE_MODE
9034 		  (TREE_TYPE (gimple_assign_rhs1 (def)));
9035 
9036 		if (GET_MODE_SIZE (rmode) < GET_MODE_SIZE (mode)
9037 		    && TREE_INT_CST_LOW (treeop1) < GET_MODE_BITSIZE (word_mode)
9038 		    && ((TREE_INT_CST_LOW (treeop1) + GET_MODE_BITSIZE (rmode))
9039 			>= GET_MODE_BITSIZE (word_mode)))
9040 		  {
9041 		    rtx_insn *seq, *seq_old;
9042 		    unsigned int high_off = subreg_highpart_offset (word_mode,
9043 								    mode);
9044 		    bool extend_unsigned
9045 		      = TYPE_UNSIGNED (TREE_TYPE (gimple_assign_rhs1 (def)));
9046 		    rtx low = lowpart_subreg (word_mode, op0, mode);
9047 		    rtx dest_low = lowpart_subreg (word_mode, target, mode);
9048 		    rtx dest_high = simplify_gen_subreg (word_mode, target,
9049 							 mode, high_off);
9050 		    HOST_WIDE_INT ramount = (BITS_PER_WORD
9051 					     - TREE_INT_CST_LOW (treeop1));
9052 		    tree rshift = build_int_cst (TREE_TYPE (treeop1), ramount);
9053 
9054 		    start_sequence ();
9055 		    /* dest_high = src_low >> (word_size - C).  */
9056 		    temp = expand_variable_shift (RSHIFT_EXPR, word_mode, low,
9057 						  rshift, dest_high,
9058 						  extend_unsigned);
9059 		    if (temp != dest_high)
9060 		      emit_move_insn (dest_high, temp);
9061 
9062 		    /* dest_low = src_low << C.  */
9063 		    temp = expand_variable_shift (LSHIFT_EXPR, word_mode, low,
9064 						  treeop1, dest_low, unsignedp);
9065 		    if (temp != dest_low)
9066 		      emit_move_insn (dest_low, temp);
9067 
9068 		    seq = get_insns ();
9069 		    end_sequence ();
9070 		    temp = target ;
9071 
9072 		    if (have_insn_for (ASHIFT, mode))
9073 		      {
9074 			bool speed_p = optimize_insn_for_speed_p ();
9075 			start_sequence ();
9076 			rtx ret_old = expand_variable_shift (code, mode, op0,
9077 							     treeop1, target,
9078 							     unsignedp);
9079 
9080 			seq_old = get_insns ();
9081 			end_sequence ();
9082 			if (seq_cost (seq, speed_p)
9083 			    >= seq_cost (seq_old, speed_p))
9084 			  {
9085 			    seq = seq_old;
9086 			    temp = ret_old;
9087 			  }
9088 		      }
9089 		      emit_insn (seq);
9090 		  }
9091 	      }
9092 	  }
9093 
9094 	if (temp == NULL_RTX)
9095 	  temp = expand_variable_shift (code, mode, op0, treeop1, target,
9096 					unsignedp);
9097 	if (code == LSHIFT_EXPR)
9098 	  temp = REDUCE_BIT_FIELD (temp);
9099 	return temp;
9100       }
9101 
9102       /* Could determine the answer when only additive constants differ.  Also,
9103 	 the addition of one can be handled by changing the condition.  */
9104     case LT_EXPR:
9105     case LE_EXPR:
9106     case GT_EXPR:
9107     case GE_EXPR:
9108     case EQ_EXPR:
9109     case NE_EXPR:
9110     case UNORDERED_EXPR:
9111     case ORDERED_EXPR:
9112     case UNLT_EXPR:
9113     case UNLE_EXPR:
9114     case UNGT_EXPR:
9115     case UNGE_EXPR:
9116     case UNEQ_EXPR:
9117     case LTGT_EXPR:
9118       {
9119 	temp = do_store_flag (ops,
9120 			      modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9121 			      tmode != VOIDmode ? tmode : mode);
9122 	if (temp)
9123 	  return temp;
9124 
9125 	/* Use a compare and a jump for BLKmode comparisons, or for function
9126 	   type comparisons is have_canonicalize_funcptr_for_compare.  */
9127 
9128 	if ((target == 0
9129 	     || modifier == EXPAND_STACK_PARM
9130 	     || ! safe_from_p (target, treeop0, 1)
9131 	     || ! safe_from_p (target, treeop1, 1)
9132 	     /* Make sure we don't have a hard reg (such as function's return
9133 		value) live across basic blocks, if not optimizing.  */
9134 	     || (!optimize && REG_P (target)
9135 		 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9136 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9137 
9138 	emit_move_insn (target, const0_rtx);
9139 
9140 	rtx_code_label *lab1 = gen_label_rtx ();
9141 	jumpifnot_1 (code, treeop0, treeop1, lab1, -1);
9142 
9143 	if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9144 	  emit_move_insn (target, constm1_rtx);
9145 	else
9146 	  emit_move_insn (target, const1_rtx);
9147 
9148 	emit_label (lab1);
9149 	return target;
9150       }
9151     case COMPLEX_EXPR:
9152       /* Get the rtx code of the operands.  */
9153       op0 = expand_normal (treeop0);
9154       op1 = expand_normal (treeop1);
9155 
9156       if (!target)
9157 	target = gen_reg_rtx (TYPE_MODE (type));
9158       else
9159 	/* If target overlaps with op1, then either we need to force
9160 	   op1 into a pseudo (if target also overlaps with op0),
9161 	   or write the complex parts in reverse order.  */
9162 	switch (GET_CODE (target))
9163 	  {
9164 	  case CONCAT:
9165 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9166 	      {
9167 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9168 		  {
9169 		  complex_expr_force_op1:
9170 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9171 		    emit_move_insn (temp, op1);
9172 		    op1 = temp;
9173 		    break;
9174 		  }
9175 	      complex_expr_swap_order:
9176 		/* Move the imaginary (op1) and real (op0) parts to their
9177 		   location.  */
9178 		write_complex_part (target, op1, true);
9179 		write_complex_part (target, op0, false);
9180 
9181 		return target;
9182 	      }
9183 	    break;
9184 	  case MEM:
9185 	    temp = adjust_address_nv (target,
9186 				      GET_MODE_INNER (GET_MODE (target)), 0);
9187 	    if (reg_overlap_mentioned_p (temp, op1))
9188 	      {
9189 		machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9190 		temp = adjust_address_nv (target, imode,
9191 					  GET_MODE_SIZE (imode));
9192 		if (reg_overlap_mentioned_p (temp, op0))
9193 		  goto complex_expr_force_op1;
9194 		goto complex_expr_swap_order;
9195 	      }
9196 	    break;
9197 	  default:
9198 	    if (reg_overlap_mentioned_p (target, op1))
9199 	      {
9200 		if (reg_overlap_mentioned_p (target, op0))
9201 		  goto complex_expr_force_op1;
9202 		goto complex_expr_swap_order;
9203 	      }
9204 	    break;
9205 	  }
9206 
9207       /* Move the real (op0) and imaginary (op1) parts to their location.  */
9208       write_complex_part (target, op0, false);
9209       write_complex_part (target, op1, true);
9210 
9211       return target;
9212 
9213     case WIDEN_SUM_EXPR:
9214       {
9215         tree oprnd0 = treeop0;
9216         tree oprnd1 = treeop1;
9217 
9218         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9219         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9220                                             target, unsignedp);
9221         return target;
9222       }
9223 
9224     case REDUC_MAX_EXPR:
9225     case REDUC_MIN_EXPR:
9226     case REDUC_PLUS_EXPR:
9227       {
9228         op0 = expand_normal (treeop0);
9229         this_optab = optab_for_tree_code (code, type, optab_default);
9230         machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9231 
9232 	struct expand_operand ops[2];
9233 	enum insn_code icode = optab_handler (this_optab, vec_mode);
9234 
9235 	create_output_operand (&ops[0], target, mode);
9236 	create_input_operand (&ops[1], op0, vec_mode);
9237 	expand_insn (icode, 2, ops);
9238 	target = ops[0].value;
9239 	if (GET_MODE (target) != mode)
9240 	  return gen_lowpart (tmode, target);
9241 	return target;
9242       }
9243 
9244     case VEC_UNPACK_HI_EXPR:
9245     case VEC_UNPACK_LO_EXPR:
9246       {
9247 	op0 = expand_normal (treeop0);
9248 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9249 					  target, unsignedp);
9250 	gcc_assert (temp);
9251 	return temp;
9252       }
9253 
9254     case VEC_UNPACK_FLOAT_HI_EXPR:
9255     case VEC_UNPACK_FLOAT_LO_EXPR:
9256       {
9257 	op0 = expand_normal (treeop0);
9258 	/* The signedness is determined from input operand.  */
9259 	temp = expand_widen_pattern_expr
9260 	  (ops, op0, NULL_RTX, NULL_RTX,
9261 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9262 
9263 	gcc_assert (temp);
9264 	return temp;
9265       }
9266 
9267     case VEC_WIDEN_MULT_HI_EXPR:
9268     case VEC_WIDEN_MULT_LO_EXPR:
9269     case VEC_WIDEN_MULT_EVEN_EXPR:
9270     case VEC_WIDEN_MULT_ODD_EXPR:
9271     case VEC_WIDEN_LSHIFT_HI_EXPR:
9272     case VEC_WIDEN_LSHIFT_LO_EXPR:
9273       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9274       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9275 					  target, unsignedp);
9276       gcc_assert (target);
9277       return target;
9278 
9279     case VEC_PACK_TRUNC_EXPR:
9280     case VEC_PACK_SAT_EXPR:
9281     case VEC_PACK_FIX_TRUNC_EXPR:
9282       mode = TYPE_MODE (TREE_TYPE (treeop0));
9283       goto binop;
9284 
9285     case VEC_PERM_EXPR:
9286       expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9287       op2 = expand_normal (treeop2);
9288 
9289       /* Careful here: if the target doesn't support integral vector modes,
9290 	 a constant selection vector could wind up smooshed into a normal
9291 	 integral constant.  */
9292       if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9293 	{
9294 	  tree sel_type = TREE_TYPE (treeop2);
9295 	  machine_mode vmode
9296 	    = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9297 			       TYPE_VECTOR_SUBPARTS (sel_type));
9298 	  gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9299 	  op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9300 	  gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9301 	}
9302       else
9303         gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9304 
9305       temp = expand_vec_perm (mode, op0, op1, op2, target);
9306       gcc_assert (temp);
9307       return temp;
9308 
9309     case DOT_PROD_EXPR:
9310       {
9311 	tree oprnd0 = treeop0;
9312 	tree oprnd1 = treeop1;
9313 	tree oprnd2 = treeop2;
9314 	rtx op2;
9315 
9316 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9317 	op2 = expand_normal (oprnd2);
9318 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9319 					    target, unsignedp);
9320 	return target;
9321       }
9322 
9323       case SAD_EXPR:
9324       {
9325 	tree oprnd0 = treeop0;
9326 	tree oprnd1 = treeop1;
9327 	tree oprnd2 = treeop2;
9328 	rtx op2;
9329 
9330 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9331 	op2 = expand_normal (oprnd2);
9332 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9333 					    target, unsignedp);
9334 	return target;
9335       }
9336 
9337     case REALIGN_LOAD_EXPR:
9338       {
9339         tree oprnd0 = treeop0;
9340         tree oprnd1 = treeop1;
9341         tree oprnd2 = treeop2;
9342         rtx op2;
9343 
9344         this_optab = optab_for_tree_code (code, type, optab_default);
9345         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9346         op2 = expand_normal (oprnd2);
9347         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9348 				  target, unsignedp);
9349         gcc_assert (temp);
9350         return temp;
9351       }
9352 
9353     case COND_EXPR:
9354       {
9355 	/* A COND_EXPR with its type being VOID_TYPE represents a
9356 	   conditional jump and is handled in
9357 	   expand_gimple_cond_expr.  */
9358 	gcc_assert (!VOID_TYPE_P (type));
9359 
9360 	/* Note that COND_EXPRs whose type is a structure or union
9361 	   are required to be constructed to contain assignments of
9362 	   a temporary variable, so that we can evaluate them here
9363 	   for side effect only.  If type is void, we must do likewise.  */
9364 
9365 	gcc_assert (!TREE_ADDRESSABLE (type)
9366 		    && !ignore
9367 		    && TREE_TYPE (treeop1) != void_type_node
9368 		    && TREE_TYPE (treeop2) != void_type_node);
9369 
9370 	temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9371 	if (temp)
9372 	  return temp;
9373 
9374 	/* If we are not to produce a result, we have no target.  Otherwise,
9375 	   if a target was specified use it; it will not be used as an
9376 	   intermediate target unless it is safe.  If no target, use a
9377 	   temporary.  */
9378 
9379 	if (modifier != EXPAND_STACK_PARM
9380 	    && original_target
9381 	    && safe_from_p (original_target, treeop0, 1)
9382 	    && GET_MODE (original_target) == mode
9383 	    && !MEM_P (original_target))
9384 	  temp = original_target;
9385 	else
9386 	  temp = assign_temp (type, 0, 1);
9387 
9388 	do_pending_stack_adjust ();
9389 	NO_DEFER_POP;
9390 	rtx_code_label *lab0 = gen_label_rtx ();
9391 	rtx_code_label *lab1 = gen_label_rtx ();
9392 	jumpifnot (treeop0, lab0, -1);
9393 	store_expr (treeop1, temp,
9394 		    modifier == EXPAND_STACK_PARM,
9395 		    false, false);
9396 
9397 	emit_jump_insn (targetm.gen_jump (lab1));
9398 	emit_barrier ();
9399 	emit_label (lab0);
9400 	store_expr (treeop2, temp,
9401 		    modifier == EXPAND_STACK_PARM,
9402 		    false, false);
9403 
9404 	emit_label (lab1);
9405 	OK_DEFER_POP;
9406 	return temp;
9407       }
9408 
9409     case VEC_COND_EXPR:
9410       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9411       return target;
9412 
9413     default:
9414       gcc_unreachable ();
9415     }
9416 
9417   /* Here to do an ordinary binary operator.  */
9418  binop:
9419   expand_operands (treeop0, treeop1,
9420 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9421  binop2:
9422   this_optab = optab_for_tree_code (code, type, optab_default);
9423  binop3:
9424   if (modifier == EXPAND_STACK_PARM)
9425     target = 0;
9426   temp = expand_binop (mode, this_optab, op0, op1, target,
9427 		       unsignedp, OPTAB_LIB_WIDEN);
9428   gcc_assert (temp);
9429   /* Bitwise operations do not need bitfield reduction as we expect their
9430      operands being properly truncated.  */
9431   if (code == BIT_XOR_EXPR
9432       || code == BIT_AND_EXPR
9433       || code == BIT_IOR_EXPR)
9434     return temp;
9435   return REDUCE_BIT_FIELD (temp);
9436 }
9437 #undef REDUCE_BIT_FIELD
9438 
9439 
9440 /* Return TRUE if expression STMT is suitable for replacement.
9441    Never consider memory loads as replaceable, because those don't ever lead
9442    into constant expressions.  */
9443 
9444 static bool
9445 stmt_is_replaceable_p (gimple *stmt)
9446 {
9447   if (ssa_is_replaceable_p (stmt))
9448     {
9449       /* Don't move around loads.  */
9450       if (!gimple_assign_single_p (stmt)
9451 	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9452 	return true;
9453     }
9454   return false;
9455 }
9456 
9457 rtx
9458 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9459 		    enum expand_modifier modifier, rtx *alt_rtl,
9460 		    bool inner_reference_p)
9461 {
9462   rtx op0, op1, temp, decl_rtl;
9463   tree type;
9464   int unsignedp;
9465   machine_mode mode, dmode;
9466   enum tree_code code = TREE_CODE (exp);
9467   rtx subtarget, original_target;
9468   int ignore;
9469   tree context;
9470   bool reduce_bit_field;
9471   location_t loc = EXPR_LOCATION (exp);
9472   struct separate_ops ops;
9473   tree treeop0, treeop1, treeop2;
9474   tree ssa_name = NULL_TREE;
9475   gimple *g;
9476 
9477   type = TREE_TYPE (exp);
9478   mode = TYPE_MODE (type);
9479   unsignedp = TYPE_UNSIGNED (type);
9480 
9481   treeop0 = treeop1 = treeop2 = NULL_TREE;
9482   if (!VL_EXP_CLASS_P (exp))
9483     switch (TREE_CODE_LENGTH (code))
9484       {
9485 	default:
9486 	case 3: treeop2 = TREE_OPERAND (exp, 2);
9487 	case 2: treeop1 = TREE_OPERAND (exp, 1);
9488 	case 1: treeop0 = TREE_OPERAND (exp, 0);
9489 	case 0: break;
9490       }
9491   ops.code = code;
9492   ops.type = type;
9493   ops.op0 = treeop0;
9494   ops.op1 = treeop1;
9495   ops.op2 = treeop2;
9496   ops.location = loc;
9497 
9498   ignore = (target == const0_rtx
9499 	    || ((CONVERT_EXPR_CODE_P (code)
9500 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9501 		&& TREE_CODE (type) == VOID_TYPE));
9502 
9503   /* An operation in what may be a bit-field type needs the
9504      result to be reduced to the precision of the bit-field type,
9505      which is narrower than that of the type's mode.  */
9506   reduce_bit_field = (!ignore
9507 		      && INTEGRAL_TYPE_P (type)
9508 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9509 
9510   /* If we are going to ignore this result, we need only do something
9511      if there is a side-effect somewhere in the expression.  If there
9512      is, short-circuit the most common cases here.  Note that we must
9513      not call expand_expr with anything but const0_rtx in case this
9514      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9515 
9516   if (ignore)
9517     {
9518       if (! TREE_SIDE_EFFECTS (exp))
9519 	return const0_rtx;
9520 
9521       /* Ensure we reference a volatile object even if value is ignored, but
9522 	 don't do this if all we are doing is taking its address.  */
9523       if (TREE_THIS_VOLATILE (exp)
9524 	  && TREE_CODE (exp) != FUNCTION_DECL
9525 	  && mode != VOIDmode && mode != BLKmode
9526 	  && modifier != EXPAND_CONST_ADDRESS)
9527 	{
9528 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9529 	  if (MEM_P (temp))
9530 	    copy_to_reg (temp);
9531 	  return const0_rtx;
9532 	}
9533 
9534       if (TREE_CODE_CLASS (code) == tcc_unary
9535 	  || code == BIT_FIELD_REF
9536 	  || code == COMPONENT_REF
9537 	  || code == INDIRECT_REF)
9538 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9539 			    modifier);
9540 
9541       else if (TREE_CODE_CLASS (code) == tcc_binary
9542 	       || TREE_CODE_CLASS (code) == tcc_comparison
9543 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9544 	{
9545 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9546 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9547 	  return const0_rtx;
9548 	}
9549 
9550       target = 0;
9551     }
9552 
9553   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9554     target = 0;
9555 
9556   /* Use subtarget as the target for operand 0 of a binary operation.  */
9557   subtarget = get_subtarget (target);
9558   original_target = target;
9559 
9560   switch (code)
9561     {
9562     case LABEL_DECL:
9563       {
9564 	tree function = decl_function_context (exp);
9565 
9566 	temp = label_rtx (exp);
9567 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9568 
9569 	if (function != current_function_decl
9570 	    && function != 0)
9571 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9572 
9573 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9574 	return temp;
9575       }
9576 
9577     case SSA_NAME:
9578       /* ??? ivopts calls expander, without any preparation from
9579          out-of-ssa.  So fake instructions as if this was an access to the
9580 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9581 	 reuse it, if partition base vars have it set already.  */
9582       if (!currently_expanding_to_rtl)
9583 	{
9584 	  tree var = SSA_NAME_VAR (exp);
9585 	  if (var && DECL_RTL_SET_P (var))
9586 	    return DECL_RTL (var);
9587 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9588 			      LAST_VIRTUAL_REGISTER + 1);
9589 	}
9590 
9591       g = get_gimple_for_ssa_name (exp);
9592       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9593       if (g == NULL
9594 	  && modifier == EXPAND_INITIALIZER
9595 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9596 	  && (optimize || !SSA_NAME_VAR (exp)
9597 	      || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9598 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9599 	g = SSA_NAME_DEF_STMT (exp);
9600       if (g)
9601 	{
9602 	  rtx r;
9603 	  location_t saved_loc = curr_insn_location ();
9604 	  location_t loc = gimple_location (g);
9605 	  if (loc != UNKNOWN_LOCATION)
9606 	    set_curr_insn_location (loc);
9607 	  ops.code = gimple_assign_rhs_code (g);
9608           switch (get_gimple_rhs_class (ops.code))
9609 	    {
9610 	    case GIMPLE_TERNARY_RHS:
9611 	      ops.op2 = gimple_assign_rhs3 (g);
9612 	      /* Fallthru */
9613 	    case GIMPLE_BINARY_RHS:
9614 	      ops.op1 = gimple_assign_rhs2 (g);
9615 
9616 	      /* Try to expand conditonal compare.  */
9617 	      if (targetm.gen_ccmp_first)
9618 		{
9619 		  gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9620 		  r = expand_ccmp_expr (g);
9621 		  if (r)
9622 		    break;
9623 		}
9624 	      /* Fallthru */
9625 	    case GIMPLE_UNARY_RHS:
9626 	      ops.op0 = gimple_assign_rhs1 (g);
9627 	      ops.type = TREE_TYPE (gimple_assign_lhs (g));
9628 	      ops.location = loc;
9629 	      r = expand_expr_real_2 (&ops, target, tmode, modifier);
9630 	      break;
9631 	    case GIMPLE_SINGLE_RHS:
9632 	      {
9633 		r = expand_expr_real (gimple_assign_rhs1 (g), target,
9634 				      tmode, modifier, NULL, inner_reference_p);
9635 		break;
9636 	      }
9637 	    default:
9638 	      gcc_unreachable ();
9639 	    }
9640 	  set_curr_insn_location (saved_loc);
9641 	  if (REG_P (r) && !REG_EXPR (r))
9642 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9643 	  return r;
9644 	}
9645 
9646       ssa_name = exp;
9647       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9648       exp = SSA_NAME_VAR (ssa_name);
9649       goto expand_decl_rtl;
9650 
9651     case PARM_DECL:
9652     case VAR_DECL:
9653       /* If a static var's type was incomplete when the decl was written,
9654 	 but the type is complete now, lay out the decl now.  */
9655       if (DECL_SIZE (exp) == 0
9656 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9657 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9658 	layout_decl (exp, 0);
9659 
9660       /* ... fall through ...  */
9661 
9662     case FUNCTION_DECL:
9663     case RESULT_DECL:
9664       decl_rtl = DECL_RTL (exp);
9665     expand_decl_rtl:
9666       gcc_assert (decl_rtl);
9667 
9668       /* DECL_MODE might change when TYPE_MODE depends on attribute target
9669 	 settings for VECTOR_TYPE_P that might switch for the function.  */
9670       if (currently_expanding_to_rtl
9671 	  && code == VAR_DECL && MEM_P (decl_rtl)
9672 	  && VECTOR_TYPE_P (type) && exp && DECL_MODE (exp) != mode)
9673 	decl_rtl = change_address (decl_rtl, TYPE_MODE (type), 0);
9674       else
9675 	decl_rtl = copy_rtx (decl_rtl);
9676 
9677       /* Record writes to register variables.  */
9678       if (modifier == EXPAND_WRITE
9679 	  && REG_P (decl_rtl)
9680 	  && HARD_REGISTER_P (decl_rtl))
9681         add_to_hard_reg_set (&crtl->asm_clobbers,
9682 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9683 
9684       /* Ensure variable marked as used even if it doesn't go through
9685 	 a parser.  If it hasn't be used yet, write out an external
9686 	 definition.  */
9687       if (exp)
9688 	TREE_USED (exp) = 1;
9689 
9690       /* Show we haven't gotten RTL for this yet.  */
9691       temp = 0;
9692 
9693       /* Variables inherited from containing functions should have
9694 	 been lowered by this point.  */
9695       if (exp)
9696 	context = decl_function_context (exp);
9697       gcc_assert (!exp
9698 		  || SCOPE_FILE_SCOPE_P (context)
9699 		  || context == current_function_decl
9700 		  || TREE_STATIC (exp)
9701 		  || DECL_EXTERNAL (exp)
9702 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
9703 		  || TREE_CODE (exp) == FUNCTION_DECL);
9704 
9705       /* This is the case of an array whose size is to be determined
9706 	 from its initializer, while the initializer is still being parsed.
9707 	 ??? We aren't parsing while expanding anymore.  */
9708 
9709       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9710 	temp = validize_mem (decl_rtl);
9711 
9712       /* If DECL_RTL is memory, we are in the normal case and the
9713 	 address is not valid, get the address into a register.  */
9714 
9715       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9716 	{
9717 	  if (alt_rtl)
9718 	    *alt_rtl = decl_rtl;
9719 	  decl_rtl = use_anchored_address (decl_rtl);
9720 	  if (modifier != EXPAND_CONST_ADDRESS
9721 	      && modifier != EXPAND_SUM
9722 	      && !memory_address_addr_space_p (exp ? DECL_MODE (exp)
9723 					       : GET_MODE (decl_rtl),
9724 					       XEXP (decl_rtl, 0),
9725 					       MEM_ADDR_SPACE (decl_rtl)))
9726 	    temp = replace_equiv_address (decl_rtl,
9727 					  copy_rtx (XEXP (decl_rtl, 0)));
9728 	}
9729 
9730       /* If we got something, return it.  But first, set the alignment
9731 	 if the address is a register.  */
9732       if (temp != 0)
9733 	{
9734 	  if (exp && MEM_P (temp) && REG_P (XEXP (temp, 0)))
9735 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9736 
9737 	  return temp;
9738 	}
9739 
9740       if (exp)
9741 	dmode = DECL_MODE (exp);
9742       else
9743 	dmode = TYPE_MODE (TREE_TYPE (ssa_name));
9744 
9745       /* If the mode of DECL_RTL does not match that of the decl,
9746 	 there are two cases: we are dealing with a BLKmode value
9747 	 that is returned in a register, or we are dealing with
9748 	 a promoted value.  In the latter case, return a SUBREG
9749 	 of the wanted mode, but mark it so that we know that it
9750 	 was already extended.  */
9751       if (REG_P (decl_rtl)
9752 	  && dmode != BLKmode
9753 	  && GET_MODE (decl_rtl) != dmode)
9754 	{
9755 	  machine_mode pmode;
9756 
9757 	  /* Get the signedness to be used for this variable.  Ensure we get
9758 	     the same mode we got when the variable was declared.  */
9759 	  if (code != SSA_NAME)
9760 	    pmode = promote_decl_mode (exp, &unsignedp);
9761 	  else if ((g = SSA_NAME_DEF_STMT (ssa_name))
9762 		   && gimple_code (g) == GIMPLE_CALL
9763 		   && !gimple_call_internal_p (g))
9764 	    pmode = promote_function_mode (type, mode, &unsignedp,
9765 					   gimple_call_fntype (g),
9766 					   2);
9767 	  else
9768 	    pmode = promote_ssa_mode (ssa_name, &unsignedp);
9769 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
9770 
9771 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
9772 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
9773 	  SUBREG_PROMOTED_SET (temp, unsignedp);
9774 	  return temp;
9775 	}
9776 
9777       return decl_rtl;
9778 
9779     case INTEGER_CST:
9780       /* Given that TYPE_PRECISION (type) is not always equal to
9781          GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9782          the former to the latter according to the signedness of the
9783          type. */
9784       temp = immed_wide_int_const (wide_int::from
9785 				   (exp,
9786 				    GET_MODE_PRECISION (TYPE_MODE (type)),
9787 				    TYPE_SIGN (type)),
9788 				   TYPE_MODE (type));
9789       return temp;
9790 
9791     case VECTOR_CST:
9792       {
9793 	tree tmp = NULL_TREE;
9794 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9795 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9796 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9797 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9798 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9799 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9800 	  return const_vector_from_tree (exp);
9801 	if (GET_MODE_CLASS (mode) == MODE_INT)
9802 	  {
9803 	    if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
9804 	      return const_scalar_mask_from_tree (exp);
9805 	    else
9806 	      {
9807 		tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9808 		if (type_for_mode)
9809 		  tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR,
9810 					type_for_mode, exp);
9811 	      }
9812 	  }
9813 	if (!tmp)
9814 	  {
9815 	    vec<constructor_elt, va_gc> *v;
9816 	    unsigned i;
9817 	    vec_alloc (v, VECTOR_CST_NELTS (exp));
9818 	    for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9819 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9820 	    tmp = build_constructor (type, v);
9821 	  }
9822 	return expand_expr (tmp, ignore ? const0_rtx : target,
9823 			    tmode, modifier);
9824       }
9825 
9826     case CONST_DECL:
9827       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9828 
9829     case REAL_CST:
9830       /* If optimized, generate immediate CONST_DOUBLE
9831 	 which will be turned into memory by reload if necessary.
9832 
9833 	 We used to force a register so that loop.c could see it.  But
9834 	 this does not allow gen_* patterns to perform optimizations with
9835 	 the constants.  It also produces two insns in cases like "x = 1.0;".
9836 	 On most machines, floating-point constants are not permitted in
9837 	 many insns, so we'd end up copying it to a register in any case.
9838 
9839 	 Now, we do the copying in expand_binop, if appropriate.  */
9840       return const_double_from_real_value (TREE_REAL_CST (exp),
9841 					   TYPE_MODE (TREE_TYPE (exp)));
9842 
9843     case FIXED_CST:
9844       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9845 					   TYPE_MODE (TREE_TYPE (exp)));
9846 
9847     case COMPLEX_CST:
9848       /* Handle evaluating a complex constant in a CONCAT target.  */
9849       if (original_target && GET_CODE (original_target) == CONCAT)
9850 	{
9851 	  machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9852 	  rtx rtarg, itarg;
9853 
9854 	  rtarg = XEXP (original_target, 0);
9855 	  itarg = XEXP (original_target, 1);
9856 
9857 	  /* Move the real and imaginary parts separately.  */
9858 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9859 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9860 
9861 	  if (op0 != rtarg)
9862 	    emit_move_insn (rtarg, op0);
9863 	  if (op1 != itarg)
9864 	    emit_move_insn (itarg, op1);
9865 
9866 	  return original_target;
9867 	}
9868 
9869       /* ... fall through ...  */
9870 
9871     case STRING_CST:
9872       temp = expand_expr_constant (exp, 1, modifier);
9873 
9874       /* temp contains a constant address.
9875 	 On RISC machines where a constant address isn't valid,
9876 	 make some insns to get that address into a register.  */
9877       if (modifier != EXPAND_CONST_ADDRESS
9878 	  && modifier != EXPAND_INITIALIZER
9879 	  && modifier != EXPAND_SUM
9880 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9881 					    MEM_ADDR_SPACE (temp)))
9882 	return replace_equiv_address (temp,
9883 				      copy_rtx (XEXP (temp, 0)));
9884       return temp;
9885 
9886     case SAVE_EXPR:
9887       {
9888 	tree val = treeop0;
9889 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9890 				      inner_reference_p);
9891 
9892 	if (!SAVE_EXPR_RESOLVED_P (exp))
9893 	  {
9894 	    /* We can indeed still hit this case, typically via builtin
9895 	       expanders calling save_expr immediately before expanding
9896 	       something.  Assume this means that we only have to deal
9897 	       with non-BLKmode values.  */
9898 	    gcc_assert (GET_MODE (ret) != BLKmode);
9899 
9900 	    val = build_decl (curr_insn_location (),
9901 			      VAR_DECL, NULL, TREE_TYPE (exp));
9902 	    DECL_ARTIFICIAL (val) = 1;
9903 	    DECL_IGNORED_P (val) = 1;
9904 	    treeop0 = val;
9905 	    TREE_OPERAND (exp, 0) = treeop0;
9906 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
9907 
9908 	    if (!CONSTANT_P (ret))
9909 	      ret = copy_to_reg (ret);
9910 	    SET_DECL_RTL (val, ret);
9911 	  }
9912 
9913         return ret;
9914       }
9915 
9916 
9917     case CONSTRUCTOR:
9918       /* If we don't need the result, just ensure we evaluate any
9919 	 subexpressions.  */
9920       if (ignore)
9921 	{
9922 	  unsigned HOST_WIDE_INT idx;
9923 	  tree value;
9924 
9925 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9926 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9927 
9928 	  return const0_rtx;
9929 	}
9930 
9931       return expand_constructor (exp, target, modifier, false);
9932 
9933     case TARGET_MEM_REF:
9934       {
9935 	addr_space_t as
9936 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9937 	enum insn_code icode;
9938 	unsigned int align;
9939 
9940 	op0 = addr_for_mem_ref (exp, as, true);
9941 	op0 = memory_address_addr_space (mode, op0, as);
9942 	temp = gen_rtx_MEM (mode, op0);
9943 	set_mem_attributes (temp, exp, 0);
9944 	set_mem_addr_space (temp, as);
9945 	align = get_object_alignment (exp);
9946 	if (modifier != EXPAND_WRITE
9947 	    && modifier != EXPAND_MEMORY
9948 	    && mode != BLKmode
9949 	    && align < GET_MODE_ALIGNMENT (mode)
9950 	    /* If the target does not have special handling for unaligned
9951 	       loads of mode then it can use regular moves for them.  */
9952 	    && ((icode = optab_handler (movmisalign_optab, mode))
9953 		!= CODE_FOR_nothing))
9954 	  {
9955 	    struct expand_operand ops[2];
9956 
9957 	    /* We've already validated the memory, and we're creating a
9958 	       new pseudo destination.  The predicates really can't fail,
9959 	       nor can the generator.  */
9960 	    create_output_operand (&ops[0], NULL_RTX, mode);
9961 	    create_fixed_operand (&ops[1], temp);
9962 	    expand_insn (icode, 2, ops);
9963 	    temp = ops[0].value;
9964 	  }
9965 	return temp;
9966       }
9967 
9968     case MEM_REF:
9969       {
9970 	const bool reverse = REF_REVERSE_STORAGE_ORDER (exp);
9971 	addr_space_t as
9972 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9973 	machine_mode address_mode;
9974 	tree base = TREE_OPERAND (exp, 0);
9975 	gimple *def_stmt;
9976 	enum insn_code icode;
9977 	unsigned align;
9978 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
9979 	   might end up in a register.  */
9980 	if (mem_ref_refers_to_non_mem_p (exp))
9981 	  {
9982 	    HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9983 	    base = TREE_OPERAND (base, 0);
9984 	    if (offset == 0
9985 	        && !reverse
9986 		&& tree_fits_uhwi_p (TYPE_SIZE (type))
9987 		&& (GET_MODE_BITSIZE (DECL_MODE (base))
9988 		    == tree_to_uhwi (TYPE_SIZE (type))))
9989 	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9990 				  target, tmode, modifier);
9991 	    if (TYPE_MODE (type) == BLKmode)
9992 	      {
9993 		temp = assign_stack_temp (DECL_MODE (base),
9994 					  GET_MODE_SIZE (DECL_MODE (base)));
9995 		store_expr (base, temp, 0, false, false);
9996 		temp = adjust_address (temp, BLKmode, offset);
9997 		set_mem_size (temp, int_size_in_bytes (type));
9998 		return temp;
9999 	      }
10000 	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
10001 			  bitsize_int (offset * BITS_PER_UNIT));
10002 	    REF_REVERSE_STORAGE_ORDER (exp) = reverse;
10003 	    return expand_expr (exp, target, tmode, modifier);
10004 	  }
10005 	address_mode = targetm.addr_space.address_mode (as);
10006 	base = TREE_OPERAND (exp, 0);
10007 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
10008 	  {
10009 	    tree mask = gimple_assign_rhs2 (def_stmt);
10010 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
10011 			   gimple_assign_rhs1 (def_stmt), mask);
10012 	    TREE_OPERAND (exp, 0) = base;
10013 	  }
10014 	align = get_object_alignment (exp);
10015 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
10016 	op0 = memory_address_addr_space (mode, op0, as);
10017 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
10018 	  {
10019 	    rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
10020 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
10021 	    op0 = memory_address_addr_space (mode, op0, as);
10022 	  }
10023 	temp = gen_rtx_MEM (mode, op0);
10024 	set_mem_attributes (temp, exp, 0);
10025 	set_mem_addr_space (temp, as);
10026 	if (TREE_THIS_VOLATILE (exp))
10027 	  MEM_VOLATILE_P (temp) = 1;
10028 	if (modifier != EXPAND_WRITE
10029 	    && modifier != EXPAND_MEMORY
10030 	    && !inner_reference_p
10031 	    && mode != BLKmode
10032 	    && align < GET_MODE_ALIGNMENT (mode))
10033 	  {
10034 	    if ((icode = optab_handler (movmisalign_optab, mode))
10035 		!= CODE_FOR_nothing)
10036 	      {
10037 		struct expand_operand ops[2];
10038 
10039 		/* We've already validated the memory, and we're creating a
10040 		   new pseudo destination.  The predicates really can't fail,
10041 		   nor can the generator.  */
10042 		create_output_operand (&ops[0], NULL_RTX, mode);
10043 		create_fixed_operand (&ops[1], temp);
10044 		expand_insn (icode, 2, ops);
10045 		temp = ops[0].value;
10046 	      }
10047 	    else if (SLOW_UNALIGNED_ACCESS (mode, align))
10048 	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
10049 					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
10050 					(modifier == EXPAND_STACK_PARM
10051 					 ? NULL_RTX : target),
10052 					mode, mode, false);
10053 	  }
10054 	if (reverse
10055 	    && modifier != EXPAND_MEMORY
10056 	    && modifier != EXPAND_WRITE)
10057 	  temp = flip_storage_order (mode, temp);
10058 	return temp;
10059       }
10060 
10061     case ARRAY_REF:
10062 
10063       {
10064 	tree array = treeop0;
10065 	tree index = treeop1;
10066 	tree init;
10067 
10068 	/* Fold an expression like: "foo"[2].
10069 	   This is not done in fold so it won't happen inside &.
10070 	   Don't fold if this is for wide characters since it's too
10071 	   difficult to do correctly and this is a very rare case.  */
10072 
10073 	if (modifier != EXPAND_CONST_ADDRESS
10074 	    && modifier != EXPAND_INITIALIZER
10075 	    && modifier != EXPAND_MEMORY)
10076 	  {
10077 	    tree t = fold_read_from_constant_string (exp);
10078 
10079 	    if (t)
10080 	      return expand_expr (t, target, tmode, modifier);
10081 	  }
10082 
10083 	/* If this is a constant index into a constant array,
10084 	   just get the value from the array.  Handle both the cases when
10085 	   we have an explicit constructor and when our operand is a variable
10086 	   that was declared const.  */
10087 
10088 	if (modifier != EXPAND_CONST_ADDRESS
10089 	    && modifier != EXPAND_INITIALIZER
10090 	    && modifier != EXPAND_MEMORY
10091 	    && TREE_CODE (array) == CONSTRUCTOR
10092 	    && ! TREE_SIDE_EFFECTS (array)
10093 	    && TREE_CODE (index) == INTEGER_CST)
10094 	  {
10095 	    unsigned HOST_WIDE_INT ix;
10096 	    tree field, value;
10097 
10098 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
10099 				      field, value)
10100 	      if (tree_int_cst_equal (field, index))
10101 		{
10102 		  if (!TREE_SIDE_EFFECTS (value))
10103 		    return expand_expr (fold (value), target, tmode, modifier);
10104 		  break;
10105 		}
10106 	  }
10107 
10108 	else if (optimize >= 1
10109 		 && modifier != EXPAND_CONST_ADDRESS
10110 		 && modifier != EXPAND_INITIALIZER
10111 		 && modifier != EXPAND_MEMORY
10112 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10113 		 && TREE_CODE (index) == INTEGER_CST
10114 		 && (TREE_CODE (array) == VAR_DECL
10115 		     || TREE_CODE (array) == CONST_DECL)
10116 		 && (init = ctor_for_folding (array)) != error_mark_node)
10117 	  {
10118 	    if (init == NULL_TREE)
10119 	      {
10120 		tree value = build_zero_cst (type);
10121 		if (TREE_CODE (value) == CONSTRUCTOR)
10122 		  {
10123 		    /* If VALUE is a CONSTRUCTOR, this optimization is only
10124 		       useful if this doesn't store the CONSTRUCTOR into
10125 		       memory.  If it does, it is more efficient to just
10126 		       load the data from the array directly.  */
10127 		    rtx ret = expand_constructor (value, target,
10128 						  modifier, true);
10129 		    if (ret == NULL_RTX)
10130 		      value = NULL_TREE;
10131 		  }
10132 
10133 		if (value)
10134 		  return expand_expr (value, target, tmode, modifier);
10135 	      }
10136 	    else if (TREE_CODE (init) == CONSTRUCTOR)
10137 	      {
10138 		unsigned HOST_WIDE_INT ix;
10139 		tree field, value;
10140 
10141 		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10142 					  field, value)
10143 		  if (tree_int_cst_equal (field, index))
10144 		    {
10145 		      if (TREE_SIDE_EFFECTS (value))
10146 			break;
10147 
10148 		      if (TREE_CODE (value) == CONSTRUCTOR)
10149 			{
10150 			  /* If VALUE is a CONSTRUCTOR, this
10151 			     optimization is only useful if
10152 			     this doesn't store the CONSTRUCTOR
10153 			     into memory.  If it does, it is more
10154 			     efficient to just load the data from
10155 			     the array directly.  */
10156 			  rtx ret = expand_constructor (value, target,
10157 							modifier, true);
10158 			  if (ret == NULL_RTX)
10159 			    break;
10160 			}
10161 
10162 		      return
10163 		        expand_expr (fold (value), target, tmode, modifier);
10164 		    }
10165 	      }
10166 	    else if (TREE_CODE (init) == STRING_CST)
10167 	      {
10168 		tree low_bound = array_ref_low_bound (exp);
10169 		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10170 
10171 		/* Optimize the special case of a zero lower bound.
10172 
10173 		   We convert the lower bound to sizetype to avoid problems
10174 		   with constant folding.  E.g. suppose the lower bound is
10175 		   1 and its mode is QI.  Without the conversion
10176 		      (ARRAY + (INDEX - (unsigned char)1))
10177 		   becomes
10178 		      (ARRAY + (-(unsigned char)1) + INDEX)
10179 		   which becomes
10180 		      (ARRAY + 255 + INDEX).  Oops!  */
10181 		if (!integer_zerop (low_bound))
10182 		  index1 = size_diffop_loc (loc, index1,
10183 					    fold_convert_loc (loc, sizetype,
10184 							      low_bound));
10185 
10186 		if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10187 		  {
10188 		    tree type = TREE_TYPE (TREE_TYPE (init));
10189 		    machine_mode mode = TYPE_MODE (type);
10190 
10191 		    if (GET_MODE_CLASS (mode) == MODE_INT
10192 			&& GET_MODE_SIZE (mode) == 1)
10193 		      return gen_int_mode (TREE_STRING_POINTER (init)
10194 					   [TREE_INT_CST_LOW (index1)],
10195 					   mode);
10196 		  }
10197 	      }
10198 	  }
10199       }
10200       goto normal_inner_ref;
10201 
10202     case COMPONENT_REF:
10203       /* If the operand is a CONSTRUCTOR, we can just extract the
10204 	 appropriate field if it is present.  */
10205       if (TREE_CODE (treeop0) == CONSTRUCTOR)
10206 	{
10207 	  unsigned HOST_WIDE_INT idx;
10208 	  tree field, value;
10209 
10210 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10211 				    idx, field, value)
10212 	    if (field == treeop1
10213 		/* We can normally use the value of the field in the
10214 		   CONSTRUCTOR.  However, if this is a bitfield in
10215 		   an integral mode that we can fit in a HOST_WIDE_INT,
10216 		   we must mask only the number of bits in the bitfield,
10217 		   since this is done implicitly by the constructor.  If
10218 		   the bitfield does not meet either of those conditions,
10219 		   we can't do this optimization.  */
10220 		&& (! DECL_BIT_FIELD (field)
10221 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10222 			&& (GET_MODE_PRECISION (DECL_MODE (field))
10223 			    <= HOST_BITS_PER_WIDE_INT))))
10224 	      {
10225 		if (DECL_BIT_FIELD (field)
10226 		    && modifier == EXPAND_STACK_PARM)
10227 		  target = 0;
10228 		op0 = expand_expr (value, target, tmode, modifier);
10229 		if (DECL_BIT_FIELD (field))
10230 		  {
10231 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10232 		    machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10233 
10234 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
10235 		      {
10236 			op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10237 					    imode);
10238 			op0 = expand_and (imode, op0, op1, target);
10239 		      }
10240 		    else
10241 		      {
10242 			int count = GET_MODE_PRECISION (imode) - bitsize;
10243 
10244 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10245 					    target, 0);
10246 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10247 					    target, 0);
10248 		      }
10249 		  }
10250 
10251 		return op0;
10252 	      }
10253 	}
10254       goto normal_inner_ref;
10255 
10256     case BIT_FIELD_REF:
10257     case ARRAY_RANGE_REF:
10258     normal_inner_ref:
10259       {
10260 	machine_mode mode1, mode2;
10261 	HOST_WIDE_INT bitsize, bitpos;
10262 	tree offset;
10263 	int reversep, volatilep = 0, must_force_mem;
10264 	tree tem
10265 	  = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode1,
10266 				 &unsignedp, &reversep, &volatilep, true);
10267 	rtx orig_op0, memloc;
10268 	bool clear_mem_expr = false;
10269 
10270 	/* If we got back the original object, something is wrong.  Perhaps
10271 	   we are evaluating an expression too early.  In any event, don't
10272 	   infinitely recurse.  */
10273 	gcc_assert (tem != exp);
10274 
10275 	/* If TEM's type is a union of variable size, pass TARGET to the inner
10276 	   computation, since it will need a temporary and TARGET is known
10277 	   to have to do.  This occurs in unchecked conversion in Ada.  */
10278 	orig_op0 = op0
10279 	  = expand_expr_real (tem,
10280 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10281 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10282 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10283 				   != INTEGER_CST)
10284 			       && modifier != EXPAND_STACK_PARM
10285 			       ? target : NULL_RTX),
10286 			      VOIDmode,
10287 			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10288 			      NULL, true);
10289 
10290 	/* If the field has a mode, we want to access it in the
10291 	   field's mode, not the computed mode.
10292 	   If a MEM has VOIDmode (external with incomplete type),
10293 	   use BLKmode for it instead.  */
10294 	if (MEM_P (op0))
10295 	  {
10296 	    if (mode1 != VOIDmode)
10297 	      op0 = adjust_address (op0, mode1, 0);
10298 	    else if (GET_MODE (op0) == VOIDmode)
10299 	      op0 = adjust_address (op0, BLKmode, 0);
10300 	  }
10301 
10302 	mode2
10303 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10304 
10305 	/* If we have either an offset, a BLKmode result, or a reference
10306 	   outside the underlying object, we must force it to memory.
10307 	   Such a case can occur in Ada if we have unchecked conversion
10308 	   of an expression from a scalar type to an aggregate type or
10309 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10310 	   passed a partially uninitialized object or a view-conversion
10311 	   to a larger size.  */
10312 	must_force_mem = (offset
10313 			  || mode1 == BLKmode
10314 			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10315 
10316 	/* Handle CONCAT first.  */
10317 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10318 	  {
10319 	    if (bitpos == 0
10320 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0))
10321 		&& COMPLEX_MODE_P (mode1)
10322 		&& COMPLEX_MODE_P (GET_MODE (op0))
10323 		&& (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10324 		    == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10325 	      {
10326 		if (reversep)
10327 		  op0 = flip_storage_order (GET_MODE (op0), op0);
10328 		if (mode1 != GET_MODE (op0))
10329 		  {
10330 		    rtx parts[2];
10331 		    for (int i = 0; i < 2; i++)
10332 		      {
10333 			rtx op = read_complex_part (op0, i != 0);
10334 			if (GET_CODE (op) == SUBREG)
10335 			  op = force_reg (GET_MODE (op), op);
10336 			rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10337 						       op);
10338 			if (temp)
10339 			  op = temp;
10340 			else
10341 			  {
10342 			    if (!REG_P (op) && !MEM_P (op))
10343 			      op = force_reg (GET_MODE (op), op);
10344 			    op = gen_lowpart (GET_MODE_INNER (mode1), op);
10345 			  }
10346 			parts[i] = op;
10347 		      }
10348 		    op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10349 		  }
10350 		return op0;
10351 	      }
10352 	    if (bitpos == 0
10353 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10354 		&& bitsize)
10355 	      {
10356 		op0 = XEXP (op0, 0);
10357 		mode2 = GET_MODE (op0);
10358 	      }
10359 	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10360 		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10361 		     && bitpos
10362 		     && bitsize)
10363 	      {
10364 		op0 = XEXP (op0, 1);
10365 		bitpos = 0;
10366 		mode2 = GET_MODE (op0);
10367 	      }
10368 	    else
10369 	      /* Otherwise force into memory.  */
10370 	      must_force_mem = 1;
10371 	  }
10372 
10373 	/* If this is a constant, put it in a register if it is a legitimate
10374 	   constant and we don't need a memory reference.  */
10375 	if (CONSTANT_P (op0)
10376 	    && mode2 != BLKmode
10377 	    && targetm.legitimate_constant_p (mode2, op0)
10378 	    && !must_force_mem)
10379 	  op0 = force_reg (mode2, op0);
10380 
10381 	/* Otherwise, if this is a constant, try to force it to the constant
10382 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10383 	   is a legitimate constant.  */
10384 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10385 	  op0 = validize_mem (memloc);
10386 
10387 	/* Otherwise, if this is a constant or the object is not in memory
10388 	   and need be, put it there.  */
10389 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10390 	  {
10391 	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10392 	    emit_move_insn (memloc, op0);
10393 	    op0 = memloc;
10394 	    clear_mem_expr = true;
10395 	  }
10396 
10397 	if (offset)
10398 	  {
10399 	    machine_mode address_mode;
10400 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10401 					  EXPAND_SUM);
10402 
10403 	    gcc_assert (MEM_P (op0));
10404 
10405 	    address_mode = get_address_mode (op0);
10406 	    if (GET_MODE (offset_rtx) != address_mode)
10407 	      {
10408 		/* We cannot be sure that the RTL in offset_rtx is valid outside
10409 		   of a memory address context, so force it into a register
10410 		   before attempting to convert it to the desired mode.  */
10411 		offset_rtx = force_operand (offset_rtx, NULL_RTX);
10412 		offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10413 	      }
10414 
10415 	    /* See the comment in expand_assignment for the rationale.  */
10416 	    if (mode1 != VOIDmode
10417 		&& bitpos != 0
10418 		&& bitsize > 0
10419 		&& (bitpos % bitsize) == 0
10420 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10421 		&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10422 	      {
10423 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10424 		bitpos = 0;
10425 	      }
10426 
10427 	    op0 = offset_address (op0, offset_rtx,
10428 				  highest_pow2_factor (offset));
10429 	  }
10430 
10431 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10432 	   record its alignment as BIGGEST_ALIGNMENT.  */
10433 	if (MEM_P (op0) && bitpos == 0 && offset != 0
10434 	    && is_aligning_offset (offset, tem))
10435 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10436 
10437 	/* Don't forget about volatility even if this is a bitfield.  */
10438 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10439 	  {
10440 	    if (op0 == orig_op0)
10441 	      op0 = copy_rtx (op0);
10442 
10443 	    MEM_VOLATILE_P (op0) = 1;
10444 	  }
10445 
10446 	/* In cases where an aligned union has an unaligned object
10447 	   as a field, we might be extracting a BLKmode value from
10448 	   an integer-mode (e.g., SImode) object.  Handle this case
10449 	   by doing the extract into an object as wide as the field
10450 	   (which we know to be the width of a basic mode), then
10451 	   storing into memory, and changing the mode to BLKmode.  */
10452 	if (mode1 == VOIDmode
10453 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10454 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10455 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10456 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10457 		&& modifier != EXPAND_CONST_ADDRESS
10458 		&& modifier != EXPAND_INITIALIZER
10459 		&& modifier != EXPAND_MEMORY)
10460 	    /* If the bitfield is volatile and the bitsize
10461 	       is narrower than the access size of the bitfield,
10462 	       we need to extract bitfields from the access.  */
10463 	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10464 		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10465 		&& mode1 != BLKmode
10466 		&& bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10467 	    /* If the field isn't aligned enough to fetch as a memref,
10468 	       fetch it as a bit field.  */
10469 	    || (mode1 != BLKmode
10470 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10471 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10472 		      || (MEM_P (op0)
10473 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10474 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10475 		     && modifier != EXPAND_MEMORY
10476 		     && ((modifier == EXPAND_CONST_ADDRESS
10477 			  || modifier == EXPAND_INITIALIZER)
10478 			 ? STRICT_ALIGNMENT
10479 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10480 		    || (bitpos % BITS_PER_UNIT != 0)))
10481 	    /* If the type and the field are a constant size and the
10482 	       size of the type isn't the same size as the bitfield,
10483 	       we must use bitfield operations.  */
10484 	    || (bitsize >= 0
10485 		&& TYPE_SIZE (TREE_TYPE (exp))
10486 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10487 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10488 					  bitsize)))
10489 	  {
10490 	    machine_mode ext_mode = mode;
10491 
10492 	    if (ext_mode == BLKmode
10493 		&& ! (target != 0 && MEM_P (op0)
10494 		      && MEM_P (target)
10495 		      && bitpos % BITS_PER_UNIT == 0))
10496 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10497 
10498 	    if (ext_mode == BLKmode)
10499 	      {
10500 		if (target == 0)
10501 		  target = assign_temp (type, 1, 1);
10502 
10503 		/* ??? Unlike the similar test a few lines below, this one is
10504 		   very likely obsolete.  */
10505 		if (bitsize == 0)
10506 		  return target;
10507 
10508 		/* In this case, BITPOS must start at a byte boundary and
10509 		   TARGET, if specified, must be a MEM.  */
10510 		gcc_assert (MEM_P (op0)
10511 			    && (!target || MEM_P (target))
10512 			    && !(bitpos % BITS_PER_UNIT));
10513 
10514 		emit_block_move (target,
10515 				 adjust_address (op0, VOIDmode,
10516 						 bitpos / BITS_PER_UNIT),
10517 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10518 					  / BITS_PER_UNIT),
10519 				 (modifier == EXPAND_STACK_PARM
10520 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10521 
10522 		return target;
10523 	      }
10524 
10525 	    /* If we have nothing to extract, the result will be 0 for targets
10526 	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10527 	       return 0 for the sake of consistency, as reading a zero-sized
10528 	       bitfield is valid in Ada and the value is fully specified.  */
10529 	    if (bitsize == 0)
10530 	      return const0_rtx;
10531 
10532 	    op0 = validize_mem (op0);
10533 
10534 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10535 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10536 
10537 	    /* If the result has a record type and the extraction is done in
10538 	       an integral mode, then the field may be not aligned on a byte
10539 	       boundary; in this case, if it has reverse storage order, it
10540 	       needs to be extracted as a scalar field with reverse storage
10541 	       order and put back into memory order afterwards.  */
10542 	    if (TREE_CODE (type) == RECORD_TYPE
10543 		&& GET_MODE_CLASS (ext_mode) == MODE_INT)
10544 	      reversep = TYPE_REVERSE_STORAGE_ORDER (type);
10545 
10546 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10547 				     (modifier == EXPAND_STACK_PARM
10548 				      ? NULL_RTX : target),
10549 				     ext_mode, ext_mode, reversep);
10550 
10551 	    /* If the result has a record type and the mode of OP0 is an
10552 	       integral mode then, if BITSIZE is narrower than this mode
10553 	       and this is for big-endian data, we must put the field
10554 	       into the high-order bits.  And we must also put it back
10555 	       into memory order if it has been previously reversed.  */
10556 	    if (TREE_CODE (type) == RECORD_TYPE
10557 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT)
10558 	      {
10559 		HOST_WIDE_INT size = GET_MODE_BITSIZE (GET_MODE (op0));
10560 
10561 		if (bitsize < size
10562 		    && reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
10563 		  op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10564 				      size - bitsize, op0, 1);
10565 
10566 		if (reversep)
10567 		  op0 = flip_storage_order (GET_MODE (op0), op0);
10568 	      }
10569 
10570 	    /* If the result type is BLKmode, store the data into a temporary
10571 	       of the appropriate type, but with the mode corresponding to the
10572 	       mode for the data we have (op0's mode).  */
10573 	    if (mode == BLKmode)
10574 	      {
10575 		rtx new_rtx
10576 		  = assign_stack_temp_for_type (ext_mode,
10577 						GET_MODE_BITSIZE (ext_mode),
10578 						type);
10579 		emit_move_insn (new_rtx, op0);
10580 		op0 = copy_rtx (new_rtx);
10581 		PUT_MODE (op0, BLKmode);
10582 	      }
10583 
10584 	    return op0;
10585 	  }
10586 
10587 	/* If the result is BLKmode, use that to access the object
10588 	   now as well.  */
10589 	if (mode == BLKmode)
10590 	  mode1 = BLKmode;
10591 
10592 	/* Get a reference to just this component.  */
10593 	if (modifier == EXPAND_CONST_ADDRESS
10594 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10595 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10596 	else
10597 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10598 
10599 	if (op0 == orig_op0)
10600 	  op0 = copy_rtx (op0);
10601 
10602 	/* Don't set memory attributes if the base expression is
10603 	   SSA_NAME that got expanded as a MEM.  In that case, we should
10604 	   just honor its original memory attributes.  */
10605 	if (TREE_CODE (tem) != SSA_NAME || !MEM_P (orig_op0))
10606 	  set_mem_attributes (op0, exp, 0);
10607 
10608 	if (REG_P (XEXP (op0, 0)))
10609 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10610 
10611 	/* If op0 is a temporary because the original expressions was forced
10612 	   to memory, clear MEM_EXPR so that the original expression cannot
10613 	   be marked as addressable through MEM_EXPR of the temporary.  */
10614 	if (clear_mem_expr)
10615 	  set_mem_expr (op0, NULL_TREE);
10616 
10617 	MEM_VOLATILE_P (op0) |= volatilep;
10618 
10619         if (reversep
10620 	    && modifier != EXPAND_MEMORY
10621 	    && modifier != EXPAND_WRITE)
10622 	  op0 = flip_storage_order (mode1, op0);
10623 
10624 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10625 	    || modifier == EXPAND_CONST_ADDRESS
10626 	    || modifier == EXPAND_INITIALIZER)
10627 	  return op0;
10628 
10629 	if (target == 0)
10630 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10631 
10632 	convert_move (target, op0, unsignedp);
10633 	return target;
10634       }
10635 
10636     case OBJ_TYPE_REF:
10637       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10638 
10639     case CALL_EXPR:
10640       /* All valid uses of __builtin_va_arg_pack () are removed during
10641 	 inlining.  */
10642       if (CALL_EXPR_VA_ARG_PACK (exp))
10643 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10644       {
10645 	tree fndecl = get_callee_fndecl (exp), attr;
10646 
10647 	if (fndecl
10648 	    && (attr = lookup_attribute ("error",
10649 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10650 	  error ("%Kcall to %qs declared with attribute error: %s",
10651 		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10652 		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10653 	if (fndecl
10654 	    && (attr = lookup_attribute ("warning",
10655 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10656 	  warning_at (tree_nonartificial_location (exp),
10657 		      0, "%Kcall to %qs declared with attribute warning: %s",
10658 		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10659 		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10660 
10661 	/* Check for a built-in function.  */
10662 	if (fndecl && DECL_BUILT_IN (fndecl))
10663 	  {
10664 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10665 	    if (CALL_WITH_BOUNDS_P (exp))
10666 	      return expand_builtin_with_bounds (exp, target, subtarget,
10667 						 tmode, ignore);
10668 	    else
10669 	      return expand_builtin (exp, target, subtarget, tmode, ignore);
10670 	  }
10671       }
10672       return expand_call (exp, target, ignore);
10673 
10674     case VIEW_CONVERT_EXPR:
10675       op0 = NULL_RTX;
10676 
10677       /* If we are converting to BLKmode, try to avoid an intermediate
10678 	 temporary by fetching an inner memory reference.  */
10679       if (mode == BLKmode
10680 	  && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10681 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10682 	  && handled_component_p (treeop0))
10683       {
10684 	machine_mode mode1;
10685 	HOST_WIDE_INT bitsize, bitpos;
10686 	tree offset;
10687 	int unsignedp, reversep, volatilep = 0;
10688 	tree tem
10689 	  = get_inner_reference (treeop0, &bitsize, &bitpos, &offset, &mode1,
10690 				 &unsignedp, &reversep, &volatilep, true);
10691 	rtx orig_op0;
10692 
10693 	/* ??? We should work harder and deal with non-zero offsets.  */
10694 	if (!offset
10695 	    && (bitpos % BITS_PER_UNIT) == 0
10696 	    && !reversep
10697 	    && bitsize >= 0
10698 	    && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10699 	  {
10700 	    /* See the normal_inner_ref case for the rationale.  */
10701 	    orig_op0
10702 	      = expand_expr_real (tem,
10703 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10704 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10705 				       != INTEGER_CST)
10706 				   && modifier != EXPAND_STACK_PARM
10707 				   ? target : NULL_RTX),
10708 				  VOIDmode,
10709 				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10710 				  NULL, true);
10711 
10712 	    if (MEM_P (orig_op0))
10713 	      {
10714 		op0 = orig_op0;
10715 
10716 		/* Get a reference to just this component.  */
10717 		if (modifier == EXPAND_CONST_ADDRESS
10718 		    || modifier == EXPAND_SUM
10719 		    || modifier == EXPAND_INITIALIZER)
10720 		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10721 		else
10722 		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10723 
10724 		if (op0 == orig_op0)
10725 		  op0 = copy_rtx (op0);
10726 
10727 		set_mem_attributes (op0, treeop0, 0);
10728 		if (REG_P (XEXP (op0, 0)))
10729 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10730 
10731 		MEM_VOLATILE_P (op0) |= volatilep;
10732 	      }
10733 	  }
10734       }
10735 
10736       if (!op0)
10737 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10738 				NULL, inner_reference_p);
10739 
10740       /* If the input and output modes are both the same, we are done.  */
10741       if (mode == GET_MODE (op0))
10742 	;
10743       /* If neither mode is BLKmode, and both modes are the same size
10744 	 then we can use gen_lowpart.  */
10745       else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10746 	       && (GET_MODE_PRECISION (mode)
10747 		   == GET_MODE_PRECISION (GET_MODE (op0)))
10748 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
10749 	{
10750 	  if (GET_CODE (op0) == SUBREG)
10751 	    op0 = force_reg (GET_MODE (op0), op0);
10752 	  temp = gen_lowpart_common (mode, op0);
10753 	  if (temp)
10754 	    op0 = temp;
10755 	  else
10756 	    {
10757 	      if (!REG_P (op0) && !MEM_P (op0))
10758 		op0 = force_reg (GET_MODE (op0), op0);
10759 	      op0 = gen_lowpart (mode, op0);
10760 	    }
10761 	}
10762       /* If both types are integral, convert from one mode to the other.  */
10763       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10764 	op0 = convert_modes (mode, GET_MODE (op0), op0,
10765 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10766       /* If the output type is a bit-field type, do an extraction.  */
10767       else if (reduce_bit_field)
10768 	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10769 				  TYPE_UNSIGNED (type), NULL_RTX,
10770 				  mode, mode, false);
10771       /* As a last resort, spill op0 to memory, and reload it in a
10772 	 different mode.  */
10773       else if (!MEM_P (op0))
10774 	{
10775 	  /* If the operand is not a MEM, force it into memory.  Since we
10776 	     are going to be changing the mode of the MEM, don't call
10777 	     force_const_mem for constants because we don't allow pool
10778 	     constants to change mode.  */
10779 	  tree inner_type = TREE_TYPE (treeop0);
10780 
10781 	  gcc_assert (!TREE_ADDRESSABLE (exp));
10782 
10783 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10784 	    target
10785 	      = assign_stack_temp_for_type
10786 		(TYPE_MODE (inner_type),
10787 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10788 
10789 	  emit_move_insn (target, op0);
10790 	  op0 = target;
10791 	}
10792 
10793       /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
10794 	 output type is such that the operand is known to be aligned, indicate
10795 	 that it is.  Otherwise, we need only be concerned about alignment for
10796 	 non-BLKmode results.  */
10797       if (MEM_P (op0))
10798 	{
10799 	  enum insn_code icode;
10800 
10801 	  if (TYPE_ALIGN_OK (type))
10802 	    {
10803 	      /* ??? Copying the MEM without substantially changing it might
10804 		 run afoul of the code handling volatile memory references in
10805 		 store_expr, which assumes that TARGET is returned unmodified
10806 		 if it has been used.  */
10807 	      op0 = copy_rtx (op0);
10808 	      set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10809 	    }
10810 	  else if (modifier != EXPAND_WRITE
10811 		   && modifier != EXPAND_MEMORY
10812 		   && !inner_reference_p
10813 		   && mode != BLKmode
10814 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10815 	    {
10816 	      /* If the target does have special handling for unaligned
10817 		 loads of mode then use them.  */
10818 	      if ((icode = optab_handler (movmisalign_optab, mode))
10819 		  != CODE_FOR_nothing)
10820 		{
10821 		  rtx reg;
10822 
10823 		  op0 = adjust_address (op0, mode, 0);
10824 		  /* We've already validated the memory, and we're creating a
10825 		     new pseudo destination.  The predicates really can't
10826 		     fail.  */
10827 		  reg = gen_reg_rtx (mode);
10828 
10829 		  /* Nor can the insn generator.  */
10830 		  rtx_insn *insn = GEN_FCN (icode) (reg, op0);
10831 		  emit_insn (insn);
10832 		  return reg;
10833 		}
10834 	      else if (STRICT_ALIGNMENT)
10835 		{
10836 		  tree inner_type = TREE_TYPE (treeop0);
10837 		  HOST_WIDE_INT temp_size
10838 		    = MAX (int_size_in_bytes (inner_type),
10839 			   (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10840 		  rtx new_rtx
10841 		    = assign_stack_temp_for_type (mode, temp_size, type);
10842 		  rtx new_with_op0_mode
10843 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
10844 
10845 		  gcc_assert (!TREE_ADDRESSABLE (exp));
10846 
10847 		  if (GET_MODE (op0) == BLKmode)
10848 		    emit_block_move (new_with_op0_mode, op0,
10849 				     GEN_INT (GET_MODE_SIZE (mode)),
10850 				     (modifier == EXPAND_STACK_PARM
10851 				      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10852 		  else
10853 		    emit_move_insn (new_with_op0_mode, op0);
10854 
10855 		  op0 = new_rtx;
10856 		}
10857 	    }
10858 
10859 	  op0 = adjust_address (op0, mode, 0);
10860 	}
10861 
10862       return op0;
10863 
10864     case MODIFY_EXPR:
10865       {
10866 	tree lhs = treeop0;
10867 	tree rhs = treeop1;
10868 	gcc_assert (ignore);
10869 
10870 	/* Check for |= or &= of a bitfield of size one into another bitfield
10871 	   of size 1.  In this case, (unless we need the result of the
10872 	   assignment) we can do this more efficiently with a
10873 	   test followed by an assignment, if necessary.
10874 
10875 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
10876 	   things change so we do, this code should be enhanced to
10877 	   support it.  */
10878 	if (TREE_CODE (lhs) == COMPONENT_REF
10879 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
10880 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
10881 	    && TREE_OPERAND (rhs, 0) == lhs
10882 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10883 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10884 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10885 	  {
10886 	    rtx_code_label *label = gen_label_rtx ();
10887 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10888 	    do_jump (TREE_OPERAND (rhs, 1),
10889 		     value ? label : 0,
10890 		     value ? 0 : label, -1);
10891 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10892 			       false);
10893 	    do_pending_stack_adjust ();
10894 	    emit_label (label);
10895 	    return const0_rtx;
10896 	  }
10897 
10898 	expand_assignment (lhs, rhs, false);
10899 	return const0_rtx;
10900       }
10901 
10902     case ADDR_EXPR:
10903       return expand_expr_addr_expr (exp, target, tmode, modifier);
10904 
10905     case REALPART_EXPR:
10906       op0 = expand_normal (treeop0);
10907       return read_complex_part (op0, false);
10908 
10909     case IMAGPART_EXPR:
10910       op0 = expand_normal (treeop0);
10911       return read_complex_part (op0, true);
10912 
10913     case RETURN_EXPR:
10914     case LABEL_EXPR:
10915     case GOTO_EXPR:
10916     case SWITCH_EXPR:
10917     case ASM_EXPR:
10918       /* Expanded in cfgexpand.c.  */
10919       gcc_unreachable ();
10920 
10921     case TRY_CATCH_EXPR:
10922     case CATCH_EXPR:
10923     case EH_FILTER_EXPR:
10924     case TRY_FINALLY_EXPR:
10925       /* Lowered by tree-eh.c.  */
10926       gcc_unreachable ();
10927 
10928     case WITH_CLEANUP_EXPR:
10929     case CLEANUP_POINT_EXPR:
10930     case TARGET_EXPR:
10931     case CASE_LABEL_EXPR:
10932     case VA_ARG_EXPR:
10933     case BIND_EXPR:
10934     case INIT_EXPR:
10935     case CONJ_EXPR:
10936     case COMPOUND_EXPR:
10937     case PREINCREMENT_EXPR:
10938     case PREDECREMENT_EXPR:
10939     case POSTINCREMENT_EXPR:
10940     case POSTDECREMENT_EXPR:
10941     case LOOP_EXPR:
10942     case EXIT_EXPR:
10943     case COMPOUND_LITERAL_EXPR:
10944       /* Lowered by gimplify.c.  */
10945       gcc_unreachable ();
10946 
10947     case FDESC_EXPR:
10948       /* Function descriptors are not valid except for as
10949 	 initialization constants, and should not be expanded.  */
10950       gcc_unreachable ();
10951 
10952     case WITH_SIZE_EXPR:
10953       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
10954 	 have pulled out the size to use in whatever context it needed.  */
10955       return expand_expr_real (treeop0, original_target, tmode,
10956 			       modifier, alt_rtl, inner_reference_p);
10957 
10958     default:
10959       return expand_expr_real_2 (&ops, target, tmode, modifier);
10960     }
10961 }
10962 
10963 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10964    signedness of TYPE), possibly returning the result in TARGET.  */
10965 static rtx
10966 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10967 {
10968   HOST_WIDE_INT prec = TYPE_PRECISION (type);
10969   if (target && GET_MODE (target) != GET_MODE (exp))
10970     target = 0;
10971   /* For constant values, reduce using build_int_cst_type. */
10972   if (CONST_INT_P (exp))
10973     {
10974       HOST_WIDE_INT value = INTVAL (exp);
10975       tree t = build_int_cst_type (type, value);
10976       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10977     }
10978   else if (TYPE_UNSIGNED (type))
10979     {
10980       machine_mode mode = GET_MODE (exp);
10981       rtx mask = immed_wide_int_const
10982 	(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10983       return expand_and (mode, exp, mask, target);
10984     }
10985   else
10986     {
10987       int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10988       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10989 			  exp, count, target, 0);
10990       return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10991 			   exp, count, target, 0);
10992     }
10993 }
10994 
10995 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10996    when applied to the address of EXP produces an address known to be
10997    aligned more than BIGGEST_ALIGNMENT.  */
10998 
10999 static int
11000 is_aligning_offset (const_tree offset, const_tree exp)
11001 {
11002   /* Strip off any conversions.  */
11003   while (CONVERT_EXPR_P (offset))
11004     offset = TREE_OPERAND (offset, 0);
11005 
11006   /* We must now have a BIT_AND_EXPR with a constant that is one less than
11007      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
11008   if (TREE_CODE (offset) != BIT_AND_EXPR
11009       || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
11010       || compare_tree_int (TREE_OPERAND (offset, 1),
11011 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
11012       || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
11013     return 0;
11014 
11015   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
11016      It must be NEGATE_EXPR.  Then strip any more conversions.  */
11017   offset = TREE_OPERAND (offset, 0);
11018   while (CONVERT_EXPR_P (offset))
11019     offset = TREE_OPERAND (offset, 0);
11020 
11021   if (TREE_CODE (offset) != NEGATE_EXPR)
11022     return 0;
11023 
11024   offset = TREE_OPERAND (offset, 0);
11025   while (CONVERT_EXPR_P (offset))
11026     offset = TREE_OPERAND (offset, 0);
11027 
11028   /* This must now be the address of EXP.  */
11029   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
11030 }
11031 
11032 /* Return the tree node if an ARG corresponds to a string constant or zero
11033    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
11034    in bytes within the string that ARG is accessing.  The type of the
11035    offset will be `sizetype'.  */
11036 
11037 tree
11038 string_constant (tree arg, tree *ptr_offset)
11039 {
11040   tree array, offset, lower_bound;
11041   STRIP_NOPS (arg);
11042 
11043   if (TREE_CODE (arg) == ADDR_EXPR)
11044     {
11045       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
11046 	{
11047 	  *ptr_offset = size_zero_node;
11048 	  return TREE_OPERAND (arg, 0);
11049 	}
11050       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
11051 	{
11052 	  array = TREE_OPERAND (arg, 0);
11053 	  offset = size_zero_node;
11054 	}
11055       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
11056 	{
11057 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11058 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11059 	  if (TREE_CODE (array) != STRING_CST
11060 	      && TREE_CODE (array) != VAR_DECL)
11061 	    return 0;
11062 
11063 	  /* Check if the array has a nonzero lower bound.  */
11064 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
11065 	  if (!integer_zerop (lower_bound))
11066 	    {
11067 	      /* If the offset and base aren't both constants, return 0.  */
11068 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
11069 	        return 0;
11070 	      if (TREE_CODE (offset) != INTEGER_CST)
11071 		return 0;
11072 	      /* Adjust offset by the lower bound.  */
11073 	      offset = size_diffop (fold_convert (sizetype, offset),
11074 				    fold_convert (sizetype, lower_bound));
11075 	    }
11076 	}
11077       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
11078 	{
11079 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
11080 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
11081 	  if (TREE_CODE (array) != ADDR_EXPR)
11082 	    return 0;
11083 	  array = TREE_OPERAND (array, 0);
11084 	  if (TREE_CODE (array) != STRING_CST
11085 	      && TREE_CODE (array) != VAR_DECL)
11086 	    return 0;
11087 	}
11088       else
11089 	return 0;
11090     }
11091   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
11092     {
11093       tree arg0 = TREE_OPERAND (arg, 0);
11094       tree arg1 = TREE_OPERAND (arg, 1);
11095 
11096       STRIP_NOPS (arg0);
11097       STRIP_NOPS (arg1);
11098 
11099       if (TREE_CODE (arg0) == ADDR_EXPR
11100 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
11101 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
11102 	{
11103 	  array = TREE_OPERAND (arg0, 0);
11104 	  offset = arg1;
11105 	}
11106       else if (TREE_CODE (arg1) == ADDR_EXPR
11107 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
11108 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
11109 	{
11110 	  array = TREE_OPERAND (arg1, 0);
11111 	  offset = arg0;
11112 	}
11113       else
11114 	return 0;
11115     }
11116   else
11117     return 0;
11118 
11119   if (TREE_CODE (array) == STRING_CST)
11120     {
11121       *ptr_offset = fold_convert (sizetype, offset);
11122       return array;
11123     }
11124   else if (TREE_CODE (array) == VAR_DECL
11125 	   || TREE_CODE (array) == CONST_DECL)
11126     {
11127       int length;
11128       tree init = ctor_for_folding (array);
11129 
11130       /* Variables initialized to string literals can be handled too.  */
11131       if (init == error_mark_node
11132 	  || !init
11133 	  || TREE_CODE (init) != STRING_CST)
11134 	return 0;
11135 
11136       /* Avoid const char foo[4] = "abcde";  */
11137       if (DECL_SIZE_UNIT (array) == NULL_TREE
11138 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
11139 	  || (length = TREE_STRING_LENGTH (init)) <= 0
11140 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
11141 	return 0;
11142 
11143       /* If variable is bigger than the string literal, OFFSET must be constant
11144 	 and inside of the bounds of the string literal.  */
11145       offset = fold_convert (sizetype, offset);
11146       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11147 	  && (! tree_fits_uhwi_p (offset)
11148 	      || compare_tree_int (offset, length) >= 0))
11149 	return 0;
11150 
11151       *ptr_offset = offset;
11152       return init;
11153     }
11154 
11155   return 0;
11156 }
11157 
11158 /* Generate code to calculate OPS, and exploded expression
11159    using a store-flag instruction and return an rtx for the result.
11160    OPS reflects a comparison.
11161 
11162    If TARGET is nonzero, store the result there if convenient.
11163 
11164    Return zero if there is no suitable set-flag instruction
11165    available on this machine.
11166 
11167    Once expand_expr has been called on the arguments of the comparison,
11168    we are committed to doing the store flag, since it is not safe to
11169    re-evaluate the expression.  We emit the store-flag insn by calling
11170    emit_store_flag, but only expand the arguments if we have a reason
11171    to believe that emit_store_flag will be successful.  If we think that
11172    it will, but it isn't, we have to simulate the store-flag with a
11173    set/jump/set sequence.  */
11174 
11175 static rtx
11176 do_store_flag (sepops ops, rtx target, machine_mode mode)
11177 {
11178   enum rtx_code code;
11179   tree arg0, arg1, type;
11180   machine_mode operand_mode;
11181   int unsignedp;
11182   rtx op0, op1;
11183   rtx subtarget = target;
11184   location_t loc = ops->location;
11185 
11186   arg0 = ops->op0;
11187   arg1 = ops->op1;
11188 
11189   /* Don't crash if the comparison was erroneous.  */
11190   if (arg0 == error_mark_node || arg1 == error_mark_node)
11191     return const0_rtx;
11192 
11193   type = TREE_TYPE (arg0);
11194   operand_mode = TYPE_MODE (type);
11195   unsignedp = TYPE_UNSIGNED (type);
11196 
11197   /* We won't bother with BLKmode store-flag operations because it would mean
11198      passing a lot of information to emit_store_flag.  */
11199   if (operand_mode == BLKmode)
11200     return 0;
11201 
11202   /* We won't bother with store-flag operations involving function pointers
11203      when function pointers must be canonicalized before comparisons.  */
11204   if (targetm.have_canonicalize_funcptr_for_compare ()
11205       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11206 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11207 	       == FUNCTION_TYPE))
11208 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11209 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11210 		  == FUNCTION_TYPE))))
11211     return 0;
11212 
11213   STRIP_NOPS (arg0);
11214   STRIP_NOPS (arg1);
11215 
11216   /* For vector typed comparisons emit code to generate the desired
11217      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
11218      expander for this.  */
11219   if (TREE_CODE (ops->type) == VECTOR_TYPE)
11220     {
11221       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11222       if (VECTOR_BOOLEAN_TYPE_P (ops->type)
11223 	  && expand_vec_cmp_expr_p (TREE_TYPE (arg0), ops->type))
11224 	return expand_vec_cmp_expr (ops->type, ifexp, target);
11225       else
11226 	{
11227 	  tree if_true = constant_boolean_node (true, ops->type);
11228 	  tree if_false = constant_boolean_node (false, ops->type);
11229 	  return expand_vec_cond_expr (ops->type, ifexp, if_true,
11230 				       if_false, target);
11231 	}
11232     }
11233 
11234   /* Get the rtx comparison code to use.  We know that EXP is a comparison
11235      operation of some type.  Some comparisons against 1 and -1 can be
11236      converted to comparisons with zero.  Do so here so that the tests
11237      below will be aware that we have a comparison with zero.   These
11238      tests will not catch constants in the first operand, but constants
11239      are rarely passed as the first operand.  */
11240 
11241   switch (ops->code)
11242     {
11243     case EQ_EXPR:
11244       code = EQ;
11245       break;
11246     case NE_EXPR:
11247       code = NE;
11248       break;
11249     case LT_EXPR:
11250       if (integer_onep (arg1))
11251 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11252       else
11253 	code = unsignedp ? LTU : LT;
11254       break;
11255     case LE_EXPR:
11256       if (! unsignedp && integer_all_onesp (arg1))
11257 	arg1 = integer_zero_node, code = LT;
11258       else
11259 	code = unsignedp ? LEU : LE;
11260       break;
11261     case GT_EXPR:
11262       if (! unsignedp && integer_all_onesp (arg1))
11263 	arg1 = integer_zero_node, code = GE;
11264       else
11265 	code = unsignedp ? GTU : GT;
11266       break;
11267     case GE_EXPR:
11268       if (integer_onep (arg1))
11269 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11270       else
11271 	code = unsignedp ? GEU : GE;
11272       break;
11273 
11274     case UNORDERED_EXPR:
11275       code = UNORDERED;
11276       break;
11277     case ORDERED_EXPR:
11278       code = ORDERED;
11279       break;
11280     case UNLT_EXPR:
11281       code = UNLT;
11282       break;
11283     case UNLE_EXPR:
11284       code = UNLE;
11285       break;
11286     case UNGT_EXPR:
11287       code = UNGT;
11288       break;
11289     case UNGE_EXPR:
11290       code = UNGE;
11291       break;
11292     case UNEQ_EXPR:
11293       code = UNEQ;
11294       break;
11295     case LTGT_EXPR:
11296       code = LTGT;
11297       break;
11298 
11299     default:
11300       gcc_unreachable ();
11301     }
11302 
11303   /* Put a constant second.  */
11304   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11305       || TREE_CODE (arg0) == FIXED_CST)
11306     {
11307       std::swap (arg0, arg1);
11308       code = swap_condition (code);
11309     }
11310 
11311   /* If this is an equality or inequality test of a single bit, we can
11312      do this by shifting the bit being tested to the low-order bit and
11313      masking the result with the constant 1.  If the condition was EQ,
11314      we xor it with 1.  This does not require an scc insn and is faster
11315      than an scc insn even if we have it.
11316 
11317      The code to make this transformation was moved into fold_single_bit_test,
11318      so we just call into the folder and expand its result.  */
11319 
11320   if ((code == NE || code == EQ)
11321       && integer_zerop (arg1)
11322       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11323     {
11324       gimple *srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11325       if (srcstmt
11326 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11327 	{
11328 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11329 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11330 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11331 				       gimple_assign_rhs1 (srcstmt),
11332 				       gimple_assign_rhs2 (srcstmt));
11333 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11334 	  if (temp)
11335 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11336 	}
11337     }
11338 
11339   if (! get_subtarget (target)
11340       || GET_MODE (subtarget) != operand_mode)
11341     subtarget = 0;
11342 
11343   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11344 
11345   if (target == 0)
11346     target = gen_reg_rtx (mode);
11347 
11348   /* Try a cstore if possible.  */
11349   return emit_store_flag_force (target, code, op0, op1,
11350 				operand_mode, unsignedp,
11351 				(TYPE_PRECISION (ops->type) == 1
11352 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11353 }
11354 
11355 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
11356    0 otherwise (i.e. if there is no casesi instruction).
11357 
11358    DEFAULT_PROBABILITY is the probability of jumping to the default
11359    label.  */
11360 int
11361 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11362 	    rtx table_label, rtx default_label, rtx fallback_label,
11363             int default_probability)
11364 {
11365   struct expand_operand ops[5];
11366   machine_mode index_mode = SImode;
11367   rtx op1, op2, index;
11368 
11369   if (! targetm.have_casesi ())
11370     return 0;
11371 
11372   /* Convert the index to SImode.  */
11373   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11374     {
11375       machine_mode omode = TYPE_MODE (index_type);
11376       rtx rangertx = expand_normal (range);
11377 
11378       /* We must handle the endpoints in the original mode.  */
11379       index_expr = build2 (MINUS_EXPR, index_type,
11380 			   index_expr, minval);
11381       minval = integer_zero_node;
11382       index = expand_normal (index_expr);
11383       if (default_label)
11384         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11385 				 omode, 1, default_label,
11386                                  default_probability);
11387       /* Now we can safely truncate.  */
11388       index = convert_to_mode (index_mode, index, 0);
11389     }
11390   else
11391     {
11392       if (TYPE_MODE (index_type) != index_mode)
11393 	{
11394 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11395 	  index_expr = fold_convert (index_type, index_expr);
11396 	}
11397 
11398       index = expand_normal (index_expr);
11399     }
11400 
11401   do_pending_stack_adjust ();
11402 
11403   op1 = expand_normal (minval);
11404   op2 = expand_normal (range);
11405 
11406   create_input_operand (&ops[0], index, index_mode);
11407   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11408   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11409   create_fixed_operand (&ops[3], table_label);
11410   create_fixed_operand (&ops[4], (default_label
11411 				  ? default_label
11412 				  : fallback_label));
11413   expand_jump_insn (targetm.code_for_casesi, 5, ops);
11414   return 1;
11415 }
11416 
11417 /* Attempt to generate a tablejump instruction; same concept.  */
11418 /* Subroutine of the next function.
11419 
11420    INDEX is the value being switched on, with the lowest value
11421    in the table already subtracted.
11422    MODE is its expected mode (needed if INDEX is constant).
11423    RANGE is the length of the jump table.
11424    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11425 
11426    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11427    index value is out of range.
11428    DEFAULT_PROBABILITY is the probability of jumping to
11429    the default label.  */
11430 
11431 static void
11432 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11433 	      rtx default_label, int default_probability)
11434 {
11435   rtx temp, vector;
11436 
11437   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11438     cfun->cfg->max_jumptable_ents = INTVAL (range);
11439 
11440   /* Do an unsigned comparison (in the proper mode) between the index
11441      expression and the value which represents the length of the range.
11442      Since we just finished subtracting the lower bound of the range
11443      from the index expression, this comparison allows us to simultaneously
11444      check that the original index expression value is both greater than
11445      or equal to the minimum value of the range and less than or equal to
11446      the maximum value of the range.  */
11447 
11448   if (default_label)
11449     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11450 			     default_label, default_probability);
11451 
11452 
11453   /* If index is in range, it must fit in Pmode.
11454      Convert to Pmode so we can index with it.  */
11455   if (mode != Pmode)
11456     index = convert_to_mode (Pmode, index, 1);
11457 
11458   /* Don't let a MEM slip through, because then INDEX that comes
11459      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11460      and break_out_memory_refs will go to work on it and mess it up.  */
11461 #ifdef PIC_CASE_VECTOR_ADDRESS
11462   if (flag_pic && !REG_P (index))
11463     index = copy_to_mode_reg (Pmode, index);
11464 #endif
11465 
11466   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11467      GET_MODE_SIZE, because this indicates how large insns are.  The other
11468      uses should all be Pmode, because they are addresses.  This code
11469      could fail if addresses and insns are not the same size.  */
11470   index = simplify_gen_binary (MULT, Pmode, index,
11471 			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11472 					     Pmode));
11473   index = simplify_gen_binary (PLUS, Pmode, index,
11474 			       gen_rtx_LABEL_REF (Pmode, table_label));
11475 
11476 #ifdef PIC_CASE_VECTOR_ADDRESS
11477   if (flag_pic)
11478     index = PIC_CASE_VECTOR_ADDRESS (index);
11479   else
11480 #endif
11481     index = memory_address (CASE_VECTOR_MODE, index);
11482   temp = gen_reg_rtx (CASE_VECTOR_MODE);
11483   vector = gen_const_mem (CASE_VECTOR_MODE, index);
11484   convert_move (temp, vector, 0);
11485 
11486   emit_jump_insn (targetm.gen_tablejump (temp, table_label));
11487 
11488   /* If we are generating PIC code or if the table is PC-relative, the
11489      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11490   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11491     emit_barrier ();
11492 }
11493 
11494 int
11495 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11496 	       rtx table_label, rtx default_label, int default_probability)
11497 {
11498   rtx index;
11499 
11500   if (! targetm.have_tablejump ())
11501     return 0;
11502 
11503   index_expr = fold_build2 (MINUS_EXPR, index_type,
11504 			    fold_convert (index_type, index_expr),
11505 			    fold_convert (index_type, minval));
11506   index = expand_normal (index_expr);
11507   do_pending_stack_adjust ();
11508 
11509   do_tablejump (index, TYPE_MODE (index_type),
11510 		convert_modes (TYPE_MODE (index_type),
11511 			       TYPE_MODE (TREE_TYPE (range)),
11512 			       expand_normal (range),
11513 			       TYPE_UNSIGNED (TREE_TYPE (range))),
11514 		table_label, default_label, default_probability);
11515   return 1;
11516 }
11517 
11518 /* Return a CONST_VECTOR rtx representing vector mask for
11519    a VECTOR_CST of booleans.  */
11520 static rtx
11521 const_vector_mask_from_tree (tree exp)
11522 {
11523   rtvec v;
11524   unsigned i;
11525   int units;
11526   tree elt;
11527   machine_mode inner, mode;
11528 
11529   mode = TYPE_MODE (TREE_TYPE (exp));
11530   units = GET_MODE_NUNITS (mode);
11531   inner = GET_MODE_INNER (mode);
11532 
11533   v = rtvec_alloc (units);
11534 
11535   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11536     {
11537       elt = VECTOR_CST_ELT (exp, i);
11538 
11539       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11540       if (integer_zerop (elt))
11541 	RTVEC_ELT (v, i) = CONST0_RTX (inner);
11542       else if (integer_onep (elt)
11543 	       || integer_minus_onep (elt))
11544 	RTVEC_ELT (v, i) = CONSTM1_RTX (inner);
11545       else
11546 	gcc_unreachable ();
11547     }
11548 
11549   return gen_rtx_CONST_VECTOR (mode, v);
11550 }
11551 
11552 /* Return a CONST_INT rtx representing vector mask for
11553    a VECTOR_CST of booleans.  */
11554 static rtx
11555 const_scalar_mask_from_tree (tree exp)
11556 {
11557   machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
11558   wide_int res = wi::zero (GET_MODE_PRECISION (mode));
11559   tree elt;
11560   unsigned i;
11561 
11562   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11563     {
11564       elt = VECTOR_CST_ELT (exp, i);
11565       gcc_assert (TREE_CODE (elt) == INTEGER_CST);
11566       if (integer_all_onesp (elt))
11567 	res = wi::set_bit (res, i);
11568       else
11569 	gcc_assert (integer_zerop (elt));
11570     }
11571 
11572   return immed_wide_int_const (res, mode);
11573 }
11574 
11575 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11576 static rtx
11577 const_vector_from_tree (tree exp)
11578 {
11579   rtvec v;
11580   unsigned i;
11581   int units;
11582   tree elt;
11583   machine_mode inner, mode;
11584 
11585   mode = TYPE_MODE (TREE_TYPE (exp));
11586 
11587   if (initializer_zerop (exp))
11588     return CONST0_RTX (mode);
11589 
11590   if (VECTOR_BOOLEAN_TYPE_P (TREE_TYPE (exp)))
11591     return const_vector_mask_from_tree (exp);
11592 
11593   units = GET_MODE_NUNITS (mode);
11594   inner = GET_MODE_INNER (mode);
11595 
11596   v = rtvec_alloc (units);
11597 
11598   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11599     {
11600       elt = VECTOR_CST_ELT (exp, i);
11601 
11602       if (TREE_CODE (elt) == REAL_CST)
11603 	RTVEC_ELT (v, i) = const_double_from_real_value (TREE_REAL_CST (elt),
11604 							 inner);
11605       else if (TREE_CODE (elt) == FIXED_CST)
11606 	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11607 							 inner);
11608       else
11609 	RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11610     }
11611 
11612   return gen_rtx_CONST_VECTOR (mode, v);
11613 }
11614 
11615 /* Build a decl for a personality function given a language prefix.  */
11616 
11617 tree
11618 build_personality_function (const char *lang)
11619 {
11620   const char *unwind_and_version;
11621   tree decl, type;
11622   char *name;
11623 
11624   switch (targetm_common.except_unwind_info (&global_options))
11625     {
11626     case UI_NONE:
11627       return NULL;
11628     case UI_SJLJ:
11629       unwind_and_version = "_sj0";
11630       break;
11631     case UI_DWARF2:
11632     case UI_TARGET:
11633       unwind_and_version = "_v0";
11634       break;
11635     case UI_SEH:
11636       unwind_and_version = "_seh0";
11637       break;
11638     default:
11639       gcc_unreachable ();
11640     }
11641 
11642   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11643 
11644   type = build_function_type_list (integer_type_node, integer_type_node,
11645 				   long_long_unsigned_type_node,
11646 				   ptr_type_node, ptr_type_node, NULL_TREE);
11647   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11648 		     get_identifier (name), type);
11649   DECL_ARTIFICIAL (decl) = 1;
11650   DECL_EXTERNAL (decl) = 1;
11651   TREE_PUBLIC (decl) = 1;
11652 
11653   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
11654      are the flags assigned by targetm.encode_section_info.  */
11655   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11656 
11657   return decl;
11658 }
11659 
11660 /* Extracts the personality function of DECL and returns the corresponding
11661    libfunc.  */
11662 
11663 rtx
11664 get_personality_function (tree decl)
11665 {
11666   tree personality = DECL_FUNCTION_PERSONALITY (decl);
11667   enum eh_personality_kind pk;
11668 
11669   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11670   if (pk == eh_personality_none)
11671     return NULL;
11672 
11673   if (!personality
11674       && pk == eh_personality_any)
11675     personality = lang_hooks.eh_personality ();
11676 
11677   if (pk == eh_personality_lang)
11678     gcc_assert (personality != NULL_TREE);
11679 
11680   return XEXP (DECL_RTL (personality), 0);
11681 }
11682 
11683 /* Returns a tree for the size of EXP in bytes.  */
11684 
11685 static tree
11686 tree_expr_size (const_tree exp)
11687 {
11688   if (DECL_P (exp)
11689       && DECL_SIZE_UNIT (exp) != 0)
11690     return DECL_SIZE_UNIT (exp);
11691   else
11692     return size_in_bytes (TREE_TYPE (exp));
11693 }
11694 
11695 /* Return an rtx for the size in bytes of the value of EXP.  */
11696 
11697 rtx
11698 expr_size (tree exp)
11699 {
11700   tree size;
11701 
11702   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11703     size = TREE_OPERAND (exp, 1);
11704   else
11705     {
11706       size = tree_expr_size (exp);
11707       gcc_assert (size);
11708       gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11709     }
11710 
11711   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11712 }
11713 
11714 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11715    if the size can vary or is larger than an integer.  */
11716 
11717 static HOST_WIDE_INT
11718 int_expr_size (tree exp)
11719 {
11720   tree size;
11721 
11722   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11723     size = TREE_OPERAND (exp, 1);
11724   else
11725     {
11726       size = tree_expr_size (exp);
11727       gcc_assert (size);
11728     }
11729 
11730   if (size == 0 || !tree_fits_shwi_p (size))
11731     return -1;
11732 
11733   return tree_to_shwi (size);
11734 }
11735 
11736 #include "gt-expr.h"
11737