xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/expr.c (revision 946379e7b37692fc43f68eb0d1c10daa0a7f3b6c)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2013 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "regs.h"
29 #include "hard-reg-set.h"
30 #include "except.h"
31 #include "function.h"
32 #include "insn-config.h"
33 #include "insn-attr.h"
34 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
35 #include "expr.h"
36 #include "optabs.h"
37 #include "libfuncs.h"
38 #include "recog.h"
39 #include "reload.h"
40 #include "typeclass.h"
41 #include "toplev.h"
42 #include "langhooks.h"
43 #include "intl.h"
44 #include "tm_p.h"
45 #include "tree-iterator.h"
46 #include "tree-flow.h"
47 #include "target.h"
48 #include "common/common-target.h"
49 #include "timevar.h"
50 #include "df.h"
51 #include "diagnostic.h"
52 #include "ssaexpand.h"
53 #include "target-globals.h"
54 #include "params.h"
55 
56 /* Decide whether a function's arguments should be processed
57    from first to last or from last to first.
58 
59    They should if the stack and args grow in opposite directions, but
60    only if we have push insns.  */
61 
62 #ifdef PUSH_ROUNDING
63 
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED	/* If it's last to first.  */
67 #endif
68 #endif
69 
70 #endif
71 
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
75 #else
76 #define STACK_PUSH_CODE PRE_INC
77 #endif
78 #endif
79 
80 
81 /* If this is nonzero, we do not bother generating VOLATILE
82    around volatile memory references, and we are willing to
83    output indirect addresses.  If cse is to follow, we reject
84    indirect addresses so a useful potential cse is generated;
85    if it is used only once, instruction combination will produce
86    the same indirect address eventually.  */
87 int cse_not_expected;
88 
89 /* This structure is used by move_by_pieces to describe the move to
90    be performed.  */
91 struct move_by_pieces_d
92 {
93   rtx to;
94   rtx to_addr;
95   int autinc_to;
96   int explicit_inc_to;
97   rtx from;
98   rtx from_addr;
99   int autinc_from;
100   int explicit_inc_from;
101   unsigned HOST_WIDE_INT len;
102   HOST_WIDE_INT offset;
103   int reverse;
104 };
105 
106 /* This structure is used by store_by_pieces to describe the clear to
107    be performed.  */
108 
109 struct store_by_pieces_d
110 {
111   rtx to;
112   rtx to_addr;
113   int autinc_to;
114   int explicit_inc_to;
115   unsigned HOST_WIDE_INT len;
116   HOST_WIDE_INT offset;
117   rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
118   void *constfundata;
119   int reverse;
120 };
121 
122 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
123 			      struct move_by_pieces_d *);
124 static bool block_move_libcall_safe_for_call_parm (void);
125 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
126 static tree emit_block_move_libcall_fn (int);
127 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
128 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
129 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
130 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
131 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
132 			       struct store_by_pieces_d *);
133 static tree clear_storage_libcall_fn (int);
134 static rtx compress_float_constant (rtx, rtx);
135 static rtx get_subtarget (rtx);
136 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
137 				     HOST_WIDE_INT, enum machine_mode,
138 				     tree, int, alias_set_type);
139 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
140 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
141 			unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
142 			enum machine_mode, tree, alias_set_type, bool);
143 
144 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
145 
146 static int is_aligning_offset (const_tree, const_tree);
147 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
148 			     enum expand_modifier);
149 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
150 static rtx do_store_flag (sepops, rtx, enum machine_mode);
151 #ifdef PUSH_ROUNDING
152 static void emit_single_push_insn (enum machine_mode, rtx, tree);
153 #endif
154 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx, int);
155 static rtx const_vector_from_tree (tree);
156 static void write_complex_part (rtx, rtx, bool);
157 
158 /* This macro is used to determine whether move_by_pieces should be called
159    to perform a structure copy.  */
160 #ifndef MOVE_BY_PIECES_P
161 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
162   (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
163    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
164 #endif
165 
166 /* This macro is used to determine whether clear_by_pieces should be
167    called to clear storage.  */
168 #ifndef CLEAR_BY_PIECES_P
169 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
170   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
171    < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
172 #endif
173 
174 /* This macro is used to determine whether store_by_pieces should be
175    called to "memset" storage with byte values other than zero.  */
176 #ifndef SET_BY_PIECES_P
177 #define SET_BY_PIECES_P(SIZE, ALIGN) \
178   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
179    < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
180 #endif
181 
182 /* This macro is used to determine whether store_by_pieces should be
183    called to "memcpy" storage when the source is a constant string.  */
184 #ifndef STORE_BY_PIECES_P
185 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
186   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
188 #endif
189 
190 /* This is run to set up which modes can be used
191    directly in memory and to initialize the block move optab.  It is run
192    at the beginning of compilation and when the target is reinitialized.  */
193 
194 void
195 init_expr_target (void)
196 {
197   rtx insn, pat;
198   enum machine_mode mode;
199   int num_clobbers;
200   rtx mem, mem1;
201   rtx reg;
202 
203   /* Try indexing by frame ptr and try by stack ptr.
204      It is known that on the Convex the stack ptr isn't a valid index.
205      With luck, one or the other is valid on any machine.  */
206   mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
207   mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
208 
209   /* A scratch register we can modify in-place below to avoid
210      useless RTL allocations.  */
211   reg = gen_rtx_REG (VOIDmode, -1);
212 
213   insn = rtx_alloc (INSN);
214   pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
215   PATTERN (insn) = pat;
216 
217   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
218        mode = (enum machine_mode) ((int) mode + 1))
219     {
220       int regno;
221 
222       direct_load[(int) mode] = direct_store[(int) mode] = 0;
223       PUT_MODE (mem, mode);
224       PUT_MODE (mem1, mode);
225       PUT_MODE (reg, mode);
226 
227       /* See if there is some register that can be used in this mode and
228 	 directly loaded or stored from memory.  */
229 
230       if (mode != VOIDmode && mode != BLKmode)
231 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
232 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
233 	     regno++)
234 	  {
235 	    if (! HARD_REGNO_MODE_OK (regno, mode))
236 	      continue;
237 
238 	    SET_REGNO (reg, regno);
239 
240 	    SET_SRC (pat) = mem;
241 	    SET_DEST (pat) = reg;
242 	    if (recog (pat, insn, &num_clobbers) >= 0)
243 	      direct_load[(int) mode] = 1;
244 
245 	    SET_SRC (pat) = mem1;
246 	    SET_DEST (pat) = reg;
247 	    if (recog (pat, insn, &num_clobbers) >= 0)
248 	      direct_load[(int) mode] = 1;
249 
250 	    SET_SRC (pat) = reg;
251 	    SET_DEST (pat) = mem;
252 	    if (recog (pat, insn, &num_clobbers) >= 0)
253 	      direct_store[(int) mode] = 1;
254 
255 	    SET_SRC (pat) = reg;
256 	    SET_DEST (pat) = mem1;
257 	    if (recog (pat, insn, &num_clobbers) >= 0)
258 	      direct_store[(int) mode] = 1;
259 	  }
260     }
261 
262   mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
263 
264   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
265        mode = GET_MODE_WIDER_MODE (mode))
266     {
267       enum machine_mode srcmode;
268       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
269 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
270 	{
271 	  enum insn_code ic;
272 
273 	  ic = can_extend_p (mode, srcmode, 0);
274 	  if (ic == CODE_FOR_nothing)
275 	    continue;
276 
277 	  PUT_MODE (mem, srcmode);
278 
279 	  if (insn_operand_matches (ic, 1, mem))
280 	    float_extend_from_mem[mode][srcmode] = true;
281 	}
282     }
283 }
284 
285 /* This is run at the start of compiling a function.  */
286 
287 void
288 init_expr (void)
289 {
290   memset (&crtl->expr, 0, sizeof (crtl->expr));
291 }
292 
293 /* Copy data from FROM to TO, where the machine modes are not the same.
294    Both modes may be integer, or both may be floating, or both may be
295    fixed-point.
296    UNSIGNEDP should be nonzero if FROM is an unsigned type.
297    This causes zero-extension instead of sign-extension.  */
298 
299 void
300 convert_move (rtx to, rtx from, int unsignedp)
301 {
302   enum machine_mode to_mode = GET_MODE (to);
303   enum machine_mode from_mode = GET_MODE (from);
304   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
305   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
306   enum insn_code code;
307   rtx libcall;
308 
309   /* rtx code for making an equivalent value.  */
310   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
311 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
312 
313 
314   gcc_assert (to_real == from_real);
315   gcc_assert (to_mode != BLKmode);
316   gcc_assert (from_mode != BLKmode);
317 
318   /* If the source and destination are already the same, then there's
319      nothing to do.  */
320   if (to == from)
321     return;
322 
323   /* If FROM is a SUBREG that indicates that we have already done at least
324      the required extension, strip it.  We don't handle such SUBREGs as
325      TO here.  */
326 
327   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
328       && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
329 	  >= GET_MODE_PRECISION (to_mode))
330       && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
331     from = gen_lowpart (to_mode, from), from_mode = to_mode;
332 
333   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
334 
335   if (to_mode == from_mode
336       || (from_mode == VOIDmode && CONSTANT_P (from)))
337     {
338       emit_move_insn (to, from);
339       return;
340     }
341 
342   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
343     {
344       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
345 
346       if (VECTOR_MODE_P (to_mode))
347 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
348       else
349 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
350 
351       emit_move_insn (to, from);
352       return;
353     }
354 
355   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
356     {
357       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
358       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
359       return;
360     }
361 
362   if (to_real)
363     {
364       rtx value, insns;
365       convert_optab tab;
366 
367       gcc_assert ((GET_MODE_PRECISION (from_mode)
368 		   != GET_MODE_PRECISION (to_mode))
369 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
370 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
371 
372       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
373 	/* Conversion between decimal float and binary float, same size.  */
374 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
375       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
376 	tab = sext_optab;
377       else
378 	tab = trunc_optab;
379 
380       /* Try converting directly if the insn is supported.  */
381 
382       code = convert_optab_handler (tab, to_mode, from_mode);
383       if (code != CODE_FOR_nothing)
384 	{
385 	  emit_unop_insn (code, to, from,
386 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
387 	  return;
388 	}
389 
390       /* Otherwise use a libcall.  */
391       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
392 
393       /* Is this conversion implemented yet?  */
394       gcc_assert (libcall);
395 
396       start_sequence ();
397       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
398 				       1, from, from_mode);
399       insns = get_insns ();
400       end_sequence ();
401       emit_libcall_block (insns, to, value,
402 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
403 								       from)
404 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
405       return;
406     }
407 
408   /* Handle pointer conversion.  */			/* SPEE 900220.  */
409   /* Targets are expected to provide conversion insns between PxImode and
410      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
411   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
412     {
413       enum machine_mode full_mode
414 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
415 
416       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
417 		  != CODE_FOR_nothing);
418 
419       if (full_mode != from_mode)
420 	from = convert_to_mode (full_mode, from, unsignedp);
421       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
422 		      to, from, UNKNOWN);
423       return;
424     }
425   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
426     {
427       rtx new_from;
428       enum machine_mode full_mode
429 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
430       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
431       enum insn_code icode;
432 
433       icode = convert_optab_handler (ctab, full_mode, from_mode);
434       gcc_assert (icode != CODE_FOR_nothing);
435 
436       if (to_mode == full_mode)
437 	{
438 	  emit_unop_insn (icode, to, from, UNKNOWN);
439 	  return;
440 	}
441 
442       new_from = gen_reg_rtx (full_mode);
443       emit_unop_insn (icode, new_from, from, UNKNOWN);
444 
445       /* else proceed to integer conversions below.  */
446       from_mode = full_mode;
447       from = new_from;
448     }
449 
450    /* Make sure both are fixed-point modes or both are not.  */
451    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
452 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
453    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
454     {
455       /* If we widen from_mode to to_mode and they are in the same class,
456 	 we won't saturate the result.
457 	 Otherwise, always saturate the result to play safe.  */
458       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
459 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
460 	expand_fixed_convert (to, from, 0, 0);
461       else
462 	expand_fixed_convert (to, from, 0, 1);
463       return;
464     }
465 
466   /* Now both modes are integers.  */
467 
468   /* Handle expanding beyond a word.  */
469   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
470       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
471     {
472       rtx insns;
473       rtx lowpart;
474       rtx fill_value;
475       rtx lowfrom;
476       int i;
477       enum machine_mode lowpart_mode;
478       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
479 
480       /* Try converting directly if the insn is supported.  */
481       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
482 	  != CODE_FOR_nothing)
483 	{
484 	  /* If FROM is a SUBREG, put it into a register.  Do this
485 	     so that we always generate the same set of insns for
486 	     better cse'ing; if an intermediate assignment occurred,
487 	     we won't be doing the operation directly on the SUBREG.  */
488 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
489 	    from = force_reg (from_mode, from);
490 	  emit_unop_insn (code, to, from, equiv_code);
491 	  return;
492 	}
493       /* Next, try converting via full word.  */
494       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
495 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
496 		   != CODE_FOR_nothing))
497 	{
498 	  rtx word_to = gen_reg_rtx (word_mode);
499 	  if (REG_P (to))
500 	    {
501 	      if (reg_overlap_mentioned_p (to, from))
502 		from = force_reg (from_mode, from);
503 	      emit_clobber (to);
504 	    }
505 	  convert_move (word_to, from, unsignedp);
506 	  emit_unop_insn (code, to, word_to, equiv_code);
507 	  return;
508 	}
509 
510       /* No special multiword conversion insn; do it by hand.  */
511       start_sequence ();
512 
513       /* Since we will turn this into a no conflict block, we must ensure the
514          the source does not overlap the target so force it into an isolated
515          register when maybe so.  Likewise for any MEM input, since the
516          conversion sequence might require several references to it and we
517          must ensure we're getting the same value every time.  */
518 
519       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
520 	from = force_reg (from_mode, from);
521 
522       /* Get a copy of FROM widened to a word, if necessary.  */
523       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
524 	lowpart_mode = word_mode;
525       else
526 	lowpart_mode = from_mode;
527 
528       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
529 
530       lowpart = gen_lowpart (lowpart_mode, to);
531       emit_move_insn (lowpart, lowfrom);
532 
533       /* Compute the value to put in each remaining word.  */
534       if (unsignedp)
535 	fill_value = const0_rtx;
536       else
537 	fill_value = emit_store_flag (gen_reg_rtx (word_mode),
538 				      LT, lowfrom, const0_rtx,
539 				      VOIDmode, 0, -1);
540 
541       /* Fill the remaining words.  */
542       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
543 	{
544 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
545 	  rtx subword = operand_subword (to, index, 1, to_mode);
546 
547 	  gcc_assert (subword);
548 
549 	  if (fill_value != subword)
550 	    emit_move_insn (subword, fill_value);
551 	}
552 
553       insns = get_insns ();
554       end_sequence ();
555 
556       emit_insn (insns);
557       return;
558     }
559 
560   /* Truncating multi-word to a word or less.  */
561   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
562       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
563     {
564       if (!((MEM_P (from)
565 	     && ! MEM_VOLATILE_P (from)
566 	     && direct_load[(int) to_mode]
567 	     && ! mode_dependent_address_p (XEXP (from, 0),
568 					    MEM_ADDR_SPACE (from)))
569 	    || REG_P (from)
570 	    || GET_CODE (from) == SUBREG))
571 	from = force_reg (from_mode, from);
572       convert_move (to, gen_lowpart (word_mode, from), 0);
573       return;
574     }
575 
576   /* Now follow all the conversions between integers
577      no more than a word long.  */
578 
579   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
580   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
581       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
582     {
583       if (!((MEM_P (from)
584 	     && ! MEM_VOLATILE_P (from)
585 	     && direct_load[(int) to_mode]
586 	     && ! mode_dependent_address_p (XEXP (from, 0),
587 					    MEM_ADDR_SPACE (from)))
588 	    || REG_P (from)
589 	    || GET_CODE (from) == SUBREG))
590 	from = force_reg (from_mode, from);
591       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
592 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
593 	from = copy_to_reg (from);
594       emit_move_insn (to, gen_lowpart (to_mode, from));
595       return;
596     }
597 
598   /* Handle extension.  */
599   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
600     {
601       /* Convert directly if that works.  */
602       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
603 	  != CODE_FOR_nothing)
604 	{
605 	  emit_unop_insn (code, to, from, equiv_code);
606 	  return;
607 	}
608       else
609 	{
610 	  enum machine_mode intermediate;
611 	  rtx tmp;
612 	  int shift_amount;
613 
614 	  /* Search for a mode to convert via.  */
615 	  for (intermediate = from_mode; intermediate != VOIDmode;
616 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
617 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
618 		  != CODE_FOR_nothing)
619 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
620 		     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
621 		&& (can_extend_p (intermediate, from_mode, unsignedp)
622 		    != CODE_FOR_nothing))
623 	      {
624 		convert_move (to, convert_to_mode (intermediate, from,
625 						   unsignedp), unsignedp);
626 		return;
627 	      }
628 
629 	  /* No suitable intermediate mode.
630 	     Generate what we need with	shifts.  */
631 	  shift_amount = (GET_MODE_PRECISION (to_mode)
632 			  - GET_MODE_PRECISION (from_mode));
633 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
634 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
635 			      to, unsignedp);
636 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
637 			      to, unsignedp);
638 	  if (tmp != to)
639 	    emit_move_insn (to, tmp);
640 	  return;
641 	}
642     }
643 
644   /* Support special truncate insns for certain modes.  */
645   if (convert_optab_handler (trunc_optab, to_mode,
646 			     from_mode) != CODE_FOR_nothing)
647     {
648       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
649 		      to, from, UNKNOWN);
650       return;
651     }
652 
653   /* Handle truncation of volatile memrefs, and so on;
654      the things that couldn't be truncated directly,
655      and for which there was no special instruction.
656 
657      ??? Code above formerly short-circuited this, for most integer
658      mode pairs, with a force_reg in from_mode followed by a recursive
659      call to this routine.  Appears always to have been wrong.  */
660   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
661     {
662       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
663       emit_move_insn (to, temp);
664       return;
665     }
666 
667   /* Mode combination is not recognized.  */
668   gcc_unreachable ();
669 }
670 
671 /* Return an rtx for a value that would result
672    from converting X to mode MODE.
673    Both X and MODE may be floating, or both integer.
674    UNSIGNEDP is nonzero if X is an unsigned value.
675    This can be done by referring to a part of X in place
676    or by copying to a new temporary with conversion.  */
677 
678 rtx
679 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
680 {
681   return convert_modes (mode, VOIDmode, x, unsignedp);
682 }
683 
684 /* Return an rtx for a value that would result
685    from converting X from mode OLDMODE to mode MODE.
686    Both modes may be floating, or both integer.
687    UNSIGNEDP is nonzero if X is an unsigned value.
688 
689    This can be done by referring to a part of X in place
690    or by copying to a new temporary with conversion.
691 
692    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
693 
694 rtx
695 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
696 {
697   rtx temp;
698 
699   /* If FROM is a SUBREG that indicates that we have already done at least
700      the required extension, strip it.  */
701 
702   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
703       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
704       && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
705     x = gen_lowpart (mode, x);
706 
707   if (GET_MODE (x) != VOIDmode)
708     oldmode = GET_MODE (x);
709 
710   if (mode == oldmode)
711     return x;
712 
713   /* There is one case that we must handle specially: If we are converting
714      a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
715      we are to interpret the constant as unsigned, gen_lowpart will do
716      the wrong if the constant appears negative.  What we want to do is
717      make the high-order word of the constant zero, not all ones.  */
718 
719   if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
720       && GET_MODE_BITSIZE (mode) == HOST_BITS_PER_DOUBLE_INT
721       && CONST_INT_P (x) && INTVAL (x) < 0)
722     {
723       double_int val = double_int::from_uhwi (INTVAL (x));
724 
725       /* We need to zero extend VAL.  */
726       if (oldmode != VOIDmode)
727 	val = val.zext (GET_MODE_BITSIZE (oldmode));
728 
729       return immed_double_int_const (val, mode);
730     }
731 
732   /* We can do this with a gen_lowpart if both desired and current modes
733      are integer, and this is either a constant integer, a register, or a
734      non-volatile MEM.  Except for the constant case where MODE is no
735      wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
736 
737   if ((CONST_INT_P (x)
738        && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT)
739       || (GET_MODE_CLASS (mode) == MODE_INT
740 	  && GET_MODE_CLASS (oldmode) == MODE_INT
741 	  && (CONST_DOUBLE_AS_INT_P (x)
742 	      || (GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
743 		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
744 		       && direct_load[(int) mode])
745 		      || (REG_P (x)
746 			  && (! HARD_REGISTER_P (x)
747 			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
748 			  && TRULY_NOOP_TRUNCATION_MODES_P (mode,
749 							    GET_MODE (x))))))))
750     {
751       /* ?? If we don't know OLDMODE, we have to assume here that
752 	 X does not need sign- or zero-extension.   This may not be
753 	 the case, but it's the best we can do.  */
754       if (CONST_INT_P (x) && oldmode != VOIDmode
755 	  && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (oldmode))
756 	{
757 	  HOST_WIDE_INT val = INTVAL (x);
758 
759 	  /* We must sign or zero-extend in this case.  Start by
760 	     zero-extending, then sign extend if we need to.  */
761 	  val &= GET_MODE_MASK (oldmode);
762 	  if (! unsignedp
763 	      && val_signbit_known_set_p (oldmode, val))
764 	    val |= ~GET_MODE_MASK (oldmode);
765 
766 	  return gen_int_mode (val, mode);
767 	}
768 
769       return gen_lowpart (mode, x);
770     }
771 
772   /* Converting from integer constant into mode is always equivalent to an
773      subreg operation.  */
774   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
775     {
776       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
777       return simplify_gen_subreg (mode, x, oldmode, 0);
778     }
779 
780   temp = gen_reg_rtx (mode);
781   convert_move (temp, x, unsignedp);
782   return temp;
783 }
784 
785 /* Return the largest alignment we can use for doing a move (or store)
786    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
787 
788 static unsigned int
789 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
790 {
791   enum machine_mode tmode;
792 
793   tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
794   if (align >= GET_MODE_ALIGNMENT (tmode))
795     align = GET_MODE_ALIGNMENT (tmode);
796   else
797     {
798       enum machine_mode tmode, xmode;
799 
800       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
801 	   tmode != VOIDmode;
802 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
803 	if (GET_MODE_SIZE (tmode) > max_pieces
804 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
805 	  break;
806 
807       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
808     }
809 
810   return align;
811 }
812 
813 /* Return the widest integer mode no wider than SIZE.  If no such mode
814    can be found, return VOIDmode.  */
815 
816 static enum machine_mode
817 widest_int_mode_for_size (unsigned int size)
818 {
819   enum machine_mode tmode, mode = VOIDmode;
820 
821   for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
822        tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
823     if (GET_MODE_SIZE (tmode) < size)
824       mode = tmode;
825 
826   return mode;
827 }
828 
829 /* STORE_MAX_PIECES is the number of bytes at a time that we can
830    store efficiently.  Due to internal GCC limitations, this is
831    MOVE_MAX_PIECES limited by the number of bytes GCC can represent
832    for an immediate constant.  */
833 
834 #define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
835 
836 /* Determine whether the LEN bytes can be moved by using several move
837    instructions.  Return nonzero if a call to move_by_pieces should
838    succeed.  */
839 
840 int
841 can_move_by_pieces (unsigned HOST_WIDE_INT len ATTRIBUTE_UNUSED,
842 		    unsigned int align ATTRIBUTE_UNUSED)
843 {
844   return MOVE_BY_PIECES_P (len, align);
845 }
846 
847 /* Generate several move instructions to copy LEN bytes from block FROM to
848    block TO.  (These are MEM rtx's with BLKmode).
849 
850    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
851    used to push FROM to the stack.
852 
853    ALIGN is maximum stack alignment we can assume.
854 
855    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
856    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
857    stpcpy.  */
858 
859 rtx
860 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
861 		unsigned int align, int endp)
862 {
863   struct move_by_pieces_d data;
864   enum machine_mode to_addr_mode;
865   enum machine_mode from_addr_mode = get_address_mode (from);
866   rtx to_addr, from_addr = XEXP (from, 0);
867   unsigned int max_size = MOVE_MAX_PIECES + 1;
868   enum insn_code icode;
869 
870   align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
871 
872   data.offset = 0;
873   data.from_addr = from_addr;
874   if (to)
875     {
876       to_addr_mode = get_address_mode (to);
877       to_addr = XEXP (to, 0);
878       data.to = to;
879       data.autinc_to
880 	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
881 	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
882       data.reverse
883 	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
884     }
885   else
886     {
887       to_addr_mode = VOIDmode;
888       to_addr = NULL_RTX;
889       data.to = NULL_RTX;
890       data.autinc_to = 1;
891 #ifdef STACK_GROWS_DOWNWARD
892       data.reverse = 1;
893 #else
894       data.reverse = 0;
895 #endif
896     }
897   data.to_addr = to_addr;
898   data.from = from;
899   data.autinc_from
900     = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
901        || GET_CODE (from_addr) == POST_INC
902        || GET_CODE (from_addr) == POST_DEC);
903 
904   data.explicit_inc_from = 0;
905   data.explicit_inc_to = 0;
906   if (data.reverse) data.offset = len;
907   data.len = len;
908 
909   /* If copying requires more than two move insns,
910      copy addresses to registers (to make displacements shorter)
911      and use post-increment if available.  */
912   if (!(data.autinc_from && data.autinc_to)
913       && move_by_pieces_ninsns (len, align, max_size) > 2)
914     {
915       /* Find the mode of the largest move...
916 	 MODE might not be used depending on the definitions of the
917 	 USE_* macros below.  */
918       enum machine_mode mode ATTRIBUTE_UNUSED
919 	= widest_int_mode_for_size (max_size);
920 
921       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
922 	{
923 	  data.from_addr = copy_to_mode_reg (from_addr_mode,
924 					     plus_constant (from_addr_mode,
925 							    from_addr, len));
926 	  data.autinc_from = 1;
927 	  data.explicit_inc_from = -1;
928 	}
929       if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
930 	{
931 	  data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
932 	  data.autinc_from = 1;
933 	  data.explicit_inc_from = 1;
934 	}
935       if (!data.autinc_from && CONSTANT_P (from_addr))
936 	data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
937       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
938 	{
939 	  data.to_addr = copy_to_mode_reg (to_addr_mode,
940 					   plus_constant (to_addr_mode,
941 							  to_addr, len));
942 	  data.autinc_to = 1;
943 	  data.explicit_inc_to = -1;
944 	}
945       if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
946 	{
947 	  data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
948 	  data.autinc_to = 1;
949 	  data.explicit_inc_to = 1;
950 	}
951       if (!data.autinc_to && CONSTANT_P (to_addr))
952 	data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
953     }
954 
955   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
956 
957   /* First move what we can in the largest integer mode, then go to
958      successively smaller modes.  */
959 
960   while (max_size > 1 && data.len > 0)
961     {
962       enum machine_mode mode = widest_int_mode_for_size (max_size);
963 
964       if (mode == VOIDmode)
965 	break;
966 
967       icode = optab_handler (mov_optab, mode);
968       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
969 	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
970 
971       max_size = GET_MODE_SIZE (mode);
972     }
973 
974   /* The code above should have handled everything.  */
975   gcc_assert (!data.len);
976 
977   if (endp)
978     {
979       rtx to1;
980 
981       gcc_assert (!data.reverse);
982       if (data.autinc_to)
983 	{
984 	  if (endp == 2)
985 	    {
986 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
987 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
988 	      else
989 		data.to_addr = copy_to_mode_reg (to_addr_mode,
990 						 plus_constant (to_addr_mode,
991 								data.to_addr,
992 								-1));
993 	    }
994 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
995 					   data.offset);
996 	}
997       else
998 	{
999 	  if (endp == 2)
1000 	    --data.offset;
1001 	  to1 = adjust_address (data.to, QImode, data.offset);
1002 	}
1003       return to1;
1004     }
1005   else
1006     return data.to;
1007 }
1008 
1009 /* Return number of insns required to move L bytes by pieces.
1010    ALIGN (in bits) is maximum alignment we can assume.  */
1011 
1012 unsigned HOST_WIDE_INT
1013 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1014 		       unsigned int max_size)
1015 {
1016   unsigned HOST_WIDE_INT n_insns = 0;
1017 
1018   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1019 
1020   while (max_size > 1 && l > 0)
1021     {
1022       enum machine_mode mode;
1023       enum insn_code icode;
1024 
1025       mode = widest_int_mode_for_size (max_size);
1026 
1027       if (mode == VOIDmode)
1028 	break;
1029 
1030       icode = optab_handler (mov_optab, mode);
1031       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1032 	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1033 
1034       max_size = GET_MODE_SIZE (mode);
1035     }
1036 
1037   gcc_assert (!l);
1038   return n_insns;
1039 }
1040 
1041 /* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1042    with move instructions for mode MODE.  GENFUN is the gen_... function
1043    to make a move insn for that mode.  DATA has all the other info.  */
1044 
1045 static void
1046 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1047 		  struct move_by_pieces_d *data)
1048 {
1049   unsigned int size = GET_MODE_SIZE (mode);
1050   rtx to1 = NULL_RTX, from1;
1051 
1052   while (data->len >= size)
1053     {
1054       if (data->reverse)
1055 	data->offset -= size;
1056 
1057       if (data->to)
1058 	{
1059 	  if (data->autinc_to)
1060 	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1061 					     data->offset);
1062 	  else
1063 	    to1 = adjust_address (data->to, mode, data->offset);
1064 	}
1065 
1066       if (data->autinc_from)
1067 	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1068 					   data->offset);
1069       else
1070 	from1 = adjust_address (data->from, mode, data->offset);
1071 
1072       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1073 	emit_insn (gen_add2_insn (data->to_addr,
1074 				  GEN_INT (-(HOST_WIDE_INT)size)));
1075       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1076 	emit_insn (gen_add2_insn (data->from_addr,
1077 				  GEN_INT (-(HOST_WIDE_INT)size)));
1078 
1079       if (data->to)
1080 	emit_insn ((*genfun) (to1, from1));
1081       else
1082 	{
1083 #ifdef PUSH_ROUNDING
1084 	  emit_single_push_insn (mode, from1, NULL);
1085 #else
1086 	  gcc_unreachable ();
1087 #endif
1088 	}
1089 
1090       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1091 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1092       if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1093 	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1094 
1095       if (! data->reverse)
1096 	data->offset += size;
1097 
1098       data->len -= size;
1099     }
1100 }
1101 
1102 /* Emit code to move a block Y to a block X.  This may be done with
1103    string-move instructions, with multiple scalar move instructions,
1104    or with a library call.
1105 
1106    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1107    SIZE is an rtx that says how long they are.
1108    ALIGN is the maximum alignment we can assume they have.
1109    METHOD describes what kind of copy this is, and what mechanisms may be used.
1110 
1111    Return the address of the new block, if memcpy is called and returns it,
1112    0 otherwise.  */
1113 
1114 rtx
1115 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1116 		       unsigned int expected_align, HOST_WIDE_INT expected_size)
1117 {
1118   bool may_use_call;
1119   rtx retval = 0;
1120   unsigned int align;
1121 
1122   gcc_assert (size);
1123   if (CONST_INT_P (size)
1124       && INTVAL (size) == 0)
1125     return 0;
1126 
1127   switch (method)
1128     {
1129     case BLOCK_OP_NORMAL:
1130     case BLOCK_OP_TAILCALL:
1131       may_use_call = true;
1132       break;
1133 
1134     case BLOCK_OP_CALL_PARM:
1135       may_use_call = block_move_libcall_safe_for_call_parm ();
1136 
1137       /* Make inhibit_defer_pop nonzero around the library call
1138 	 to force it to pop the arguments right away.  */
1139       NO_DEFER_POP;
1140       break;
1141 
1142     case BLOCK_OP_NO_LIBCALL:
1143       may_use_call = false;
1144       break;
1145 
1146     default:
1147       gcc_unreachable ();
1148     }
1149 
1150   gcc_assert (MEM_P (x) && MEM_P (y));
1151   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1152   gcc_assert (align >= BITS_PER_UNIT);
1153 
1154   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1155      block copy is more efficient for other large modes, e.g. DCmode.  */
1156   x = adjust_address (x, BLKmode, 0);
1157   y = adjust_address (y, BLKmode, 0);
1158 
1159   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1160      can be incorrect is coming from __builtin_memcpy.  */
1161   if (CONST_INT_P (size))
1162     {
1163       x = shallow_copy_rtx (x);
1164       y = shallow_copy_rtx (y);
1165       set_mem_size (x, INTVAL (size));
1166       set_mem_size (y, INTVAL (size));
1167     }
1168 
1169   if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1170     move_by_pieces (x, y, INTVAL (size), align, 0);
1171   else if (emit_block_move_via_movmem (x, y, size, align,
1172 				       expected_align, expected_size))
1173     ;
1174   else if (may_use_call
1175 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1176 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1177     {
1178       /* Since x and y are passed to a libcall, mark the corresponding
1179 	 tree EXPR as addressable.  */
1180       tree y_expr = MEM_EXPR (y);
1181       tree x_expr = MEM_EXPR (x);
1182       if (y_expr)
1183 	mark_addressable (y_expr);
1184       if (x_expr)
1185 	mark_addressable (x_expr);
1186       retval = emit_block_move_via_libcall (x, y, size,
1187 					    method == BLOCK_OP_TAILCALL);
1188     }
1189 
1190   else
1191     emit_block_move_via_loop (x, y, size, align);
1192 
1193   if (method == BLOCK_OP_CALL_PARM)
1194     OK_DEFER_POP;
1195 
1196   return retval;
1197 }
1198 
1199 rtx
1200 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1201 {
1202   return emit_block_move_hints (x, y, size, method, 0, -1);
1203 }
1204 
1205 /* A subroutine of emit_block_move.  Returns true if calling the
1206    block move libcall will not clobber any parameters which may have
1207    already been placed on the stack.  */
1208 
1209 static bool
1210 block_move_libcall_safe_for_call_parm (void)
1211 {
1212 #if defined (REG_PARM_STACK_SPACE)
1213   tree fn;
1214 #endif
1215 
1216   /* If arguments are pushed on the stack, then they're safe.  */
1217   if (PUSH_ARGS)
1218     return true;
1219 
1220   /* If registers go on the stack anyway, any argument is sure to clobber
1221      an outgoing argument.  */
1222 #if defined (REG_PARM_STACK_SPACE)
1223   fn = emit_block_move_libcall_fn (false);
1224   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1225      depend on its argument.  */
1226   (void) fn;
1227   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1228       && REG_PARM_STACK_SPACE (fn) != 0)
1229     return false;
1230 #endif
1231 
1232   /* If any argument goes in memory, then it might clobber an outgoing
1233      argument.  */
1234   {
1235     CUMULATIVE_ARGS args_so_far_v;
1236     cumulative_args_t args_so_far;
1237     tree fn, arg;
1238 
1239     fn = emit_block_move_libcall_fn (false);
1240     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1241     args_so_far = pack_cumulative_args (&args_so_far_v);
1242 
1243     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1244     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1245       {
1246 	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1247 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1248 					      NULL_TREE, true);
1249 	if (!tmp || !REG_P (tmp))
1250 	  return false;
1251 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1252 	  return false;
1253 	targetm.calls.function_arg_advance (args_so_far, mode,
1254 					    NULL_TREE, true);
1255       }
1256   }
1257   return true;
1258 }
1259 
1260 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1261    return true if successful.  */
1262 
1263 static bool
1264 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1265 			    unsigned int expected_align, HOST_WIDE_INT expected_size)
1266 {
1267   int save_volatile_ok = volatile_ok;
1268   enum machine_mode mode;
1269 
1270   if (expected_align < align)
1271     expected_align = align;
1272 
1273   /* Since this is a move insn, we don't care about volatility.  */
1274   volatile_ok = 1;
1275 
1276   /* Try the most limited insn first, because there's no point
1277      including more than one in the machine description unless
1278      the more limited one has some advantage.  */
1279 
1280   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1281        mode = GET_MODE_WIDER_MODE (mode))
1282     {
1283       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1284 
1285       if (code != CODE_FOR_nothing
1286 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1287 	     here because if SIZE is less than the mode mask, as it is
1288 	     returned by the macro, it will definitely be less than the
1289 	     actual mode mask.  */
1290 	  && ((CONST_INT_P (size)
1291 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1292 		   <= (GET_MODE_MASK (mode) >> 1)))
1293 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
1294 	{
1295 	  struct expand_operand ops[6];
1296 	  unsigned int nops;
1297 
1298 	  /* ??? When called via emit_block_move_for_call, it'd be
1299 	     nice if there were some way to inform the backend, so
1300 	     that it doesn't fail the expansion because it thinks
1301 	     emitting the libcall would be more efficient.  */
1302 	  nops = insn_data[(int) code].n_generator_args;
1303 	  gcc_assert (nops == 4 || nops == 6);
1304 
1305 	  create_fixed_operand (&ops[0], x);
1306 	  create_fixed_operand (&ops[1], y);
1307 	  /* The check above guarantees that this size conversion is valid.  */
1308 	  create_convert_operand_to (&ops[2], size, mode, true);
1309 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1310 	  if (nops == 6)
1311 	    {
1312 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1313 	      create_integer_operand (&ops[5], expected_size);
1314 	    }
1315 	  if (maybe_expand_insn (code, nops, ops))
1316 	    {
1317 	      volatile_ok = save_volatile_ok;
1318 	      return true;
1319 	    }
1320 	}
1321     }
1322 
1323   volatile_ok = save_volatile_ok;
1324   return false;
1325 }
1326 
1327 /* A subroutine of emit_block_move.  Expand a call to memcpy.
1328    Return the return value from memcpy, 0 otherwise.  */
1329 
1330 rtx
1331 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1332 {
1333   rtx dst_addr, src_addr;
1334   tree call_expr, fn, src_tree, dst_tree, size_tree;
1335   enum machine_mode size_mode;
1336   rtx retval;
1337 
1338   /* Emit code to copy the addresses of DST and SRC and SIZE into new
1339      pseudos.  We can then place those new pseudos into a VAR_DECL and
1340      use them later.  */
1341 
1342   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1343   src_addr = copy_addr_to_reg (XEXP (src, 0));
1344 
1345   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1346   src_addr = convert_memory_address (ptr_mode, src_addr);
1347 
1348   dst_tree = make_tree (ptr_type_node, dst_addr);
1349   src_tree = make_tree (ptr_type_node, src_addr);
1350 
1351   size_mode = TYPE_MODE (sizetype);
1352 
1353   size = convert_to_mode (size_mode, size, 1);
1354   size = copy_to_mode_reg (size_mode, size);
1355 
1356   /* It is incorrect to use the libcall calling conventions to call
1357      memcpy in this context.  This could be a user call to memcpy and
1358      the user may wish to examine the return value from memcpy.  For
1359      targets where libcalls and normal calls have different conventions
1360      for returning pointers, we could end up generating incorrect code.  */
1361 
1362   size_tree = make_tree (sizetype, size);
1363 
1364   fn = emit_block_move_libcall_fn (true);
1365   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1366   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1367 
1368   retval = expand_normal (call_expr);
1369 
1370   return retval;
1371 }
1372 
1373 /* A subroutine of emit_block_move_via_libcall.  Create the tree node
1374    for the function we use for block copies.  */
1375 
1376 static GTY(()) tree block_move_fn;
1377 
1378 void
1379 init_block_move_fn (const char *asmspec)
1380 {
1381   if (!block_move_fn)
1382     {
1383       tree args, fn, attrs, attr_args;
1384 
1385       fn = get_identifier ("memcpy");
1386       args = build_function_type_list (ptr_type_node, ptr_type_node,
1387 				       const_ptr_type_node, sizetype,
1388 				       NULL_TREE);
1389 
1390       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1391       DECL_EXTERNAL (fn) = 1;
1392       TREE_PUBLIC (fn) = 1;
1393       DECL_ARTIFICIAL (fn) = 1;
1394       TREE_NOTHROW (fn) = 1;
1395       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1396       DECL_VISIBILITY_SPECIFIED (fn) = 1;
1397 
1398       attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1399       attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1400 
1401       decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1402 
1403       block_move_fn = fn;
1404     }
1405 
1406   if (asmspec)
1407     set_user_assembler_name (block_move_fn, asmspec);
1408 }
1409 
1410 static tree
1411 emit_block_move_libcall_fn (int for_call)
1412 {
1413   static bool emitted_extern;
1414 
1415   if (!block_move_fn)
1416     init_block_move_fn (NULL);
1417 
1418   if (for_call && !emitted_extern)
1419     {
1420       emitted_extern = true;
1421       make_decl_rtl (block_move_fn);
1422     }
1423 
1424   return block_move_fn;
1425 }
1426 
1427 /* A subroutine of emit_block_move.  Copy the data via an explicit
1428    loop.  This is used only when libcalls are forbidden.  */
1429 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1430 
1431 static void
1432 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1433 			  unsigned int align ATTRIBUTE_UNUSED)
1434 {
1435   rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1436   enum machine_mode x_addr_mode = get_address_mode (x);
1437   enum machine_mode y_addr_mode = get_address_mode (y);
1438   enum machine_mode iter_mode;
1439 
1440   iter_mode = GET_MODE (size);
1441   if (iter_mode == VOIDmode)
1442     iter_mode = word_mode;
1443 
1444   top_label = gen_label_rtx ();
1445   cmp_label = gen_label_rtx ();
1446   iter = gen_reg_rtx (iter_mode);
1447 
1448   emit_move_insn (iter, const0_rtx);
1449 
1450   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1451   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1452   do_pending_stack_adjust ();
1453 
1454   emit_jump (cmp_label);
1455   emit_label (top_label);
1456 
1457   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1458   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1459 
1460   if (x_addr_mode != y_addr_mode)
1461     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1462   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1463 
1464   x = change_address (x, QImode, x_addr);
1465   y = change_address (y, QImode, y_addr);
1466 
1467   emit_move_insn (x, y);
1468 
1469   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1470 			     true, OPTAB_LIB_WIDEN);
1471   if (tmp != iter)
1472     emit_move_insn (iter, tmp);
1473 
1474   emit_label (cmp_label);
1475 
1476   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1477 			   true, top_label, REG_BR_PROB_BASE * 90 / 100);
1478 }
1479 
1480 /* Copy all or part of a value X into registers starting at REGNO.
1481    The number of registers to be filled is NREGS.  */
1482 
1483 void
1484 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1485 {
1486   int i;
1487 #ifdef HAVE_load_multiple
1488   rtx pat;
1489   rtx last;
1490 #endif
1491 
1492   if (nregs == 0)
1493     return;
1494 
1495   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1496     x = validize_mem (force_const_mem (mode, x));
1497 
1498   /* See if the machine can do this with a load multiple insn.  */
1499 #ifdef HAVE_load_multiple
1500   if (HAVE_load_multiple)
1501     {
1502       last = get_last_insn ();
1503       pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1504 			       GEN_INT (nregs));
1505       if (pat)
1506 	{
1507 	  emit_insn (pat);
1508 	  return;
1509 	}
1510       else
1511 	delete_insns_since (last);
1512     }
1513 #endif
1514 
1515   for (i = 0; i < nregs; i++)
1516     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1517 		    operand_subword_force (x, i, mode));
1518 }
1519 
1520 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1521    The number of registers to be filled is NREGS.  */
1522 
1523 void
1524 move_block_from_reg (int regno, rtx x, int nregs)
1525 {
1526   int i;
1527 
1528   if (nregs == 0)
1529     return;
1530 
1531   /* See if the machine can do this with a store multiple insn.  */
1532 #ifdef HAVE_store_multiple
1533   if (HAVE_store_multiple)
1534     {
1535       rtx last = get_last_insn ();
1536       rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1537 				    GEN_INT (nregs));
1538       if (pat)
1539 	{
1540 	  emit_insn (pat);
1541 	  return;
1542 	}
1543       else
1544 	delete_insns_since (last);
1545     }
1546 #endif
1547 
1548   for (i = 0; i < nregs; i++)
1549     {
1550       rtx tem = operand_subword (x, i, 1, BLKmode);
1551 
1552       gcc_assert (tem);
1553 
1554       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1555     }
1556 }
1557 
1558 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1559    ORIG, where ORIG is a non-consecutive group of registers represented by
1560    a PARALLEL.  The clone is identical to the original except in that the
1561    original set of registers is replaced by a new set of pseudo registers.
1562    The new set has the same modes as the original set.  */
1563 
1564 rtx
1565 gen_group_rtx (rtx orig)
1566 {
1567   int i, length;
1568   rtx *tmps;
1569 
1570   gcc_assert (GET_CODE (orig) == PARALLEL);
1571 
1572   length = XVECLEN (orig, 0);
1573   tmps = XALLOCAVEC (rtx, length);
1574 
1575   /* Skip a NULL entry in first slot.  */
1576   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1577 
1578   if (i)
1579     tmps[0] = 0;
1580 
1581   for (; i < length; i++)
1582     {
1583       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1584       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1585 
1586       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1587     }
1588 
1589   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1590 }
1591 
1592 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1593    except that values are placed in TMPS[i], and must later be moved
1594    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1595 
1596 static void
1597 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1598 {
1599   rtx src;
1600   int start, i;
1601   enum machine_mode m = GET_MODE (orig_src);
1602 
1603   gcc_assert (GET_CODE (dst) == PARALLEL);
1604 
1605   if (m != VOIDmode
1606       && !SCALAR_INT_MODE_P (m)
1607       && !MEM_P (orig_src)
1608       && GET_CODE (orig_src) != CONCAT)
1609     {
1610       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1611       if (imode == BLKmode)
1612 	src = assign_stack_temp (GET_MODE (orig_src), ssize);
1613       else
1614 	src = gen_reg_rtx (imode);
1615       if (imode != BLKmode)
1616 	src = gen_lowpart (GET_MODE (orig_src), src);
1617       emit_move_insn (src, orig_src);
1618       /* ...and back again.  */
1619       if (imode != BLKmode)
1620 	src = gen_lowpart (imode, src);
1621       emit_group_load_1 (tmps, dst, src, type, ssize);
1622       return;
1623     }
1624 
1625   /* Check for a NULL entry, used to indicate that the parameter goes
1626      both on the stack and in registers.  */
1627   if (XEXP (XVECEXP (dst, 0, 0), 0))
1628     start = 0;
1629   else
1630     start = 1;
1631 
1632   /* Process the pieces.  */
1633   for (i = start; i < XVECLEN (dst, 0); i++)
1634     {
1635       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1636       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1637       unsigned int bytelen = GET_MODE_SIZE (mode);
1638       int shift = 0;
1639 
1640       /* Handle trailing fragments that run over the size of the struct.  */
1641       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1642 	{
1643 	  /* Arrange to shift the fragment to where it belongs.
1644 	     extract_bit_field loads to the lsb of the reg.  */
1645 	  if (
1646 #ifdef BLOCK_REG_PADDING
1647 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1648 	      == (BYTES_BIG_ENDIAN ? upward : downward)
1649 #else
1650 	      BYTES_BIG_ENDIAN
1651 #endif
1652 	      )
1653 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1654 	  bytelen = ssize - bytepos;
1655 	  gcc_assert (bytelen > 0);
1656 	}
1657 
1658       /* If we won't be loading directly from memory, protect the real source
1659 	 from strange tricks we might play; but make sure that the source can
1660 	 be loaded directly into the destination.  */
1661       src = orig_src;
1662       if (!MEM_P (orig_src)
1663 	  && (!CONSTANT_P (orig_src)
1664 	      || (GET_MODE (orig_src) != mode
1665 		  && GET_MODE (orig_src) != VOIDmode)))
1666 	{
1667 	  if (GET_MODE (orig_src) == VOIDmode)
1668 	    src = gen_reg_rtx (mode);
1669 	  else
1670 	    src = gen_reg_rtx (GET_MODE (orig_src));
1671 
1672 	  emit_move_insn (src, orig_src);
1673 	}
1674 
1675       /* Optimize the access just a bit.  */
1676       if (MEM_P (src)
1677 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1678 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1679 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1680 	  && bytelen == GET_MODE_SIZE (mode))
1681 	{
1682 	  tmps[i] = gen_reg_rtx (mode);
1683 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1684 	}
1685       else if (COMPLEX_MODE_P (mode)
1686 	       && GET_MODE (src) == mode
1687 	       && bytelen == GET_MODE_SIZE (mode))
1688 	/* Let emit_move_complex do the bulk of the work.  */
1689 	tmps[i] = src;
1690       else if (GET_CODE (src) == CONCAT)
1691 	{
1692 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1693 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1694 
1695 	  if ((bytepos == 0 && bytelen == slen0)
1696 	      || (bytepos != 0 && bytepos + bytelen <= slen))
1697 	    {
1698 	      /* The following assumes that the concatenated objects all
1699 		 have the same size.  In this case, a simple calculation
1700 		 can be used to determine the object and the bit field
1701 		 to be extracted.  */
1702 	      tmps[i] = XEXP (src, bytepos / slen0);
1703 	      if (! CONSTANT_P (tmps[i])
1704 		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1705 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1706 					     (bytepos % slen0) * BITS_PER_UNIT,
1707 					     1, false, NULL_RTX, mode, mode);
1708 	    }
1709 	  else
1710 	    {
1711 	      rtx mem;
1712 
1713 	      gcc_assert (!bytepos);
1714 	      mem = assign_stack_temp (GET_MODE (src), slen);
1715 	      emit_move_insn (mem, src);
1716 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1717 					   0, 1, false, NULL_RTX, mode, mode);
1718 	    }
1719 	}
1720       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1721 	 SIMD register, which is currently broken.  While we get GCC
1722 	 to emit proper RTL for these cases, let's dump to memory.  */
1723       else if (VECTOR_MODE_P (GET_MODE (dst))
1724 	       && REG_P (src))
1725 	{
1726 	  int slen = GET_MODE_SIZE (GET_MODE (src));
1727 	  rtx mem;
1728 
1729 	  mem = assign_stack_temp (GET_MODE (src), slen);
1730 	  emit_move_insn (mem, src);
1731 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1732 	}
1733       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1734                && XVECLEN (dst, 0) > 1)
1735         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1736       else if (CONSTANT_P (src))
1737 	{
1738 	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1739 
1740 	  if (len == ssize)
1741 	    tmps[i] = src;
1742 	  else
1743 	    {
1744 	      rtx first, second;
1745 
1746 	      gcc_assert (2 * len == ssize);
1747 	      split_double (src, &first, &second);
1748 	      if (i)
1749 		tmps[i] = second;
1750 	      else
1751 		tmps[i] = first;
1752 	    }
1753 	}
1754       else if (REG_P (src) && GET_MODE (src) == mode)
1755 	tmps[i] = src;
1756       else
1757 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1758 				     bytepos * BITS_PER_UNIT, 1, false, NULL_RTX,
1759 				     mode, mode);
1760 
1761       if (shift)
1762 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1763 				shift, tmps[i], 0);
1764     }
1765 }
1766 
1767 /* Emit code to move a block SRC of type TYPE to a block DST,
1768    where DST is non-consecutive registers represented by a PARALLEL.
1769    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1770    if not known.  */
1771 
1772 void
1773 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1774 {
1775   rtx *tmps;
1776   int i;
1777 
1778   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1779   emit_group_load_1 (tmps, dst, src, type, ssize);
1780 
1781   /* Copy the extracted pieces into the proper (probable) hard regs.  */
1782   for (i = 0; i < XVECLEN (dst, 0); i++)
1783     {
1784       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1785       if (d == NULL)
1786 	continue;
1787       emit_move_insn (d, tmps[i]);
1788     }
1789 }
1790 
1791 /* Similar, but load SRC into new pseudos in a format that looks like
1792    PARALLEL.  This can later be fed to emit_group_move to get things
1793    in the right place.  */
1794 
1795 rtx
1796 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1797 {
1798   rtvec vec;
1799   int i;
1800 
1801   vec = rtvec_alloc (XVECLEN (parallel, 0));
1802   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1803 
1804   /* Convert the vector to look just like the original PARALLEL, except
1805      with the computed values.  */
1806   for (i = 0; i < XVECLEN (parallel, 0); i++)
1807     {
1808       rtx e = XVECEXP (parallel, 0, i);
1809       rtx d = XEXP (e, 0);
1810 
1811       if (d)
1812 	{
1813 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1814 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1815 	}
1816       RTVEC_ELT (vec, i) = e;
1817     }
1818 
1819   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1820 }
1821 
1822 /* Emit code to move a block SRC to block DST, where SRC and DST are
1823    non-consecutive groups of registers, each represented by a PARALLEL.  */
1824 
1825 void
1826 emit_group_move (rtx dst, rtx src)
1827 {
1828   int i;
1829 
1830   gcc_assert (GET_CODE (src) == PARALLEL
1831 	      && GET_CODE (dst) == PARALLEL
1832 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1833 
1834   /* Skip first entry if NULL.  */
1835   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1836     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1837 		    XEXP (XVECEXP (src, 0, i), 0));
1838 }
1839 
1840 /* Move a group of registers represented by a PARALLEL into pseudos.  */
1841 
1842 rtx
1843 emit_group_move_into_temps (rtx src)
1844 {
1845   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1846   int i;
1847 
1848   for (i = 0; i < XVECLEN (src, 0); i++)
1849     {
1850       rtx e = XVECEXP (src, 0, i);
1851       rtx d = XEXP (e, 0);
1852 
1853       if (d)
1854 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1855       RTVEC_ELT (vec, i) = e;
1856     }
1857 
1858   return gen_rtx_PARALLEL (GET_MODE (src), vec);
1859 }
1860 
1861 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1862    where SRC is non-consecutive registers represented by a PARALLEL.
1863    SSIZE represents the total size of block ORIG_DST, or -1 if not
1864    known.  */
1865 
1866 void
1867 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1868 {
1869   rtx *tmps, dst;
1870   int start, finish, i;
1871   enum machine_mode m = GET_MODE (orig_dst);
1872 
1873   gcc_assert (GET_CODE (src) == PARALLEL);
1874 
1875   if (!SCALAR_INT_MODE_P (m)
1876       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1877     {
1878       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1879       if (imode == BLKmode)
1880         dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1881       else
1882         dst = gen_reg_rtx (imode);
1883       emit_group_store (dst, src, type, ssize);
1884       if (imode != BLKmode)
1885         dst = gen_lowpart (GET_MODE (orig_dst), dst);
1886       emit_move_insn (orig_dst, dst);
1887       return;
1888     }
1889 
1890   /* Check for a NULL entry, used to indicate that the parameter goes
1891      both on the stack and in registers.  */
1892   if (XEXP (XVECEXP (src, 0, 0), 0))
1893     start = 0;
1894   else
1895     start = 1;
1896   finish = XVECLEN (src, 0);
1897 
1898   tmps = XALLOCAVEC (rtx, finish);
1899 
1900   /* Copy the (probable) hard regs into pseudos.  */
1901   for (i = start; i < finish; i++)
1902     {
1903       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1904       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1905 	{
1906 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1907 	  emit_move_insn (tmps[i], reg);
1908 	}
1909       else
1910 	tmps[i] = reg;
1911     }
1912 
1913   /* If we won't be storing directly into memory, protect the real destination
1914      from strange tricks we might play.  */
1915   dst = orig_dst;
1916   if (GET_CODE (dst) == PARALLEL)
1917     {
1918       rtx temp;
1919 
1920       /* We can get a PARALLEL dst if there is a conditional expression in
1921 	 a return statement.  In that case, the dst and src are the same,
1922 	 so no action is necessary.  */
1923       if (rtx_equal_p (dst, src))
1924 	return;
1925 
1926       /* It is unclear if we can ever reach here, but we may as well handle
1927 	 it.  Allocate a temporary, and split this into a store/load to/from
1928 	 the temporary.  */
1929 
1930       temp = assign_stack_temp (GET_MODE (dst), ssize);
1931       emit_group_store (temp, src, type, ssize);
1932       emit_group_load (dst, temp, type, ssize);
1933       return;
1934     }
1935   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1936     {
1937       enum machine_mode outer = GET_MODE (dst);
1938       enum machine_mode inner;
1939       HOST_WIDE_INT bytepos;
1940       bool done = false;
1941       rtx temp;
1942 
1943       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1944 	dst = gen_reg_rtx (outer);
1945 
1946       /* Make life a bit easier for combine.  */
1947       /* If the first element of the vector is the low part
1948 	 of the destination mode, use a paradoxical subreg to
1949 	 initialize the destination.  */
1950       if (start < finish)
1951 	{
1952 	  inner = GET_MODE (tmps[start]);
1953 	  bytepos = subreg_lowpart_offset (inner, outer);
1954 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1955 	    {
1956 	      temp = simplify_gen_subreg (outer, tmps[start],
1957 					  inner, 0);
1958 	      if (temp)
1959 		{
1960 		  emit_move_insn (dst, temp);
1961 		  done = true;
1962 		  start++;
1963 		}
1964 	    }
1965 	}
1966 
1967       /* If the first element wasn't the low part, try the last.  */
1968       if (!done
1969 	  && start < finish - 1)
1970 	{
1971 	  inner = GET_MODE (tmps[finish - 1]);
1972 	  bytepos = subreg_lowpart_offset (inner, outer);
1973 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
1974 	    {
1975 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
1976 					  inner, 0);
1977 	      if (temp)
1978 		{
1979 		  emit_move_insn (dst, temp);
1980 		  done = true;
1981 		  finish--;
1982 		}
1983 	    }
1984 	}
1985 
1986       /* Otherwise, simply initialize the result to zero.  */
1987       if (!done)
1988         emit_move_insn (dst, CONST0_RTX (outer));
1989     }
1990 
1991   /* Process the pieces.  */
1992   for (i = start; i < finish; i++)
1993     {
1994       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
1995       enum machine_mode mode = GET_MODE (tmps[i]);
1996       unsigned int bytelen = GET_MODE_SIZE (mode);
1997       unsigned int adj_bytelen;
1998       rtx dest = dst;
1999 
2000       /* Handle trailing fragments that run over the size of the struct.  */
2001       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2002 	adj_bytelen = ssize - bytepos;
2003       else
2004 	adj_bytelen = bytelen;
2005 
2006       if (GET_CODE (dst) == CONCAT)
2007 	{
2008 	  if (bytepos + adj_bytelen
2009 	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2010 	    dest = XEXP (dst, 0);
2011 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2012 	    {
2013 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2014 	      dest = XEXP (dst, 1);
2015 	    }
2016 	  else
2017 	    {
2018 	      enum machine_mode dest_mode = GET_MODE (dest);
2019 	      enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2020 
2021 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2022 
2023 	      if (GET_MODE_ALIGNMENT (dest_mode)
2024 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2025 		{
2026 		  dest = assign_stack_temp (dest_mode,
2027 					    GET_MODE_SIZE (dest_mode));
2028 		  emit_move_insn (adjust_address (dest,
2029 						  tmp_mode,
2030 						  bytepos),
2031 				  tmps[i]);
2032 		  dst = dest;
2033 		}
2034 	      else
2035 		{
2036 		  dest = assign_stack_temp (tmp_mode,
2037 					    GET_MODE_SIZE (tmp_mode));
2038 		  emit_move_insn (dest, tmps[i]);
2039 		  dst = adjust_address (dest, dest_mode, bytepos);
2040 		}
2041 	      break;
2042 	    }
2043 	}
2044 
2045       /* Handle trailing fragments that run over the size of the struct.  */
2046       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 	{
2048 	  /* store_bit_field always takes its value from the lsb.
2049 	     Move the fragment to the lsb if it's not already there.  */
2050 	  if (
2051 #ifdef BLOCK_REG_PADDING
2052 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2053 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2054 #else
2055 	      BYTES_BIG_ENDIAN
2056 #endif
2057 	      )
2058 	    {
2059 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2060 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2061 				      shift, tmps[i], 0);
2062 	    }
2063 
2064 	  /* Make sure not to write past the end of the struct.  */
2065 	  store_bit_field (dest,
2066 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2067 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2068 			   VOIDmode, tmps[i]);
2069 	}
2070 
2071       /* Optimize the access just a bit.  */
2072       else if (MEM_P (dest)
2073 	       && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2074 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2075 	       && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2076 	       && bytelen == GET_MODE_SIZE (mode))
2077 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2078 
2079       else
2080 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2081 			 0, 0, mode, tmps[i]);
2082     }
2083 
2084   /* Copy from the pseudo into the (probable) hard reg.  */
2085   if (orig_dst != dst)
2086     emit_move_insn (orig_dst, dst);
2087 }
2088 
2089 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2090    of the value stored in X.  */
2091 
2092 rtx
2093 maybe_emit_group_store (rtx x, tree type)
2094 {
2095   enum machine_mode mode = TYPE_MODE (type);
2096   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2097   if (GET_CODE (x) == PARALLEL)
2098     {
2099       rtx result = gen_reg_rtx (mode);
2100       emit_group_store (result, x, type, int_size_in_bytes (type));
2101       return result;
2102     }
2103   return x;
2104 }
2105 
2106 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2107 
2108    This is used on targets that return BLKmode values in registers.  */
2109 
2110 void
2111 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2112 {
2113   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2114   rtx src = NULL, dst = NULL;
2115   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2116   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2117   enum machine_mode mode = GET_MODE (srcreg);
2118   enum machine_mode tmode = GET_MODE (target);
2119   enum machine_mode copy_mode;
2120 
2121   /* BLKmode registers created in the back-end shouldn't have survived.  */
2122   gcc_assert (mode != BLKmode);
2123 
2124   /* If the structure doesn't take up a whole number of words, see whether
2125      SRCREG is padded on the left or on the right.  If it's on the left,
2126      set PADDING_CORRECTION to the number of bits to skip.
2127 
2128      In most ABIs, the structure will be returned at the least end of
2129      the register, which translates to right padding on little-endian
2130      targets and left padding on big-endian targets.  The opposite
2131      holds if the structure is returned at the most significant
2132      end of the register.  */
2133   if (bytes % UNITS_PER_WORD != 0
2134       && (targetm.calls.return_in_msb (type)
2135 	  ? !BYTES_BIG_ENDIAN
2136 	  : BYTES_BIG_ENDIAN))
2137     padding_correction
2138       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2139 
2140   /* We can use a single move if we have an exact mode for the size.  */
2141   else if (MEM_P (target)
2142 	   && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2143 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2144 	   && bytes == GET_MODE_SIZE (mode))
2145   {
2146     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2147     return;
2148   }
2149 
2150   /* And if we additionally have the same mode for a register.  */
2151   else if (REG_P (target)
2152 	   && GET_MODE (target) == mode
2153 	   && bytes == GET_MODE_SIZE (mode))
2154   {
2155     emit_move_insn (target, srcreg);
2156     return;
2157   }
2158 
2159   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2160      into a new pseudo which is a full word.  */
2161   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2162     {
2163       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2164       mode = word_mode;
2165     }
2166 
2167   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2168      memory, take care of not reading/writing past its end by selecting
2169      a copy mode suited to BITSIZE.  This should always be possible given
2170      how it is computed.
2171 
2172      If the target lives in register, make sure not to select a copy mode
2173      larger than the mode of the register.
2174 
2175      We could probably emit more efficient code for machines which do not use
2176      strict alignment, but it doesn't seem worth the effort at the current
2177      time.  */
2178 
2179   copy_mode = word_mode;
2180   if (MEM_P (target))
2181     {
2182       enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2183       if (mem_mode != BLKmode)
2184 	copy_mode = mem_mode;
2185     }
2186   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2187     copy_mode = tmode;
2188 
2189   for (bitpos = 0, xbitpos = padding_correction;
2190        bitpos < bytes * BITS_PER_UNIT;
2191        bitpos += bitsize, xbitpos += bitsize)
2192     {
2193       /* We need a new source operand each time xbitpos is on a
2194 	 word boundary and when xbitpos == padding_correction
2195 	 (the first time through).  */
2196       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2197 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2198 
2199       /* We need a new destination operand each time bitpos is on
2200 	 a word boundary.  */
2201       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2202 	dst = target;
2203       else if (bitpos % BITS_PER_WORD == 0)
2204 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2205 
2206       /* Use xbitpos for the source extraction (right justified) and
2207 	 bitpos for the destination store (left justified).  */
2208       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2209 		       extract_bit_field (src, bitsize,
2210 					  xbitpos % BITS_PER_WORD, 1, false,
2211 					  NULL_RTX, copy_mode, copy_mode));
2212     }
2213 }
2214 
2215 /* Copy BLKmode value SRC into a register of mode MODE.  Return the
2216    register if it contains any data, otherwise return null.
2217 
2218    This is used on targets that return BLKmode values in registers.  */
2219 
2220 rtx
2221 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2222 {
2223   int i, n_regs;
2224   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2225   unsigned int bitsize;
2226   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2227   enum machine_mode dst_mode;
2228 
2229   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2230 
2231   x = expand_normal (src);
2232 
2233   bytes = int_size_in_bytes (TREE_TYPE (src));
2234   if (bytes == 0)
2235     return NULL_RTX;
2236 
2237   /* If the structure doesn't take up a whole number of words, see
2238      whether the register value should be padded on the left or on
2239      the right.  Set PADDING_CORRECTION to the number of padding
2240      bits needed on the left side.
2241 
2242      In most ABIs, the structure will be returned at the least end of
2243      the register, which translates to right padding on little-endian
2244      targets and left padding on big-endian targets.  The opposite
2245      holds if the structure is returned at the most significant
2246      end of the register.  */
2247   if (bytes % UNITS_PER_WORD != 0
2248       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2249 	  ? !BYTES_BIG_ENDIAN
2250 	  : BYTES_BIG_ENDIAN))
2251     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2252 					   * BITS_PER_UNIT));
2253 
2254   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2255   dst_words = XALLOCAVEC (rtx, n_regs);
2256   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2257 
2258   /* Copy the structure BITSIZE bits at a time.  */
2259   for (bitpos = 0, xbitpos = padding_correction;
2260        bitpos < bytes * BITS_PER_UNIT;
2261        bitpos += bitsize, xbitpos += bitsize)
2262     {
2263       /* We need a new destination pseudo each time xbitpos is
2264 	 on a word boundary and when xbitpos == padding_correction
2265 	 (the first time through).  */
2266       if (xbitpos % BITS_PER_WORD == 0
2267 	  || xbitpos == padding_correction)
2268 	{
2269 	  /* Generate an appropriate register.  */
2270 	  dst_word = gen_reg_rtx (word_mode);
2271 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2272 
2273 	  /* Clear the destination before we move anything into it.  */
2274 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2275 	}
2276 
2277       /* We need a new source operand each time bitpos is on a word
2278 	 boundary.  */
2279       if (bitpos % BITS_PER_WORD == 0)
2280 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2281 
2282       /* Use bitpos for the source extraction (left justified) and
2283 	 xbitpos for the destination store (right justified).  */
2284       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2285 		       0, 0, word_mode,
2286 		       extract_bit_field (src_word, bitsize,
2287 					  bitpos % BITS_PER_WORD, 1, false,
2288 					  NULL_RTX, word_mode, word_mode));
2289     }
2290 
2291   if (mode == BLKmode)
2292     {
2293       /* Find the smallest integer mode large enough to hold the
2294 	 entire structure.  */
2295       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 	   mode != VOIDmode;
2297 	   mode = GET_MODE_WIDER_MODE (mode))
2298 	/* Have we found a large enough mode?  */
2299 	if (GET_MODE_SIZE (mode) >= bytes)
2300 	  break;
2301 
2302       /* A suitable mode should have been found.  */
2303       gcc_assert (mode != VOIDmode);
2304     }
2305 
2306   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2307     dst_mode = word_mode;
2308   else
2309     dst_mode = mode;
2310   dst = gen_reg_rtx (dst_mode);
2311 
2312   for (i = 0; i < n_regs; i++)
2313     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2314 
2315   if (mode != dst_mode)
2316     dst = gen_lowpart (mode, dst);
2317 
2318   return dst;
2319 }
2320 
2321 /* Add a USE expression for REG to the (possibly empty) list pointed
2322    to by CALL_FUSAGE.  REG must denote a hard register.  */
2323 
2324 void
2325 use_reg_mode (rtx *call_fusage, rtx reg, enum machine_mode mode)
2326 {
2327   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2328 
2329   *call_fusage
2330     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2331 }
2332 
2333 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2334    starting at REGNO.  All of these registers must be hard registers.  */
2335 
2336 void
2337 use_regs (rtx *call_fusage, int regno, int nregs)
2338 {
2339   int i;
2340 
2341   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2342 
2343   for (i = 0; i < nregs; i++)
2344     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2345 }
2346 
2347 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2348    PARALLEL REGS.  This is for calls that pass values in multiple
2349    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2350 
2351 void
2352 use_group_regs (rtx *call_fusage, rtx regs)
2353 {
2354   int i;
2355 
2356   for (i = 0; i < XVECLEN (regs, 0); i++)
2357     {
2358       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2359 
2360       /* A NULL entry means the parameter goes both on the stack and in
2361 	 registers.  This can also be a MEM for targets that pass values
2362 	 partially on the stack and partially in registers.  */
2363       if (reg != 0 && REG_P (reg))
2364 	use_reg (call_fusage, reg);
2365     }
2366 }
2367 
2368 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2369    assigment and the code of the expresion on the RHS is CODE.  Return
2370    NULL otherwise.  */
2371 
2372 static gimple
2373 get_def_for_expr (tree name, enum tree_code code)
2374 {
2375   gimple def_stmt;
2376 
2377   if (TREE_CODE (name) != SSA_NAME)
2378     return NULL;
2379 
2380   def_stmt = get_gimple_for_ssa_name (name);
2381   if (!def_stmt
2382       || gimple_assign_rhs_code (def_stmt) != code)
2383     return NULL;
2384 
2385   return def_stmt;
2386 }
2387 
2388 #ifdef HAVE_conditional_move
2389 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2390    assigment and the class of the expresion on the RHS is CLASS.  Return
2391    NULL otherwise.  */
2392 
2393 static gimple
2394 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2395 {
2396   gimple def_stmt;
2397 
2398   if (TREE_CODE (name) != SSA_NAME)
2399     return NULL;
2400 
2401   def_stmt = get_gimple_for_ssa_name (name);
2402   if (!def_stmt
2403       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2404     return NULL;
2405 
2406   return def_stmt;
2407 }
2408 #endif
2409 
2410 
2411 /* Determine whether the LEN bytes generated by CONSTFUN can be
2412    stored to memory using several move instructions.  CONSTFUNDATA is
2413    a pointer which will be passed as argument in every CONSTFUN call.
2414    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2415    a memset operation and false if it's a copy of a constant string.
2416    Return nonzero if a call to store_by_pieces should succeed.  */
2417 
2418 int
2419 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2420 		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2421 		     void *constfundata, unsigned int align, bool memsetp)
2422 {
2423   unsigned HOST_WIDE_INT l;
2424   unsigned int max_size;
2425   HOST_WIDE_INT offset = 0;
2426   enum machine_mode mode;
2427   enum insn_code icode;
2428   int reverse;
2429   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
2430   rtx cst ATTRIBUTE_UNUSED;
2431 
2432   if (len == 0)
2433     return 1;
2434 
2435   if (! (memsetp
2436 	 ? SET_BY_PIECES_P (len, align)
2437 	 : STORE_BY_PIECES_P (len, align)))
2438     return 0;
2439 
2440   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2441 
2442   /* We would first store what we can in the largest integer mode, then go to
2443      successively smaller modes.  */
2444 
2445   for (reverse = 0;
2446        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2447        reverse++)
2448     {
2449       l = len;
2450       max_size = STORE_MAX_PIECES + 1;
2451       while (max_size > 1 && l > 0)
2452 	{
2453 	  mode = widest_int_mode_for_size (max_size);
2454 
2455 	  if (mode == VOIDmode)
2456 	    break;
2457 
2458 	  icode = optab_handler (mov_optab, mode);
2459 	  if (icode != CODE_FOR_nothing
2460 	      && align >= GET_MODE_ALIGNMENT (mode))
2461 	    {
2462 	      unsigned int size = GET_MODE_SIZE (mode);
2463 
2464 	      while (l >= size)
2465 		{
2466 		  if (reverse)
2467 		    offset -= size;
2468 
2469 		  cst = (*constfun) (constfundata, offset, mode);
2470 		  if (!targetm.legitimate_constant_p (mode, cst))
2471 		    return 0;
2472 
2473 		  if (!reverse)
2474 		    offset += size;
2475 
2476 		  l -= size;
2477 		}
2478 	    }
2479 
2480 	  max_size = GET_MODE_SIZE (mode);
2481 	}
2482 
2483       /* The code above should have handled everything.  */
2484       gcc_assert (!l);
2485     }
2486 
2487   return 1;
2488 }
2489 
2490 /* Generate several move instructions to store LEN bytes generated by
2491    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2492    pointer which will be passed as argument in every CONSTFUN call.
2493    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2494    a memset operation and false if it's a copy of a constant string.
2495    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2496    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2497    stpcpy.  */
2498 
2499 rtx
2500 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2501 		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2502 		 void *constfundata, unsigned int align, bool memsetp, int endp)
2503 {
2504   enum machine_mode to_addr_mode = get_address_mode (to);
2505   struct store_by_pieces_d data;
2506 
2507   if (len == 0)
2508     {
2509       gcc_assert (endp != 2);
2510       return to;
2511     }
2512 
2513   gcc_assert (memsetp
2514 	      ? SET_BY_PIECES_P (len, align)
2515 	      : STORE_BY_PIECES_P (len, align));
2516   data.constfun = constfun;
2517   data.constfundata = constfundata;
2518   data.len = len;
2519   data.to = to;
2520   store_by_pieces_1 (&data, align);
2521   if (endp)
2522     {
2523       rtx to1;
2524 
2525       gcc_assert (!data.reverse);
2526       if (data.autinc_to)
2527 	{
2528 	  if (endp == 2)
2529 	    {
2530 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2531 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2532 	      else
2533 		data.to_addr = copy_to_mode_reg (to_addr_mode,
2534 						 plus_constant (to_addr_mode,
2535 								data.to_addr,
2536 								-1));
2537 	    }
2538 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2539 					   data.offset);
2540 	}
2541       else
2542 	{
2543 	  if (endp == 2)
2544 	    --data.offset;
2545 	  to1 = adjust_address (data.to, QImode, data.offset);
2546 	}
2547       return to1;
2548     }
2549   else
2550     return data.to;
2551 }
2552 
2553 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2554    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2555 
2556 static void
2557 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2558 {
2559   struct store_by_pieces_d data;
2560 
2561   if (len == 0)
2562     return;
2563 
2564   data.constfun = clear_by_pieces_1;
2565   data.constfundata = NULL;
2566   data.len = len;
2567   data.to = to;
2568   store_by_pieces_1 (&data, align);
2569 }
2570 
2571 /* Callback routine for clear_by_pieces.
2572    Return const0_rtx unconditionally.  */
2573 
2574 static rtx
2575 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2576 		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2577 		   enum machine_mode mode ATTRIBUTE_UNUSED)
2578 {
2579   return const0_rtx;
2580 }
2581 
2582 /* Subroutine of clear_by_pieces and store_by_pieces.
2583    Generate several move instructions to store LEN bytes of block TO.  (A MEM
2584    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2585 
2586 static void
2587 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2588 		   unsigned int align ATTRIBUTE_UNUSED)
2589 {
2590   enum machine_mode to_addr_mode = get_address_mode (data->to);
2591   rtx to_addr = XEXP (data->to, 0);
2592   unsigned int max_size = STORE_MAX_PIECES + 1;
2593   enum insn_code icode;
2594 
2595   data->offset = 0;
2596   data->to_addr = to_addr;
2597   data->autinc_to
2598     = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2599        || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2600 
2601   data->explicit_inc_to = 0;
2602   data->reverse
2603     = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2604   if (data->reverse)
2605     data->offset = data->len;
2606 
2607   /* If storing requires more than two move insns,
2608      copy addresses to registers (to make displacements shorter)
2609      and use post-increment if available.  */
2610   if (!data->autinc_to
2611       && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2612     {
2613       /* Determine the main mode we'll be using.
2614 	 MODE might not be used depending on the definitions of the
2615 	 USE_* macros below.  */
2616       enum machine_mode mode ATTRIBUTE_UNUSED
2617 	= widest_int_mode_for_size (max_size);
2618 
2619       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2620 	{
2621 	  data->to_addr = copy_to_mode_reg (to_addr_mode,
2622 					    plus_constant (to_addr_mode,
2623 							   to_addr,
2624 							   data->len));
2625 	  data->autinc_to = 1;
2626 	  data->explicit_inc_to = -1;
2627 	}
2628 
2629       if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2630 	  && ! data->autinc_to)
2631 	{
2632 	  data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2633 	  data->autinc_to = 1;
2634 	  data->explicit_inc_to = 1;
2635 	}
2636 
2637       if ( !data->autinc_to && CONSTANT_P (to_addr))
2638 	data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2639     }
2640 
2641   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2642 
2643   /* First store what we can in the largest integer mode, then go to
2644      successively smaller modes.  */
2645 
2646   while (max_size > 1 && data->len > 0)
2647     {
2648       enum machine_mode mode = widest_int_mode_for_size (max_size);
2649 
2650       if (mode == VOIDmode)
2651 	break;
2652 
2653       icode = optab_handler (mov_optab, mode);
2654       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2655 	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2656 
2657       max_size = GET_MODE_SIZE (mode);
2658     }
2659 
2660   /* The code above should have handled everything.  */
2661   gcc_assert (!data->len);
2662 }
2663 
2664 /* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2665    with move instructions for mode MODE.  GENFUN is the gen_... function
2666    to make a move insn for that mode.  DATA has all the other info.  */
2667 
2668 static void
2669 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2670 		   struct store_by_pieces_d *data)
2671 {
2672   unsigned int size = GET_MODE_SIZE (mode);
2673   rtx to1, cst;
2674 
2675   while (data->len >= size)
2676     {
2677       if (data->reverse)
2678 	data->offset -= size;
2679 
2680       if (data->autinc_to)
2681 	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2682 					 data->offset);
2683       else
2684 	to1 = adjust_address (data->to, mode, data->offset);
2685 
2686       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2687 	emit_insn (gen_add2_insn (data->to_addr,
2688 				  GEN_INT (-(HOST_WIDE_INT) size)));
2689 
2690       cst = (*data->constfun) (data->constfundata, data->offset, mode);
2691       emit_insn ((*genfun) (to1, cst));
2692 
2693       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2694 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2695 
2696       if (! data->reverse)
2697 	data->offset += size;
2698 
2699       data->len -= size;
2700     }
2701 }
2702 
2703 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2704    its length in bytes.  */
2705 
2706 rtx
2707 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2708 		     unsigned int expected_align, HOST_WIDE_INT expected_size)
2709 {
2710   enum machine_mode mode = GET_MODE (object);
2711   unsigned int align;
2712 
2713   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2714 
2715   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2716      just move a zero.  Otherwise, do this a piece at a time.  */
2717   if (mode != BLKmode
2718       && CONST_INT_P (size)
2719       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2720     {
2721       rtx zero = CONST0_RTX (mode);
2722       if (zero != NULL)
2723 	{
2724 	  emit_move_insn (object, zero);
2725 	  return NULL;
2726 	}
2727 
2728       if (COMPLEX_MODE_P (mode))
2729 	{
2730 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2731 	  if (zero != NULL)
2732 	    {
2733 	      write_complex_part (object, zero, 0);
2734 	      write_complex_part (object, zero, 1);
2735 	      return NULL;
2736 	    }
2737 	}
2738     }
2739 
2740   if (size == const0_rtx)
2741     return NULL;
2742 
2743   align = MEM_ALIGN (object);
2744 
2745   if (CONST_INT_P (size)
2746       && CLEAR_BY_PIECES_P (INTVAL (size), align))
2747     clear_by_pieces (object, INTVAL (size), align);
2748   else if (set_storage_via_setmem (object, size, const0_rtx, align,
2749 				   expected_align, expected_size))
2750     ;
2751   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2752     return set_storage_via_libcall (object, size, const0_rtx,
2753 				    method == BLOCK_OP_TAILCALL);
2754   else
2755     gcc_unreachable ();
2756 
2757   return NULL;
2758 }
2759 
2760 rtx
2761 clear_storage (rtx object, rtx size, enum block_op_methods method)
2762 {
2763   return clear_storage_hints (object, size, method, 0, -1);
2764 }
2765 
2766 
2767 /* A subroutine of clear_storage.  Expand a call to memset.
2768    Return the return value of memset, 0 otherwise.  */
2769 
2770 rtx
2771 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2772 {
2773   tree call_expr, fn, object_tree, size_tree, val_tree;
2774   enum machine_mode size_mode;
2775   rtx retval;
2776 
2777   /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2778      place those into new pseudos into a VAR_DECL and use them later.  */
2779 
2780   object = copy_addr_to_reg (XEXP (object, 0));
2781 
2782   size_mode = TYPE_MODE (sizetype);
2783   size = convert_to_mode (size_mode, size, 1);
2784   size = copy_to_mode_reg (size_mode, size);
2785 
2786   /* It is incorrect to use the libcall calling conventions to call
2787      memset in this context.  This could be a user call to memset and
2788      the user may wish to examine the return value from memset.  For
2789      targets where libcalls and normal calls have different conventions
2790      for returning pointers, we could end up generating incorrect code.  */
2791 
2792   object_tree = make_tree (ptr_type_node, object);
2793   if (!CONST_INT_P (val))
2794     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2795   size_tree = make_tree (sizetype, size);
2796   val_tree = make_tree (integer_type_node, val);
2797 
2798   fn = clear_storage_libcall_fn (true);
2799   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2800   CALL_EXPR_TAILCALL (call_expr) = tailcall;
2801 
2802   retval = expand_normal (call_expr);
2803 
2804   return retval;
2805 }
2806 
2807 /* A subroutine of set_storage_via_libcall.  Create the tree node
2808    for the function we use for block clears.  */
2809 
2810 tree block_clear_fn;
2811 
2812 void
2813 init_block_clear_fn (const char *asmspec)
2814 {
2815   if (!block_clear_fn)
2816     {
2817       tree fn, args;
2818 
2819       fn = get_identifier ("memset");
2820       args = build_function_type_list (ptr_type_node, ptr_type_node,
2821 				       integer_type_node, sizetype,
2822 				       NULL_TREE);
2823 
2824       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2825       DECL_EXTERNAL (fn) = 1;
2826       TREE_PUBLIC (fn) = 1;
2827       DECL_ARTIFICIAL (fn) = 1;
2828       TREE_NOTHROW (fn) = 1;
2829       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2830       DECL_VISIBILITY_SPECIFIED (fn) = 1;
2831 
2832       block_clear_fn = fn;
2833     }
2834 
2835   if (asmspec)
2836     set_user_assembler_name (block_clear_fn, asmspec);
2837 }
2838 
2839 static tree
2840 clear_storage_libcall_fn (int for_call)
2841 {
2842   static bool emitted_extern;
2843 
2844   if (!block_clear_fn)
2845     init_block_clear_fn (NULL);
2846 
2847   if (for_call && !emitted_extern)
2848     {
2849       emitted_extern = true;
2850       make_decl_rtl (block_clear_fn);
2851     }
2852 
2853   return block_clear_fn;
2854 }
2855 
2856 /* Expand a setmem pattern; return true if successful.  */
2857 
2858 bool
2859 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2860 			unsigned int expected_align, HOST_WIDE_INT expected_size)
2861 {
2862   /* Try the most limited insn first, because there's no point
2863      including more than one in the machine description unless
2864      the more limited one has some advantage.  */
2865 
2866   enum machine_mode mode;
2867 
2868   if (expected_align < align)
2869     expected_align = align;
2870 
2871   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2872        mode = GET_MODE_WIDER_MODE (mode))
2873     {
2874       enum insn_code code = direct_optab_handler (setmem_optab, mode);
2875 
2876       if (code != CODE_FOR_nothing
2877 	  /* We don't need MODE to be narrower than
2878 	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2879 	     the mode mask, as it is returned by the macro, it will
2880 	     definitely be less than the actual mode mask.  */
2881 	  && ((CONST_INT_P (size)
2882 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2883 		   <= (GET_MODE_MASK (mode) >> 1)))
2884 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD))
2885 	{
2886 	  struct expand_operand ops[6];
2887 	  unsigned int nops;
2888 
2889 	  nops = insn_data[(int) code].n_generator_args;
2890 	  gcc_assert (nops == 4 || nops == 6);
2891 
2892 	  create_fixed_operand (&ops[0], object);
2893 	  /* The check above guarantees that this size conversion is valid.  */
2894 	  create_convert_operand_to (&ops[1], size, mode, true);
2895 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
2896 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2897 	  if (nops == 6)
2898 	    {
2899 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2900 	      create_integer_operand (&ops[5], expected_size);
2901 	    }
2902 	  if (maybe_expand_insn (code, nops, ops))
2903 	    return true;
2904 	}
2905     }
2906 
2907   return false;
2908 }
2909 
2910 
2911 /* Write to one of the components of the complex value CPLX.  Write VAL to
2912    the real part if IMAG_P is false, and the imaginary part if its true.  */
2913 
2914 static void
2915 write_complex_part (rtx cplx, rtx val, bool imag_p)
2916 {
2917   enum machine_mode cmode;
2918   enum machine_mode imode;
2919   unsigned ibitsize;
2920 
2921   if (GET_CODE (cplx) == CONCAT)
2922     {
2923       emit_move_insn (XEXP (cplx, imag_p), val);
2924       return;
2925     }
2926 
2927   cmode = GET_MODE (cplx);
2928   imode = GET_MODE_INNER (cmode);
2929   ibitsize = GET_MODE_BITSIZE (imode);
2930 
2931   /* For MEMs simplify_gen_subreg may generate an invalid new address
2932      because, e.g., the original address is considered mode-dependent
2933      by the target, which restricts simplify_subreg from invoking
2934      adjust_address_nv.  Instead of preparing fallback support for an
2935      invalid address, we call adjust_address_nv directly.  */
2936   if (MEM_P (cplx))
2937     {
2938       emit_move_insn (adjust_address_nv (cplx, imode,
2939 					 imag_p ? GET_MODE_SIZE (imode) : 0),
2940 		      val);
2941       return;
2942     }
2943 
2944   /* If the sub-object is at least word sized, then we know that subregging
2945      will work.  This special case is important, since store_bit_field
2946      wants to operate on integer modes, and there's rarely an OImode to
2947      correspond to TCmode.  */
2948   if (ibitsize >= BITS_PER_WORD
2949       /* For hard regs we have exact predicates.  Assume we can split
2950 	 the original object if it spans an even number of hard regs.
2951 	 This special case is important for SCmode on 64-bit platforms
2952 	 where the natural size of floating-point regs is 32-bit.  */
2953       || (REG_P (cplx)
2954 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
2955 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
2956     {
2957       rtx part = simplify_gen_subreg (imode, cplx, cmode,
2958 				      imag_p ? GET_MODE_SIZE (imode) : 0);
2959       if (part)
2960         {
2961 	  emit_move_insn (part, val);
2962 	  return;
2963 	}
2964       else
2965 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
2966 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
2967     }
2968 
2969   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
2970 }
2971 
2972 /* Extract one of the components of the complex value CPLX.  Extract the
2973    real part if IMAG_P is false, and the imaginary part if it's true.  */
2974 
2975 static rtx
2976 read_complex_part (rtx cplx, bool imag_p)
2977 {
2978   enum machine_mode cmode, imode;
2979   unsigned ibitsize;
2980 
2981   if (GET_CODE (cplx) == CONCAT)
2982     return XEXP (cplx, imag_p);
2983 
2984   cmode = GET_MODE (cplx);
2985   imode = GET_MODE_INNER (cmode);
2986   ibitsize = GET_MODE_BITSIZE (imode);
2987 
2988   /* Special case reads from complex constants that got spilled to memory.  */
2989   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
2990     {
2991       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
2992       if (decl && TREE_CODE (decl) == COMPLEX_CST)
2993 	{
2994 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
2995 	  if (CONSTANT_CLASS_P (part))
2996 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
2997 	}
2998     }
2999 
3000   /* For MEMs simplify_gen_subreg may generate an invalid new address
3001      because, e.g., the original address is considered mode-dependent
3002      by the target, which restricts simplify_subreg from invoking
3003      adjust_address_nv.  Instead of preparing fallback support for an
3004      invalid address, we call adjust_address_nv directly.  */
3005   if (MEM_P (cplx))
3006     return adjust_address_nv (cplx, imode,
3007 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3008 
3009   /* If the sub-object is at least word sized, then we know that subregging
3010      will work.  This special case is important, since extract_bit_field
3011      wants to operate on integer modes, and there's rarely an OImode to
3012      correspond to TCmode.  */
3013   if (ibitsize >= BITS_PER_WORD
3014       /* For hard regs we have exact predicates.  Assume we can split
3015 	 the original object if it spans an even number of hard regs.
3016 	 This special case is important for SCmode on 64-bit platforms
3017 	 where the natural size of floating-point regs is 32-bit.  */
3018       || (REG_P (cplx)
3019 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3020 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3021     {
3022       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3023 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3024       if (ret)
3025         return ret;
3026       else
3027 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3028 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3029     }
3030 
3031   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3032 			    true, false, NULL_RTX, imode, imode);
3033 }
3034 
3035 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3036    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3037    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3038    we'll force-create a SUBREG if needed.  */
3039 
3040 static rtx
3041 emit_move_change_mode (enum machine_mode new_mode,
3042 		       enum machine_mode old_mode, rtx x, bool force)
3043 {
3044   rtx ret;
3045 
3046   if (push_operand (x, GET_MODE (x)))
3047     {
3048       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3049       MEM_COPY_ATTRIBUTES (ret, x);
3050     }
3051   else if (MEM_P (x))
3052     {
3053       /* We don't have to worry about changing the address since the
3054 	 size in bytes is supposed to be the same.  */
3055       if (reload_in_progress)
3056 	{
3057 	  /* Copy the MEM to change the mode and move any
3058 	     substitutions from the old MEM to the new one.  */
3059 	  ret = adjust_address_nv (x, new_mode, 0);
3060 	  copy_replacements (x, ret);
3061 	}
3062       else
3063 	ret = adjust_address (x, new_mode, 0);
3064     }
3065   else
3066     {
3067       /* Note that we do want simplify_subreg's behavior of validating
3068 	 that the new mode is ok for a hard register.  If we were to use
3069 	 simplify_gen_subreg, we would create the subreg, but would
3070 	 probably run into the target not being able to implement it.  */
3071       /* Except, of course, when FORCE is true, when this is exactly what
3072 	 we want.  Which is needed for CCmodes on some targets.  */
3073       if (force)
3074 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3075       else
3076 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3077     }
3078 
3079   return ret;
3080 }
3081 
3082 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3083    an integer mode of the same size as MODE.  Returns the instruction
3084    emitted, or NULL if such a move could not be generated.  */
3085 
3086 static rtx
3087 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3088 {
3089   enum machine_mode imode;
3090   enum insn_code code;
3091 
3092   /* There must exist a mode of the exact size we require.  */
3093   imode = int_mode_for_mode (mode);
3094   if (imode == BLKmode)
3095     return NULL_RTX;
3096 
3097   /* The target must support moves in this mode.  */
3098   code = optab_handler (mov_optab, imode);
3099   if (code == CODE_FOR_nothing)
3100     return NULL_RTX;
3101 
3102   x = emit_move_change_mode (imode, mode, x, force);
3103   if (x == NULL_RTX)
3104     return NULL_RTX;
3105   y = emit_move_change_mode (imode, mode, y, force);
3106   if (y == NULL_RTX)
3107     return NULL_RTX;
3108   return emit_insn (GEN_FCN (code) (x, y));
3109 }
3110 
3111 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3112    Return an equivalent MEM that does not use an auto-increment.  */
3113 
3114 static rtx
3115 emit_move_resolve_push (enum machine_mode mode, rtx x)
3116 {
3117   enum rtx_code code = GET_CODE (XEXP (x, 0));
3118   HOST_WIDE_INT adjust;
3119   rtx temp;
3120 
3121   adjust = GET_MODE_SIZE (mode);
3122 #ifdef PUSH_ROUNDING
3123   adjust = PUSH_ROUNDING (adjust);
3124 #endif
3125   if (code == PRE_DEC || code == POST_DEC)
3126     adjust = -adjust;
3127   else if (code == PRE_MODIFY || code == POST_MODIFY)
3128     {
3129       rtx expr = XEXP (XEXP (x, 0), 1);
3130       HOST_WIDE_INT val;
3131 
3132       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3133       gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3134       val = INTVAL (XEXP (expr, 1));
3135       if (GET_CODE (expr) == MINUS)
3136 	val = -val;
3137       gcc_assert (adjust == val || adjust == -val);
3138       adjust = val;
3139     }
3140 
3141   /* Do not use anti_adjust_stack, since we don't want to update
3142      stack_pointer_delta.  */
3143   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3144 			      GEN_INT (adjust), stack_pointer_rtx,
3145 			      0, OPTAB_LIB_WIDEN);
3146   if (temp != stack_pointer_rtx)
3147     emit_move_insn (stack_pointer_rtx, temp);
3148 
3149   switch (code)
3150     {
3151     case PRE_INC:
3152     case PRE_DEC:
3153     case PRE_MODIFY:
3154       temp = stack_pointer_rtx;
3155       break;
3156     case POST_INC:
3157     case POST_DEC:
3158     case POST_MODIFY:
3159       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3160       break;
3161     default:
3162       gcc_unreachable ();
3163     }
3164 
3165   return replace_equiv_address (x, temp);
3166 }
3167 
3168 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3169    X is known to satisfy push_operand, and MODE is known to be complex.
3170    Returns the last instruction emitted.  */
3171 
3172 rtx
3173 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3174 {
3175   enum machine_mode submode = GET_MODE_INNER (mode);
3176   bool imag_first;
3177 
3178 #ifdef PUSH_ROUNDING
3179   unsigned int submodesize = GET_MODE_SIZE (submode);
3180 
3181   /* In case we output to the stack, but the size is smaller than the
3182      machine can push exactly, we need to use move instructions.  */
3183   if (PUSH_ROUNDING (submodesize) != submodesize)
3184     {
3185       x = emit_move_resolve_push (mode, x);
3186       return emit_move_insn (x, y);
3187     }
3188 #endif
3189 
3190   /* Note that the real part always precedes the imag part in memory
3191      regardless of machine's endianness.  */
3192   switch (GET_CODE (XEXP (x, 0)))
3193     {
3194     case PRE_DEC:
3195     case POST_DEC:
3196       imag_first = true;
3197       break;
3198     case PRE_INC:
3199     case POST_INC:
3200       imag_first = false;
3201       break;
3202     default:
3203       gcc_unreachable ();
3204     }
3205 
3206   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3207 		  read_complex_part (y, imag_first));
3208   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3209 			 read_complex_part (y, !imag_first));
3210 }
3211 
3212 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3213    via two moves of the parts.  Returns the last instruction emitted.  */
3214 
3215 rtx
3216 emit_move_complex_parts (rtx x, rtx y)
3217 {
3218   /* Show the output dies here.  This is necessary for SUBREGs
3219      of pseudos since we cannot track their lifetimes correctly;
3220      hard regs shouldn't appear here except as return values.  */
3221   if (!reload_completed && !reload_in_progress
3222       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3223     emit_clobber (x);
3224 
3225   write_complex_part (x, read_complex_part (y, false), false);
3226   write_complex_part (x, read_complex_part (y, true), true);
3227 
3228   return get_last_insn ();
3229 }
3230 
3231 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3232    MODE is known to be complex.  Returns the last instruction emitted.  */
3233 
3234 static rtx
3235 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3236 {
3237   bool try_int;
3238 
3239   /* Need to take special care for pushes, to maintain proper ordering
3240      of the data, and possibly extra padding.  */
3241   if (push_operand (x, mode))
3242     return emit_move_complex_push (mode, x, y);
3243 
3244   /* See if we can coerce the target into moving both values at once.  */
3245 
3246   /* Move floating point as parts.  */
3247   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3248       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing)
3249     try_int = false;
3250   /* Not possible if the values are inherently not adjacent.  */
3251   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3252     try_int = false;
3253   /* Is possible if both are registers (or subregs of registers).  */
3254   else if (register_operand (x, mode) && register_operand (y, mode))
3255     try_int = true;
3256   /* If one of the operands is a memory, and alignment constraints
3257      are friendly enough, we may be able to do combined memory operations.
3258      We do not attempt this if Y is a constant because that combination is
3259      usually better with the by-parts thing below.  */
3260   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3261 	   && (!STRICT_ALIGNMENT
3262 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3263     try_int = true;
3264   else
3265     try_int = false;
3266 
3267   if (try_int)
3268     {
3269       rtx ret;
3270 
3271       /* For memory to memory moves, optimal behavior can be had with the
3272 	 existing block move logic.  */
3273       if (MEM_P (x) && MEM_P (y))
3274 	{
3275 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3276 			   BLOCK_OP_NO_LIBCALL);
3277 	  return get_last_insn ();
3278 	}
3279 
3280       ret = emit_move_via_integer (mode, x, y, true);
3281       if (ret)
3282 	return ret;
3283     }
3284 
3285   return emit_move_complex_parts (x, y);
3286 }
3287 
3288 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3289    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3290 
3291 static rtx
3292 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3293 {
3294   rtx ret;
3295 
3296   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3297   if (mode != CCmode)
3298     {
3299       enum insn_code code = optab_handler (mov_optab, CCmode);
3300       if (code != CODE_FOR_nothing)
3301 	{
3302 	  x = emit_move_change_mode (CCmode, mode, x, true);
3303 	  y = emit_move_change_mode (CCmode, mode, y, true);
3304 	  return emit_insn (GEN_FCN (code) (x, y));
3305 	}
3306     }
3307 
3308   /* Otherwise, find the MODE_INT mode of the same width.  */
3309   ret = emit_move_via_integer (mode, x, y, false);
3310   gcc_assert (ret != NULL);
3311   return ret;
3312 }
3313 
3314 /* Return true if word I of OP lies entirely in the
3315    undefined bits of a paradoxical subreg.  */
3316 
3317 static bool
3318 undefined_operand_subword_p (const_rtx op, int i)
3319 {
3320   enum machine_mode innermode, innermostmode;
3321   int offset;
3322   if (GET_CODE (op) != SUBREG)
3323     return false;
3324   innermode = GET_MODE (op);
3325   innermostmode = GET_MODE (SUBREG_REG (op));
3326   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3327   /* The SUBREG_BYTE represents offset, as if the value were stored in
3328      memory, except for a paradoxical subreg where we define
3329      SUBREG_BYTE to be 0; undo this exception as in
3330      simplify_subreg.  */
3331   if (SUBREG_BYTE (op) == 0
3332       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3333     {
3334       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3335       if (WORDS_BIG_ENDIAN)
3336 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3337       if (BYTES_BIG_ENDIAN)
3338 	offset += difference % UNITS_PER_WORD;
3339     }
3340   if (offset >= GET_MODE_SIZE (innermostmode)
3341       || offset <= -GET_MODE_SIZE (word_mode))
3342     return true;
3343   return false;
3344 }
3345 
3346 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3347    MODE is any multi-word or full-word mode that lacks a move_insn
3348    pattern.  Note that you will get better code if you define such
3349    patterns, even if they must turn into multiple assembler instructions.  */
3350 
3351 static rtx
3352 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3353 {
3354   rtx last_insn = 0;
3355   rtx seq, inner;
3356   bool need_clobber;
3357   int i;
3358 
3359   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3360 
3361   /* If X is a push on the stack, do the push now and replace
3362      X with a reference to the stack pointer.  */
3363   if (push_operand (x, mode))
3364     x = emit_move_resolve_push (mode, x);
3365 
3366   /* If we are in reload, see if either operand is a MEM whose address
3367      is scheduled for replacement.  */
3368   if (reload_in_progress && MEM_P (x)
3369       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3370     x = replace_equiv_address_nv (x, inner);
3371   if (reload_in_progress && MEM_P (y)
3372       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3373     y = replace_equiv_address_nv (y, inner);
3374 
3375   start_sequence ();
3376 
3377   need_clobber = false;
3378   for (i = 0;
3379        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3380        i++)
3381     {
3382       rtx xpart = operand_subword (x, i, 1, mode);
3383       rtx ypart;
3384 
3385       /* Do not generate code for a move if it would come entirely
3386 	 from the undefined bits of a paradoxical subreg.  */
3387       if (undefined_operand_subword_p (y, i))
3388 	continue;
3389 
3390       ypart = operand_subword (y, i, 1, mode);
3391 
3392       /* If we can't get a part of Y, put Y into memory if it is a
3393 	 constant.  Otherwise, force it into a register.  Then we must
3394 	 be able to get a part of Y.  */
3395       if (ypart == 0 && CONSTANT_P (y))
3396 	{
3397 	  y = use_anchored_address (force_const_mem (mode, y));
3398 	  ypart = operand_subword (y, i, 1, mode);
3399 	}
3400       else if (ypart == 0)
3401 	ypart = operand_subword_force (y, i, mode);
3402 
3403       gcc_assert (xpart && ypart);
3404 
3405       need_clobber |= (GET_CODE (xpart) == SUBREG);
3406 
3407       last_insn = emit_move_insn (xpart, ypart);
3408     }
3409 
3410   seq = get_insns ();
3411   end_sequence ();
3412 
3413   /* Show the output dies here.  This is necessary for SUBREGs
3414      of pseudos since we cannot track their lifetimes correctly;
3415      hard regs shouldn't appear here except as return values.
3416      We never want to emit such a clobber after reload.  */
3417   if (x != y
3418       && ! (reload_in_progress || reload_completed)
3419       && need_clobber != 0)
3420     emit_clobber (x);
3421 
3422   emit_insn (seq);
3423 
3424   return last_insn;
3425 }
3426 
3427 /* Low level part of emit_move_insn.
3428    Called just like emit_move_insn, but assumes X and Y
3429    are basically valid.  */
3430 
3431 rtx
3432 emit_move_insn_1 (rtx x, rtx y)
3433 {
3434   enum machine_mode mode = GET_MODE (x);
3435   enum insn_code code;
3436 
3437   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3438 
3439   code = optab_handler (mov_optab, mode);
3440   if (code != CODE_FOR_nothing)
3441     return emit_insn (GEN_FCN (code) (x, y));
3442 
3443   /* Expand complex moves by moving real part and imag part.  */
3444   if (COMPLEX_MODE_P (mode))
3445     return emit_move_complex (mode, x, y);
3446 
3447   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3448       || ALL_FIXED_POINT_MODE_P (mode))
3449     {
3450       rtx result = emit_move_via_integer (mode, x, y, true);
3451 
3452       /* If we can't find an integer mode, use multi words.  */
3453       if (result)
3454 	return result;
3455       else
3456 	return emit_move_multi_word (mode, x, y);
3457     }
3458 
3459   if (GET_MODE_CLASS (mode) == MODE_CC)
3460     return emit_move_ccmode (mode, x, y);
3461 
3462   /* Try using a move pattern for the corresponding integer mode.  This is
3463      only safe when simplify_subreg can convert MODE constants into integer
3464      constants.  At present, it can only do this reliably if the value
3465      fits within a HOST_WIDE_INT.  */
3466   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3467     {
3468       rtx ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3469 
3470       if (ret)
3471 	{
3472 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3473 	    return ret;
3474 	}
3475     }
3476 
3477   return emit_move_multi_word (mode, x, y);
3478 }
3479 
3480 /* Generate code to copy Y into X.
3481    Both Y and X must have the same mode, except that
3482    Y can be a constant with VOIDmode.
3483    This mode cannot be BLKmode; use emit_block_move for that.
3484 
3485    Return the last instruction emitted.  */
3486 
3487 rtx
3488 emit_move_insn (rtx x, rtx y)
3489 {
3490   enum machine_mode mode = GET_MODE (x);
3491   rtx y_cst = NULL_RTX;
3492   rtx last_insn, set;
3493 
3494   gcc_assert (mode != BLKmode
3495 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3496 
3497   if (CONSTANT_P (y))
3498     {
3499       if (optimize
3500 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3501 	  && (last_insn = compress_float_constant (x, y)))
3502 	return last_insn;
3503 
3504       y_cst = y;
3505 
3506       if (!targetm.legitimate_constant_p (mode, y))
3507 	{
3508 	  y = force_const_mem (mode, y);
3509 
3510 	  /* If the target's cannot_force_const_mem prevented the spill,
3511 	     assume that the target's move expanders will also take care
3512 	     of the non-legitimate constant.  */
3513 	  if (!y)
3514 	    y = y_cst;
3515 	  else
3516 	    y = use_anchored_address (y);
3517 	}
3518     }
3519 
3520   /* If X or Y are memory references, verify that their addresses are valid
3521      for the machine.  */
3522   if (MEM_P (x)
3523       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3524 					 MEM_ADDR_SPACE (x))
3525 	  && ! push_operand (x, GET_MODE (x))))
3526     x = validize_mem (x);
3527 
3528   if (MEM_P (y)
3529       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3530 					MEM_ADDR_SPACE (y)))
3531     y = validize_mem (y);
3532 
3533   gcc_assert (mode != BLKmode);
3534 
3535   last_insn = emit_move_insn_1 (x, y);
3536 
3537   if (y_cst && REG_P (x)
3538       && (set = single_set (last_insn)) != NULL_RTX
3539       && SET_DEST (set) == x
3540       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3541     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3542 
3543   return last_insn;
3544 }
3545 
3546 /* If Y is representable exactly in a narrower mode, and the target can
3547    perform the extension directly from constant or memory, then emit the
3548    move as an extension.  */
3549 
3550 static rtx
3551 compress_float_constant (rtx x, rtx y)
3552 {
3553   enum machine_mode dstmode = GET_MODE (x);
3554   enum machine_mode orig_srcmode = GET_MODE (y);
3555   enum machine_mode srcmode;
3556   REAL_VALUE_TYPE r;
3557   int oldcost, newcost;
3558   bool speed = optimize_insn_for_speed_p ();
3559 
3560   REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3561 
3562   if (targetm.legitimate_constant_p (dstmode, y))
3563     oldcost = set_src_cost (y, speed);
3564   else
3565     oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3566 
3567   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3568        srcmode != orig_srcmode;
3569        srcmode = GET_MODE_WIDER_MODE (srcmode))
3570     {
3571       enum insn_code ic;
3572       rtx trunc_y, last_insn;
3573 
3574       /* Skip if the target can't extend this way.  */
3575       ic = can_extend_p (dstmode, srcmode, 0);
3576       if (ic == CODE_FOR_nothing)
3577 	continue;
3578 
3579       /* Skip if the narrowed value isn't exact.  */
3580       if (! exact_real_truncate (srcmode, &r))
3581 	continue;
3582 
3583       trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3584 
3585       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3586 	{
3587 	  /* Skip if the target needs extra instructions to perform
3588 	     the extension.  */
3589 	  if (!insn_operand_matches (ic, 1, trunc_y))
3590 	    continue;
3591 	  /* This is valid, but may not be cheaper than the original. */
3592 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3593 				  speed);
3594 	  if (oldcost < newcost)
3595 	    continue;
3596 	}
3597       else if (float_extend_from_mem[dstmode][srcmode])
3598 	{
3599 	  trunc_y = force_const_mem (srcmode, trunc_y);
3600 	  /* This is valid, but may not be cheaper than the original. */
3601 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3602 				  speed);
3603 	  if (oldcost < newcost)
3604 	    continue;
3605 	  trunc_y = validize_mem (trunc_y);
3606 	}
3607       else
3608 	continue;
3609 
3610       /* For CSE's benefit, force the compressed constant pool entry
3611 	 into a new pseudo.  This constant may be used in different modes,
3612 	 and if not, combine will put things back together for us.  */
3613       trunc_y = force_reg (srcmode, trunc_y);
3614 
3615       /* If x is a hard register, perform the extension into a pseudo,
3616 	 so that e.g. stack realignment code is aware of it.  */
3617       rtx target = x;
3618       if (REG_P (x) && HARD_REGISTER_P (x))
3619 	target = gen_reg_rtx (dstmode);
3620 
3621       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3622       last_insn = get_last_insn ();
3623 
3624       if (REG_P (target))
3625 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3626 
3627       if (target != x)
3628 	return emit_move_insn (x, target);
3629       return last_insn;
3630     }
3631 
3632   return NULL_RTX;
3633 }
3634 
3635 /* Pushing data onto the stack.  */
3636 
3637 /* Push a block of length SIZE (perhaps variable)
3638    and return an rtx to address the beginning of the block.
3639    The value may be virtual_outgoing_args_rtx.
3640 
3641    EXTRA is the number of bytes of padding to push in addition to SIZE.
3642    BELOW nonzero means this padding comes at low addresses;
3643    otherwise, the padding comes at high addresses.  */
3644 
3645 rtx
3646 push_block (rtx size, int extra, int below)
3647 {
3648   rtx temp;
3649 
3650   size = convert_modes (Pmode, ptr_mode, size, 1);
3651   if (CONSTANT_P (size))
3652     anti_adjust_stack (plus_constant (Pmode, size, extra));
3653   else if (REG_P (size) && extra == 0)
3654     anti_adjust_stack (size);
3655   else
3656     {
3657       temp = copy_to_mode_reg (Pmode, size);
3658       if (extra != 0)
3659 	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3660 			     temp, 0, OPTAB_LIB_WIDEN);
3661       anti_adjust_stack (temp);
3662     }
3663 
3664 #ifndef STACK_GROWS_DOWNWARD
3665   if (0)
3666 #else
3667   if (1)
3668 #endif
3669     {
3670       temp = virtual_outgoing_args_rtx;
3671       if (extra != 0 && below)
3672 	temp = plus_constant (Pmode, temp, extra);
3673     }
3674   else
3675     {
3676       if (CONST_INT_P (size))
3677 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3678 			      -INTVAL (size) - (below ? 0 : extra));
3679       else if (extra != 0 && !below)
3680 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3681 			     negate_rtx (Pmode, plus_constant (Pmode, size,
3682 							       extra)));
3683       else
3684 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3685 			     negate_rtx (Pmode, size));
3686     }
3687 
3688   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3689 }
3690 
3691 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3692 
3693 static rtx
3694 mem_autoinc_base (rtx mem)
3695 {
3696   if (MEM_P (mem))
3697     {
3698       rtx addr = XEXP (mem, 0);
3699       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3700 	return XEXP (addr, 0);
3701     }
3702   return NULL;
3703 }
3704 
3705 /* A utility routine used here, in reload, and in try_split.  The insns
3706    after PREV up to and including LAST are known to adjust the stack,
3707    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3708    placing notes as appropriate.  PREV may be NULL, indicating the
3709    entire insn sequence prior to LAST should be scanned.
3710 
3711    The set of allowed stack pointer modifications is small:
3712      (1) One or more auto-inc style memory references (aka pushes),
3713      (2) One or more addition/subtraction with the SP as destination,
3714      (3) A single move insn with the SP as destination,
3715      (4) A call_pop insn,
3716      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3717 
3718    Insns in the sequence that do not modify the SP are ignored,
3719    except for noreturn calls.
3720 
3721    The return value is the amount of adjustment that can be trivially
3722    verified, via immediate operand or auto-inc.  If the adjustment
3723    cannot be trivially extracted, the return value is INT_MIN.  */
3724 
3725 HOST_WIDE_INT
3726 find_args_size_adjust (rtx insn)
3727 {
3728   rtx dest, set, pat;
3729   int i;
3730 
3731   pat = PATTERN (insn);
3732   set = NULL;
3733 
3734   /* Look for a call_pop pattern.  */
3735   if (CALL_P (insn))
3736     {
3737       /* We have to allow non-call_pop patterns for the case
3738 	 of emit_single_push_insn of a TLS address.  */
3739       if (GET_CODE (pat) != PARALLEL)
3740 	return 0;
3741 
3742       /* All call_pop have a stack pointer adjust in the parallel.
3743 	 The call itself is always first, and the stack adjust is
3744 	 usually last, so search from the end.  */
3745       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3746 	{
3747 	  set = XVECEXP (pat, 0, i);
3748 	  if (GET_CODE (set) != SET)
3749 	    continue;
3750 	  dest = SET_DEST (set);
3751 	  if (dest == stack_pointer_rtx)
3752 	    break;
3753 	}
3754       /* We'd better have found the stack pointer adjust.  */
3755       if (i == 0)
3756 	return 0;
3757       /* Fall through to process the extracted SET and DEST
3758 	 as if it was a standalone insn.  */
3759     }
3760   else if (GET_CODE (pat) == SET)
3761     set = pat;
3762   else if ((set = single_set (insn)) != NULL)
3763     ;
3764   else if (GET_CODE (pat) == PARALLEL)
3765     {
3766       /* ??? Some older ports use a parallel with a stack adjust
3767 	 and a store for a PUSH_ROUNDING pattern, rather than a
3768 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3769       /* ??? See h8300 and m68k, pushqi1.  */
3770       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3771 	{
3772 	  set = XVECEXP (pat, 0, i);
3773 	  if (GET_CODE (set) != SET)
3774 	    continue;
3775 	  dest = SET_DEST (set);
3776 	  if (dest == stack_pointer_rtx)
3777 	    break;
3778 
3779 	  /* We do not expect an auto-inc of the sp in the parallel.  */
3780 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3781 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3782 			       != stack_pointer_rtx);
3783 	}
3784       if (i < 0)
3785 	return 0;
3786     }
3787   else
3788     return 0;
3789 
3790   dest = SET_DEST (set);
3791 
3792   /* Look for direct modifications of the stack pointer.  */
3793   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3794     {
3795       /* Look for a trivial adjustment, otherwise assume nothing.  */
3796       /* Note that the SPU restore_stack_block pattern refers to
3797 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
3798       if (SCALAR_INT_MODE_P (GET_MODE (dest))
3799 	  && GET_CODE (SET_SRC (set)) == PLUS
3800 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3801 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3802 	return INTVAL (XEXP (SET_SRC (set), 1));
3803       /* ??? Reload can generate no-op moves, which will be cleaned
3804 	 up later.  Recognize it and continue searching.  */
3805       else if (rtx_equal_p (dest, SET_SRC (set)))
3806 	return 0;
3807       else
3808 	return HOST_WIDE_INT_MIN;
3809     }
3810   else
3811     {
3812       rtx mem, addr;
3813 
3814       /* Otherwise only think about autoinc patterns.  */
3815       if (mem_autoinc_base (dest) == stack_pointer_rtx)
3816 	{
3817 	  mem = dest;
3818 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3819 			       != stack_pointer_rtx);
3820 	}
3821       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3822 	mem = SET_SRC (set);
3823       else
3824 	return 0;
3825 
3826       addr = XEXP (mem, 0);
3827       switch (GET_CODE (addr))
3828 	{
3829 	case PRE_INC:
3830 	case POST_INC:
3831 	  return GET_MODE_SIZE (GET_MODE (mem));
3832 	case PRE_DEC:
3833 	case POST_DEC:
3834 	  return -GET_MODE_SIZE (GET_MODE (mem));
3835 	case PRE_MODIFY:
3836 	case POST_MODIFY:
3837 	  addr = XEXP (addr, 1);
3838 	  gcc_assert (GET_CODE (addr) == PLUS);
3839 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3840 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3841 	  return INTVAL (XEXP (addr, 1));
3842 	default:
3843 	  gcc_unreachable ();
3844 	}
3845     }
3846 }
3847 
3848 int
3849 fixup_args_size_notes (rtx prev, rtx last, int end_args_size)
3850 {
3851   int args_size = end_args_size;
3852   bool saw_unknown = false;
3853   rtx insn;
3854 
3855   for (insn = last; insn != prev; insn = PREV_INSN (insn))
3856     {
3857       HOST_WIDE_INT this_delta;
3858 
3859       if (!NONDEBUG_INSN_P (insn))
3860 	continue;
3861 
3862       this_delta = find_args_size_adjust (insn);
3863       if (this_delta == 0)
3864 	{
3865 	  if (!CALL_P (insn)
3866 	      || ACCUMULATE_OUTGOING_ARGS
3867 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3868 	    continue;
3869 	}
3870 
3871       gcc_assert (!saw_unknown);
3872       if (this_delta == HOST_WIDE_INT_MIN)
3873 	saw_unknown = true;
3874 
3875       add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
3876 #ifdef STACK_GROWS_DOWNWARD
3877       this_delta = -(unsigned HOST_WIDE_INT) this_delta;
3878 #endif
3879       args_size -= this_delta;
3880     }
3881 
3882   return saw_unknown ? INT_MIN : args_size;
3883 }
3884 
3885 #ifdef PUSH_ROUNDING
3886 /* Emit single push insn.  */
3887 
3888 static void
3889 emit_single_push_insn_1 (enum machine_mode mode, rtx x, tree type)
3890 {
3891   rtx dest_addr;
3892   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3893   rtx dest;
3894   enum insn_code icode;
3895 
3896   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3897   /* If there is push pattern, use it.  Otherwise try old way of throwing
3898      MEM representing push operation to move expander.  */
3899   icode = optab_handler (push_optab, mode);
3900   if (icode != CODE_FOR_nothing)
3901     {
3902       struct expand_operand ops[1];
3903 
3904       create_input_operand (&ops[0], x, mode);
3905       if (maybe_expand_insn (icode, 1, ops))
3906 	return;
3907     }
3908   if (GET_MODE_SIZE (mode) == rounded_size)
3909     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3910   /* If we are to pad downward, adjust the stack pointer first and
3911      then store X into the stack location using an offset.  This is
3912      because emit_move_insn does not know how to pad; it does not have
3913      access to type.  */
3914   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3915     {
3916       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3917       HOST_WIDE_INT offset;
3918 
3919       emit_move_insn (stack_pointer_rtx,
3920 		      expand_binop (Pmode,
3921 #ifdef STACK_GROWS_DOWNWARD
3922 				    sub_optab,
3923 #else
3924 				    add_optab,
3925 #endif
3926 				    stack_pointer_rtx,
3927 				    GEN_INT (rounded_size),
3928 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3929 
3930       offset = (HOST_WIDE_INT) padding_size;
3931 #ifdef STACK_GROWS_DOWNWARD
3932       if (STACK_PUSH_CODE == POST_DEC)
3933 	/* We have already decremented the stack pointer, so get the
3934 	   previous value.  */
3935 	offset += (HOST_WIDE_INT) rounded_size;
3936 #else
3937       if (STACK_PUSH_CODE == POST_INC)
3938 	/* We have already incremented the stack pointer, so get the
3939 	   previous value.  */
3940 	offset -= (HOST_WIDE_INT) rounded_size;
3941 #endif
3942       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3943     }
3944   else
3945     {
3946 #ifdef STACK_GROWS_DOWNWARD
3947       /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3948       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3949 				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3950 #else
3951       /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3952       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3953 				GEN_INT (rounded_size));
3954 #endif
3955       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3956     }
3957 
3958   dest = gen_rtx_MEM (mode, dest_addr);
3959 
3960   if (type != 0)
3961     {
3962       set_mem_attributes (dest, type, 1);
3963 
3964       if (flag_optimize_sibling_calls)
3965 	/* Function incoming arguments may overlap with sibling call
3966 	   outgoing arguments and we cannot allow reordering of reads
3967 	   from function arguments with stores to outgoing arguments
3968 	   of sibling calls.  */
3969 	set_mem_alias_set (dest, 0);
3970     }
3971   emit_move_insn (dest, x);
3972 }
3973 
3974 /* Emit and annotate a single push insn.  */
3975 
3976 static void
3977 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3978 {
3979   int delta, old_delta = stack_pointer_delta;
3980   rtx prev = get_last_insn ();
3981   rtx last;
3982 
3983   emit_single_push_insn_1 (mode, x, type);
3984 
3985   last = get_last_insn ();
3986 
3987   /* Notice the common case where we emitted exactly one insn.  */
3988   if (PREV_INSN (last) == prev)
3989     {
3990       add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
3991       return;
3992     }
3993 
3994   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
3995   gcc_assert (delta == INT_MIN || delta == old_delta);
3996 }
3997 #endif
3998 
3999 /* Generate code to push X onto the stack, assuming it has mode MODE and
4000    type TYPE.
4001    MODE is redundant except when X is a CONST_INT (since they don't
4002    carry mode info).
4003    SIZE is an rtx for the size of data to be copied (in bytes),
4004    needed only if X is BLKmode.
4005 
4006    ALIGN (in bits) is maximum alignment we can assume.
4007 
4008    If PARTIAL and REG are both nonzero, then copy that many of the first
4009    bytes of X into registers starting with REG, and push the rest of X.
4010    The amount of space pushed is decreased by PARTIAL bytes.
4011    REG must be a hard register in this case.
4012    If REG is zero but PARTIAL is not, take any all others actions for an
4013    argument partially in registers, but do not actually load any
4014    registers.
4015 
4016    EXTRA is the amount in bytes of extra space to leave next to this arg.
4017    This is ignored if an argument block has already been allocated.
4018 
4019    On a machine that lacks real push insns, ARGS_ADDR is the address of
4020    the bottom of the argument block for this call.  We use indexing off there
4021    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4022    argument block has not been preallocated.
4023 
4024    ARGS_SO_FAR is the size of args previously pushed for this call.
4025 
4026    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4027    for arguments passed in registers.  If nonzero, it will be the number
4028    of bytes required.  */
4029 
4030 void
4031 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
4032 		unsigned int align, int partial, rtx reg, int extra,
4033 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4034 		rtx alignment_pad)
4035 {
4036   rtx xinner;
4037   enum direction stack_direction
4038 #ifdef STACK_GROWS_DOWNWARD
4039     = downward;
4040 #else
4041     = upward;
4042 #endif
4043 
4044   /* Decide where to pad the argument: `downward' for below,
4045      `upward' for above, or `none' for don't pad it.
4046      Default is below for small data on big-endian machines; else above.  */
4047   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4048 
4049   /* Invert direction if stack is post-decrement.
4050      FIXME: why?  */
4051   if (STACK_PUSH_CODE == POST_DEC)
4052     if (where_pad != none)
4053       where_pad = (where_pad == downward ? upward : downward);
4054 
4055   xinner = x;
4056 
4057   if (mode == BLKmode
4058       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)
4059 	  && type != NULL_TREE))
4060     {
4061       /* Copy a block into the stack, entirely or partially.  */
4062 
4063       rtx temp;
4064       int used;
4065       int offset;
4066       int skip;
4067 
4068       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4069       used = partial - offset;
4070 
4071       if (mode != BLKmode)
4072 	{
4073 	  /* A value is to be stored in an insufficiently aligned
4074 	     stack slot; copy via a suitably aligned slot if
4075 	     necessary.  */
4076 	  size = GEN_INT (GET_MODE_SIZE (mode));
4077 	  if (!MEM_P (xinner))
4078 	    {
4079 	      temp = assign_temp (type, 1, 1);
4080 	      emit_move_insn (temp, xinner);
4081 	      xinner = temp;
4082 	    }
4083 	}
4084 
4085       gcc_assert (size);
4086 
4087       /* USED is now the # of bytes we need not copy to the stack
4088 	 because registers will take care of them.  */
4089 
4090       if (partial != 0)
4091 	xinner = adjust_address (xinner, BLKmode, used);
4092 
4093       /* If the partial register-part of the arg counts in its stack size,
4094 	 skip the part of stack space corresponding to the registers.
4095 	 Otherwise, start copying to the beginning of the stack space,
4096 	 by setting SKIP to 0.  */
4097       skip = (reg_parm_stack_space == 0) ? 0 : used;
4098 
4099 #ifdef PUSH_ROUNDING
4100       /* Do it with several push insns if that doesn't take lots of insns
4101 	 and if there is no difficulty with push insns that skip bytes
4102 	 on the stack for alignment purposes.  */
4103       if (args_addr == 0
4104 	  && PUSH_ARGS
4105 	  && CONST_INT_P (size)
4106 	  && skip == 0
4107 	  && MEM_ALIGN (xinner) >= align
4108 	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
4109 	  /* Here we avoid the case of a structure whose weak alignment
4110 	     forces many pushes of a small amount of data,
4111 	     and such small pushes do rounding that causes trouble.  */
4112 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4113 	      || align >= BIGGEST_ALIGNMENT
4114 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4115 		  == (align / BITS_PER_UNIT)))
4116 	  && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4117 	{
4118 	  /* Push padding now if padding above and stack grows down,
4119 	     or if padding below and stack grows up.
4120 	     But if space already allocated, this has already been done.  */
4121 	  if (extra && args_addr == 0
4122 	      && where_pad != none && where_pad != stack_direction)
4123 	    anti_adjust_stack (GEN_INT (extra));
4124 
4125 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4126 	}
4127       else
4128 #endif /* PUSH_ROUNDING  */
4129 	{
4130 	  rtx target;
4131 
4132 	  /* Otherwise make space on the stack and copy the data
4133 	     to the address of that space.  */
4134 
4135 	  /* Deduct words put into registers from the size we must copy.  */
4136 	  if (partial != 0)
4137 	    {
4138 	      if (CONST_INT_P (size))
4139 		size = GEN_INT (INTVAL (size) - used);
4140 	      else
4141 		size = expand_binop (GET_MODE (size), sub_optab, size,
4142 				     GEN_INT (used), NULL_RTX, 0,
4143 				     OPTAB_LIB_WIDEN);
4144 	    }
4145 
4146 	  /* Get the address of the stack space.
4147 	     In this case, we do not deal with EXTRA separately.
4148 	     A single stack adjust will do.  */
4149 	  if (! args_addr)
4150 	    {
4151 	      temp = push_block (size, extra, where_pad == downward);
4152 	      extra = 0;
4153 	    }
4154 	  else if (CONST_INT_P (args_so_far))
4155 	    temp = memory_address (BLKmode,
4156 				   plus_constant (Pmode, args_addr,
4157 						  skip + INTVAL (args_so_far)));
4158 	  else
4159 	    temp = memory_address (BLKmode,
4160 				   plus_constant (Pmode,
4161 						  gen_rtx_PLUS (Pmode,
4162 								args_addr,
4163 								args_so_far),
4164 						  skip));
4165 
4166 	  if (!ACCUMULATE_OUTGOING_ARGS)
4167 	    {
4168 	      /* If the source is referenced relative to the stack pointer,
4169 		 copy it to another register to stabilize it.  We do not need
4170 		 to do this if we know that we won't be changing sp.  */
4171 
4172 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4173 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4174 		temp = copy_to_reg (temp);
4175 	    }
4176 
4177 	  target = gen_rtx_MEM (BLKmode, temp);
4178 
4179 	  /* We do *not* set_mem_attributes here, because incoming arguments
4180 	     may overlap with sibling call outgoing arguments and we cannot
4181 	     allow reordering of reads from function arguments with stores
4182 	     to outgoing arguments of sibling calls.  We do, however, want
4183 	     to record the alignment of the stack slot.  */
4184 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4185 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4186 	  set_mem_align (target, align);
4187 
4188 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4189 	}
4190     }
4191   else if (partial > 0)
4192     {
4193       /* Scalar partly in registers.  */
4194 
4195       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4196       int i;
4197       int not_stack;
4198       /* # bytes of start of argument
4199 	 that we must make space for but need not store.  */
4200       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4201       int args_offset = INTVAL (args_so_far);
4202       int skip;
4203 
4204       /* Push padding now if padding above and stack grows down,
4205 	 or if padding below and stack grows up.
4206 	 But if space already allocated, this has already been done.  */
4207       if (extra && args_addr == 0
4208 	  && where_pad != none && where_pad != stack_direction)
4209 	anti_adjust_stack (GEN_INT (extra));
4210 
4211       /* If we make space by pushing it, we might as well push
4212 	 the real data.  Otherwise, we can leave OFFSET nonzero
4213 	 and leave the space uninitialized.  */
4214       if (args_addr == 0)
4215 	offset = 0;
4216 
4217       /* Now NOT_STACK gets the number of words that we don't need to
4218 	 allocate on the stack.  Convert OFFSET to words too.  */
4219       not_stack = (partial - offset) / UNITS_PER_WORD;
4220       offset /= UNITS_PER_WORD;
4221 
4222       /* If the partial register-part of the arg counts in its stack size,
4223 	 skip the part of stack space corresponding to the registers.
4224 	 Otherwise, start copying to the beginning of the stack space,
4225 	 by setting SKIP to 0.  */
4226       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4227 
4228       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4229 	x = validize_mem (force_const_mem (mode, x));
4230 
4231       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4232 	 SUBREGs of such registers are not allowed.  */
4233       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4234 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4235 	x = copy_to_reg (x);
4236 
4237       /* Loop over all the words allocated on the stack for this arg.  */
4238       /* We can do it by words, because any scalar bigger than a word
4239 	 has a size a multiple of a word.  */
4240 #ifndef PUSH_ARGS_REVERSED
4241       for (i = not_stack; i < size; i++)
4242 #else
4243       for (i = size - 1; i >= not_stack; i--)
4244 #endif
4245 	if (i >= not_stack + offset)
4246 	  emit_push_insn (operand_subword_force (x, i, mode),
4247 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4248 			  0, args_addr,
4249 			  GEN_INT (args_offset + ((i - not_stack + skip)
4250 						  * UNITS_PER_WORD)),
4251 			  reg_parm_stack_space, alignment_pad);
4252     }
4253   else
4254     {
4255       rtx addr;
4256       rtx dest;
4257 
4258       /* Push padding now if padding above and stack grows down,
4259 	 or if padding below and stack grows up.
4260 	 But if space already allocated, this has already been done.  */
4261       if (extra && args_addr == 0
4262 	  && where_pad != none && where_pad != stack_direction)
4263 	anti_adjust_stack (GEN_INT (extra));
4264 
4265 #ifdef PUSH_ROUNDING
4266       if (args_addr == 0 && PUSH_ARGS)
4267 	emit_single_push_insn (mode, x, type);
4268       else
4269 #endif
4270 	{
4271 	  if (CONST_INT_P (args_so_far))
4272 	    addr
4273 	      = memory_address (mode,
4274 				plus_constant (Pmode, args_addr,
4275 					       INTVAL (args_so_far)));
4276 	  else
4277 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4278 						       args_so_far));
4279 	  dest = gen_rtx_MEM (mode, addr);
4280 
4281 	  /* We do *not* set_mem_attributes here, because incoming arguments
4282 	     may overlap with sibling call outgoing arguments and we cannot
4283 	     allow reordering of reads from function arguments with stores
4284 	     to outgoing arguments of sibling calls.  We do, however, want
4285 	     to record the alignment of the stack slot.  */
4286 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4287 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4288 	  set_mem_align (dest, align);
4289 
4290 	  emit_move_insn (dest, x);
4291 	}
4292     }
4293 
4294   /* If part should go in registers, copy that part
4295      into the appropriate registers.  Do this now, at the end,
4296      since mem-to-mem copies above may do function calls.  */
4297   if (partial > 0 && reg != 0)
4298     {
4299       /* Handle calls that pass values in multiple non-contiguous locations.
4300 	 The Irix 6 ABI has examples of this.  */
4301       if (GET_CODE (reg) == PARALLEL)
4302 	emit_group_load (reg, x, type, -1);
4303       else
4304 	{
4305 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4306 	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4307 	}
4308     }
4309 
4310   if (extra && args_addr == 0 && where_pad == stack_direction)
4311     anti_adjust_stack (GEN_INT (extra));
4312 
4313   if (alignment_pad && args_addr == 0)
4314     anti_adjust_stack (alignment_pad);
4315 }
4316 
4317 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4318    operations.  */
4319 
4320 static rtx
4321 get_subtarget (rtx x)
4322 {
4323   return (optimize
4324           || x == 0
4325 	   /* Only registers can be subtargets.  */
4326 	   || !REG_P (x)
4327 	   /* Don't use hard regs to avoid extending their life.  */
4328 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4329 	  ? 0 : x);
4330 }
4331 
4332 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4333    FIELD is a bitfield.  Returns true if the optimization was successful,
4334    and there's nothing else to do.  */
4335 
4336 static bool
4337 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4338 				 unsigned HOST_WIDE_INT bitpos,
4339 				 unsigned HOST_WIDE_INT bitregion_start,
4340 				 unsigned HOST_WIDE_INT bitregion_end,
4341 				 enum machine_mode mode1, rtx str_rtx,
4342 				 tree to, tree src)
4343 {
4344   enum machine_mode str_mode = GET_MODE (str_rtx);
4345   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4346   tree op0, op1;
4347   rtx value, result;
4348   optab binop;
4349   gimple srcstmt;
4350   enum tree_code code;
4351 
4352   if (mode1 != VOIDmode
4353       || bitsize >= BITS_PER_WORD
4354       || str_bitsize > BITS_PER_WORD
4355       || TREE_SIDE_EFFECTS (to)
4356       || TREE_THIS_VOLATILE (to))
4357     return false;
4358 
4359   STRIP_NOPS (src);
4360   if (TREE_CODE (src) != SSA_NAME)
4361     return false;
4362   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4363     return false;
4364 
4365   srcstmt = get_gimple_for_ssa_name (src);
4366   if (!srcstmt
4367       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4368     return false;
4369 
4370   code = gimple_assign_rhs_code (srcstmt);
4371 
4372   op0 = gimple_assign_rhs1 (srcstmt);
4373 
4374   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4375      to find its initialization.  Hopefully the initialization will
4376      be from a bitfield load.  */
4377   if (TREE_CODE (op0) == SSA_NAME)
4378     {
4379       gimple op0stmt = get_gimple_for_ssa_name (op0);
4380 
4381       /* We want to eventually have OP0 be the same as TO, which
4382 	 should be a bitfield.  */
4383       if (!op0stmt
4384 	  || !is_gimple_assign (op0stmt)
4385 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4386 	return false;
4387       op0 = gimple_assign_rhs1 (op0stmt);
4388     }
4389 
4390   op1 = gimple_assign_rhs2 (srcstmt);
4391 
4392   if (!operand_equal_p (to, op0, 0))
4393     return false;
4394 
4395   if (MEM_P (str_rtx))
4396     {
4397       unsigned HOST_WIDE_INT offset1;
4398 
4399       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4400 	str_mode = word_mode;
4401       str_mode = get_best_mode (bitsize, bitpos,
4402 				bitregion_start, bitregion_end,
4403 				MEM_ALIGN (str_rtx), str_mode, 0);
4404       if (str_mode == VOIDmode)
4405 	return false;
4406       str_bitsize = GET_MODE_BITSIZE (str_mode);
4407 
4408       offset1 = bitpos;
4409       bitpos %= str_bitsize;
4410       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4411       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4412     }
4413   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4414     return false;
4415 
4416   /* If the bit field covers the whole REG/MEM, store_field
4417      will likely generate better code.  */
4418   if (bitsize >= str_bitsize)
4419     return false;
4420 
4421   /* We can't handle fields split across multiple entities.  */
4422   if (bitpos + bitsize > str_bitsize)
4423     return false;
4424 
4425   if (BYTES_BIG_ENDIAN)
4426     bitpos = str_bitsize - bitpos - bitsize;
4427 
4428   switch (code)
4429     {
4430     case PLUS_EXPR:
4431     case MINUS_EXPR:
4432       /* For now, just optimize the case of the topmost bitfield
4433 	 where we don't need to do any masking and also
4434 	 1 bit bitfields where xor can be used.
4435 	 We might win by one instruction for the other bitfields
4436 	 too if insv/extv instructions aren't used, so that
4437 	 can be added later.  */
4438       if (bitpos + bitsize != str_bitsize
4439 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4440 	break;
4441 
4442       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4443       value = convert_modes (str_mode,
4444 			     TYPE_MODE (TREE_TYPE (op1)), value,
4445 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4446 
4447       /* We may be accessing data outside the field, which means
4448 	 we can alias adjacent data.  */
4449       if (MEM_P (str_rtx))
4450 	{
4451 	  str_rtx = shallow_copy_rtx (str_rtx);
4452 	  set_mem_alias_set (str_rtx, 0);
4453 	  set_mem_expr (str_rtx, 0);
4454 	}
4455 
4456       binop = code == PLUS_EXPR ? add_optab : sub_optab;
4457       if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4458 	{
4459 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4460 	  binop = xor_optab;
4461 	}
4462       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4463       result = expand_binop (str_mode, binop, str_rtx,
4464 			     value, str_rtx, 1, OPTAB_WIDEN);
4465       if (result != str_rtx)
4466 	emit_move_insn (str_rtx, result);
4467       return true;
4468 
4469     case BIT_IOR_EXPR:
4470     case BIT_XOR_EXPR:
4471       if (TREE_CODE (op1) != INTEGER_CST)
4472 	break;
4473       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4474       value = convert_modes (str_mode,
4475 			     TYPE_MODE (TREE_TYPE (op1)), value,
4476 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4477 
4478       /* We may be accessing data outside the field, which means
4479 	 we can alias adjacent data.  */
4480       if (MEM_P (str_rtx))
4481 	{
4482 	  str_rtx = shallow_copy_rtx (str_rtx);
4483 	  set_mem_alias_set (str_rtx, 0);
4484 	  set_mem_expr (str_rtx, 0);
4485 	}
4486 
4487       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4488       if (bitpos + bitsize != str_bitsize)
4489 	{
4490 	  rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1);
4491 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4492 	}
4493       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4494       result = expand_binop (str_mode, binop, str_rtx,
4495 			     value, str_rtx, 1, OPTAB_WIDEN);
4496       if (result != str_rtx)
4497 	emit_move_insn (str_rtx, result);
4498       return true;
4499 
4500     default:
4501       break;
4502     }
4503 
4504   return false;
4505 }
4506 
4507 /* In the C++ memory model, consecutive bit fields in a structure are
4508    considered one memory location.
4509 
4510    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4511    returns the bit range of consecutive bits in which this COMPONENT_REF
4512    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4513    and *OFFSET may be adjusted in the process.
4514 
4515    If the access does not need to be restricted, 0 is returned in both
4516    *BITSTART and *BITEND.  */
4517 
4518 static void
4519 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4520 	       unsigned HOST_WIDE_INT *bitend,
4521 	       tree exp,
4522 	       HOST_WIDE_INT *bitpos,
4523 	       tree *offset)
4524 {
4525   HOST_WIDE_INT bitoffset;
4526   tree field, repr;
4527 
4528   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4529 
4530   field = TREE_OPERAND (exp, 1);
4531   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4532   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4533      need to limit the range we can access.  */
4534   if (!repr)
4535     {
4536       *bitstart = *bitend = 0;
4537       return;
4538     }
4539 
4540   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4541      part of a larger bit field, then the representative does not serve any
4542      useful purpose.  This can occur in Ada.  */
4543   if (handled_component_p (TREE_OPERAND (exp, 0)))
4544     {
4545       enum machine_mode rmode;
4546       HOST_WIDE_INT rbitsize, rbitpos;
4547       tree roffset;
4548       int unsignedp;
4549       int volatilep = 0;
4550       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4551 			   &roffset, &rmode, &unsignedp, &volatilep, false);
4552       if ((rbitpos % BITS_PER_UNIT) != 0)
4553 	{
4554 	  *bitstart = *bitend = 0;
4555 	  return;
4556 	}
4557     }
4558 
4559   /* Compute the adjustment to bitpos from the offset of the field
4560      relative to the representative.  DECL_FIELD_OFFSET of field and
4561      repr are the same by construction if they are not constants,
4562      see finish_bitfield_layout.  */
4563   if (host_integerp (DECL_FIELD_OFFSET (field), 1)
4564       && host_integerp (DECL_FIELD_OFFSET (repr), 1))
4565     bitoffset = (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
4566 		 - tree_low_cst (DECL_FIELD_OFFSET (repr), 1)) * BITS_PER_UNIT;
4567   else
4568     bitoffset = 0;
4569   bitoffset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
4570 		- tree_low_cst (DECL_FIELD_BIT_OFFSET (repr), 1));
4571 
4572   /* If the adjustment is larger than bitpos, we would have a negative bit
4573      position for the lower bound and this may wreak havoc later.  Adjust
4574      offset and bitpos to make the lower bound non-negative in that case.  */
4575   if (bitoffset > *bitpos)
4576     {
4577       HOST_WIDE_INT adjust = bitoffset - *bitpos;
4578       gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4579 
4580       *bitpos += adjust;
4581       if (*offset == NULL_TREE)
4582 	*offset = size_int (-adjust / BITS_PER_UNIT);
4583       else
4584 	*offset
4585 	  = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4586       *bitstart = 0;
4587     }
4588   else
4589     *bitstart = *bitpos - bitoffset;
4590 
4591   *bitend = *bitstart + tree_low_cst (DECL_SIZE (repr), 1) - 1;
4592 }
4593 
4594 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4595    in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4596    DECL_RTL was not set yet, return NORTL.  */
4597 
4598 static inline bool
4599 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4600 {
4601   if (TREE_CODE (addr) != ADDR_EXPR)
4602     return false;
4603 
4604   tree base = TREE_OPERAND (addr, 0);
4605 
4606   if (!DECL_P (base)
4607       || TREE_ADDRESSABLE (base)
4608       || DECL_MODE (base) == BLKmode)
4609     return false;
4610 
4611   if (!DECL_RTL_SET_P (base))
4612     return nortl;
4613 
4614   return (!MEM_P (DECL_RTL (base)));
4615 }
4616 
4617 /* Returns true if the MEM_REF REF refers to an object that does not
4618    reside in memory and has non-BLKmode.  */
4619 
4620 static inline bool
4621 mem_ref_refers_to_non_mem_p (tree ref)
4622 {
4623   tree base = TREE_OPERAND (ref, 0);
4624   return addr_expr_of_non_mem_decl_p_1 (base, false);
4625 }
4626 
4627 /* Return TRUE iff OP is an ADDR_EXPR of a DECL that's not
4628    addressable.  This is very much like mem_ref_refers_to_non_mem_p,
4629    but instead of the MEM_REF, it takes its base, and it doesn't
4630    assume a DECL is in memory just because its RTL is not set yet.  */
4631 
4632 bool
4633 addr_expr_of_non_mem_decl_p (tree op)
4634 {
4635   return addr_expr_of_non_mem_decl_p_1 (op, true);
4636 }
4637 
4638 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4639    is true, try generating a nontemporal store.  */
4640 
4641 void
4642 expand_assignment (tree to, tree from, bool nontemporal)
4643 {
4644   rtx to_rtx = 0;
4645   rtx result;
4646   enum machine_mode mode;
4647   unsigned int align;
4648   enum insn_code icode;
4649 
4650   /* Don't crash if the lhs of the assignment was erroneous.  */
4651   if (TREE_CODE (to) == ERROR_MARK)
4652     {
4653       expand_normal (from);
4654       return;
4655     }
4656 
4657   /* Optimize away no-op moves without side-effects.  */
4658   if (operand_equal_p (to, from, 0))
4659     return;
4660 
4661   /* Handle misaligned stores.  */
4662   mode = TYPE_MODE (TREE_TYPE (to));
4663   if ((TREE_CODE (to) == MEM_REF
4664        || TREE_CODE (to) == TARGET_MEM_REF)
4665       && mode != BLKmode
4666       && !mem_ref_refers_to_non_mem_p (to)
4667       && ((align = get_object_alignment (to))
4668 	  < GET_MODE_ALIGNMENT (mode))
4669       && (((icode = optab_handler (movmisalign_optab, mode))
4670 	   != CODE_FOR_nothing)
4671 	  || SLOW_UNALIGNED_ACCESS (mode, align)))
4672     {
4673       rtx reg, mem;
4674 
4675       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4676       reg = force_not_mem (reg);
4677       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4678 
4679       if (icode != CODE_FOR_nothing)
4680 	{
4681 	  struct expand_operand ops[2];
4682 
4683 	  create_fixed_operand (&ops[0], mem);
4684 	  create_input_operand (&ops[1], reg, mode);
4685 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4686 	     would silently be omitted.  */
4687 	  expand_insn (icode, 2, ops);
4688 	}
4689       else
4690 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4691       return;
4692     }
4693 
4694   /* Assignment of a structure component needs special treatment
4695      if the structure component's rtx is not simply a MEM.
4696      Assignment of an array element at a constant index, and assignment of
4697      an array element in an unaligned packed structure field, has the same
4698      problem.  Same for (partially) storing into a non-memory object.  */
4699   if (handled_component_p (to)
4700       || (TREE_CODE (to) == MEM_REF
4701 	  && mem_ref_refers_to_non_mem_p (to))
4702       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4703     {
4704       enum machine_mode mode1;
4705       HOST_WIDE_INT bitsize, bitpos;
4706       unsigned HOST_WIDE_INT bitregion_start = 0;
4707       unsigned HOST_WIDE_INT bitregion_end = 0;
4708       tree offset;
4709       int unsignedp;
4710       int volatilep = 0;
4711       tree tem;
4712 
4713       push_temp_slots ();
4714       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4715 				 &unsignedp, &volatilep, true);
4716 
4717       /* Make sure bitpos is not negative, it can wreak havoc later.  */
4718       if (bitpos < 0)
4719 	{
4720 	  gcc_assert (offset == NULL_TREE);
4721 	  offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4722 					? 3 : exact_log2 (BITS_PER_UNIT)));
4723 	  bitpos &= BITS_PER_UNIT - 1;
4724 	}
4725 
4726       if (TREE_CODE (to) == COMPONENT_REF
4727 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4728 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4729 
4730       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4731 
4732       /* If the bitfield is volatile, we want to access it in the
4733 	 field's mode, not the computed mode.
4734 	 If a MEM has VOIDmode (external with incomplete type),
4735 	 use BLKmode for it instead.  */
4736       if (MEM_P (to_rtx))
4737 	{
4738 	  if (volatilep && flag_strict_volatile_bitfields > 0)
4739 	    to_rtx = adjust_address (to_rtx, mode1, 0);
4740 	  else if (GET_MODE (to_rtx) == VOIDmode)
4741 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
4742 	}
4743 
4744       if (offset != 0)
4745 	{
4746 	  enum machine_mode address_mode;
4747 	  rtx offset_rtx;
4748 
4749 	  if (!MEM_P (to_rtx))
4750 	    {
4751 	      /* We can get constant negative offsets into arrays with broken
4752 		 user code.  Translate this to a trap instead of ICEing.  */
4753 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4754 	      expand_builtin_trap ();
4755 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4756 	    }
4757 
4758 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4759 	  address_mode = get_address_mode (to_rtx);
4760 	  if (GET_MODE (offset_rtx) != address_mode)
4761 	    offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4762 
4763 	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4764 	     to call force_reg for that case.  Avoid that case.  */
4765 	  if (MEM_P (to_rtx)
4766 	      && GET_MODE (to_rtx) == BLKmode
4767 	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4768 	      && bitsize > 0
4769 	      && (bitpos % bitsize) == 0
4770 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4771 	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4772 	    {
4773 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4774 	      bitpos = 0;
4775 	    }
4776 
4777 	  to_rtx = offset_address (to_rtx, offset_rtx,
4778 				   highest_pow2_factor_for_target (to,
4779 				   				   offset));
4780 	}
4781 
4782       /* No action is needed if the target is not a memory and the field
4783 	 lies completely outside that target.  This can occur if the source
4784 	 code contains an out-of-bounds access to a small array.  */
4785       if (!MEM_P (to_rtx)
4786 	  && GET_MODE (to_rtx) != BLKmode
4787 	  && (unsigned HOST_WIDE_INT) bitpos
4788 	     >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4789 	{
4790 	  expand_normal (from);
4791 	  result = NULL;
4792 	}
4793       /* Handle expand_expr of a complex value returning a CONCAT.  */
4794       else if (GET_CODE (to_rtx) == CONCAT)
4795 	{
4796 	  unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4797 	  if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4798 	      && bitpos == 0
4799 	      && bitsize == mode_bitsize)
4800 	    result = store_expr (from, to_rtx, false, nontemporal);
4801 	  else if (bitsize == mode_bitsize / 2
4802 		   && (bitpos == 0 || bitpos == mode_bitsize / 2))
4803 	    result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4804 				 nontemporal);
4805 	  else if (bitpos + bitsize <= mode_bitsize / 2)
4806 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4807 				  bitregion_start, bitregion_end,
4808 				  mode1, from,
4809 				  get_alias_set (to), nontemporal);
4810 	  else if (bitpos >= mode_bitsize / 2)
4811 	    result = store_field (XEXP (to_rtx, 1), bitsize,
4812 				  bitpos - mode_bitsize / 2,
4813 				  bitregion_start, bitregion_end,
4814 				  mode1, from,
4815 				  get_alias_set (to), nontemporal);
4816 	  else if (bitpos == 0 && bitsize == mode_bitsize)
4817 	    {
4818 	      rtx from_rtx;
4819 	      result = expand_normal (from);
4820 	      from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4821 					      TYPE_MODE (TREE_TYPE (from)), 0);
4822 	      emit_move_insn (XEXP (to_rtx, 0),
4823 			      read_complex_part (from_rtx, false));
4824 	      emit_move_insn (XEXP (to_rtx, 1),
4825 			      read_complex_part (from_rtx, true));
4826 	    }
4827 	  else
4828 	    {
4829 	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4830 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
4831 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
4832 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
4833 	      result = store_field (temp, bitsize, bitpos,
4834 				    bitregion_start, bitregion_end,
4835 				    mode1, from,
4836 				    get_alias_set (to), nontemporal);
4837 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4838 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4839 	    }
4840 	}
4841       else
4842 	{
4843 	  if (MEM_P (to_rtx))
4844 	    {
4845 	      /* If the field is at offset zero, we could have been given the
4846 		 DECL_RTX of the parent struct.  Don't munge it.  */
4847 	      to_rtx = shallow_copy_rtx (to_rtx);
4848 
4849 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4850 
4851 	      /* Deal with volatile and readonly fields.  The former is only
4852 		 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4853 	      if (volatilep)
4854 		MEM_VOLATILE_P (to_rtx) = 1;
4855 	    }
4856 
4857 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
4858 					       bitregion_start, bitregion_end,
4859 					       mode1,
4860 					       to_rtx, to, from))
4861 	    result = NULL;
4862 	  else
4863 	    result = store_field (to_rtx, bitsize, bitpos,
4864 				  bitregion_start, bitregion_end,
4865 				  mode1, from,
4866 				  get_alias_set (to), nontemporal);
4867 	}
4868 
4869       if (result)
4870 	preserve_temp_slots (result);
4871       pop_temp_slots ();
4872       return;
4873     }
4874 
4875   /* If the rhs is a function call and its value is not an aggregate,
4876      call the function before we start to compute the lhs.
4877      This is needed for correct code for cases such as
4878      val = setjmp (buf) on machines where reference to val
4879      requires loading up part of an address in a separate insn.
4880 
4881      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4882      since it might be a promoted variable where the zero- or sign- extension
4883      needs to be done.  Handling this in the normal way is safe because no
4884      computation is done before the call.  The same is true for SSA names.  */
4885   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4886       && COMPLETE_TYPE_P (TREE_TYPE (from))
4887       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4888       && ! (((TREE_CODE (to) == VAR_DECL
4889 	      || TREE_CODE (to) == PARM_DECL
4890 	      || TREE_CODE (to) == RESULT_DECL)
4891 	     && REG_P (DECL_RTL (to)))
4892 	    || TREE_CODE (to) == SSA_NAME))
4893     {
4894       rtx value;
4895 
4896       push_temp_slots ();
4897       value = expand_normal (from);
4898       if (to_rtx == 0)
4899 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4900 
4901       /* Handle calls that return values in multiple non-contiguous locations.
4902 	 The Irix 6 ABI has examples of this.  */
4903       if (GET_CODE (to_rtx) == PARALLEL)
4904 	{
4905 	  if (GET_CODE (value) == PARALLEL)
4906 	    emit_group_move (to_rtx, value);
4907 	  else
4908 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
4909 			     int_size_in_bytes (TREE_TYPE (from)));
4910 	}
4911       else if (GET_CODE (value) == PARALLEL)
4912 	emit_group_store (to_rtx, value, TREE_TYPE (from),
4913 			  int_size_in_bytes (TREE_TYPE (from)));
4914       else if (GET_MODE (to_rtx) == BLKmode)
4915 	{
4916 	  /* Handle calls that return BLKmode values in registers.  */
4917 	  if (REG_P (value))
4918 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
4919 	  else
4920 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4921 	}
4922       else
4923 	{
4924 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
4925 	    value = convert_memory_address_addr_space
4926 		      (GET_MODE (to_rtx), value,
4927 		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4928 
4929 	  emit_move_insn (to_rtx, value);
4930 	}
4931       preserve_temp_slots (to_rtx);
4932       pop_temp_slots ();
4933       return;
4934     }
4935 
4936   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
4937   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4938 
4939   /* Don't move directly into a return register.  */
4940   if (TREE_CODE (to) == RESULT_DECL
4941       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4942     {
4943       rtx temp;
4944 
4945       push_temp_slots ();
4946 
4947       /* If the source is itself a return value, it still is in a pseudo at
4948 	 this point so we can move it back to the return register directly.  */
4949       if (REG_P (to_rtx)
4950 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
4951 	  && TREE_CODE (from) != CALL_EXPR)
4952 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4953       else
4954 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4955 
4956       /* Handle calls that return values in multiple non-contiguous locations.
4957 	 The Irix 6 ABI has examples of this.  */
4958       if (GET_CODE (to_rtx) == PARALLEL)
4959 	{
4960 	  if (GET_CODE (temp) == PARALLEL)
4961 	    emit_group_move (to_rtx, temp);
4962 	  else
4963 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
4964 			     int_size_in_bytes (TREE_TYPE (from)));
4965 	}
4966       else if (temp)
4967 	emit_move_insn (to_rtx, temp);
4968 
4969       preserve_temp_slots (to_rtx);
4970       pop_temp_slots ();
4971       return;
4972     }
4973 
4974   /* In case we are returning the contents of an object which overlaps
4975      the place the value is being stored, use a safe function when copying
4976      a value through a pointer into a structure value return block.  */
4977   if (TREE_CODE (to) == RESULT_DECL
4978       && TREE_CODE (from) == INDIRECT_REF
4979       && ADDR_SPACE_GENERIC_P
4980 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4981       && refs_may_alias_p (to, from)
4982       && cfun->returns_struct
4983       && !cfun->returns_pcc_struct)
4984     {
4985       rtx from_rtx, size;
4986 
4987       push_temp_slots ();
4988       size = expr_size (from);
4989       from_rtx = expand_normal (from);
4990 
4991       emit_library_call (memmove_libfunc, LCT_NORMAL,
4992 			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4993 			 XEXP (from_rtx, 0), Pmode,
4994 			 convert_to_mode (TYPE_MODE (sizetype),
4995 					  size, TYPE_UNSIGNED (sizetype)),
4996 			 TYPE_MODE (sizetype));
4997 
4998       preserve_temp_slots (to_rtx);
4999       pop_temp_slots ();
5000       return;
5001     }
5002 
5003   /* Compute FROM and store the value in the rtx we got.  */
5004 
5005   push_temp_slots ();
5006   result = store_expr (from, to_rtx, 0, nontemporal);
5007   preserve_temp_slots (result);
5008   pop_temp_slots ();
5009   return;
5010 }
5011 
5012 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5013    succeeded, false otherwise.  */
5014 
5015 bool
5016 emit_storent_insn (rtx to, rtx from)
5017 {
5018   struct expand_operand ops[2];
5019   enum machine_mode mode = GET_MODE (to);
5020   enum insn_code code = optab_handler (storent_optab, mode);
5021 
5022   if (code == CODE_FOR_nothing)
5023     return false;
5024 
5025   create_fixed_operand (&ops[0], to);
5026   create_input_operand (&ops[1], from, mode);
5027   return maybe_expand_insn (code, 2, ops);
5028 }
5029 
5030 /* Generate code for computing expression EXP,
5031    and storing the value into TARGET.
5032 
5033    If the mode is BLKmode then we may return TARGET itself.
5034    It turns out that in BLKmode it doesn't cause a problem.
5035    because C has no operators that could combine two different
5036    assignments into the same BLKmode object with different values
5037    with no sequence point.  Will other languages need this to
5038    be more thorough?
5039 
5040    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5041    stack, and block moves may need to be treated specially.
5042 
5043    If NONTEMPORAL is true, try using a nontemporal store instruction.  */
5044 
5045 rtx
5046 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5047 {
5048   rtx temp;
5049   rtx alt_rtl = NULL_RTX;
5050   location_t loc = curr_insn_location ();
5051 
5052   if (VOID_TYPE_P (TREE_TYPE (exp)))
5053     {
5054       /* C++ can generate ?: expressions with a throw expression in one
5055 	 branch and an rvalue in the other. Here, we resolve attempts to
5056 	 store the throw expression's nonexistent result.  */
5057       gcc_assert (!call_param_p);
5058       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5059       return NULL_RTX;
5060     }
5061   if (TREE_CODE (exp) == COMPOUND_EXPR)
5062     {
5063       /* Perform first part of compound expression, then assign from second
5064 	 part.  */
5065       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5066 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5067       return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5068 			 nontemporal);
5069     }
5070   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5071     {
5072       /* For conditional expression, get safe form of the target.  Then
5073 	 test the condition, doing the appropriate assignment on either
5074 	 side.  This avoids the creation of unnecessary temporaries.
5075 	 For non-BLKmode, it is more efficient not to do this.  */
5076 
5077       rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
5078 
5079       do_pending_stack_adjust ();
5080       NO_DEFER_POP;
5081       jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5082       store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
5083 		  nontemporal);
5084       emit_jump_insn (gen_jump (lab2));
5085       emit_barrier ();
5086       emit_label (lab1);
5087       store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
5088 		  nontemporal);
5089       emit_label (lab2);
5090       OK_DEFER_POP;
5091 
5092       return NULL_RTX;
5093     }
5094   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5095     /* If this is a scalar in a register that is stored in a wider mode
5096        than the declared mode, compute the result into its declared mode
5097        and then convert to the wider mode.  Our value is the computed
5098        expression.  */
5099     {
5100       rtx inner_target = 0;
5101 
5102       /* We can do the conversion inside EXP, which will often result
5103 	 in some optimizations.  Do the conversion in two steps: first
5104 	 change the signedness, if needed, then the extend.  But don't
5105 	 do this if the type of EXP is a subtype of something else
5106 	 since then the conversion might involve more than just
5107 	 converting modes.  */
5108       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5109 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5110 	  && GET_MODE_PRECISION (GET_MODE (target))
5111 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5112 	{
5113 	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
5114 	      != SUBREG_PROMOTED_UNSIGNED_P (target))
5115 	    {
5116 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5117 		 version, so use the mode instead.  */
5118 	      tree ntype
5119 		= (signed_or_unsigned_type_for
5120 		   (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
5121 	      if (ntype == NULL)
5122 		ntype = lang_hooks.types.type_for_mode
5123 		  (TYPE_MODE (TREE_TYPE (exp)),
5124 		   SUBREG_PROMOTED_UNSIGNED_P (target));
5125 
5126 	      exp = fold_convert_loc (loc, ntype, exp);
5127 	    }
5128 
5129 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5130 				  (GET_MODE (SUBREG_REG (target)),
5131 				   SUBREG_PROMOTED_UNSIGNED_P (target)),
5132 				  exp);
5133 
5134 	  inner_target = SUBREG_REG (target);
5135 	}
5136 
5137       temp = expand_expr (exp, inner_target, VOIDmode,
5138 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5139 
5140       /* If TEMP is a VOIDmode constant, use convert_modes to make
5141 	 sure that we properly convert it.  */
5142       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5143 	{
5144 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5145 				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
5146 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5147 			        GET_MODE (target), temp,
5148 			        SUBREG_PROMOTED_UNSIGNED_P (target));
5149 	}
5150 
5151       convert_move (SUBREG_REG (target), temp,
5152 		    SUBREG_PROMOTED_UNSIGNED_P (target));
5153 
5154       return NULL_RTX;
5155     }
5156   else if ((TREE_CODE (exp) == STRING_CST
5157 	    || (TREE_CODE (exp) == MEM_REF
5158 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5159 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5160 		   == STRING_CST
5161 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5162 	   && !nontemporal && !call_param_p
5163 	   && MEM_P (target))
5164     {
5165       /* Optimize initialization of an array with a STRING_CST.  */
5166       HOST_WIDE_INT exp_len, str_copy_len;
5167       rtx dest_mem;
5168       tree str = TREE_CODE (exp) == STRING_CST
5169 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5170 
5171       exp_len = int_expr_size (exp);
5172       if (exp_len <= 0)
5173 	goto normal_expr;
5174 
5175       if (TREE_STRING_LENGTH (str) <= 0)
5176 	goto normal_expr;
5177 
5178       str_copy_len = strlen (TREE_STRING_POINTER (str));
5179       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5180 	goto normal_expr;
5181 
5182       str_copy_len = TREE_STRING_LENGTH (str);
5183       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5184 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5185 	{
5186 	  str_copy_len += STORE_MAX_PIECES - 1;
5187 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5188 	}
5189       str_copy_len = MIN (str_copy_len, exp_len);
5190       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5191 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5192 				MEM_ALIGN (target), false))
5193 	goto normal_expr;
5194 
5195       dest_mem = target;
5196 
5197       dest_mem = store_by_pieces (dest_mem,
5198 				  str_copy_len, builtin_strncpy_read_str,
5199 				  CONST_CAST (char *,
5200 					      TREE_STRING_POINTER (str)),
5201 				  MEM_ALIGN (target), false,
5202 				  exp_len > str_copy_len ? 1 : 0);
5203       if (exp_len > str_copy_len)
5204 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5205 		       GEN_INT (exp_len - str_copy_len),
5206 		       BLOCK_OP_NORMAL);
5207       return NULL_RTX;
5208     }
5209   else
5210     {
5211       rtx tmp_target;
5212 
5213   normal_expr:
5214       /* If we want to use a nontemporal store, force the value to
5215 	 register first.  */
5216       tmp_target = nontemporal ? NULL_RTX : target;
5217       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5218 			       (call_param_p
5219 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5220 			       &alt_rtl, false);
5221     }
5222 
5223   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5224      the same as that of TARGET, adjust the constant.  This is needed, for
5225      example, in case it is a CONST_DOUBLE and we want only a word-sized
5226      value.  */
5227   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5228       && TREE_CODE (exp) != ERROR_MARK
5229       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5230     temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5231 			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5232 
5233   /* If value was not generated in the target, store it there.
5234      Convert the value to TARGET's type first if necessary and emit the
5235      pending incrementations that have been queued when expanding EXP.
5236      Note that we cannot emit the whole queue blindly because this will
5237      effectively disable the POST_INC optimization later.
5238 
5239      If TEMP and TARGET compare equal according to rtx_equal_p, but
5240      one or both of them are volatile memory refs, we have to distinguish
5241      two cases:
5242      - expand_expr has used TARGET.  In this case, we must not generate
5243        another copy.  This can be detected by TARGET being equal according
5244        to == .
5245      - expand_expr has not used TARGET - that means that the source just
5246        happens to have the same RTX form.  Since temp will have been created
5247        by expand_expr, it will compare unequal according to == .
5248        We must generate a copy in this case, to reach the correct number
5249        of volatile memory references.  */
5250 
5251   if ((! rtx_equal_p (temp, target)
5252        || (temp != target && (side_effects_p (temp)
5253 			      || side_effects_p (target))))
5254       && TREE_CODE (exp) != ERROR_MARK
5255       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5256 	 but TARGET is not valid memory reference, TEMP will differ
5257 	 from TARGET although it is really the same location.  */
5258       && !(alt_rtl
5259 	   && rtx_equal_p (alt_rtl, target)
5260 	   && !side_effects_p (alt_rtl)
5261 	   && !side_effects_p (target))
5262       /* If there's nothing to copy, don't bother.  Don't call
5263 	 expr_size unless necessary, because some front-ends (C++)
5264 	 expr_size-hook must not be given objects that are not
5265 	 supposed to be bit-copied or bit-initialized.  */
5266       && expr_size (exp) != const0_rtx)
5267     {
5268       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5269 	{
5270 	  if (GET_MODE (target) == BLKmode)
5271 	    {
5272 	      /* Handle calls that return BLKmode values in registers.  */
5273 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5274 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5275 	      else
5276 		store_bit_field (target,
5277 				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5278 				 0, 0, 0, GET_MODE (temp), temp);
5279 	    }
5280 	  else
5281 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5282 	}
5283 
5284       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5285 	{
5286 	  /* Handle copying a string constant into an array.  The string
5287 	     constant may be shorter than the array.  So copy just the string's
5288 	     actual length, and clear the rest.  First get the size of the data
5289 	     type of the string, which is actually the size of the target.  */
5290 	  rtx size = expr_size (exp);
5291 
5292 	  if (CONST_INT_P (size)
5293 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5294 	    emit_block_move (target, temp, size,
5295 			     (call_param_p
5296 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5297 	  else
5298 	    {
5299 	      enum machine_mode pointer_mode
5300 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5301 	      enum machine_mode address_mode = get_address_mode (target);
5302 
5303 	      /* Compute the size of the data to copy from the string.  */
5304 	      tree copy_size
5305 		= size_binop_loc (loc, MIN_EXPR,
5306 				  make_tree (sizetype, size),
5307 				  size_int (TREE_STRING_LENGTH (exp)));
5308 	      rtx copy_size_rtx
5309 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5310 			       (call_param_p
5311 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5312 	      rtx label = 0;
5313 
5314 	      /* Copy that much.  */
5315 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5316 					       TYPE_UNSIGNED (sizetype));
5317 	      emit_block_move (target, temp, copy_size_rtx,
5318 			       (call_param_p
5319 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5320 
5321 	      /* Figure out how much is left in TARGET that we have to clear.
5322 		 Do all calculations in pointer_mode.  */
5323 	      if (CONST_INT_P (copy_size_rtx))
5324 		{
5325 		  size = plus_constant (address_mode, size,
5326 					-INTVAL (copy_size_rtx));
5327 		  target = adjust_address (target, BLKmode,
5328 					   INTVAL (copy_size_rtx));
5329 		}
5330 	      else
5331 		{
5332 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5333 				       copy_size_rtx, NULL_RTX, 0,
5334 				       OPTAB_LIB_WIDEN);
5335 
5336 		  if (GET_MODE (copy_size_rtx) != address_mode)
5337 		    copy_size_rtx = convert_to_mode (address_mode,
5338 						     copy_size_rtx,
5339 						     TYPE_UNSIGNED (sizetype));
5340 
5341 		  target = offset_address (target, copy_size_rtx,
5342 					   highest_pow2_factor (copy_size));
5343 		  label = gen_label_rtx ();
5344 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5345 					   GET_MODE (size), 0, label);
5346 		}
5347 
5348 	      if (size != const0_rtx)
5349 		clear_storage (target, size, BLOCK_OP_NORMAL);
5350 
5351 	      if (label)
5352 		emit_label (label);
5353 	    }
5354 	}
5355       /* Handle calls that return values in multiple non-contiguous locations.
5356 	 The Irix 6 ABI has examples of this.  */
5357       else if (GET_CODE (target) == PARALLEL)
5358 	{
5359 	  if (GET_CODE (temp) == PARALLEL)
5360 	    emit_group_move (target, temp);
5361 	  else
5362 	    emit_group_load (target, temp, TREE_TYPE (exp),
5363 			     int_size_in_bytes (TREE_TYPE (exp)));
5364 	}
5365       else if (GET_CODE (temp) == PARALLEL)
5366 	emit_group_store (target, temp, TREE_TYPE (exp),
5367 			  int_size_in_bytes (TREE_TYPE (exp)));
5368       else if (GET_MODE (temp) == BLKmode)
5369 	emit_block_move (target, temp, expr_size (exp),
5370 			 (call_param_p
5371 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5372       /* If we emit a nontemporal store, there is nothing else to do.  */
5373       else if (nontemporal && emit_storent_insn (target, temp))
5374 	;
5375       else
5376 	{
5377 	  temp = force_operand (temp, target);
5378 	  if (temp != target)
5379 	    emit_move_insn (target, temp);
5380 	}
5381     }
5382 
5383   return NULL_RTX;
5384 }
5385 
5386 /* Return true if field F of structure TYPE is a flexible array.  */
5387 
5388 static bool
5389 flexible_array_member_p (const_tree f, const_tree type)
5390 {
5391   const_tree tf;
5392 
5393   tf = TREE_TYPE (f);
5394   return (DECL_CHAIN (f) == NULL
5395 	  && TREE_CODE (tf) == ARRAY_TYPE
5396 	  && TYPE_DOMAIN (tf)
5397 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5398 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5399 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5400 	  && int_size_in_bytes (type) >= 0);
5401 }
5402 
5403 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5404    must have in order for it to completely initialize a value of type TYPE.
5405    Return -1 if the number isn't known.
5406 
5407    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5408 
5409 static HOST_WIDE_INT
5410 count_type_elements (const_tree type, bool for_ctor_p)
5411 {
5412   switch (TREE_CODE (type))
5413     {
5414     case ARRAY_TYPE:
5415       {
5416 	tree nelts;
5417 
5418 	nelts = array_type_nelts (type);
5419 	if (nelts && host_integerp (nelts, 1))
5420 	  {
5421 	    unsigned HOST_WIDE_INT n;
5422 
5423 	    n = tree_low_cst (nelts, 1) + 1;
5424 	    if (n == 0 || for_ctor_p)
5425 	      return n;
5426 	    else
5427 	      return n * count_type_elements (TREE_TYPE (type), false);
5428 	  }
5429 	return for_ctor_p ? -1 : 1;
5430       }
5431 
5432     case RECORD_TYPE:
5433       {
5434 	unsigned HOST_WIDE_INT n;
5435 	tree f;
5436 
5437 	n = 0;
5438 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5439 	  if (TREE_CODE (f) == FIELD_DECL)
5440 	    {
5441 	      if (!for_ctor_p)
5442 		n += count_type_elements (TREE_TYPE (f), false);
5443 	      else if (!flexible_array_member_p (f, type))
5444 		/* Don't count flexible arrays, which are not supposed
5445 		   to be initialized.  */
5446 		n += 1;
5447 	    }
5448 
5449 	return n;
5450       }
5451 
5452     case UNION_TYPE:
5453     case QUAL_UNION_TYPE:
5454       {
5455 	tree f;
5456 	HOST_WIDE_INT n, m;
5457 
5458 	gcc_assert (!for_ctor_p);
5459 	/* Estimate the number of scalars in each field and pick the
5460 	   maximum.  Other estimates would do instead; the idea is simply
5461 	   to make sure that the estimate is not sensitive to the ordering
5462 	   of the fields.  */
5463 	n = 1;
5464 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5465 	  if (TREE_CODE (f) == FIELD_DECL)
5466 	    {
5467 	      m = count_type_elements (TREE_TYPE (f), false);
5468 	      /* If the field doesn't span the whole union, add an extra
5469 		 scalar for the rest.  */
5470 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5471 				    TYPE_SIZE (type)) != 1)
5472 		m++;
5473 	      if (n < m)
5474 		n = m;
5475 	    }
5476 	return n;
5477       }
5478 
5479     case COMPLEX_TYPE:
5480       return 2;
5481 
5482     case VECTOR_TYPE:
5483       return TYPE_VECTOR_SUBPARTS (type);
5484 
5485     case INTEGER_TYPE:
5486     case REAL_TYPE:
5487     case FIXED_POINT_TYPE:
5488     case ENUMERAL_TYPE:
5489     case BOOLEAN_TYPE:
5490     case POINTER_TYPE:
5491     case OFFSET_TYPE:
5492     case REFERENCE_TYPE:
5493     case NULLPTR_TYPE:
5494       return 1;
5495 
5496     case ERROR_MARK:
5497       return 0;
5498 
5499     case VOID_TYPE:
5500     case METHOD_TYPE:
5501     case FUNCTION_TYPE:
5502     case LANG_TYPE:
5503     default:
5504       gcc_unreachable ();
5505     }
5506 }
5507 
5508 /* Helper for categorize_ctor_elements.  Identical interface.  */
5509 
5510 static bool
5511 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5512 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5513 {
5514   unsigned HOST_WIDE_INT idx;
5515   HOST_WIDE_INT nz_elts, init_elts, num_fields;
5516   tree value, purpose, elt_type;
5517 
5518   /* Whether CTOR is a valid constant initializer, in accordance with what
5519      initializer_constant_valid_p does.  If inferred from the constructor
5520      elements, true until proven otherwise.  */
5521   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5522   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5523 
5524   nz_elts = 0;
5525   init_elts = 0;
5526   num_fields = 0;
5527   elt_type = NULL_TREE;
5528 
5529   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5530     {
5531       HOST_WIDE_INT mult = 1;
5532 
5533       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5534 	{
5535 	  tree lo_index = TREE_OPERAND (purpose, 0);
5536 	  tree hi_index = TREE_OPERAND (purpose, 1);
5537 
5538 	  if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
5539 	    mult = (tree_low_cst (hi_index, 1)
5540 		    - tree_low_cst (lo_index, 1) + 1);
5541 	}
5542       num_fields += mult;
5543       elt_type = TREE_TYPE (value);
5544 
5545       switch (TREE_CODE (value))
5546 	{
5547 	case CONSTRUCTOR:
5548 	  {
5549 	    HOST_WIDE_INT nz = 0, ic = 0;
5550 
5551 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5552 							   p_complete);
5553 
5554 	    nz_elts += mult * nz;
5555  	    init_elts += mult * ic;
5556 
5557 	    if (const_from_elts_p && const_p)
5558 	      const_p = const_elt_p;
5559 	  }
5560 	  break;
5561 
5562 	case INTEGER_CST:
5563 	case REAL_CST:
5564 	case FIXED_CST:
5565 	  if (!initializer_zerop (value))
5566 	    nz_elts += mult;
5567 	  init_elts += mult;
5568 	  break;
5569 
5570 	case STRING_CST:
5571 	  nz_elts += mult * TREE_STRING_LENGTH (value);
5572 	  init_elts += mult * TREE_STRING_LENGTH (value);
5573 	  break;
5574 
5575 	case COMPLEX_CST:
5576 	  if (!initializer_zerop (TREE_REALPART (value)))
5577 	    nz_elts += mult;
5578 	  if (!initializer_zerop (TREE_IMAGPART (value)))
5579 	    nz_elts += mult;
5580 	  init_elts += mult;
5581 	  break;
5582 
5583 	case VECTOR_CST:
5584 	  {
5585 	    unsigned i;
5586 	    for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5587 	      {
5588 		tree v = VECTOR_CST_ELT (value, i);
5589 		if (!initializer_zerop (v))
5590 		  nz_elts += mult;
5591 		init_elts += mult;
5592 	      }
5593 	  }
5594 	  break;
5595 
5596 	default:
5597 	  {
5598 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5599 	    nz_elts += mult * tc;
5600 	    init_elts += mult * tc;
5601 
5602 	    if (const_from_elts_p && const_p)
5603 	      const_p = initializer_constant_valid_p (value, elt_type)
5604 			!= NULL_TREE;
5605 	  }
5606 	  break;
5607 	}
5608     }
5609 
5610   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5611 						num_fields, elt_type))
5612     *p_complete = false;
5613 
5614   *p_nz_elts += nz_elts;
5615   *p_init_elts += init_elts;
5616 
5617   return const_p;
5618 }
5619 
5620 /* Examine CTOR to discover:
5621    * how many scalar fields are set to nonzero values,
5622      and place it in *P_NZ_ELTS;
5623    * how many scalar fields in total are in CTOR,
5624      and place it in *P_ELT_COUNT.
5625    * whether the constructor is complete -- in the sense that every
5626      meaningful byte is explicitly given a value --
5627      and place it in *P_COMPLETE.
5628 
5629    Return whether or not CTOR is a valid static constant initializer, the same
5630    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5631 
5632 bool
5633 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5634 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
5635 {
5636   *p_nz_elts = 0;
5637   *p_init_elts = 0;
5638   *p_complete = true;
5639 
5640   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5641 }
5642 
5643 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5644    of which had type LAST_TYPE.  Each element was itself a complete
5645    initializer, in the sense that every meaningful byte was explicitly
5646    given a value.  Return true if the same is true for the constructor
5647    as a whole.  */
5648 
5649 bool
5650 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5651 			  const_tree last_type)
5652 {
5653   if (TREE_CODE (type) == UNION_TYPE
5654       || TREE_CODE (type) == QUAL_UNION_TYPE)
5655     {
5656       if (num_elts == 0)
5657 	return false;
5658 
5659       gcc_assert (num_elts == 1 && last_type);
5660 
5661       /* ??? We could look at each element of the union, and find the
5662 	 largest element.  Which would avoid comparing the size of the
5663 	 initialized element against any tail padding in the union.
5664 	 Doesn't seem worth the effort...  */
5665       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5666     }
5667 
5668   return count_type_elements (type, true) == num_elts;
5669 }
5670 
5671 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
5672 
5673 static int
5674 mostly_zeros_p (const_tree exp)
5675 {
5676   if (TREE_CODE (exp) == CONSTRUCTOR)
5677     {
5678       HOST_WIDE_INT nz_elts, init_elts;
5679       bool complete_p;
5680 
5681       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5682       return !complete_p || nz_elts < init_elts / 4;
5683     }
5684 
5685   return initializer_zerop (exp);
5686 }
5687 
5688 /* Return 1 if EXP contains all zeros.  */
5689 
5690 static int
5691 all_zeros_p (const_tree exp)
5692 {
5693   if (TREE_CODE (exp) == CONSTRUCTOR)
5694     {
5695       HOST_WIDE_INT nz_elts, init_elts;
5696       bool complete_p;
5697 
5698       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5699       return nz_elts == 0;
5700     }
5701 
5702   return initializer_zerop (exp);
5703 }
5704 
5705 /* Helper function for store_constructor.
5706    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5707    CLEARED is as for store_constructor.
5708    ALIAS_SET is the alias set to use for any stores.
5709 
5710    This provides a recursive shortcut back to store_constructor when it isn't
5711    necessary to go through store_field.  This is so that we can pass through
5712    the cleared field to let store_constructor know that we may not have to
5713    clear a substructure if the outer structure has already been cleared.  */
5714 
5715 static void
5716 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5717 			 HOST_WIDE_INT bitpos, enum machine_mode mode,
5718 			 tree exp, int cleared, alias_set_type alias_set)
5719 {
5720   if (TREE_CODE (exp) == CONSTRUCTOR
5721       /* We can only call store_constructor recursively if the size and
5722 	 bit position are on a byte boundary.  */
5723       && bitpos % BITS_PER_UNIT == 0
5724       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5725       /* If we have a nonzero bitpos for a register target, then we just
5726 	 let store_field do the bitfield handling.  This is unlikely to
5727 	 generate unnecessary clear instructions anyways.  */
5728       && (bitpos == 0 || MEM_P (target)))
5729     {
5730       if (MEM_P (target))
5731 	target
5732 	  = adjust_address (target,
5733 			    GET_MODE (target) == BLKmode
5734 			    || 0 != (bitpos
5735 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5736 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5737 
5738 
5739       /* Update the alias set, if required.  */
5740       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5741 	  && MEM_ALIAS_SET (target) != 0)
5742 	{
5743 	  target = copy_rtx (target);
5744 	  set_mem_alias_set (target, alias_set);
5745 	}
5746 
5747       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5748     }
5749   else
5750     store_field (target, bitsize, bitpos, 0, 0, mode, exp, alias_set, false);
5751 }
5752 
5753 /* Store the value of constructor EXP into the rtx TARGET.
5754    TARGET is either a REG or a MEM; we know it cannot conflict, since
5755    safe_from_p has been called.
5756    CLEARED is true if TARGET is known to have been zero'd.
5757    SIZE is the number of bytes of TARGET we are allowed to modify: this
5758    may not be the same as the size of EXP if we are assigning to a field
5759    which has been packed to exclude padding bits.  */
5760 
5761 static void
5762 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5763 {
5764   tree type = TREE_TYPE (exp);
5765 #ifdef WORD_REGISTER_OPERATIONS
5766   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5767 #endif
5768 
5769   switch (TREE_CODE (type))
5770     {
5771     case RECORD_TYPE:
5772     case UNION_TYPE:
5773     case QUAL_UNION_TYPE:
5774       {
5775 	unsigned HOST_WIDE_INT idx;
5776 	tree field, value;
5777 
5778 	/* If size is zero or the target is already cleared, do nothing.  */
5779 	if (size == 0 || cleared)
5780 	  cleared = 1;
5781 	/* We either clear the aggregate or indicate the value is dead.  */
5782 	else if ((TREE_CODE (type) == UNION_TYPE
5783 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
5784 		 && ! CONSTRUCTOR_ELTS (exp))
5785 	  /* If the constructor is empty, clear the union.  */
5786 	  {
5787 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5788 	    cleared = 1;
5789 	  }
5790 
5791 	/* If we are building a static constructor into a register,
5792 	   set the initial value as zero so we can fold the value into
5793 	   a constant.  But if more than one register is involved,
5794 	   this probably loses.  */
5795 	else if (REG_P (target) && TREE_STATIC (exp)
5796 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5797 	  {
5798 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5799 	    cleared = 1;
5800 	  }
5801 
5802         /* If the constructor has fewer fields than the structure or
5803 	   if we are initializing the structure to mostly zeros, clear
5804 	   the whole structure first.  Don't do this if TARGET is a
5805 	   register whose mode size isn't equal to SIZE since
5806 	   clear_storage can't handle this case.  */
5807 	else if (size > 0
5808 		 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
5809 		      != fields_length (type))
5810 		     || mostly_zeros_p (exp))
5811 		 && (!REG_P (target)
5812 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5813 			 == size)))
5814 	  {
5815 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5816 	    cleared = 1;
5817 	  }
5818 
5819 	if (REG_P (target) && !cleared)
5820 	  emit_clobber (target);
5821 
5822 	/* Store each element of the constructor into the
5823 	   corresponding field of TARGET.  */
5824 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5825 	  {
5826 	    enum machine_mode mode;
5827 	    HOST_WIDE_INT bitsize;
5828 	    HOST_WIDE_INT bitpos = 0;
5829 	    tree offset;
5830 	    rtx to_rtx = target;
5831 
5832 	    /* Just ignore missing fields.  We cleared the whole
5833 	       structure, above, if any fields are missing.  */
5834 	    if (field == 0)
5835 	      continue;
5836 
5837 	    if (cleared && initializer_zerop (value))
5838 	      continue;
5839 
5840 	    if (host_integerp (DECL_SIZE (field), 1))
5841 	      bitsize = tree_low_cst (DECL_SIZE (field), 1);
5842 	    else
5843 	      bitsize = -1;
5844 
5845 	    mode = DECL_MODE (field);
5846 	    if (DECL_BIT_FIELD (field))
5847 	      mode = VOIDmode;
5848 
5849 	    offset = DECL_FIELD_OFFSET (field);
5850 	    if (host_integerp (offset, 0)
5851 		&& host_integerp (bit_position (field), 0))
5852 	      {
5853 		bitpos = int_bit_position (field);
5854 		offset = 0;
5855 	      }
5856 	    else
5857 	      bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5858 
5859 	    if (offset)
5860 	      {
5861 	        enum machine_mode address_mode;
5862 		rtx offset_rtx;
5863 
5864 		offset
5865 		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5866 						    make_tree (TREE_TYPE (exp),
5867 							       target));
5868 
5869 		offset_rtx = expand_normal (offset);
5870 		gcc_assert (MEM_P (to_rtx));
5871 
5872 		address_mode = get_address_mode (to_rtx);
5873 		if (GET_MODE (offset_rtx) != address_mode)
5874 		  offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5875 
5876 		to_rtx = offset_address (to_rtx, offset_rtx,
5877 					 highest_pow2_factor (offset));
5878 	      }
5879 
5880 #ifdef WORD_REGISTER_OPERATIONS
5881 	    /* If this initializes a field that is smaller than a
5882 	       word, at the start of a word, try to widen it to a full
5883 	       word.  This special case allows us to output C++ member
5884 	       function initializations in a form that the optimizers
5885 	       can understand.  */
5886 	    if (REG_P (target)
5887 		&& bitsize < BITS_PER_WORD
5888 		&& bitpos % BITS_PER_WORD == 0
5889 		&& GET_MODE_CLASS (mode) == MODE_INT
5890 		&& TREE_CODE (value) == INTEGER_CST
5891 		&& exp_size >= 0
5892 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5893 	      {
5894 		tree type = TREE_TYPE (value);
5895 
5896 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
5897 		  {
5898 		    type = lang_hooks.types.type_for_mode
5899 		      (word_mode, TYPE_UNSIGNED (type));
5900 		    value = fold_convert (type, value);
5901 		  }
5902 
5903 		if (BYTES_BIG_ENDIAN)
5904 		  value
5905 		   = fold_build2 (LSHIFT_EXPR, type, value,
5906 				   build_int_cst (type,
5907 						  BITS_PER_WORD - bitsize));
5908 		bitsize = BITS_PER_WORD;
5909 		mode = word_mode;
5910 	      }
5911 #endif
5912 
5913 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5914 		&& DECL_NONADDRESSABLE_P (field))
5915 	      {
5916 		to_rtx = copy_rtx (to_rtx);
5917 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5918 	      }
5919 
5920 	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
5921 				     value, cleared,
5922 				     get_alias_set (TREE_TYPE (field)));
5923 	  }
5924 	break;
5925       }
5926     case ARRAY_TYPE:
5927       {
5928 	tree value, index;
5929 	unsigned HOST_WIDE_INT i;
5930 	int need_to_clear;
5931 	tree domain;
5932 	tree elttype = TREE_TYPE (type);
5933 	int const_bounds_p;
5934 	HOST_WIDE_INT minelt = 0;
5935 	HOST_WIDE_INT maxelt = 0;
5936 
5937 	domain = TYPE_DOMAIN (type);
5938 	const_bounds_p = (TYPE_MIN_VALUE (domain)
5939 			  && TYPE_MAX_VALUE (domain)
5940 			  && host_integerp (TYPE_MIN_VALUE (domain), 0)
5941 			  && host_integerp (TYPE_MAX_VALUE (domain), 0));
5942 
5943 	/* If we have constant bounds for the range of the type, get them.  */
5944 	if (const_bounds_p)
5945 	  {
5946 	    minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5947 	    maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5948 	  }
5949 
5950 	/* If the constructor has fewer elements than the array, clear
5951            the whole array first.  Similarly if this is static
5952            constructor of a non-BLKmode object.  */
5953 	if (cleared)
5954 	  need_to_clear = 0;
5955 	else if (REG_P (target) && TREE_STATIC (exp))
5956 	  need_to_clear = 1;
5957 	else
5958 	  {
5959 	    unsigned HOST_WIDE_INT idx;
5960 	    tree index, value;
5961 	    HOST_WIDE_INT count = 0, zero_count = 0;
5962 	    need_to_clear = ! const_bounds_p;
5963 
5964 	    /* This loop is a more accurate version of the loop in
5965 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5966 	       is also needed to check for missing elements.  */
5967 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5968 	      {
5969 		HOST_WIDE_INT this_node_count;
5970 
5971 		if (need_to_clear)
5972 		  break;
5973 
5974 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5975 		  {
5976 		    tree lo_index = TREE_OPERAND (index, 0);
5977 		    tree hi_index = TREE_OPERAND (index, 1);
5978 
5979 		    if (! host_integerp (lo_index, 1)
5980 			|| ! host_integerp (hi_index, 1))
5981 		      {
5982 			need_to_clear = 1;
5983 			break;
5984 		      }
5985 
5986 		    this_node_count = (tree_low_cst (hi_index, 1)
5987 				       - tree_low_cst (lo_index, 1) + 1);
5988 		  }
5989 		else
5990 		  this_node_count = 1;
5991 
5992 		count += this_node_count;
5993 		if (mostly_zeros_p (value))
5994 		  zero_count += this_node_count;
5995 	      }
5996 
5997 	    /* Clear the entire array first if there are any missing
5998 	       elements, or if the incidence of zero elements is >=
5999 	       75%.  */
6000 	    if (! need_to_clear
6001 		&& (count < maxelt - minelt + 1
6002 		    || 4 * zero_count >= 3 * count))
6003 	      need_to_clear = 1;
6004 	  }
6005 
6006 	if (need_to_clear && size > 0)
6007 	  {
6008 	    if (REG_P (target))
6009 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
6010 	    else
6011 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6012 	    cleared = 1;
6013 	  }
6014 
6015 	if (!cleared && REG_P (target))
6016 	  /* Inform later passes that the old value is dead.  */
6017 	  emit_clobber (target);
6018 
6019 	/* Store each element of the constructor into the
6020 	   corresponding element of TARGET, determined by counting the
6021 	   elements.  */
6022 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6023 	  {
6024 	    enum machine_mode mode;
6025 	    HOST_WIDE_INT bitsize;
6026 	    HOST_WIDE_INT bitpos;
6027 	    rtx xtarget = target;
6028 
6029 	    if (cleared && initializer_zerop (value))
6030 	      continue;
6031 
6032 	    mode = TYPE_MODE (elttype);
6033 	    if (mode == BLKmode)
6034 	      bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
6035 			 ? tree_low_cst (TYPE_SIZE (elttype), 1)
6036 			 : -1);
6037 	    else
6038 	      bitsize = GET_MODE_BITSIZE (mode);
6039 
6040 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6041 	      {
6042 		tree lo_index = TREE_OPERAND (index, 0);
6043 		tree hi_index = TREE_OPERAND (index, 1);
6044 		rtx index_r, pos_rtx;
6045 		HOST_WIDE_INT lo, hi, count;
6046 		tree position;
6047 
6048 		/* If the range is constant and "small", unroll the loop.  */
6049 		if (const_bounds_p
6050 		    && host_integerp (lo_index, 0)
6051 		    && host_integerp (hi_index, 0)
6052 		    && (lo = tree_low_cst (lo_index, 0),
6053 			hi = tree_low_cst (hi_index, 0),
6054 			count = hi - lo + 1,
6055 			(!MEM_P (target)
6056 			 || count <= 2
6057 			 || (host_integerp (TYPE_SIZE (elttype), 1)
6058 			     && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
6059 				 <= 40 * 8)))))
6060 		  {
6061 		    lo -= minelt;  hi -= minelt;
6062 		    for (; lo <= hi; lo++)
6063 		      {
6064 			bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
6065 
6066 			if (MEM_P (target)
6067 			    && !MEM_KEEP_ALIAS_SET_P (target)
6068 			    && TREE_CODE (type) == ARRAY_TYPE
6069 			    && TYPE_NONALIASED_COMPONENT (type))
6070 			  {
6071 			    target = copy_rtx (target);
6072 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6073 			  }
6074 
6075 			store_constructor_field
6076 			  (target, bitsize, bitpos, mode, value, cleared,
6077 			   get_alias_set (elttype));
6078 		      }
6079 		  }
6080 		else
6081 		  {
6082 		    rtx loop_start = gen_label_rtx ();
6083 		    rtx loop_end = gen_label_rtx ();
6084 		    tree exit_cond;
6085 
6086 		    expand_normal (hi_index);
6087 
6088 		    index = build_decl (EXPR_LOCATION (exp),
6089 					VAR_DECL, NULL_TREE, domain);
6090 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6091 		    SET_DECL_RTL (index, index_r);
6092 		    store_expr (lo_index, index_r, 0, false);
6093 
6094 		    /* Build the head of the loop.  */
6095 		    do_pending_stack_adjust ();
6096 		    emit_label (loop_start);
6097 
6098 		    /* Assign value to element index.  */
6099 		    position =
6100 		      fold_convert (ssizetype,
6101 				    fold_build2 (MINUS_EXPR,
6102 						 TREE_TYPE (index),
6103 						 index,
6104 						 TYPE_MIN_VALUE (domain)));
6105 
6106 		    position =
6107 			size_binop (MULT_EXPR, position,
6108 				    fold_convert (ssizetype,
6109 						  TYPE_SIZE_UNIT (elttype)));
6110 
6111 		    pos_rtx = expand_normal (position);
6112 		    xtarget = offset_address (target, pos_rtx,
6113 					      highest_pow2_factor (position));
6114 		    xtarget = adjust_address (xtarget, mode, 0);
6115 		    if (TREE_CODE (value) == CONSTRUCTOR)
6116 		      store_constructor (value, xtarget, cleared,
6117 					 bitsize / BITS_PER_UNIT);
6118 		    else
6119 		      store_expr (value, xtarget, 0, false);
6120 
6121 		    /* Generate a conditional jump to exit the loop.  */
6122 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6123 					index, hi_index);
6124 		    jumpif (exit_cond, loop_end, -1);
6125 
6126 		    /* Update the loop counter, and jump to the head of
6127 		       the loop.  */
6128 		    expand_assignment (index,
6129 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6130 					       index, integer_one_node),
6131 				       false);
6132 
6133 		    emit_jump (loop_start);
6134 
6135 		    /* Build the end of the loop.  */
6136 		    emit_label (loop_end);
6137 		  }
6138 	      }
6139 	    else if ((index != 0 && ! host_integerp (index, 0))
6140 		     || ! host_integerp (TYPE_SIZE (elttype), 1))
6141 	      {
6142 		tree position;
6143 
6144 		if (index == 0)
6145 		  index = ssize_int (1);
6146 
6147 		if (minelt)
6148 		  index = fold_convert (ssizetype,
6149 					fold_build2 (MINUS_EXPR,
6150 						     TREE_TYPE (index),
6151 						     index,
6152 						     TYPE_MIN_VALUE (domain)));
6153 
6154 		position =
6155 		  size_binop (MULT_EXPR, index,
6156 			      fold_convert (ssizetype,
6157 					    TYPE_SIZE_UNIT (elttype)));
6158 		xtarget = offset_address (target,
6159 					  expand_normal (position),
6160 					  highest_pow2_factor (position));
6161 		xtarget = adjust_address (xtarget, mode, 0);
6162 		store_expr (value, xtarget, 0, false);
6163 	      }
6164 	    else
6165 	      {
6166 		if (index != 0)
6167 		  bitpos = ((tree_low_cst (index, 0) - minelt)
6168 			    * tree_low_cst (TYPE_SIZE (elttype), 1));
6169 		else
6170 		  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
6171 
6172 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6173 		    && TREE_CODE (type) == ARRAY_TYPE
6174 		    && TYPE_NONALIASED_COMPONENT (type))
6175 		  {
6176 		    target = copy_rtx (target);
6177 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6178 		  }
6179 		store_constructor_field (target, bitsize, bitpos, mode, value,
6180 					 cleared, get_alias_set (elttype));
6181 	      }
6182 	  }
6183 	break;
6184       }
6185 
6186     case VECTOR_TYPE:
6187       {
6188 	unsigned HOST_WIDE_INT idx;
6189 	constructor_elt *ce;
6190 	int i;
6191 	int need_to_clear;
6192 	int icode = CODE_FOR_nothing;
6193 	tree elttype = TREE_TYPE (type);
6194 	int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
6195 	enum machine_mode eltmode = TYPE_MODE (elttype);
6196 	HOST_WIDE_INT bitsize;
6197 	HOST_WIDE_INT bitpos;
6198 	rtvec vector = NULL;
6199 	unsigned n_elts;
6200 	alias_set_type alias;
6201 
6202 	gcc_assert (eltmode != BLKmode);
6203 
6204 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6205 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6206 	  {
6207 	    enum machine_mode mode = GET_MODE (target);
6208 
6209 	    icode = (int) optab_handler (vec_init_optab, mode);
6210 	    if (icode != CODE_FOR_nothing)
6211 	      {
6212 		unsigned int i;
6213 
6214 		vector = rtvec_alloc (n_elts);
6215 		for (i = 0; i < n_elts; i++)
6216 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6217 	      }
6218 	  }
6219 
6220 	/* If the constructor has fewer elements than the vector,
6221 	   clear the whole array first.  Similarly if this is static
6222 	   constructor of a non-BLKmode object.  */
6223 	if (cleared)
6224 	  need_to_clear = 0;
6225 	else if (REG_P (target) && TREE_STATIC (exp))
6226 	  need_to_clear = 1;
6227 	else
6228 	  {
6229 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6230 	    tree value;
6231 
6232 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6233 	      {
6234 		int n_elts_here = tree_low_cst
6235 		  (int_const_binop (TRUNC_DIV_EXPR,
6236 				    TYPE_SIZE (TREE_TYPE (value)),
6237 				    TYPE_SIZE (elttype)), 1);
6238 
6239 		count += n_elts_here;
6240 		if (mostly_zeros_p (value))
6241 		  zero_count += n_elts_here;
6242 	      }
6243 
6244 	    /* Clear the entire vector first if there are any missing elements,
6245 	       or if the incidence of zero elements is >= 75%.  */
6246 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6247 	  }
6248 
6249 	if (need_to_clear && size > 0 && !vector)
6250 	  {
6251 	    if (REG_P (target))
6252 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6253 	    else
6254 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6255 	    cleared = 1;
6256 	  }
6257 
6258 	/* Inform later passes that the old value is dead.  */
6259 	if (!cleared && !vector && REG_P (target))
6260 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6261 
6262         if (MEM_P (target))
6263 	  alias = MEM_ALIAS_SET (target);
6264 	else
6265 	  alias = get_alias_set (elttype);
6266 
6267         /* Store each element of the constructor into the corresponding
6268 	   element of TARGET, determined by counting the elements.  */
6269 	for (idx = 0, i = 0;
6270 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6271 	     idx++, i += bitsize / elt_size)
6272 	  {
6273 	    HOST_WIDE_INT eltpos;
6274 	    tree value = ce->value;
6275 
6276 	    bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
6277 	    if (cleared && initializer_zerop (value))
6278 	      continue;
6279 
6280 	    if (ce->index)
6281 	      eltpos = tree_low_cst (ce->index, 1);
6282 	    else
6283 	      eltpos = i;
6284 
6285 	    if (vector)
6286 	      {
6287 	        /* Vector CONSTRUCTORs should only be built from smaller
6288 		   vectors in the case of BLKmode vectors.  */
6289 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6290 		RTVEC_ELT (vector, eltpos)
6291 		  = expand_normal (value);
6292 	      }
6293 	    else
6294 	      {
6295 		enum machine_mode value_mode =
6296 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6297 		  ? TYPE_MODE (TREE_TYPE (value))
6298 		  : eltmode;
6299 		bitpos = eltpos * elt_size;
6300 		store_constructor_field (target, bitsize, bitpos, value_mode,
6301 					 value, cleared, alias);
6302 	      }
6303 	  }
6304 
6305 	if (vector)
6306 	  emit_insn (GEN_FCN (icode)
6307 		     (target,
6308 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6309 	break;
6310       }
6311 
6312     default:
6313       gcc_unreachable ();
6314     }
6315 }
6316 
6317 /* Store the value of EXP (an expression tree)
6318    into a subfield of TARGET which has mode MODE and occupies
6319    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6320    If MODE is VOIDmode, it means that we are storing into a bit-field.
6321 
6322    BITREGION_START is bitpos of the first bitfield in this region.
6323    BITREGION_END is the bitpos of the ending bitfield in this region.
6324    These two fields are 0, if the C++ memory model does not apply,
6325    or we are not interested in keeping track of bitfield regions.
6326 
6327    Always return const0_rtx unless we have something particular to
6328    return.
6329 
6330    ALIAS_SET is the alias set for the destination.  This value will
6331    (in general) be different from that for TARGET, since TARGET is a
6332    reference to the containing structure.
6333 
6334    If NONTEMPORAL is true, try generating a nontemporal store.  */
6335 
6336 static rtx
6337 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6338 	     unsigned HOST_WIDE_INT bitregion_start,
6339 	     unsigned HOST_WIDE_INT bitregion_end,
6340 	     enum machine_mode mode, tree exp,
6341 	     alias_set_type alias_set, bool nontemporal)
6342 {
6343   if (TREE_CODE (exp) == ERROR_MARK)
6344     return const0_rtx;
6345 
6346   /* If we have nothing to store, do nothing unless the expression has
6347      side-effects.  */
6348   if (bitsize == 0)
6349     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6350 
6351   if (GET_CODE (target) == CONCAT)
6352     {
6353       /* We're storing into a struct containing a single __complex.  */
6354 
6355       gcc_assert (!bitpos);
6356       return store_expr (exp, target, 0, nontemporal);
6357     }
6358 
6359   /* If the structure is in a register or if the component
6360      is a bit field, we cannot use addressing to access it.
6361      Use bit-field techniques or SUBREG to store in it.  */
6362 
6363   if (mode == VOIDmode
6364       || (mode != BLKmode && ! direct_store[(int) mode]
6365 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6366 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6367       || REG_P (target)
6368       || GET_CODE (target) == SUBREG
6369       /* If the field isn't aligned enough to store as an ordinary memref,
6370 	 store it as a bit field.  */
6371       || (mode != BLKmode
6372 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6373 		|| bitpos % GET_MODE_ALIGNMENT (mode))
6374 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6375 	      || (bitpos % BITS_PER_UNIT != 0)))
6376       || (bitsize >= 0 && mode != BLKmode
6377 	  && GET_MODE_BITSIZE (mode) > bitsize)
6378       /* If the RHS and field are a constant size and the size of the
6379 	 RHS isn't the same size as the bitfield, we must use bitfield
6380 	 operations.  */
6381       || (bitsize >= 0
6382 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6383 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6384       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6385          decl we must use bitfield operations.  */
6386       || (bitsize >= 0
6387 	  && TREE_CODE (exp) == MEM_REF
6388 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6389 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6390 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6391 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6392     {
6393       rtx temp;
6394       gimple nop_def;
6395 
6396       /* If EXP is a NOP_EXPR of precision less than its mode, then that
6397 	 implies a mask operation.  If the precision is the same size as
6398 	 the field we're storing into, that mask is redundant.  This is
6399 	 particularly common with bit field assignments generated by the
6400 	 C front end.  */
6401       nop_def = get_def_for_expr (exp, NOP_EXPR);
6402       if (nop_def)
6403 	{
6404 	  tree type = TREE_TYPE (exp);
6405 	  if (INTEGRAL_TYPE_P (type)
6406 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6407 	      && bitsize == TYPE_PRECISION (type))
6408 	    {
6409 	      tree op = gimple_assign_rhs1 (nop_def);
6410 	      type = TREE_TYPE (op);
6411 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6412 		exp = op;
6413 	    }
6414 	}
6415 
6416       temp = expand_normal (exp);
6417 
6418       /* If BITSIZE is narrower than the size of the type of EXP
6419 	 we will be narrowing TEMP.  Normally, what's wanted are the
6420 	 low-order bits.  However, if EXP's type is a record and this is
6421 	 big-endian machine, we want the upper BITSIZE bits.  */
6422       if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6423 	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6424 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6425 	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6426 			     GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6427 			     NULL_RTX, 1);
6428 
6429       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
6430       if (mode != VOIDmode && mode != BLKmode
6431 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6432 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6433 
6434       /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6435 	 are both BLKmode, both must be in memory and BITPOS must be aligned
6436 	 on a byte boundary.  If so, we simply do a block copy.  Likewise for
6437 	 a BLKmode-like TARGET.  */
6438       if (GET_CODE (temp) != PARALLEL
6439 	  && GET_MODE (temp) == BLKmode
6440 	  && (GET_MODE (target) == BLKmode
6441 	      || (MEM_P (target)
6442 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6443 		  && (bitpos % BITS_PER_UNIT) == 0
6444 		  && (bitsize % BITS_PER_UNIT) == 0)))
6445 	{
6446 	  gcc_assert (MEM_P (target) && MEM_P (temp)
6447 		      && (bitpos % BITS_PER_UNIT) == 0);
6448 
6449 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6450 	  emit_block_move (target, temp,
6451 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6452 				    / BITS_PER_UNIT),
6453 			   BLOCK_OP_NORMAL);
6454 
6455 	  return const0_rtx;
6456 	}
6457 
6458       /* Handle calls that return values in multiple non-contiguous locations.
6459 	 The Irix 6 ABI has examples of this.  */
6460       if (GET_CODE (temp) == PARALLEL)
6461 	{
6462 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6463 	  rtx temp_target;
6464 	  if (mode == BLKmode)
6465 	    mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6466 	  temp_target = gen_reg_rtx (mode);
6467 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6468 	  temp = temp_target;
6469 	}
6470       else if (mode == BLKmode)
6471 	{
6472 	  /* Handle calls that return BLKmode values in registers.  */
6473 	  if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6474 	    {
6475 	      rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6476 	      copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6477 	      temp = temp_target;
6478 	    }
6479 	  else
6480 	    {
6481 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6482 	      rtx temp_target;
6483 	      mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6484 	      temp_target = gen_reg_rtx (mode);
6485 	      temp_target
6486 	        = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6487 				     false, temp_target, mode, mode);
6488 	      temp = temp_target;
6489 	    }
6490 	}
6491 
6492       /* Store the value in the bitfield.  */
6493       store_bit_field (target, bitsize, bitpos,
6494 		       bitregion_start, bitregion_end,
6495 		       mode, temp);
6496 
6497       return const0_rtx;
6498     }
6499   else
6500     {
6501       /* Now build a reference to just the desired component.  */
6502       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6503 
6504       if (to_rtx == target)
6505 	to_rtx = copy_rtx (to_rtx);
6506 
6507       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6508 	set_mem_alias_set (to_rtx, alias_set);
6509 
6510       return store_expr (exp, to_rtx, 0, nontemporal);
6511     }
6512 }
6513 
6514 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6515    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6516    codes and find the ultimate containing object, which we return.
6517 
6518    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6519    bit position, and *PUNSIGNEDP to the signedness of the field.
6520    If the position of the field is variable, we store a tree
6521    giving the variable offset (in units) in *POFFSET.
6522    This offset is in addition to the bit position.
6523    If the position is not variable, we store 0 in *POFFSET.
6524 
6525    If any of the extraction expressions is volatile,
6526    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6527 
6528    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6529    Otherwise, it is a mode that can be used to access the field.
6530 
6531    If the field describes a variable-sized object, *PMODE is set to
6532    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6533    this case, but the address of the object can be found.
6534 
6535    If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6536    look through nodes that serve as markers of a greater alignment than
6537    the one that can be deduced from the expression.  These nodes make it
6538    possible for front-ends to prevent temporaries from being created by
6539    the middle-end on alignment considerations.  For that purpose, the
6540    normal operating mode at high-level is to always pass FALSE so that
6541    the ultimate containing object is really returned; moreover, the
6542    associated predicate handled_component_p will always return TRUE
6543    on these nodes, thus indicating that they are essentially handled
6544    by get_inner_reference.  TRUE should only be passed when the caller
6545    is scanning the expression in order to build another representation
6546    and specifically knows how to handle these nodes; as such, this is
6547    the normal operating mode in the RTL expanders.  */
6548 
6549 tree
6550 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6551 		     HOST_WIDE_INT *pbitpos, tree *poffset,
6552 		     enum machine_mode *pmode, int *punsignedp,
6553 		     int *pvolatilep, bool keep_aligning)
6554 {
6555   tree size_tree = 0;
6556   enum machine_mode mode = VOIDmode;
6557   bool blkmode_bitfield = false;
6558   tree offset = size_zero_node;
6559   double_int bit_offset = double_int_zero;
6560 
6561   /* First get the mode, signedness, and size.  We do this from just the
6562      outermost expression.  */
6563   *pbitsize = -1;
6564   if (TREE_CODE (exp) == COMPONENT_REF)
6565     {
6566       tree field = TREE_OPERAND (exp, 1);
6567       size_tree = DECL_SIZE (field);
6568       if (!DECL_BIT_FIELD (field))
6569 	mode = DECL_MODE (field);
6570       else if (DECL_MODE (field) == BLKmode)
6571 	blkmode_bitfield = true;
6572       else if (TREE_THIS_VOLATILE (exp)
6573 	       && flag_strict_volatile_bitfields > 0)
6574 	/* Volatile bitfields should be accessed in the mode of the
6575 	     field's type, not the mode computed based on the bit
6576 	     size.  */
6577 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6578 
6579       *punsignedp = DECL_UNSIGNED (field);
6580     }
6581   else if (TREE_CODE (exp) == BIT_FIELD_REF)
6582     {
6583       size_tree = TREE_OPERAND (exp, 1);
6584       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6585 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6586 
6587       /* For vector types, with the correct size of access, use the mode of
6588 	 inner type.  */
6589       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6590 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6591 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6592         mode = TYPE_MODE (TREE_TYPE (exp));
6593     }
6594   else
6595     {
6596       mode = TYPE_MODE (TREE_TYPE (exp));
6597       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6598 
6599       if (mode == BLKmode)
6600 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
6601       else
6602 	*pbitsize = GET_MODE_BITSIZE (mode);
6603     }
6604 
6605   if (size_tree != 0)
6606     {
6607       if (! host_integerp (size_tree, 1))
6608 	mode = BLKmode, *pbitsize = -1;
6609       else
6610 	*pbitsize = tree_low_cst (size_tree, 1);
6611     }
6612 
6613   /* Compute cumulative bit-offset for nested component-refs and array-refs,
6614      and find the ultimate containing object.  */
6615   while (1)
6616     {
6617       switch (TREE_CODE (exp))
6618 	{
6619 	case BIT_FIELD_REF:
6620 	  bit_offset += tree_to_double_int (TREE_OPERAND (exp, 2));
6621 	  break;
6622 
6623 	case COMPONENT_REF:
6624 	  {
6625 	    tree field = TREE_OPERAND (exp, 1);
6626 	    tree this_offset = component_ref_field_offset (exp);
6627 
6628 	    /* If this field hasn't been filled in yet, don't go past it.
6629 	       This should only happen when folding expressions made during
6630 	       type construction.  */
6631 	    if (this_offset == 0)
6632 	      break;
6633 
6634 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
6635 	    bit_offset += tree_to_double_int (DECL_FIELD_BIT_OFFSET (field));
6636 
6637 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6638 	  }
6639 	  break;
6640 
6641 	case ARRAY_REF:
6642 	case ARRAY_RANGE_REF:
6643 	  {
6644 	    tree index = TREE_OPERAND (exp, 1);
6645 	    tree low_bound = array_ref_low_bound (exp);
6646 	    tree unit_size = array_ref_element_size (exp);
6647 
6648 	    /* We assume all arrays have sizes that are a multiple of a byte.
6649 	       First subtract the lower bound, if any, in the type of the
6650 	       index, then convert to sizetype and multiply by the size of
6651 	       the array element.  */
6652 	    if (! integer_zerop (low_bound))
6653 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6654 				   index, low_bound);
6655 
6656 	    offset = size_binop (PLUS_EXPR, offset,
6657 			         size_binop (MULT_EXPR,
6658 					     fold_convert (sizetype, index),
6659 					     unit_size));
6660 	  }
6661 	  break;
6662 
6663 	case REALPART_EXPR:
6664 	  break;
6665 
6666 	case IMAGPART_EXPR:
6667 	  bit_offset += double_int::from_uhwi (*pbitsize);
6668 	  break;
6669 
6670 	case VIEW_CONVERT_EXPR:
6671 	  if (keep_aligning && STRICT_ALIGNMENT
6672 	      && (TYPE_ALIGN (TREE_TYPE (exp))
6673 	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6674 	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6675 		  < BIGGEST_ALIGNMENT)
6676 	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6677 		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6678 	    goto done;
6679 	  break;
6680 
6681 	case MEM_REF:
6682 	  /* Hand back the decl for MEM[&decl, off].  */
6683 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6684 	    {
6685 	      tree off = TREE_OPERAND (exp, 1);
6686 	      if (!integer_zerop (off))
6687 		{
6688 		  double_int boff, coff = mem_ref_offset (exp);
6689 		  boff = coff.alshift (BITS_PER_UNIT == 8
6690 				       ? 3 : exact_log2 (BITS_PER_UNIT),
6691 				       HOST_BITS_PER_DOUBLE_INT);
6692 		  bit_offset += boff;
6693 		}
6694 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6695 	    }
6696 	  goto done;
6697 
6698 	default:
6699 	  goto done;
6700 	}
6701 
6702       /* If any reference in the chain is volatile, the effect is volatile.  */
6703       if (TREE_THIS_VOLATILE (exp))
6704 	*pvolatilep = 1;
6705 
6706       exp = TREE_OPERAND (exp, 0);
6707     }
6708  done:
6709 
6710   /* If OFFSET is constant, see if we can return the whole thing as a
6711      constant bit position.  Make sure to handle overflow during
6712      this conversion.  */
6713   if (TREE_CODE (offset) == INTEGER_CST)
6714     {
6715       double_int tem = tree_to_double_int (offset);
6716       tem = tem.sext (TYPE_PRECISION (sizetype));
6717       tem = tem.alshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT),
6718 			 HOST_BITS_PER_DOUBLE_INT);
6719       tem += bit_offset;
6720       if (tem.fits_shwi ())
6721 	{
6722 	  *pbitpos = tem.to_shwi ();
6723 	  *poffset = offset = NULL_TREE;
6724 	}
6725     }
6726 
6727   /* Otherwise, split it up.  */
6728   if (offset)
6729     {
6730       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
6731       if (bit_offset.is_negative () || !bit_offset.fits_shwi ())
6732         {
6733 	  double_int mask
6734 	    = double_int::mask (BITS_PER_UNIT == 8
6735 			       ? 3 : exact_log2 (BITS_PER_UNIT));
6736 	  double_int tem = bit_offset.and_not (mask);
6737 	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6738 	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
6739 	  bit_offset -= tem;
6740 	  tem = tem.arshift (BITS_PER_UNIT == 8
6741 			     ? 3 : exact_log2 (BITS_PER_UNIT),
6742 			     HOST_BITS_PER_DOUBLE_INT);
6743 	  offset = size_binop (PLUS_EXPR, offset,
6744 			       double_int_to_tree (sizetype, tem));
6745 	}
6746 
6747       *pbitpos = bit_offset.to_shwi ();
6748       *poffset = offset;
6749     }
6750 
6751   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6752   if (mode == VOIDmode
6753       && blkmode_bitfield
6754       && (*pbitpos % BITS_PER_UNIT) == 0
6755       && (*pbitsize % BITS_PER_UNIT) == 0)
6756     *pmode = BLKmode;
6757   else
6758     *pmode = mode;
6759 
6760   return exp;
6761 }
6762 
6763 /* Return a tree of sizetype representing the size, in bytes, of the element
6764    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6765 
6766 tree
6767 array_ref_element_size (tree exp)
6768 {
6769   tree aligned_size = TREE_OPERAND (exp, 3);
6770   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6771   location_t loc = EXPR_LOCATION (exp);
6772 
6773   /* If a size was specified in the ARRAY_REF, it's the size measured
6774      in alignment units of the element type.  So multiply by that value.  */
6775   if (aligned_size)
6776     {
6777       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6778 	 sizetype from another type of the same width and signedness.  */
6779       if (TREE_TYPE (aligned_size) != sizetype)
6780 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6781       return size_binop_loc (loc, MULT_EXPR, aligned_size,
6782 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
6783     }
6784 
6785   /* Otherwise, take the size from that of the element type.  Substitute
6786      any PLACEHOLDER_EXPR that we have.  */
6787   else
6788     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6789 }
6790 
6791 /* Return a tree representing the lower bound of the array mentioned in
6792    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6793 
6794 tree
6795 array_ref_low_bound (tree exp)
6796 {
6797   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6798 
6799   /* If a lower bound is specified in EXP, use it.  */
6800   if (TREE_OPERAND (exp, 2))
6801     return TREE_OPERAND (exp, 2);
6802 
6803   /* Otherwise, if there is a domain type and it has a lower bound, use it,
6804      substituting for a PLACEHOLDER_EXPR as needed.  */
6805   if (domain_type && TYPE_MIN_VALUE (domain_type))
6806     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6807 
6808   /* Otherwise, return a zero of the appropriate type.  */
6809   return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6810 }
6811 
6812 /* Returns true if REF is an array reference to an array at the end of
6813    a structure.  If this is the case, the array may be allocated larger
6814    than its upper bound implies.  */
6815 
6816 bool
6817 array_at_struct_end_p (tree ref)
6818 {
6819   if (TREE_CODE (ref) != ARRAY_REF
6820       && TREE_CODE (ref) != ARRAY_RANGE_REF)
6821     return false;
6822 
6823   while (handled_component_p (ref))
6824     {
6825       /* If the reference chain contains a component reference to a
6826          non-union type and there follows another field the reference
6827 	 is not at the end of a structure.  */
6828       if (TREE_CODE (ref) == COMPONENT_REF
6829 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
6830 	{
6831 	  tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
6832 	  while (nextf && TREE_CODE (nextf) != FIELD_DECL)
6833 	    nextf = DECL_CHAIN (nextf);
6834 	  if (nextf)
6835 	    return false;
6836 	}
6837 
6838       ref = TREE_OPERAND (ref, 0);
6839     }
6840 
6841   /* If the reference is based on a declared entity, the size of the array
6842      is constrained by its given domain.  */
6843   if (DECL_P (ref))
6844     return false;
6845 
6846   return true;
6847 }
6848 
6849 /* Return a tree representing the upper bound of the array mentioned in
6850    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6851 
6852 tree
6853 array_ref_up_bound (tree exp)
6854 {
6855   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6856 
6857   /* If there is a domain type and it has an upper bound, use it, substituting
6858      for a PLACEHOLDER_EXPR as needed.  */
6859   if (domain_type && TYPE_MAX_VALUE (domain_type))
6860     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6861 
6862   /* Otherwise fail.  */
6863   return NULL_TREE;
6864 }
6865 
6866 /* Return a tree representing the offset, in bytes, of the field referenced
6867    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
6868 
6869 tree
6870 component_ref_field_offset (tree exp)
6871 {
6872   tree aligned_offset = TREE_OPERAND (exp, 2);
6873   tree field = TREE_OPERAND (exp, 1);
6874   location_t loc = EXPR_LOCATION (exp);
6875 
6876   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6877      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
6878      value.  */
6879   if (aligned_offset)
6880     {
6881       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6882 	 sizetype from another type of the same width and signedness.  */
6883       if (TREE_TYPE (aligned_offset) != sizetype)
6884 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6885       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6886 			     size_int (DECL_OFFSET_ALIGN (field)
6887 				       / BITS_PER_UNIT));
6888     }
6889 
6890   /* Otherwise, take the offset from that of the field.  Substitute
6891      any PLACEHOLDER_EXPR that we have.  */
6892   else
6893     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6894 }
6895 
6896 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
6897 
6898 static unsigned HOST_WIDE_INT
6899 target_align (const_tree target)
6900 {
6901   /* We might have a chain of nested references with intermediate misaligning
6902      bitfields components, so need to recurse to find out.  */
6903 
6904   unsigned HOST_WIDE_INT this_align, outer_align;
6905 
6906   switch (TREE_CODE (target))
6907     {
6908     case BIT_FIELD_REF:
6909       return 1;
6910 
6911     case COMPONENT_REF:
6912       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6913       outer_align = target_align (TREE_OPERAND (target, 0));
6914       return MIN (this_align, outer_align);
6915 
6916     case ARRAY_REF:
6917     case ARRAY_RANGE_REF:
6918       this_align = TYPE_ALIGN (TREE_TYPE (target));
6919       outer_align = target_align (TREE_OPERAND (target, 0));
6920       return MIN (this_align, outer_align);
6921 
6922     CASE_CONVERT:
6923     case NON_LVALUE_EXPR:
6924     case VIEW_CONVERT_EXPR:
6925       this_align = TYPE_ALIGN (TREE_TYPE (target));
6926       outer_align = target_align (TREE_OPERAND (target, 0));
6927       return MAX (this_align, outer_align);
6928 
6929     default:
6930       return TYPE_ALIGN (TREE_TYPE (target));
6931     }
6932 }
6933 
6934 
6935 /* Given an rtx VALUE that may contain additions and multiplications, return
6936    an equivalent value that just refers to a register, memory, or constant.
6937    This is done by generating instructions to perform the arithmetic and
6938    returning a pseudo-register containing the value.
6939 
6940    The returned value may be a REG, SUBREG, MEM or constant.  */
6941 
6942 rtx
6943 force_operand (rtx value, rtx target)
6944 {
6945   rtx op1, op2;
6946   /* Use subtarget as the target for operand 0 of a binary operation.  */
6947   rtx subtarget = get_subtarget (target);
6948   enum rtx_code code = GET_CODE (value);
6949 
6950   /* Check for subreg applied to an expression produced by loop optimizer.  */
6951   if (code == SUBREG
6952       && !REG_P (SUBREG_REG (value))
6953       && !MEM_P (SUBREG_REG (value)))
6954     {
6955       value
6956 	= simplify_gen_subreg (GET_MODE (value),
6957 			       force_reg (GET_MODE (SUBREG_REG (value)),
6958 					  force_operand (SUBREG_REG (value),
6959 							 NULL_RTX)),
6960 			       GET_MODE (SUBREG_REG (value)),
6961 			       SUBREG_BYTE (value));
6962       code = GET_CODE (value);
6963     }
6964 
6965   /* Check for a PIC address load.  */
6966   if ((code == PLUS || code == MINUS)
6967       && XEXP (value, 0) == pic_offset_table_rtx
6968       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6969 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
6970 	  || GET_CODE (XEXP (value, 1)) == CONST))
6971     {
6972       if (!subtarget)
6973 	subtarget = gen_reg_rtx (GET_MODE (value));
6974       emit_move_insn (subtarget, value);
6975       return subtarget;
6976     }
6977 
6978   if (ARITHMETIC_P (value))
6979     {
6980       op2 = XEXP (value, 1);
6981       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6982 	subtarget = 0;
6983       if (code == MINUS && CONST_INT_P (op2))
6984 	{
6985 	  code = PLUS;
6986 	  op2 = negate_rtx (GET_MODE (value), op2);
6987 	}
6988 
6989       /* Check for an addition with OP2 a constant integer and our first
6990          operand a PLUS of a virtual register and something else.  In that
6991          case, we want to emit the sum of the virtual register and the
6992          constant first and then add the other value.  This allows virtual
6993          register instantiation to simply modify the constant rather than
6994          creating another one around this addition.  */
6995       if (code == PLUS && CONST_INT_P (op2)
6996 	  && GET_CODE (XEXP (value, 0)) == PLUS
6997 	  && REG_P (XEXP (XEXP (value, 0), 0))
6998 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6999 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7000 	{
7001 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7002 					  XEXP (XEXP (value, 0), 0), op2,
7003 					  subtarget, 0, OPTAB_LIB_WIDEN);
7004 	  return expand_simple_binop (GET_MODE (value), code, temp,
7005 				      force_operand (XEXP (XEXP (value,
7006 								 0), 1), 0),
7007 				      target, 0, OPTAB_LIB_WIDEN);
7008 	}
7009 
7010       op1 = force_operand (XEXP (value, 0), subtarget);
7011       op2 = force_operand (op2, NULL_RTX);
7012       switch (code)
7013 	{
7014 	case MULT:
7015 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7016 	case DIV:
7017 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7018 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7019 					target, 1, OPTAB_LIB_WIDEN);
7020 	  else
7021 	    return expand_divmod (0,
7022 				  FLOAT_MODE_P (GET_MODE (value))
7023 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7024 				  GET_MODE (value), op1, op2, target, 0);
7025 	case MOD:
7026 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7027 				target, 0);
7028 	case UDIV:
7029 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7030 				target, 1);
7031 	case UMOD:
7032 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7033 				target, 1);
7034 	case ASHIFTRT:
7035 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7036 				      target, 0, OPTAB_LIB_WIDEN);
7037 	default:
7038 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7039 				      target, 1, OPTAB_LIB_WIDEN);
7040 	}
7041     }
7042   if (UNARY_P (value))
7043     {
7044       if (!target)
7045 	target = gen_reg_rtx (GET_MODE (value));
7046       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7047       switch (code)
7048 	{
7049 	case ZERO_EXTEND:
7050 	case SIGN_EXTEND:
7051 	case TRUNCATE:
7052 	case FLOAT_EXTEND:
7053 	case FLOAT_TRUNCATE:
7054 	  convert_move (target, op1, code == ZERO_EXTEND);
7055 	  return target;
7056 
7057 	case FIX:
7058 	case UNSIGNED_FIX:
7059 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7060 	  return target;
7061 
7062 	case FLOAT:
7063 	case UNSIGNED_FLOAT:
7064 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7065 	  return target;
7066 
7067 	default:
7068 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7069 	}
7070     }
7071 
7072 #ifdef INSN_SCHEDULING
7073   /* On machines that have insn scheduling, we want all memory reference to be
7074      explicit, so we need to deal with such paradoxical SUBREGs.  */
7075   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7076     value
7077       = simplify_gen_subreg (GET_MODE (value),
7078 			     force_reg (GET_MODE (SUBREG_REG (value)),
7079 					force_operand (SUBREG_REG (value),
7080 						       NULL_RTX)),
7081 			     GET_MODE (SUBREG_REG (value)),
7082 			     SUBREG_BYTE (value));
7083 #endif
7084 
7085   return value;
7086 }
7087 
7088 /* Subroutine of expand_expr: return nonzero iff there is no way that
7089    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7090    call is going to be used to determine whether we need a temporary
7091    for EXP, as opposed to a recursive call to this function.
7092 
7093    It is always safe for this routine to return zero since it merely
7094    searches for optimization opportunities.  */
7095 
7096 int
7097 safe_from_p (const_rtx x, tree exp, int top_p)
7098 {
7099   rtx exp_rtl = 0;
7100   int i, nops;
7101 
7102   if (x == 0
7103       /* If EXP has varying size, we MUST use a target since we currently
7104 	 have no way of allocating temporaries of variable size
7105 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7106 	 So we assume here that something at a higher level has prevented a
7107 	 clash.  This is somewhat bogus, but the best we can do.  Only
7108 	 do this when X is BLKmode and when we are at the top level.  */
7109       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7110 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7111 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7112 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7113 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7114 	      != INTEGER_CST)
7115 	  && GET_MODE (x) == BLKmode)
7116       /* If X is in the outgoing argument area, it is always safe.  */
7117       || (MEM_P (x)
7118 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7119 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7120 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7121     return 1;
7122 
7123   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7124      find the underlying pseudo.  */
7125   if (GET_CODE (x) == SUBREG)
7126     {
7127       x = SUBREG_REG (x);
7128       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7129 	return 0;
7130     }
7131 
7132   /* Now look at our tree code and possibly recurse.  */
7133   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7134     {
7135     case tcc_declaration:
7136       exp_rtl = DECL_RTL_IF_SET (exp);
7137       break;
7138 
7139     case tcc_constant:
7140       return 1;
7141 
7142     case tcc_exceptional:
7143       if (TREE_CODE (exp) == TREE_LIST)
7144 	{
7145 	  while (1)
7146 	    {
7147 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7148 		return 0;
7149 	      exp = TREE_CHAIN (exp);
7150 	      if (!exp)
7151 		return 1;
7152 	      if (TREE_CODE (exp) != TREE_LIST)
7153 		return safe_from_p (x, exp, 0);
7154 	    }
7155 	}
7156       else if (TREE_CODE (exp) == CONSTRUCTOR)
7157 	{
7158 	  constructor_elt *ce;
7159 	  unsigned HOST_WIDE_INT idx;
7160 
7161 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7162 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7163 		|| !safe_from_p (x, ce->value, 0))
7164 	      return 0;
7165 	  return 1;
7166 	}
7167       else if (TREE_CODE (exp) == ERROR_MARK)
7168 	return 1;	/* An already-visited SAVE_EXPR? */
7169       else
7170 	return 0;
7171 
7172     case tcc_statement:
7173       /* The only case we look at here is the DECL_INITIAL inside a
7174 	 DECL_EXPR.  */
7175       return (TREE_CODE (exp) != DECL_EXPR
7176 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7177 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7178 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7179 
7180     case tcc_binary:
7181     case tcc_comparison:
7182       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7183 	return 0;
7184       /* Fall through.  */
7185 
7186     case tcc_unary:
7187       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7188 
7189     case tcc_expression:
7190     case tcc_reference:
7191     case tcc_vl_exp:
7192       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7193 	 the expression.  If it is set, we conflict iff we are that rtx or
7194 	 both are in memory.  Otherwise, we check all operands of the
7195 	 expression recursively.  */
7196 
7197       switch (TREE_CODE (exp))
7198 	{
7199 	case ADDR_EXPR:
7200 	  /* If the operand is static or we are static, we can't conflict.
7201 	     Likewise if we don't conflict with the operand at all.  */
7202 	  if (staticp (TREE_OPERAND (exp, 0))
7203 	      || TREE_STATIC (exp)
7204 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7205 	    return 1;
7206 
7207 	  /* Otherwise, the only way this can conflict is if we are taking
7208 	     the address of a DECL a that address if part of X, which is
7209 	     very rare.  */
7210 	  exp = TREE_OPERAND (exp, 0);
7211 	  if (DECL_P (exp))
7212 	    {
7213 	      if (!DECL_RTL_SET_P (exp)
7214 		  || !MEM_P (DECL_RTL (exp)))
7215 		return 0;
7216 	      else
7217 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7218 	    }
7219 	  break;
7220 
7221 	case MEM_REF:
7222 	  if (MEM_P (x)
7223 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7224 					get_alias_set (exp)))
7225 	    return 0;
7226 	  break;
7227 
7228 	case CALL_EXPR:
7229 	  /* Assume that the call will clobber all hard registers and
7230 	     all of memory.  */
7231 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7232 	      || MEM_P (x))
7233 	    return 0;
7234 	  break;
7235 
7236 	case WITH_CLEANUP_EXPR:
7237 	case CLEANUP_POINT_EXPR:
7238 	  /* Lowered by gimplify.c.  */
7239 	  gcc_unreachable ();
7240 
7241 	case SAVE_EXPR:
7242 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7243 
7244 	default:
7245 	  break;
7246 	}
7247 
7248       /* If we have an rtx, we do not need to scan our operands.  */
7249       if (exp_rtl)
7250 	break;
7251 
7252       nops = TREE_OPERAND_LENGTH (exp);
7253       for (i = 0; i < nops; i++)
7254 	if (TREE_OPERAND (exp, i) != 0
7255 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7256 	  return 0;
7257 
7258       break;
7259 
7260     case tcc_type:
7261       /* Should never get a type here.  */
7262       gcc_unreachable ();
7263     }
7264 
7265   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7266      with it.  */
7267   if (exp_rtl)
7268     {
7269       if (GET_CODE (exp_rtl) == SUBREG)
7270 	{
7271 	  exp_rtl = SUBREG_REG (exp_rtl);
7272 	  if (REG_P (exp_rtl)
7273 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7274 	    return 0;
7275 	}
7276 
7277       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7278 	 are memory and they conflict.  */
7279       return ! (rtx_equal_p (x, exp_rtl)
7280 		|| (MEM_P (x) && MEM_P (exp_rtl)
7281 		    && true_dependence (exp_rtl, VOIDmode, x)));
7282     }
7283 
7284   /* If we reach here, it is safe.  */
7285   return 1;
7286 }
7287 
7288 
7289 /* Return the highest power of two that EXP is known to be a multiple of.
7290    This is used in updating alignment of MEMs in array references.  */
7291 
7292 unsigned HOST_WIDE_INT
7293 highest_pow2_factor (const_tree exp)
7294 {
7295   unsigned HOST_WIDE_INT c0, c1;
7296 
7297   switch (TREE_CODE (exp))
7298     {
7299     case INTEGER_CST:
7300       /* We can find the lowest bit that's a one.  If the low
7301 	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
7302 	 We need to handle this case since we can find it in a COND_EXPR,
7303 	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
7304 	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
7305 	 later ICE.  */
7306       if (TREE_OVERFLOW (exp))
7307 	return BIGGEST_ALIGNMENT;
7308       else
7309 	{
7310 	  /* Note: tree_low_cst is intentionally not used here,
7311 	     we don't care about the upper bits.  */
7312 	  c0 = TREE_INT_CST_LOW (exp);
7313 	  c0 &= -c0;
7314 	  return c0 ? c0 : BIGGEST_ALIGNMENT;
7315 	}
7316       break;
7317 
7318     case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
7319       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7320       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7321       return MIN (c0, c1);
7322 
7323     case MULT_EXPR:
7324       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7325       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7326       return c0 * c1;
7327 
7328     case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
7329     case CEIL_DIV_EXPR:
7330       if (integer_pow2p (TREE_OPERAND (exp, 1))
7331 	  && host_integerp (TREE_OPERAND (exp, 1), 1))
7332 	{
7333 	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
7334 	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
7335 	  return MAX (1, c0 / c1);
7336 	}
7337       break;
7338 
7339     case BIT_AND_EXPR:
7340       /* The highest power of two of a bit-and expression is the maximum of
7341 	 that of its operands.  We typically get here for a complex LHS and
7342 	 a constant negative power of two on the RHS to force an explicit
7343 	 alignment, so don't bother looking at the LHS.  */
7344       return highest_pow2_factor (TREE_OPERAND (exp, 1));
7345 
7346     CASE_CONVERT:
7347     case SAVE_EXPR:
7348       return highest_pow2_factor (TREE_OPERAND (exp, 0));
7349 
7350     case COMPOUND_EXPR:
7351       return highest_pow2_factor (TREE_OPERAND (exp, 1));
7352 
7353     case COND_EXPR:
7354       c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
7355       c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
7356       return MIN (c0, c1);
7357 
7358     default:
7359       break;
7360     }
7361 
7362   return 1;
7363 }
7364 
7365 /* Similar, except that the alignment requirements of TARGET are
7366    taken into account.  Assume it is at least as aligned as its
7367    type, unless it is a COMPONENT_REF in which case the layout of
7368    the structure gives the alignment.  */
7369 
7370 static unsigned HOST_WIDE_INT
7371 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7372 {
7373   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7374   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7375 
7376   return MAX (factor, talign);
7377 }
7378 
7379 #ifdef HAVE_conditional_move
7380 /* Convert the tree comparison code TCODE to the rtl one where the
7381    signedness is UNSIGNEDP.  */
7382 
7383 static enum rtx_code
7384 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7385 {
7386   enum rtx_code code;
7387   switch (tcode)
7388     {
7389     case EQ_EXPR:
7390       code = EQ;
7391       break;
7392     case NE_EXPR:
7393       code = NE;
7394       break;
7395     case LT_EXPR:
7396       code = unsignedp ? LTU : LT;
7397       break;
7398     case LE_EXPR:
7399       code = unsignedp ? LEU : LE;
7400       break;
7401     case GT_EXPR:
7402       code = unsignedp ? GTU : GT;
7403       break;
7404     case GE_EXPR:
7405       code = unsignedp ? GEU : GE;
7406       break;
7407     case UNORDERED_EXPR:
7408       code = UNORDERED;
7409       break;
7410     case ORDERED_EXPR:
7411       code = ORDERED;
7412       break;
7413     case UNLT_EXPR:
7414       code = UNLT;
7415       break;
7416     case UNLE_EXPR:
7417       code = UNLE;
7418       break;
7419     case UNGT_EXPR:
7420       code = UNGT;
7421       break;
7422     case UNGE_EXPR:
7423       code = UNGE;
7424       break;
7425     case UNEQ_EXPR:
7426       code = UNEQ;
7427       break;
7428     case LTGT_EXPR:
7429       code = LTGT;
7430       break;
7431 
7432     default:
7433       gcc_unreachable ();
7434     }
7435   return code;
7436 }
7437 #endif
7438 
7439 /* Subroutine of expand_expr.  Expand the two operands of a binary
7440    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7441    The value may be stored in TARGET if TARGET is nonzero.  The
7442    MODIFIER argument is as documented by expand_expr.  */
7443 
7444 static void
7445 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7446 		 enum expand_modifier modifier)
7447 {
7448   if (! safe_from_p (target, exp1, 1))
7449     target = 0;
7450   if (operand_equal_p (exp0, exp1, 0))
7451     {
7452       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7453       *op1 = copy_rtx (*op0);
7454     }
7455   else
7456     {
7457       /* If we need to preserve evaluation order, copy exp0 into its own
7458 	 temporary variable so that it can't be clobbered by exp1.  */
7459       if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7460 	exp0 = save_expr (exp0);
7461       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7462       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7463     }
7464 }
7465 
7466 
7467 /* Return a MEM that contains constant EXP.  DEFER is as for
7468    output_constant_def and MODIFIER is as for expand_expr.  */
7469 
7470 static rtx
7471 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7472 {
7473   rtx mem;
7474 
7475   mem = output_constant_def (exp, defer);
7476   if (modifier != EXPAND_INITIALIZER)
7477     mem = use_anchored_address (mem);
7478   return mem;
7479 }
7480 
7481 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7482    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7483 
7484 static rtx
7485 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
7486 		         enum expand_modifier modifier, addr_space_t as)
7487 {
7488   rtx result, subtarget;
7489   tree inner, offset;
7490   HOST_WIDE_INT bitsize, bitpos;
7491   int volatilep, unsignedp;
7492   enum machine_mode mode1;
7493 
7494   /* If we are taking the address of a constant and are at the top level,
7495      we have to use output_constant_def since we can't call force_const_mem
7496      at top level.  */
7497   /* ??? This should be considered a front-end bug.  We should not be
7498      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7499      exception here is STRING_CST.  */
7500   if (CONSTANT_CLASS_P (exp))
7501     {
7502       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7503       if (modifier < EXPAND_SUM)
7504 	result = force_operand (result, target);
7505       return result;
7506     }
7507 
7508   /* Everything must be something allowed by is_gimple_addressable.  */
7509   switch (TREE_CODE (exp))
7510     {
7511     case INDIRECT_REF:
7512       /* This case will happen via recursion for &a->b.  */
7513       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7514 
7515     case MEM_REF:
7516       {
7517 	tree tem = TREE_OPERAND (exp, 0);
7518 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7519 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7520 	return expand_expr (tem, target, tmode, modifier);
7521       }
7522 
7523     case CONST_DECL:
7524       /* Expand the initializer like constants above.  */
7525       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7526 					   0, modifier), 0);
7527       if (modifier < EXPAND_SUM)
7528 	result = force_operand (result, target);
7529       return result;
7530 
7531     case REALPART_EXPR:
7532       /* The real part of the complex number is always first, therefore
7533 	 the address is the same as the address of the parent object.  */
7534       offset = 0;
7535       bitpos = 0;
7536       inner = TREE_OPERAND (exp, 0);
7537       break;
7538 
7539     case IMAGPART_EXPR:
7540       /* The imaginary part of the complex number is always second.
7541 	 The expression is therefore always offset by the size of the
7542 	 scalar type.  */
7543       offset = 0;
7544       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7545       inner = TREE_OPERAND (exp, 0);
7546       break;
7547 
7548     case COMPOUND_LITERAL_EXPR:
7549       /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7550 	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7551 	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7552 	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7553 	 the initializers aren't gimplified.  */
7554       if (COMPOUND_LITERAL_EXPR_DECL (exp)
7555 	  && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7556 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7557 					target, tmode, modifier, as);
7558       /* FALLTHRU */
7559     default:
7560       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7561 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7562 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7563 	 CONSTRUCTORs too, which should yield a memory reference for the
7564 	 constructor's contents.  Assume language specific tree nodes can
7565 	 be expanded in some interesting way.  */
7566       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7567       if (DECL_P (exp)
7568 	  || TREE_CODE (exp) == CONSTRUCTOR
7569 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7570 	{
7571 	  result = expand_expr (exp, target, tmode,
7572 				modifier == EXPAND_INITIALIZER
7573 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7574 
7575 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7576 	     marked TREE_ADDRESSABLE, which will be either a front-end
7577 	     or a tree optimizer bug.  */
7578 
7579 	  if (TREE_ADDRESSABLE (exp)
7580 	      && ! MEM_P (result)
7581 	      && ! targetm.calls.allocate_stack_slots_for_args())
7582 	    {
7583 	      error ("local frame unavailable (naked function?)");
7584 	      return result;
7585 	    }
7586 	  else
7587 	    gcc_assert (MEM_P (result));
7588 	  result = XEXP (result, 0);
7589 
7590 	  /* ??? Is this needed anymore?  */
7591 	  if (DECL_P (exp))
7592 	    TREE_USED (exp) = 1;
7593 
7594 	  if (modifier != EXPAND_INITIALIZER
7595 	      && modifier != EXPAND_CONST_ADDRESS
7596 	      && modifier != EXPAND_SUM)
7597 	    result = force_operand (result, target);
7598 	  return result;
7599 	}
7600 
7601       /* Pass FALSE as the last argument to get_inner_reference although
7602 	 we are expanding to RTL.  The rationale is that we know how to
7603 	 handle "aligning nodes" here: we can just bypass them because
7604 	 they won't change the final object whose address will be returned
7605 	 (they actually exist only for that purpose).  */
7606       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7607 				   &mode1, &unsignedp, &volatilep, false);
7608       break;
7609     }
7610 
7611   /* We must have made progress.  */
7612   gcc_assert (inner != exp);
7613 
7614   subtarget = offset || bitpos ? NULL_RTX : target;
7615   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7616      inner alignment, force the inner to be sufficiently aligned.  */
7617   if (CONSTANT_CLASS_P (inner)
7618       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7619     {
7620       inner = copy_node (inner);
7621       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7622       TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7623       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7624     }
7625   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7626 
7627   if (offset)
7628     {
7629       rtx tmp;
7630 
7631       if (modifier != EXPAND_NORMAL)
7632 	result = force_operand (result, NULL);
7633       tmp = expand_expr (offset, NULL_RTX, tmode,
7634 			 modifier == EXPAND_INITIALIZER
7635 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7636 
7637       result = convert_memory_address_addr_space (tmode, result, as);
7638       tmp = convert_memory_address_addr_space (tmode, tmp, as);
7639 
7640       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7641 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
7642       else
7643 	{
7644 	  subtarget = bitpos ? NULL_RTX : target;
7645 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7646 					1, OPTAB_LIB_WIDEN);
7647 	}
7648     }
7649 
7650   if (bitpos)
7651     {
7652       /* Someone beforehand should have rejected taking the address
7653 	 of such an object.  */
7654       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7655 
7656       result = convert_memory_address_addr_space (tmode, result, as);
7657       result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7658       if (modifier < EXPAND_SUM)
7659 	result = force_operand (result, target);
7660     }
7661 
7662   return result;
7663 }
7664 
7665 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7666    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7667 
7668 static rtx
7669 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7670 		       enum expand_modifier modifier)
7671 {
7672   addr_space_t as = ADDR_SPACE_GENERIC;
7673   enum machine_mode address_mode = Pmode;
7674   enum machine_mode pointer_mode = ptr_mode;
7675   enum machine_mode rmode;
7676   rtx result;
7677 
7678   /* Target mode of VOIDmode says "whatever's natural".  */
7679   if (tmode == VOIDmode)
7680     tmode = TYPE_MODE (TREE_TYPE (exp));
7681 
7682   if (POINTER_TYPE_P (TREE_TYPE (exp)))
7683     {
7684       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7685       address_mode = targetm.addr_space.address_mode (as);
7686       pointer_mode = targetm.addr_space.pointer_mode (as);
7687     }
7688 
7689   /* We can get called with some Weird Things if the user does silliness
7690      like "(short) &a".  In that case, convert_memory_address won't do
7691      the right thing, so ignore the given target mode.  */
7692   if (tmode != address_mode && tmode != pointer_mode)
7693     tmode = address_mode;
7694 
7695   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7696 				    tmode, modifier, as);
7697 
7698   /* Despite expand_expr claims concerning ignoring TMODE when not
7699      strictly convenient, stuff breaks if we don't honor it.  Note
7700      that combined with the above, we only do this for pointer modes.  */
7701   rmode = GET_MODE (result);
7702   if (rmode == VOIDmode)
7703     rmode = tmode;
7704   if (rmode != tmode)
7705     result = convert_memory_address_addr_space (tmode, result, as);
7706 
7707   return result;
7708 }
7709 
7710 /* Generate code for computing CONSTRUCTOR EXP.
7711    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7712    is TRUE, instead of creating a temporary variable in memory
7713    NULL is returned and the caller needs to handle it differently.  */
7714 
7715 static rtx
7716 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7717 		    bool avoid_temp_mem)
7718 {
7719   tree type = TREE_TYPE (exp);
7720   enum machine_mode mode = TYPE_MODE (type);
7721 
7722   /* Try to avoid creating a temporary at all.  This is possible
7723      if all of the initializer is zero.
7724      FIXME: try to handle all [0..255] initializers we can handle
7725      with memset.  */
7726   if (TREE_STATIC (exp)
7727       && !TREE_ADDRESSABLE (exp)
7728       && target != 0 && mode == BLKmode
7729       && all_zeros_p (exp))
7730     {
7731       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7732       return target;
7733     }
7734 
7735   /* All elts simple constants => refer to a constant in memory.  But
7736      if this is a non-BLKmode mode, let it store a field at a time
7737      since that should make a CONST_INT or CONST_DOUBLE when we
7738      fold.  Likewise, if we have a target we can use, it is best to
7739      store directly into the target unless the type is large enough
7740      that memcpy will be used.  If we are making an initializer and
7741      all operands are constant, put it in memory as well.
7742 
7743      FIXME: Avoid trying to fill vector constructors piece-meal.
7744      Output them with output_constant_def below unless we're sure
7745      they're zeros.  This should go away when vector initializers
7746      are treated like VECTOR_CST instead of arrays.  */
7747   if ((TREE_STATIC (exp)
7748        && ((mode == BLKmode
7749 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
7750 		  || TREE_ADDRESSABLE (exp)
7751 		  || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7752 		      && (! MOVE_BY_PIECES_P
7753 				     (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7754 				      TYPE_ALIGN (type)))
7755 		      && ! mostly_zeros_p (exp))))
7756       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7757 	  && TREE_CONSTANT (exp)))
7758     {
7759       rtx constructor;
7760 
7761       if (avoid_temp_mem)
7762 	return NULL_RTX;
7763 
7764       constructor = expand_expr_constant (exp, 1, modifier);
7765 
7766       if (modifier != EXPAND_CONST_ADDRESS
7767 	  && modifier != EXPAND_INITIALIZER
7768 	  && modifier != EXPAND_SUM)
7769 	constructor = validize_mem (constructor);
7770 
7771       return constructor;
7772     }
7773 
7774   /* Handle calls that pass values in multiple non-contiguous
7775      locations.  The Irix 6 ABI has examples of this.  */
7776   if (target == 0 || ! safe_from_p (target, exp, 1)
7777       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7778     {
7779       if (avoid_temp_mem)
7780 	return NULL_RTX;
7781 
7782       target
7783 	= assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7784 						    | (TREE_READONLY (exp)
7785 						       * TYPE_QUAL_CONST))),
7786 		       TREE_ADDRESSABLE (exp), 1);
7787     }
7788 
7789   store_constructor (exp, target, 0, int_expr_size (exp));
7790   return target;
7791 }
7792 
7793 
7794 /* expand_expr: generate code for computing expression EXP.
7795    An rtx for the computed value is returned.  The value is never null.
7796    In the case of a void EXP, const0_rtx is returned.
7797 
7798    The value may be stored in TARGET if TARGET is nonzero.
7799    TARGET is just a suggestion; callers must assume that
7800    the rtx returned may not be the same as TARGET.
7801 
7802    If TARGET is CONST0_RTX, it means that the value will be ignored.
7803 
7804    If TMODE is not VOIDmode, it suggests generating the
7805    result in mode TMODE.  But this is done only when convenient.
7806    Otherwise, TMODE is ignored and the value generated in its natural mode.
7807    TMODE is just a suggestion; callers must assume that
7808    the rtx returned may not have mode TMODE.
7809 
7810    Note that TARGET may have neither TMODE nor MODE.  In that case, it
7811    probably will not be used.
7812 
7813    If MODIFIER is EXPAND_SUM then when EXP is an addition
7814    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7815    or a nest of (PLUS ...) and (MINUS ...) where the terms are
7816    products as above, or REG or MEM, or constant.
7817    Ordinarily in such cases we would output mul or add instructions
7818    and then return a pseudo reg containing the sum.
7819 
7820    EXPAND_INITIALIZER is much like EXPAND_SUM except that
7821    it also marks a label as absolutely required (it can't be dead).
7822    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7823    This is used for outputting expressions used in initializers.
7824 
7825    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7826    with a constant address even if that address is not normally legitimate.
7827    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7828 
7829    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7830    a call parameter.  Such targets require special care as we haven't yet
7831    marked TARGET so that it's safe from being trashed by libcalls.  We
7832    don't want to use TARGET for anything but the final result;
7833    Intermediate values must go elsewhere.   Additionally, calls to
7834    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7835 
7836    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7837    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7838    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7839    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7840    recursively.
7841 
7842    If INNER_REFERENCE_P is true, we are expanding an inner reference.
7843    In this case, we don't adjust a returned MEM rtx that wouldn't be
7844    sufficiently aligned for its mode; instead, it's up to the caller
7845    to deal with it afterwards.  This is used to make sure that unaligned
7846    base objects for which out-of-bounds accesses are supported, for
7847    example record types with trailing arrays, aren't realigned behind
7848    the back of the caller.
7849    The normal operating mode is to pass FALSE for this parameter.  */
7850 
7851 rtx
7852 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7853 		  enum expand_modifier modifier, rtx *alt_rtl,
7854 		  bool inner_reference_p)
7855 {
7856   rtx ret;
7857 
7858   /* Handle ERROR_MARK before anybody tries to access its type.  */
7859   if (TREE_CODE (exp) == ERROR_MARK
7860       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7861     {
7862       ret = CONST0_RTX (tmode);
7863       return ret ? ret : const0_rtx;
7864     }
7865 
7866   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
7867 			    inner_reference_p);
7868   return ret;
7869 }
7870 
7871 /* Try to expand the conditional expression which is represented by
7872    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If succeseds
7873    return the rtl reg which repsents the result.  Otherwise return
7874    NULL_RTL.  */
7875 
7876 static rtx
7877 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
7878 			      tree treeop1 ATTRIBUTE_UNUSED,
7879 			      tree treeop2 ATTRIBUTE_UNUSED)
7880 {
7881 #ifdef HAVE_conditional_move
7882   rtx insn;
7883   rtx op00, op01, op1, op2;
7884   enum rtx_code comparison_code;
7885   enum machine_mode comparison_mode;
7886   gimple srcstmt;
7887   rtx temp;
7888   tree type = TREE_TYPE (treeop1);
7889   int unsignedp = TYPE_UNSIGNED (type);
7890   enum machine_mode mode = TYPE_MODE (type);
7891   enum machine_mode orig_mode = mode;
7892 
7893   /* If we cannot do a conditional move on the mode, try doing it
7894      with the promoted mode. */
7895   if (!can_conditionally_move_p (mode))
7896     {
7897       mode = promote_mode (type, mode, &unsignedp);
7898       if (!can_conditionally_move_p (mode))
7899 	return NULL_RTX;
7900       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
7901     }
7902   else
7903     temp = assign_temp (type, 0, 1);
7904 
7905   start_sequence ();
7906   expand_operands (treeop1, treeop2,
7907 		   temp, &op1, &op2, EXPAND_NORMAL);
7908 
7909   if (TREE_CODE (treeop0) == SSA_NAME
7910       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
7911     {
7912       tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
7913       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
7914       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
7915       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
7916       comparison_mode = TYPE_MODE (type);
7917       unsignedp = TYPE_UNSIGNED (type);
7918       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7919     }
7920   else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
7921     {
7922       tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
7923       enum tree_code cmpcode = TREE_CODE (treeop0);
7924       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
7925       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
7926       unsignedp = TYPE_UNSIGNED (type);
7927       comparison_mode = TYPE_MODE (type);
7928       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
7929     }
7930   else
7931     {
7932       op00 = expand_normal (treeop0);
7933       op01 = const0_rtx;
7934       comparison_code = NE;
7935       comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
7936     }
7937 
7938   if (GET_MODE (op1) != mode)
7939     op1 = gen_lowpart (mode, op1);
7940 
7941   if (GET_MODE (op2) != mode)
7942     op2 = gen_lowpart (mode, op2);
7943 
7944   /* Try to emit the conditional move.  */
7945   insn = emit_conditional_move (temp, comparison_code,
7946 				op00, op01, comparison_mode,
7947 				op1, op2, mode,
7948 				unsignedp);
7949 
7950   /* If we could do the conditional move, emit the sequence,
7951      and return.  */
7952   if (insn)
7953     {
7954       rtx seq = get_insns ();
7955       end_sequence ();
7956       emit_insn (seq);
7957       return convert_modes (orig_mode, mode, temp, 0);
7958     }
7959 
7960   /* Otherwise discard the sequence and fall back to code with
7961      branches.  */
7962   end_sequence ();
7963 #endif
7964   return NULL_RTX;
7965 }
7966 
7967 rtx
7968 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7969 		    enum expand_modifier modifier)
7970 {
7971   rtx op0, op1, op2, temp;
7972   tree type;
7973   int unsignedp;
7974   enum machine_mode mode;
7975   enum tree_code code = ops->code;
7976   optab this_optab;
7977   rtx subtarget, original_target;
7978   int ignore;
7979   bool reduce_bit_field;
7980   location_t loc = ops->location;
7981   tree treeop0, treeop1, treeop2;
7982 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
7983 				 ? reduce_to_bit_field_precision ((expr), \
7984 								  target, \
7985 								  type)	  \
7986 				 : (expr))
7987 
7988   type = ops->type;
7989   mode = TYPE_MODE (type);
7990   unsignedp = TYPE_UNSIGNED (type);
7991 
7992   treeop0 = ops->op0;
7993   treeop1 = ops->op1;
7994   treeop2 = ops->op2;
7995 
7996   /* We should be called only on simple (binary or unary) expressions,
7997      exactly those that are valid in gimple expressions that aren't
7998      GIMPLE_SINGLE_RHS (or invalid).  */
7999   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8000 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8001 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8002 
8003   ignore = (target == const0_rtx
8004 	    || ((CONVERT_EXPR_CODE_P (code)
8005 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8006 		&& TREE_CODE (type) == VOID_TYPE));
8007 
8008   /* We should be called only if we need the result.  */
8009   gcc_assert (!ignore);
8010 
8011   /* An operation in what may be a bit-field type needs the
8012      result to be reduced to the precision of the bit-field type,
8013      which is narrower than that of the type's mode.  */
8014   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8015 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8016 
8017   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8018     target = 0;
8019 
8020   /* Use subtarget as the target for operand 0 of a binary operation.  */
8021   subtarget = get_subtarget (target);
8022   original_target = target;
8023 
8024   switch (code)
8025     {
8026     case NON_LVALUE_EXPR:
8027     case PAREN_EXPR:
8028     CASE_CONVERT:
8029       if (treeop0 == error_mark_node)
8030 	return const0_rtx;
8031 
8032       if (TREE_CODE (type) == UNION_TYPE)
8033 	{
8034 	  tree valtype = TREE_TYPE (treeop0);
8035 
8036 	  /* If both input and output are BLKmode, this conversion isn't doing
8037 	     anything except possibly changing memory attribute.  */
8038 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8039 	    {
8040 	      rtx result = expand_expr (treeop0, target, tmode,
8041 					modifier);
8042 
8043 	      result = copy_rtx (result);
8044 	      set_mem_attributes (result, type, 0);
8045 	      return result;
8046 	    }
8047 
8048 	  if (target == 0)
8049 	    {
8050 	      if (TYPE_MODE (type) != BLKmode)
8051 		target = gen_reg_rtx (TYPE_MODE (type));
8052 	      else
8053 		target = assign_temp (type, 1, 1);
8054 	    }
8055 
8056 	  if (MEM_P (target))
8057 	    /* Store data into beginning of memory target.  */
8058 	    store_expr (treeop0,
8059 			adjust_address (target, TYPE_MODE (valtype), 0),
8060 			modifier == EXPAND_STACK_PARM,
8061 			false);
8062 
8063 	  else
8064 	    {
8065 	      gcc_assert (REG_P (target));
8066 
8067 	      /* Store this field into a union of the proper type.  */
8068 	      store_field (target,
8069 			   MIN ((int_size_in_bytes (TREE_TYPE
8070 						    (treeop0))
8071 				 * BITS_PER_UNIT),
8072 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8073 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8074 	    }
8075 
8076 	  /* Return the entire union.  */
8077 	  return target;
8078 	}
8079 
8080       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8081 	{
8082 	  op0 = expand_expr (treeop0, target, VOIDmode,
8083 			     modifier);
8084 
8085 	  /* If the signedness of the conversion differs and OP0 is
8086 	     a promoted SUBREG, clear that indication since we now
8087 	     have to do the proper extension.  */
8088 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8089 	      && GET_CODE (op0) == SUBREG)
8090 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8091 
8092 	  return REDUCE_BIT_FIELD (op0);
8093 	}
8094 
8095       op0 = expand_expr (treeop0, NULL_RTX, mode,
8096 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8097       if (GET_MODE (op0) == mode)
8098 	;
8099 
8100       /* If OP0 is a constant, just convert it into the proper mode.  */
8101       else if (CONSTANT_P (op0))
8102 	{
8103 	  tree inner_type = TREE_TYPE (treeop0);
8104 	  enum machine_mode inner_mode = GET_MODE (op0);
8105 
8106 	  if (inner_mode == VOIDmode)
8107 	    inner_mode = TYPE_MODE (inner_type);
8108 
8109 	  if (modifier == EXPAND_INITIALIZER)
8110 	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
8111 				       subreg_lowpart_offset (mode,
8112 							      inner_mode));
8113 	  else
8114 	    op0=  convert_modes (mode, inner_mode, op0,
8115 				 TYPE_UNSIGNED (inner_type));
8116 	}
8117 
8118       else if (modifier == EXPAND_INITIALIZER)
8119 	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8120 
8121       else if (target == 0)
8122 	op0 = convert_to_mode (mode, op0,
8123 			       TYPE_UNSIGNED (TREE_TYPE
8124 					      (treeop0)));
8125       else
8126 	{
8127 	  convert_move (target, op0,
8128 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8129 	  op0 = target;
8130 	}
8131 
8132       return REDUCE_BIT_FIELD (op0);
8133 
8134     case ADDR_SPACE_CONVERT_EXPR:
8135       {
8136 	tree treeop0_type = TREE_TYPE (treeop0);
8137 	addr_space_t as_to;
8138 	addr_space_t as_from;
8139 
8140 	gcc_assert (POINTER_TYPE_P (type));
8141 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8142 
8143 	as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8144 	as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8145 
8146         /* Conversions between pointers to the same address space should
8147 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8148 	gcc_assert (as_to != as_from);
8149 
8150         /* Ask target code to handle conversion between pointers
8151 	   to overlapping address spaces.  */
8152 	if (targetm.addr_space.subset_p (as_to, as_from)
8153 	    || targetm.addr_space.subset_p (as_from, as_to))
8154 	  {
8155 	    op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8156 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8157 	    gcc_assert (op0);
8158 	    return op0;
8159 	  }
8160 
8161 	/* For disjoint address spaces, converting anything but
8162 	   a null pointer invokes undefined behaviour.  We simply
8163 	   always return a null pointer here.  */
8164 	return CONST0_RTX (mode);
8165       }
8166 
8167     case POINTER_PLUS_EXPR:
8168       /* Even though the sizetype mode and the pointer's mode can be different
8169          expand is able to handle this correctly and get the correct result out
8170          of the PLUS_EXPR code.  */
8171       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8172          if sizetype precision is smaller than pointer precision.  */
8173       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8174 	treeop1 = fold_convert_loc (loc, type,
8175 				    fold_convert_loc (loc, ssizetype,
8176 						      treeop1));
8177       /* If sizetype precision is larger than pointer precision, truncate the
8178 	 offset to have matching modes.  */
8179       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8180 	treeop1 = fold_convert_loc (loc, type, treeop1);
8181 
8182     case PLUS_EXPR:
8183       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8184 	 something else, make sure we add the register to the constant and
8185 	 then to the other thing.  This case can occur during strength
8186 	 reduction and doing it this way will produce better code if the
8187 	 frame pointer or argument pointer is eliminated.
8188 
8189 	 fold-const.c will ensure that the constant is always in the inner
8190 	 PLUS_EXPR, so the only case we need to do anything about is if
8191 	 sp, ap, or fp is our second argument, in which case we must swap
8192 	 the innermost first argument and our second argument.  */
8193 
8194       if (TREE_CODE (treeop0) == PLUS_EXPR
8195 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8196 	  && TREE_CODE (treeop1) == VAR_DECL
8197 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8198 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8199 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8200 	{
8201 	  gcc_unreachable ();
8202 	}
8203 
8204       /* If the result is to be ptr_mode and we are adding an integer to
8205 	 something, we might be forming a constant.  So try to use
8206 	 plus_constant.  If it produces a sum and we can't accept it,
8207 	 use force_operand.  This allows P = &ARR[const] to generate
8208 	 efficient code on machines where a SYMBOL_REF is not a valid
8209 	 address.
8210 
8211 	 If this is an EXPAND_SUM call, always return the sum.  */
8212       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8213 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8214 	{
8215 	  if (modifier == EXPAND_STACK_PARM)
8216 	    target = 0;
8217 	  if (TREE_CODE (treeop0) == INTEGER_CST
8218 	      && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8219 	      && TREE_CONSTANT (treeop1))
8220 	    {
8221 	      rtx constant_part;
8222 
8223 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8224 				 EXPAND_SUM);
8225 	      /* Use immed_double_const to ensure that the constant is
8226 		 truncated according to the mode of OP1, then sign extended
8227 		 to a HOST_WIDE_INT.  Using the constant directly can result
8228 		 in non-canonical RTL in a 64x32 cross compile.  */
8229 	      constant_part
8230 		= immed_double_const (TREE_INT_CST_LOW (treeop0),
8231 				      (HOST_WIDE_INT) 0,
8232 				      TYPE_MODE (TREE_TYPE (treeop1)));
8233 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8234 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8235 		op1 = force_operand (op1, target);
8236 	      return REDUCE_BIT_FIELD (op1);
8237 	    }
8238 
8239 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8240 		   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8241 		   && TREE_CONSTANT (treeop0))
8242 	    {
8243 	      rtx constant_part;
8244 
8245 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8246 				 (modifier == EXPAND_INITIALIZER
8247 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8248 	      if (! CONSTANT_P (op0))
8249 		{
8250 		  op1 = expand_expr (treeop1, NULL_RTX,
8251 				     VOIDmode, modifier);
8252 		  /* Return a PLUS if modifier says it's OK.  */
8253 		  if (modifier == EXPAND_SUM
8254 		      || modifier == EXPAND_INITIALIZER)
8255 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8256 		  goto binop2;
8257 		}
8258 	      /* Use immed_double_const to ensure that the constant is
8259 		 truncated according to the mode of OP1, then sign extended
8260 		 to a HOST_WIDE_INT.  Using the constant directly can result
8261 		 in non-canonical RTL in a 64x32 cross compile.  */
8262 	      constant_part
8263 		= immed_double_const (TREE_INT_CST_LOW (treeop1),
8264 				      (HOST_WIDE_INT) 0,
8265 				      TYPE_MODE (TREE_TYPE (treeop0)));
8266 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8267 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8268 		op0 = force_operand (op0, target);
8269 	      return REDUCE_BIT_FIELD (op0);
8270 	    }
8271 	}
8272 
8273       /* Use TER to expand pointer addition of a negated value
8274 	 as pointer subtraction.  */
8275       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8276 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8277 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8278 	  && TREE_CODE (treeop1) == SSA_NAME
8279 	  && TYPE_MODE (TREE_TYPE (treeop0))
8280 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8281 	{
8282 	  gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8283 	  if (def)
8284 	    {
8285 	      treeop1 = gimple_assign_rhs1 (def);
8286 	      code = MINUS_EXPR;
8287 	      goto do_minus;
8288 	    }
8289 	}
8290 
8291       /* No sense saving up arithmetic to be done
8292 	 if it's all in the wrong mode to form part of an address.
8293 	 And force_operand won't know whether to sign-extend or
8294 	 zero-extend.  */
8295       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8296 	  || mode != ptr_mode)
8297 	{
8298 	  expand_operands (treeop0, treeop1,
8299 			   subtarget, &op0, &op1, EXPAND_NORMAL);
8300 	  if (op0 == const0_rtx)
8301 	    return op1;
8302 	  if (op1 == const0_rtx)
8303 	    return op0;
8304 	  goto binop2;
8305 	}
8306 
8307       expand_operands (treeop0, treeop1,
8308 		       subtarget, &op0, &op1, modifier);
8309       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8310 
8311     case MINUS_EXPR:
8312     do_minus:
8313       /* For initializers, we are allowed to return a MINUS of two
8314 	 symbolic constants.  Here we handle all cases when both operands
8315 	 are constant.  */
8316       /* Handle difference of two symbolic constants,
8317 	 for the sake of an initializer.  */
8318       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8319 	  && really_constant_p (treeop0)
8320 	  && really_constant_p (treeop1))
8321 	{
8322 	  expand_operands (treeop0, treeop1,
8323 			   NULL_RTX, &op0, &op1, modifier);
8324 
8325 	  /* If the last operand is a CONST_INT, use plus_constant of
8326 	     the negated constant.  Else make the MINUS.  */
8327 	  if (CONST_INT_P (op1))
8328 	    return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8329 						    -INTVAL (op1)));
8330 	  else
8331 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8332 	}
8333 
8334       /* No sense saving up arithmetic to be done
8335 	 if it's all in the wrong mode to form part of an address.
8336 	 And force_operand won't know whether to sign-extend or
8337 	 zero-extend.  */
8338       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8339 	  || mode != ptr_mode)
8340 	goto binop;
8341 
8342       expand_operands (treeop0, treeop1,
8343 		       subtarget, &op0, &op1, modifier);
8344 
8345       /* Convert A - const to A + (-const).  */
8346       if (CONST_INT_P (op1))
8347 	{
8348 	  op1 = negate_rtx (mode, op1);
8349 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8350 	}
8351 
8352       goto binop2;
8353 
8354     case WIDEN_MULT_PLUS_EXPR:
8355     case WIDEN_MULT_MINUS_EXPR:
8356       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8357       op2 = expand_normal (treeop2);
8358       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8359 					  target, unsignedp);
8360       return target;
8361 
8362     case WIDEN_MULT_EXPR:
8363       /* If first operand is constant, swap them.
8364 	 Thus the following special case checks need only
8365 	 check the second operand.  */
8366       if (TREE_CODE (treeop0) == INTEGER_CST)
8367 	{
8368 	  tree t1 = treeop0;
8369 	  treeop0 = treeop1;
8370 	  treeop1 = t1;
8371 	}
8372 
8373       /* First, check if we have a multiplication of one signed and one
8374 	 unsigned operand.  */
8375       if (TREE_CODE (treeop1) != INTEGER_CST
8376 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8377 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8378 	{
8379 	  enum machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8380 	  this_optab = usmul_widen_optab;
8381 	  if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8382 		!= CODE_FOR_nothing)
8383 	    {
8384 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8385 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8386 				 EXPAND_NORMAL);
8387 	      else
8388 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8389 				 EXPAND_NORMAL);
8390 	      /* op0 and op1 might still be constant, despite the above
8391 		 != INTEGER_CST check.  Handle it.  */
8392 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8393 		{
8394 		  op0 = convert_modes (innermode, mode, op0, true);
8395 		  op1 = convert_modes (innermode, mode, op1, false);
8396 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8397 							target, unsignedp));
8398 		}
8399 	      goto binop3;
8400 	    }
8401 	}
8402       /* Check for a multiplication with matching signedness.  */
8403       else if ((TREE_CODE (treeop1) == INTEGER_CST
8404 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8405 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8406 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8407 	{
8408 	  tree op0type = TREE_TYPE (treeop0);
8409 	  enum machine_mode innermode = TYPE_MODE (op0type);
8410 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8411 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8412 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8413 
8414 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8415 	    {
8416 	      if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8417 		    != CODE_FOR_nothing)
8418 		{
8419 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8420 				   EXPAND_NORMAL);
8421 		  /* op0 and op1 might still be constant, despite the above
8422 		     != INTEGER_CST check.  Handle it.  */
8423 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8424 		    {
8425 		     widen_mult_const:
8426 		      op0 = convert_modes (innermode, mode, op0, zextend_p);
8427 		      op1
8428 			= convert_modes (innermode, mode, op1,
8429 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8430 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8431 							    target,
8432 							    unsignedp));
8433 		    }
8434 		  temp = expand_widening_mult (mode, op0, op1, target,
8435 					       unsignedp, this_optab);
8436 		  return REDUCE_BIT_FIELD (temp);
8437 		}
8438 	      if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8439 		    != CODE_FOR_nothing
8440 		  && innermode == word_mode)
8441 		{
8442 		  rtx htem, hipart;
8443 		  op0 = expand_normal (treeop0);
8444 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8445 		    op1 = convert_modes (innermode, mode,
8446 					 expand_normal (treeop1),
8447 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8448 		  else
8449 		    op1 = expand_normal (treeop1);
8450 		  /* op0 and op1 might still be constant, despite the above
8451 		     != INTEGER_CST check.  Handle it.  */
8452 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8453 		    goto widen_mult_const;
8454 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8455 				       unsignedp, OPTAB_LIB_WIDEN);
8456 		  hipart = gen_highpart (innermode, temp);
8457 		  htem = expand_mult_highpart_adjust (innermode, hipart,
8458 						      op0, op1, hipart,
8459 						      zextend_p);
8460 		  if (htem != hipart)
8461 		    emit_move_insn (hipart, htem);
8462 		  return REDUCE_BIT_FIELD (temp);
8463 		}
8464 	    }
8465 	}
8466       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8467       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8468       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8469       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8470 
8471     case FMA_EXPR:
8472       {
8473 	optab opt = fma_optab;
8474 	gimple def0, def2;
8475 
8476 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8477 	   call.  */
8478 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8479 	  {
8480 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8481 	    tree call_expr;
8482 
8483 	    gcc_assert (fn != NULL_TREE);
8484 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8485 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8486 	  }
8487 
8488 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8489 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8490 
8491 	op0 = op2 = NULL;
8492 
8493 	if (def0 && def2
8494 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8495 	  {
8496 	    opt = fnms_optab;
8497 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8498 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8499 	  }
8500 	else if (def0
8501 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8502 	  {
8503 	    opt = fnma_optab;
8504 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8505 	  }
8506 	else if (def2
8507 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8508 	  {
8509 	    opt = fms_optab;
8510 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8511 	  }
8512 
8513 	if (op0 == NULL)
8514 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8515 	if (op2 == NULL)
8516 	  op2 = expand_normal (treeop2);
8517 	op1 = expand_normal (treeop1);
8518 
8519 	return expand_ternary_op (TYPE_MODE (type), opt,
8520 				  op0, op1, op2, target, 0);
8521       }
8522 
8523     case MULT_EXPR:
8524       /* If this is a fixed-point operation, then we cannot use the code
8525 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8526          multiplications.   */
8527       if (ALL_FIXED_POINT_MODE_P (mode))
8528 	goto binop;
8529 
8530       /* If first operand is constant, swap them.
8531 	 Thus the following special case checks need only
8532 	 check the second operand.  */
8533       if (TREE_CODE (treeop0) == INTEGER_CST)
8534 	{
8535 	  tree t1 = treeop0;
8536 	  treeop0 = treeop1;
8537 	  treeop1 = t1;
8538 	}
8539 
8540       /* Attempt to return something suitable for generating an
8541 	 indexed address, for machines that support that.  */
8542 
8543       if (modifier == EXPAND_SUM && mode == ptr_mode
8544 	  && host_integerp (treeop1, 0))
8545 	{
8546 	  tree exp1 = treeop1;
8547 
8548 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8549 			     EXPAND_SUM);
8550 
8551 	  if (!REG_P (op0))
8552 	    op0 = force_operand (op0, NULL_RTX);
8553 	  if (!REG_P (op0))
8554 	    op0 = copy_to_mode_reg (mode, op0);
8555 
8556 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8557 			       gen_int_mode (tree_low_cst (exp1, 0),
8558 					     TYPE_MODE (TREE_TYPE (exp1)))));
8559 	}
8560 
8561       if (modifier == EXPAND_STACK_PARM)
8562 	target = 0;
8563 
8564       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8565       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8566 
8567     case TRUNC_DIV_EXPR:
8568     case FLOOR_DIV_EXPR:
8569     case CEIL_DIV_EXPR:
8570     case ROUND_DIV_EXPR:
8571     case EXACT_DIV_EXPR:
8572       /* If this is a fixed-point operation, then we cannot use the code
8573 	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8574          divisions.   */
8575       if (ALL_FIXED_POINT_MODE_P (mode))
8576 	goto binop;
8577 
8578       if (modifier == EXPAND_STACK_PARM)
8579 	target = 0;
8580       /* Possible optimization: compute the dividend with EXPAND_SUM
8581 	 then if the divisor is constant can optimize the case
8582 	 where some terms of the dividend have coeffs divisible by it.  */
8583       expand_operands (treeop0, treeop1,
8584 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8585       return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8586 
8587     case RDIV_EXPR:
8588       goto binop;
8589 
8590     case MULT_HIGHPART_EXPR:
8591       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8592       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8593       gcc_assert (temp);
8594       return temp;
8595 
8596     case TRUNC_MOD_EXPR:
8597     case FLOOR_MOD_EXPR:
8598     case CEIL_MOD_EXPR:
8599     case ROUND_MOD_EXPR:
8600       if (modifier == EXPAND_STACK_PARM)
8601 	target = 0;
8602       expand_operands (treeop0, treeop1,
8603 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8604       return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8605 
8606     case FIXED_CONVERT_EXPR:
8607       op0 = expand_normal (treeop0);
8608       if (target == 0 || modifier == EXPAND_STACK_PARM)
8609 	target = gen_reg_rtx (mode);
8610 
8611       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8612 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8613           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8614 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8615       else
8616 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8617       return target;
8618 
8619     case FIX_TRUNC_EXPR:
8620       op0 = expand_normal (treeop0);
8621       if (target == 0 || modifier == EXPAND_STACK_PARM)
8622 	target = gen_reg_rtx (mode);
8623       expand_fix (target, op0, unsignedp);
8624       return target;
8625 
8626     case FLOAT_EXPR:
8627       op0 = expand_normal (treeop0);
8628       if (target == 0 || modifier == EXPAND_STACK_PARM)
8629 	target = gen_reg_rtx (mode);
8630       /* expand_float can't figure out what to do if FROM has VOIDmode.
8631 	 So give it the correct mode.  With -O, cse will optimize this.  */
8632       if (GET_MODE (op0) == VOIDmode)
8633 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8634 				op0);
8635       expand_float (target, op0,
8636 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8637       return target;
8638 
8639     case NEGATE_EXPR:
8640       op0 = expand_expr (treeop0, subtarget,
8641 			 VOIDmode, EXPAND_NORMAL);
8642       if (modifier == EXPAND_STACK_PARM)
8643 	target = 0;
8644       temp = expand_unop (mode,
8645       			  optab_for_tree_code (NEGATE_EXPR, type,
8646 					       optab_default),
8647 			  op0, target, 0);
8648       gcc_assert (temp);
8649       return REDUCE_BIT_FIELD (temp);
8650 
8651     case ABS_EXPR:
8652       op0 = expand_expr (treeop0, subtarget,
8653 			 VOIDmode, EXPAND_NORMAL);
8654       if (modifier == EXPAND_STACK_PARM)
8655 	target = 0;
8656 
8657       /* ABS_EXPR is not valid for complex arguments.  */
8658       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8659 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8660 
8661       /* Unsigned abs is simply the operand.  Testing here means we don't
8662 	 risk generating incorrect code below.  */
8663       if (TYPE_UNSIGNED (type))
8664 	return op0;
8665 
8666       return expand_abs (mode, op0, target, unsignedp,
8667 			 safe_from_p (target, treeop0, 1));
8668 
8669     case MAX_EXPR:
8670     case MIN_EXPR:
8671       target = original_target;
8672       if (target == 0
8673 	  || modifier == EXPAND_STACK_PARM
8674 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8675 	  || GET_MODE (target) != mode
8676 	  || (REG_P (target)
8677 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8678 	target = gen_reg_rtx (mode);
8679       expand_operands (treeop0, treeop1,
8680 		       target, &op0, &op1, EXPAND_NORMAL);
8681 
8682       /* First try to do it with a special MIN or MAX instruction.
8683 	 If that does not win, use a conditional jump to select the proper
8684 	 value.  */
8685       this_optab = optab_for_tree_code (code, type, optab_default);
8686       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8687 			   OPTAB_WIDEN);
8688       if (temp != 0)
8689 	return temp;
8690 
8691       /* At this point, a MEM target is no longer useful; we will get better
8692 	 code without it.  */
8693 
8694       if (! REG_P (target))
8695 	target = gen_reg_rtx (mode);
8696 
8697       /* If op1 was placed in target, swap op0 and op1.  */
8698       if (target != op0 && target == op1)
8699 	{
8700 	  temp = op0;
8701 	  op0 = op1;
8702 	  op1 = temp;
8703 	}
8704 
8705       /* We generate better code and avoid problems with op1 mentioning
8706 	 target by forcing op1 into a pseudo if it isn't a constant.  */
8707       if (! CONSTANT_P (op1))
8708 	op1 = force_reg (mode, op1);
8709 
8710       {
8711 	enum rtx_code comparison_code;
8712 	rtx cmpop1 = op1;
8713 
8714 	if (code == MAX_EXPR)
8715 	  comparison_code = unsignedp ? GEU : GE;
8716 	else
8717 	  comparison_code = unsignedp ? LEU : LE;
8718 
8719 	/* Canonicalize to comparisons against 0.  */
8720 	if (op1 == const1_rtx)
8721 	  {
8722 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8723 	       or (a != 0 ? a : 1) for unsigned.
8724 	       For MIN we are safe converting (a <= 1 ? a : 1)
8725 	       into (a <= 0 ? a : 1)  */
8726 	    cmpop1 = const0_rtx;
8727 	    if (code == MAX_EXPR)
8728 	      comparison_code = unsignedp ? NE : GT;
8729 	  }
8730 	if (op1 == constm1_rtx && !unsignedp)
8731 	  {
8732 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8733 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8734 	    cmpop1 = const0_rtx;
8735 	    if (code == MIN_EXPR)
8736 	      comparison_code = LT;
8737 	  }
8738 #ifdef HAVE_conditional_move
8739 	/* Use a conditional move if possible.  */
8740 	if (can_conditionally_move_p (mode))
8741 	  {
8742 	    rtx insn;
8743 
8744 	    /* ??? Same problem as in expmed.c: emit_conditional_move
8745 	       forces a stack adjustment via compare_from_rtx, and we
8746 	       lose the stack adjustment if the sequence we are about
8747 	       to create is discarded.  */
8748 	    do_pending_stack_adjust ();
8749 
8750 	    start_sequence ();
8751 
8752 	    /* Try to emit the conditional move.  */
8753 	    insn = emit_conditional_move (target, comparison_code,
8754 					  op0, cmpop1, mode,
8755 					  op0, op1, mode,
8756 					  unsignedp);
8757 
8758 	    /* If we could do the conditional move, emit the sequence,
8759 	       and return.  */
8760 	    if (insn)
8761 	      {
8762 		rtx seq = get_insns ();
8763 		end_sequence ();
8764 		emit_insn (seq);
8765 		return target;
8766 	      }
8767 
8768 	    /* Otherwise discard the sequence and fall back to code with
8769 	       branches.  */
8770 	    end_sequence ();
8771 	  }
8772 #endif
8773 	if (target != op0)
8774 	  emit_move_insn (target, op0);
8775 
8776 	temp = gen_label_rtx ();
8777 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8778 				 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8779 				 -1);
8780       }
8781       emit_move_insn (target, op1);
8782       emit_label (temp);
8783       return target;
8784 
8785     case BIT_NOT_EXPR:
8786       op0 = expand_expr (treeop0, subtarget,
8787 			 VOIDmode, EXPAND_NORMAL);
8788       if (modifier == EXPAND_STACK_PARM)
8789 	target = 0;
8790       /* In case we have to reduce the result to bitfield precision
8791 	 for unsigned bitfield expand this as XOR with a proper constant
8792 	 instead.  */
8793       if (reduce_bit_field && TYPE_UNSIGNED (type))
8794 	temp = expand_binop (mode, xor_optab, op0,
8795 			     immed_double_int_const
8796 			       (double_int::mask (TYPE_PRECISION (type)), mode),
8797 			     target, 1, OPTAB_LIB_WIDEN);
8798       else
8799 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8800       gcc_assert (temp);
8801       return temp;
8802 
8803       /* ??? Can optimize bitwise operations with one arg constant.
8804 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8805 	 and (a bitwise1 b) bitwise2 b (etc)
8806 	 but that is probably not worth while.  */
8807 
8808     case BIT_AND_EXPR:
8809     case BIT_IOR_EXPR:
8810     case BIT_XOR_EXPR:
8811       goto binop;
8812 
8813     case LROTATE_EXPR:
8814     case RROTATE_EXPR:
8815       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8816 		  || (GET_MODE_PRECISION (TYPE_MODE (type))
8817 		      == TYPE_PRECISION (type)));
8818       /* fall through */
8819 
8820     case LSHIFT_EXPR:
8821     case RSHIFT_EXPR:
8822       /* If this is a fixed-point operation, then we cannot use the code
8823 	 below because "expand_shift" doesn't support sat/no-sat fixed-point
8824          shifts.   */
8825       if (ALL_FIXED_POINT_MODE_P (mode))
8826 	goto binop;
8827 
8828       if (! safe_from_p (subtarget, treeop1, 1))
8829 	subtarget = 0;
8830       if (modifier == EXPAND_STACK_PARM)
8831 	target = 0;
8832       op0 = expand_expr (treeop0, subtarget,
8833 			 VOIDmode, EXPAND_NORMAL);
8834       temp = expand_variable_shift (code, mode, op0, treeop1, target,
8835 				    unsignedp);
8836       if (code == LSHIFT_EXPR)
8837 	temp = REDUCE_BIT_FIELD (temp);
8838       return temp;
8839 
8840       /* Could determine the answer when only additive constants differ.  Also,
8841 	 the addition of one can be handled by changing the condition.  */
8842     case LT_EXPR:
8843     case LE_EXPR:
8844     case GT_EXPR:
8845     case GE_EXPR:
8846     case EQ_EXPR:
8847     case NE_EXPR:
8848     case UNORDERED_EXPR:
8849     case ORDERED_EXPR:
8850     case UNLT_EXPR:
8851     case UNLE_EXPR:
8852     case UNGT_EXPR:
8853     case UNGE_EXPR:
8854     case UNEQ_EXPR:
8855     case LTGT_EXPR:
8856       temp = do_store_flag (ops,
8857 			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8858 			    tmode != VOIDmode ? tmode : mode);
8859       if (temp)
8860 	return temp;
8861 
8862       /* Use a compare and a jump for BLKmode comparisons, or for function
8863 	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
8864 
8865       if ((target == 0
8866 	   || modifier == EXPAND_STACK_PARM
8867 	   || ! safe_from_p (target, treeop0, 1)
8868 	   || ! safe_from_p (target, treeop1, 1)
8869 	   /* Make sure we don't have a hard reg (such as function's return
8870 	      value) live across basic blocks, if not optimizing.  */
8871 	   || (!optimize && REG_P (target)
8872 	       && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8873 	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8874 
8875       emit_move_insn (target, const0_rtx);
8876 
8877       op1 = gen_label_rtx ();
8878       jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8879 
8880       if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8881 	emit_move_insn (target, constm1_rtx);
8882       else
8883 	emit_move_insn (target, const1_rtx);
8884 
8885       emit_label (op1);
8886       return target;
8887 
8888     case COMPLEX_EXPR:
8889       /* Get the rtx code of the operands.  */
8890       op0 = expand_normal (treeop0);
8891       op1 = expand_normal (treeop1);
8892 
8893       if (!target)
8894 	target = gen_reg_rtx (TYPE_MODE (type));
8895       else
8896 	/* If target overlaps with op1, then either we need to force
8897 	   op1 into a pseudo (if target also overlaps with op0),
8898 	   or write the complex parts in reverse order.  */
8899 	switch (GET_CODE (target))
8900 	  {
8901 	  case CONCAT:
8902 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
8903 	      {
8904 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
8905 		  {
8906 		  complex_expr_force_op1:
8907 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
8908 		    emit_move_insn (temp, op1);
8909 		    op1 = temp;
8910 		    break;
8911 		  }
8912 	      complex_expr_swap_order:
8913 		/* Move the imaginary (op1) and real (op0) parts to their
8914 		   location.  */
8915 		write_complex_part (target, op1, true);
8916 		write_complex_part (target, op0, false);
8917 
8918 		return target;
8919 	      }
8920 	    break;
8921 	  case MEM:
8922 	    temp = adjust_address_nv (target,
8923 				      GET_MODE_INNER (GET_MODE (target)), 0);
8924 	    if (reg_overlap_mentioned_p (temp, op1))
8925 	      {
8926 		enum machine_mode imode = GET_MODE_INNER (GET_MODE (target));
8927 		temp = adjust_address_nv (target, imode,
8928 					  GET_MODE_SIZE (imode));
8929 		if (reg_overlap_mentioned_p (temp, op0))
8930 		  goto complex_expr_force_op1;
8931 		goto complex_expr_swap_order;
8932 	      }
8933 	    break;
8934 	  default:
8935 	    if (reg_overlap_mentioned_p (target, op1))
8936 	      {
8937 		if (reg_overlap_mentioned_p (target, op0))
8938 		  goto complex_expr_force_op1;
8939 		goto complex_expr_swap_order;
8940 	      }
8941 	    break;
8942 	  }
8943 
8944       /* Move the real (op0) and imaginary (op1) parts to their location.  */
8945       write_complex_part (target, op0, false);
8946       write_complex_part (target, op1, true);
8947 
8948       return target;
8949 
8950     case WIDEN_SUM_EXPR:
8951       {
8952         tree oprnd0 = treeop0;
8953         tree oprnd1 = treeop1;
8954 
8955         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8956         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8957                                             target, unsignedp);
8958         return target;
8959       }
8960 
8961     case REDUC_MAX_EXPR:
8962     case REDUC_MIN_EXPR:
8963     case REDUC_PLUS_EXPR:
8964       {
8965         op0 = expand_normal (treeop0);
8966         this_optab = optab_for_tree_code (code, type, optab_default);
8967         temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8968         gcc_assert (temp);
8969         return temp;
8970       }
8971 
8972     case VEC_LSHIFT_EXPR:
8973     case VEC_RSHIFT_EXPR:
8974       {
8975 	target = expand_vec_shift_expr (ops, target);
8976 	return target;
8977       }
8978 
8979     case VEC_UNPACK_HI_EXPR:
8980     case VEC_UNPACK_LO_EXPR:
8981       {
8982 	op0 = expand_normal (treeop0);
8983 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8984 					  target, unsignedp);
8985 	gcc_assert (temp);
8986 	return temp;
8987       }
8988 
8989     case VEC_UNPACK_FLOAT_HI_EXPR:
8990     case VEC_UNPACK_FLOAT_LO_EXPR:
8991       {
8992 	op0 = expand_normal (treeop0);
8993 	/* The signedness is determined from input operand.  */
8994 	temp = expand_widen_pattern_expr
8995 	  (ops, op0, NULL_RTX, NULL_RTX,
8996 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8997 
8998 	gcc_assert (temp);
8999 	return temp;
9000       }
9001 
9002     case VEC_WIDEN_MULT_HI_EXPR:
9003     case VEC_WIDEN_MULT_LO_EXPR:
9004     case VEC_WIDEN_MULT_EVEN_EXPR:
9005     case VEC_WIDEN_MULT_ODD_EXPR:
9006     case VEC_WIDEN_LSHIFT_HI_EXPR:
9007     case VEC_WIDEN_LSHIFT_LO_EXPR:
9008       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9009       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9010 					  target, unsignedp);
9011       gcc_assert (target);
9012       return target;
9013 
9014     case VEC_PACK_TRUNC_EXPR:
9015     case VEC_PACK_SAT_EXPR:
9016     case VEC_PACK_FIX_TRUNC_EXPR:
9017       mode = TYPE_MODE (TREE_TYPE (treeop0));
9018       goto binop;
9019 
9020     case VEC_PERM_EXPR:
9021       expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9022       op2 = expand_normal (treeop2);
9023 
9024       /* Careful here: if the target doesn't support integral vector modes,
9025 	 a constant selection vector could wind up smooshed into a normal
9026 	 integral constant.  */
9027       if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9028 	{
9029 	  tree sel_type = TREE_TYPE (treeop2);
9030 	  enum machine_mode vmode
9031 	    = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9032 			       TYPE_VECTOR_SUBPARTS (sel_type));
9033 	  gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9034 	  op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9035 	  gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9036 	}
9037       else
9038         gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9039 
9040       temp = expand_vec_perm (mode, op0, op1, op2, target);
9041       gcc_assert (temp);
9042       return temp;
9043 
9044     case DOT_PROD_EXPR:
9045       {
9046 	tree oprnd0 = treeop0;
9047 	tree oprnd1 = treeop1;
9048 	tree oprnd2 = treeop2;
9049 	rtx op2;
9050 
9051 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9052 	op2 = expand_normal (oprnd2);
9053 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9054 					    target, unsignedp);
9055 	return target;
9056       }
9057 
9058     case REALIGN_LOAD_EXPR:
9059       {
9060         tree oprnd0 = treeop0;
9061         tree oprnd1 = treeop1;
9062         tree oprnd2 = treeop2;
9063         rtx op2;
9064 
9065         this_optab = optab_for_tree_code (code, type, optab_default);
9066         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9067         op2 = expand_normal (oprnd2);
9068         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9069 				  target, unsignedp);
9070         gcc_assert (temp);
9071         return temp;
9072       }
9073 
9074     case COND_EXPR:
9075       /* A COND_EXPR with its type being VOID_TYPE represents a
9076 	 conditional jump and is handled in
9077 	 expand_gimple_cond_expr.  */
9078       gcc_assert (!VOID_TYPE_P (type));
9079 
9080       /* Note that COND_EXPRs whose type is a structure or union
9081 	 are required to be constructed to contain assignments of
9082 	 a temporary variable, so that we can evaluate them here
9083 	 for side effect only.  If type is void, we must do likewise.  */
9084 
9085       gcc_assert (!TREE_ADDRESSABLE (type)
9086 		  && !ignore
9087 		  && TREE_TYPE (treeop1) != void_type_node
9088 		  && TREE_TYPE (treeop2) != void_type_node);
9089 
9090       temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9091       if (temp)
9092 	return temp;
9093 
9094       /* If we are not to produce a result, we have no target.  Otherwise,
9095 	 if a target was specified use it; it will not be used as an
9096 	 intermediate target unless it is safe.  If no target, use a
9097 	 temporary.  */
9098 
9099       if (modifier != EXPAND_STACK_PARM
9100 	  && original_target
9101 	  && safe_from_p (original_target, treeop0, 1)
9102 	  && GET_MODE (original_target) == mode
9103 	  && !MEM_P (original_target))
9104 	temp = original_target;
9105       else
9106 	temp = assign_temp (type, 0, 1);
9107 
9108       do_pending_stack_adjust ();
9109       NO_DEFER_POP;
9110       op0 = gen_label_rtx ();
9111       op1 = gen_label_rtx ();
9112       jumpifnot (treeop0, op0, -1);
9113       store_expr (treeop1, temp,
9114 		  modifier == EXPAND_STACK_PARM,
9115 		  false);
9116 
9117       emit_jump_insn (gen_jump (op1));
9118       emit_barrier ();
9119       emit_label (op0);
9120       store_expr (treeop2, temp,
9121 		  modifier == EXPAND_STACK_PARM,
9122 		  false);
9123 
9124       emit_label (op1);
9125       OK_DEFER_POP;
9126       return temp;
9127 
9128     case VEC_COND_EXPR:
9129       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9130       return target;
9131 
9132     default:
9133       gcc_unreachable ();
9134     }
9135 
9136   /* Here to do an ordinary binary operator.  */
9137  binop:
9138   expand_operands (treeop0, treeop1,
9139 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9140  binop2:
9141   this_optab = optab_for_tree_code (code, type, optab_default);
9142  binop3:
9143   if (modifier == EXPAND_STACK_PARM)
9144     target = 0;
9145   temp = expand_binop (mode, this_optab, op0, op1, target,
9146 		       unsignedp, OPTAB_LIB_WIDEN);
9147   gcc_assert (temp);
9148   /* Bitwise operations do not need bitfield reduction as we expect their
9149      operands being properly truncated.  */
9150   if (code == BIT_XOR_EXPR
9151       || code == BIT_AND_EXPR
9152       || code == BIT_IOR_EXPR)
9153     return temp;
9154   return REDUCE_BIT_FIELD (temp);
9155 }
9156 #undef REDUCE_BIT_FIELD
9157 
9158 rtx
9159 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
9160 		    enum expand_modifier modifier, rtx *alt_rtl,
9161 		    bool inner_reference_p)
9162 {
9163   rtx op0, op1, temp, decl_rtl;
9164   tree type;
9165   int unsignedp;
9166   enum machine_mode mode;
9167   enum tree_code code = TREE_CODE (exp);
9168   rtx subtarget, original_target;
9169   int ignore;
9170   tree context;
9171   bool reduce_bit_field;
9172   location_t loc = EXPR_LOCATION (exp);
9173   struct separate_ops ops;
9174   tree treeop0, treeop1, treeop2;
9175   tree ssa_name = NULL_TREE;
9176   gimple g;
9177 
9178   type = TREE_TYPE (exp);
9179   mode = TYPE_MODE (type);
9180   unsignedp = TYPE_UNSIGNED (type);
9181 
9182   treeop0 = treeop1 = treeop2 = NULL_TREE;
9183   if (!VL_EXP_CLASS_P (exp))
9184     switch (TREE_CODE_LENGTH (code))
9185       {
9186 	default:
9187 	case 3: treeop2 = TREE_OPERAND (exp, 2);
9188 	case 2: treeop1 = TREE_OPERAND (exp, 1);
9189 	case 1: treeop0 = TREE_OPERAND (exp, 0);
9190 	case 0: break;
9191       }
9192   ops.code = code;
9193   ops.type = type;
9194   ops.op0 = treeop0;
9195   ops.op1 = treeop1;
9196   ops.op2 = treeop2;
9197   ops.location = loc;
9198 
9199   ignore = (target == const0_rtx
9200 	    || ((CONVERT_EXPR_CODE_P (code)
9201 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9202 		&& TREE_CODE (type) == VOID_TYPE));
9203 
9204   /* An operation in what may be a bit-field type needs the
9205      result to be reduced to the precision of the bit-field type,
9206      which is narrower than that of the type's mode.  */
9207   reduce_bit_field = (!ignore
9208 		      && INTEGRAL_TYPE_P (type)
9209 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9210 
9211   /* If we are going to ignore this result, we need only do something
9212      if there is a side-effect somewhere in the expression.  If there
9213      is, short-circuit the most common cases here.  Note that we must
9214      not call expand_expr with anything but const0_rtx in case this
9215      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9216 
9217   if (ignore)
9218     {
9219       if (! TREE_SIDE_EFFECTS (exp))
9220 	return const0_rtx;
9221 
9222       /* Ensure we reference a volatile object even if value is ignored, but
9223 	 don't do this if all we are doing is taking its address.  */
9224       if (TREE_THIS_VOLATILE (exp)
9225 	  && TREE_CODE (exp) != FUNCTION_DECL
9226 	  && mode != VOIDmode && mode != BLKmode
9227 	  && modifier != EXPAND_CONST_ADDRESS)
9228 	{
9229 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9230 	  if (MEM_P (temp))
9231 	    copy_to_reg (temp);
9232 	  return const0_rtx;
9233 	}
9234 
9235       if (TREE_CODE_CLASS (code) == tcc_unary
9236 	  || code == BIT_FIELD_REF
9237 	  || code == COMPONENT_REF
9238 	  || code == INDIRECT_REF)
9239 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9240 			    modifier);
9241 
9242       else if (TREE_CODE_CLASS (code) == tcc_binary
9243 	       || TREE_CODE_CLASS (code) == tcc_comparison
9244 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9245 	{
9246 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9247 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9248 	  return const0_rtx;
9249 	}
9250 
9251       target = 0;
9252     }
9253 
9254   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9255     target = 0;
9256 
9257   /* Use subtarget as the target for operand 0 of a binary operation.  */
9258   subtarget = get_subtarget (target);
9259   original_target = target;
9260 
9261   switch (code)
9262     {
9263     case LABEL_DECL:
9264       {
9265 	tree function = decl_function_context (exp);
9266 
9267 	temp = label_rtx (exp);
9268 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9269 
9270 	if (function != current_function_decl
9271 	    && function != 0)
9272 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9273 
9274 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9275 	return temp;
9276       }
9277 
9278     case SSA_NAME:
9279       /* ??? ivopts calls expander, without any preparation from
9280          out-of-ssa.  So fake instructions as if this was an access to the
9281 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9282 	 reuse it, if partition base vars have it set already.  */
9283       if (!currently_expanding_to_rtl)
9284 	{
9285 	  tree var = SSA_NAME_VAR (exp);
9286 	  if (var && DECL_RTL_SET_P (var))
9287 	    return DECL_RTL (var);
9288 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9289 			      LAST_VIRTUAL_REGISTER + 1);
9290 	}
9291 
9292       g = get_gimple_for_ssa_name (exp);
9293       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9294       if (g == NULL
9295 	  && modifier == EXPAND_INITIALIZER
9296 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9297 	  && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9298 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9299 	g = SSA_NAME_DEF_STMT (exp);
9300       if (g)
9301 	{
9302 	  rtx r;
9303 	  location_t saved_loc = curr_insn_location ();
9304 
9305 	  set_curr_insn_location (gimple_location (g));
9306 	  r = expand_expr_real (gimple_assign_rhs_to_tree (g), target,
9307 				tmode, modifier, NULL, inner_reference_p);
9308 	  set_curr_insn_location (saved_loc);
9309 	  if (REG_P (r) && !REG_EXPR (r))
9310 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9311 	  return r;
9312 	}
9313 
9314       ssa_name = exp;
9315       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9316       exp = SSA_NAME_VAR (ssa_name);
9317       goto expand_decl_rtl;
9318 
9319     case PARM_DECL:
9320     case VAR_DECL:
9321       /* If a static var's type was incomplete when the decl was written,
9322 	 but the type is complete now, lay out the decl now.  */
9323       if (DECL_SIZE (exp) == 0
9324 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9325 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9326 	layout_decl (exp, 0);
9327 
9328       /* ... fall through ...  */
9329 
9330     case FUNCTION_DECL:
9331     case RESULT_DECL:
9332       decl_rtl = DECL_RTL (exp);
9333     expand_decl_rtl:
9334       gcc_assert (decl_rtl);
9335       decl_rtl = copy_rtx (decl_rtl);
9336       /* Record writes to register variables.  */
9337       if (modifier == EXPAND_WRITE
9338 	  && REG_P (decl_rtl)
9339 	  && HARD_REGISTER_P (decl_rtl))
9340         add_to_hard_reg_set (&crtl->asm_clobbers,
9341 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9342 
9343       /* Ensure variable marked as used even if it doesn't go through
9344 	 a parser.  If it hasn't be used yet, write out an external
9345 	 definition.  */
9346       TREE_USED (exp) = 1;
9347 
9348       /* Show we haven't gotten RTL for this yet.  */
9349       temp = 0;
9350 
9351       /* Variables inherited from containing functions should have
9352 	 been lowered by this point.  */
9353       context = decl_function_context (exp);
9354       gcc_assert (!context
9355 		  || context == current_function_decl
9356 		  || TREE_STATIC (exp)
9357 		  || DECL_EXTERNAL (exp)
9358 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
9359 		  || TREE_CODE (exp) == FUNCTION_DECL);
9360 
9361       /* This is the case of an array whose size is to be determined
9362 	 from its initializer, while the initializer is still being parsed.
9363 	 ??? We aren't parsing while expanding anymore.  */
9364 
9365       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9366 	temp = validize_mem (decl_rtl);
9367 
9368       /* If DECL_RTL is memory, we are in the normal case and the
9369 	 address is not valid, get the address into a register.  */
9370 
9371       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9372 	{
9373 	  if (alt_rtl)
9374 	    *alt_rtl = decl_rtl;
9375 	  decl_rtl = use_anchored_address (decl_rtl);
9376 	  if (modifier != EXPAND_CONST_ADDRESS
9377 	      && modifier != EXPAND_SUM
9378 	      && !memory_address_addr_space_p (DECL_MODE (exp),
9379 					       XEXP (decl_rtl, 0),
9380 					       MEM_ADDR_SPACE (decl_rtl)))
9381 	    temp = replace_equiv_address (decl_rtl,
9382 					  copy_rtx (XEXP (decl_rtl, 0)));
9383 	}
9384 
9385       /* If we got something, return it.  But first, set the alignment
9386 	 if the address is a register.  */
9387       if (temp != 0)
9388 	{
9389 	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9390 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9391 
9392 	  return temp;
9393 	}
9394 
9395       /* If the mode of DECL_RTL does not match that of the decl,
9396 	 there are two cases: we are dealing with a BLKmode value
9397 	 that is returned in a register, or we are dealing with
9398 	 a promoted value.  In the latter case, return a SUBREG
9399 	 of the wanted mode, but mark it so that we know that it
9400 	 was already extended.  */
9401       if (REG_P (decl_rtl)
9402 	  && DECL_MODE (exp) != BLKmode
9403 	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
9404 	{
9405 	  enum machine_mode pmode;
9406 
9407 	  /* Get the signedness to be used for this variable.  Ensure we get
9408 	     the same mode we got when the variable was declared.  */
9409 	  if (code == SSA_NAME
9410 	      && (g = SSA_NAME_DEF_STMT (ssa_name))
9411 	      && gimple_code (g) == GIMPLE_CALL)
9412 	    {
9413 	      gcc_assert (!gimple_call_internal_p (g));
9414 	      pmode = promote_function_mode (type, mode, &unsignedp,
9415 					     gimple_call_fntype (g),
9416 					     2);
9417 	    }
9418 	  else
9419 	    pmode = promote_decl_mode (exp, &unsignedp);
9420 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
9421 
9422 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
9423 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
9424 	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
9425 	  return temp;
9426 	}
9427 
9428       return decl_rtl;
9429 
9430     case INTEGER_CST:
9431       temp = immed_double_const (TREE_INT_CST_LOW (exp),
9432 				 TREE_INT_CST_HIGH (exp), mode);
9433 
9434       return temp;
9435 
9436     case VECTOR_CST:
9437       {
9438 	tree tmp = NULL_TREE;
9439 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9440 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9441 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9442 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9443 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9444 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9445 	  return const_vector_from_tree (exp);
9446 	if (GET_MODE_CLASS (mode) == MODE_INT)
9447 	  {
9448 	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9449 	    if (type_for_mode)
9450 	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9451 	  }
9452 	if (!tmp)
9453 	  {
9454 	    vec<constructor_elt, va_gc> *v;
9455 	    unsigned i;
9456 	    vec_alloc (v, VECTOR_CST_NELTS (exp));
9457 	    for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9458 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9459 	    tmp = build_constructor (type, v);
9460 	  }
9461 	return expand_expr (tmp, ignore ? const0_rtx : target,
9462 			    tmode, modifier);
9463       }
9464 
9465     case CONST_DECL:
9466       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9467 
9468     case REAL_CST:
9469       /* If optimized, generate immediate CONST_DOUBLE
9470 	 which will be turned into memory by reload if necessary.
9471 
9472 	 We used to force a register so that loop.c could see it.  But
9473 	 this does not allow gen_* patterns to perform optimizations with
9474 	 the constants.  It also produces two insns in cases like "x = 1.0;".
9475 	 On most machines, floating-point constants are not permitted in
9476 	 many insns, so we'd end up copying it to a register in any case.
9477 
9478 	 Now, we do the copying in expand_binop, if appropriate.  */
9479       return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9480 					   TYPE_MODE (TREE_TYPE (exp)));
9481 
9482     case FIXED_CST:
9483       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9484 					   TYPE_MODE (TREE_TYPE (exp)));
9485 
9486     case COMPLEX_CST:
9487       /* Handle evaluating a complex constant in a CONCAT target.  */
9488       if (original_target && GET_CODE (original_target) == CONCAT)
9489 	{
9490 	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9491 	  rtx rtarg, itarg;
9492 
9493 	  rtarg = XEXP (original_target, 0);
9494 	  itarg = XEXP (original_target, 1);
9495 
9496 	  /* Move the real and imaginary parts separately.  */
9497 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9498 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9499 
9500 	  if (op0 != rtarg)
9501 	    emit_move_insn (rtarg, op0);
9502 	  if (op1 != itarg)
9503 	    emit_move_insn (itarg, op1);
9504 
9505 	  return original_target;
9506 	}
9507 
9508       /* ... fall through ...  */
9509 
9510     case STRING_CST:
9511       temp = expand_expr_constant (exp, 1, modifier);
9512 
9513       /* temp contains a constant address.
9514 	 On RISC machines where a constant address isn't valid,
9515 	 make some insns to get that address into a register.  */
9516       if (modifier != EXPAND_CONST_ADDRESS
9517 	  && modifier != EXPAND_INITIALIZER
9518 	  && modifier != EXPAND_SUM
9519 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9520 					    MEM_ADDR_SPACE (temp)))
9521 	return replace_equiv_address (temp,
9522 				      copy_rtx (XEXP (temp, 0)));
9523       return temp;
9524 
9525     case SAVE_EXPR:
9526       {
9527 	tree val = treeop0;
9528 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9529 				      inner_reference_p);
9530 
9531 	if (!SAVE_EXPR_RESOLVED_P (exp))
9532 	  {
9533 	    /* We can indeed still hit this case, typically via builtin
9534 	       expanders calling save_expr immediately before expanding
9535 	       something.  Assume this means that we only have to deal
9536 	       with non-BLKmode values.  */
9537 	    gcc_assert (GET_MODE (ret) != BLKmode);
9538 
9539 	    val = build_decl (curr_insn_location (),
9540 			      VAR_DECL, NULL, TREE_TYPE (exp));
9541 	    DECL_ARTIFICIAL (val) = 1;
9542 	    DECL_IGNORED_P (val) = 1;
9543 	    treeop0 = val;
9544 	    TREE_OPERAND (exp, 0) = treeop0;
9545 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
9546 
9547 	    if (!CONSTANT_P (ret))
9548 	      ret = copy_to_reg (ret);
9549 	    SET_DECL_RTL (val, ret);
9550 	  }
9551 
9552         return ret;
9553       }
9554 
9555 
9556     case CONSTRUCTOR:
9557       /* If we don't need the result, just ensure we evaluate any
9558 	 subexpressions.  */
9559       if (ignore)
9560 	{
9561 	  unsigned HOST_WIDE_INT idx;
9562 	  tree value;
9563 
9564 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9565 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9566 
9567 	  return const0_rtx;
9568 	}
9569 
9570       return expand_constructor (exp, target, modifier, false);
9571 
9572     case TARGET_MEM_REF:
9573       {
9574 	addr_space_t as
9575 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9576 	struct mem_address addr;
9577 	enum insn_code icode;
9578 	unsigned int align;
9579 
9580 	get_address_description (exp, &addr);
9581 	op0 = addr_for_mem_ref (&addr, as, true);
9582 	op0 = memory_address_addr_space (mode, op0, as);
9583 	temp = gen_rtx_MEM (mode, op0);
9584 	set_mem_attributes (temp, exp, 0);
9585 	set_mem_addr_space (temp, as);
9586 	align = get_object_alignment (exp);
9587 	if (modifier != EXPAND_WRITE
9588 	    && modifier != EXPAND_MEMORY
9589 	    && mode != BLKmode
9590 	    && align < GET_MODE_ALIGNMENT (mode)
9591 	    /* If the target does not have special handling for unaligned
9592 	       loads of mode then it can use regular moves for them.  */
9593 	    && ((icode = optab_handler (movmisalign_optab, mode))
9594 		!= CODE_FOR_nothing))
9595 	  {
9596 	    struct expand_operand ops[2];
9597 
9598 	    /* We've already validated the memory, and we're creating a
9599 	       new pseudo destination.  The predicates really can't fail,
9600 	       nor can the generator.  */
9601 	    create_output_operand (&ops[0], NULL_RTX, mode);
9602 	    create_fixed_operand (&ops[1], temp);
9603 	    expand_insn (icode, 2, ops);
9604 	    return ops[0].value;
9605 	  }
9606 	return temp;
9607       }
9608 
9609     case MEM_REF:
9610       {
9611 	addr_space_t as
9612 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9613 	enum machine_mode address_mode;
9614 	tree base = TREE_OPERAND (exp, 0);
9615 	gimple def_stmt;
9616 	enum insn_code icode;
9617 	unsigned align;
9618 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
9619 	   might end up in a register.  */
9620 	if (mem_ref_refers_to_non_mem_p (exp))
9621 	  {
9622 	    HOST_WIDE_INT offset = mem_ref_offset (exp).low;
9623 	    tree bit_offset;
9624 	    tree bftype;
9625 	    base = TREE_OPERAND (base, 0);
9626 	    if (offset == 0
9627 		&& host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
9628 		&& (GET_MODE_BITSIZE (DECL_MODE (base))
9629 		    == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
9630 	      return expand_expr (build1 (VIEW_CONVERT_EXPR,
9631 					  TREE_TYPE (exp), base),
9632 				  target, tmode, modifier);
9633 	    bit_offset = bitsize_int (offset * BITS_PER_UNIT);
9634 	    bftype = TREE_TYPE (base);
9635 	    if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
9636 	      bftype = TREE_TYPE (exp);
9637 	    else
9638 	      {
9639 		temp = assign_stack_temp (DECL_MODE (base),
9640 					  GET_MODE_SIZE (DECL_MODE (base)));
9641 		store_expr (base, temp, 0, false);
9642 		temp = adjust_address (temp, BLKmode, offset);
9643 		set_mem_size (temp, int_size_in_bytes (TREE_TYPE (exp)));
9644 		return temp;
9645 	      }
9646 	    return expand_expr (build3 (BIT_FIELD_REF, bftype,
9647 					base,
9648 					TYPE_SIZE (TREE_TYPE (exp)),
9649 					bit_offset),
9650 				target, tmode, modifier);
9651 	  }
9652 	address_mode = targetm.addr_space.address_mode (as);
9653 	base = TREE_OPERAND (exp, 0);
9654 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9655 	  {
9656 	    tree mask = gimple_assign_rhs2 (def_stmt);
9657 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9658 			   gimple_assign_rhs1 (def_stmt), mask);
9659 	    TREE_OPERAND (exp, 0) = base;
9660 	  }
9661 	align = get_object_alignment (exp);
9662 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9663 	op0 = memory_address_addr_space (address_mode, op0, as);
9664 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
9665 	  {
9666 	    rtx off
9667 	      = immed_double_int_const (mem_ref_offset (exp), address_mode);
9668 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9669 	  }
9670 	op0 = memory_address_addr_space (mode, op0, as);
9671 	temp = gen_rtx_MEM (mode, op0);
9672 	set_mem_attributes (temp, exp, 0);
9673 	set_mem_addr_space (temp, as);
9674 	if (TREE_THIS_VOLATILE (exp))
9675 	  MEM_VOLATILE_P (temp) = 1;
9676 	if (modifier != EXPAND_WRITE
9677 	    && modifier != EXPAND_MEMORY
9678 	    && !inner_reference_p
9679 	    && mode != BLKmode
9680 	    && align < GET_MODE_ALIGNMENT (mode))
9681 	  {
9682 	    if ((icode = optab_handler (movmisalign_optab, mode))
9683 		!= CODE_FOR_nothing)
9684 	      {
9685 		struct expand_operand ops[2];
9686 
9687 		/* We've already validated the memory, and we're creating a
9688 		   new pseudo destination.  The predicates really can't fail,
9689 		   nor can the generator.  */
9690 		create_output_operand (&ops[0], NULL_RTX, mode);
9691 		create_fixed_operand (&ops[1], temp);
9692 		expand_insn (icode, 2, ops);
9693 		return ops[0].value;
9694 	      }
9695 	    else if (SLOW_UNALIGNED_ACCESS (mode, align))
9696 	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9697 					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9698 					true, (modifier == EXPAND_STACK_PARM
9699 					       ? NULL_RTX : target),
9700 					mode, mode);
9701 	  }
9702 	return temp;
9703       }
9704 
9705     case ARRAY_REF:
9706 
9707       {
9708 	tree array = treeop0;
9709 	tree index = treeop1;
9710 
9711 	/* Fold an expression like: "foo"[2].
9712 	   This is not done in fold so it won't happen inside &.
9713 	   Don't fold if this is for wide characters since it's too
9714 	   difficult to do correctly and this is a very rare case.  */
9715 
9716 	if (modifier != EXPAND_CONST_ADDRESS
9717 	    && modifier != EXPAND_INITIALIZER
9718 	    && modifier != EXPAND_MEMORY)
9719 	  {
9720 	    tree t = fold_read_from_constant_string (exp);
9721 
9722 	    if (t)
9723 	      return expand_expr (t, target, tmode, modifier);
9724 	  }
9725 
9726 	/* If this is a constant index into a constant array,
9727 	   just get the value from the array.  Handle both the cases when
9728 	   we have an explicit constructor and when our operand is a variable
9729 	   that was declared const.  */
9730 
9731 	if (modifier != EXPAND_CONST_ADDRESS
9732 	    && modifier != EXPAND_INITIALIZER
9733 	    && modifier != EXPAND_MEMORY
9734 	    && TREE_CODE (array) == CONSTRUCTOR
9735 	    && ! TREE_SIDE_EFFECTS (array)
9736 	    && TREE_CODE (index) == INTEGER_CST)
9737 	  {
9738 	    unsigned HOST_WIDE_INT ix;
9739 	    tree field, value;
9740 
9741 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9742 				      field, value)
9743 	      if (tree_int_cst_equal (field, index))
9744 		{
9745 		  if (!TREE_SIDE_EFFECTS (value))
9746 		    return expand_expr (fold (value), target, tmode, modifier);
9747 		  break;
9748 		}
9749 	  }
9750 
9751 	else if (optimize >= 1
9752 		 && modifier != EXPAND_CONST_ADDRESS
9753 		 && modifier != EXPAND_INITIALIZER
9754 		 && modifier != EXPAND_MEMORY
9755 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
9756 		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
9757 		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
9758 		 && const_value_known_p (array))
9759 	  {
9760 	    if (TREE_CODE (index) == INTEGER_CST)
9761 	      {
9762 		tree init = DECL_INITIAL (array);
9763 
9764 		if (TREE_CODE (init) == CONSTRUCTOR)
9765 		  {
9766 		    unsigned HOST_WIDE_INT ix;
9767 		    tree field, value;
9768 
9769 		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
9770 					      field, value)
9771 		      if (tree_int_cst_equal (field, index))
9772 			{
9773 			  if (TREE_SIDE_EFFECTS (value))
9774 			    break;
9775 
9776 			  if (TREE_CODE (value) == CONSTRUCTOR)
9777 			    {
9778 			      /* If VALUE is a CONSTRUCTOR, this
9779 				 optimization is only useful if
9780 				 this doesn't store the CONSTRUCTOR
9781 				 into memory.  If it does, it is more
9782 				 efficient to just load the data from
9783 				 the array directly.  */
9784 			      rtx ret = expand_constructor (value, target,
9785 							    modifier, true);
9786 			      if (ret == NULL_RTX)
9787 				break;
9788 			    }
9789 
9790 			  return expand_expr (fold (value), target, tmode,
9791 					      modifier);
9792 			}
9793 		  }
9794 		else if(TREE_CODE (init) == STRING_CST)
9795 		  {
9796 		    tree index1 = index;
9797 		    tree low_bound = array_ref_low_bound (exp);
9798 		    index1 = fold_convert_loc (loc, sizetype,
9799 					       treeop1);
9800 
9801 		    /* Optimize the special-case of a zero lower bound.
9802 
9803 		       We convert the low_bound to sizetype to avoid some problems
9804 		       with constant folding.  (E.g. suppose the lower bound is 1,
9805 		       and its mode is QI.  Without the conversion,l (ARRAY
9806 		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
9807 		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
9808 
9809 		    if (! integer_zerop (low_bound))
9810 		      index1 = size_diffop_loc (loc, index1,
9811 					    fold_convert_loc (loc, sizetype,
9812 							      low_bound));
9813 
9814 		    if (0 > compare_tree_int (index1,
9815 					      TREE_STRING_LENGTH (init)))
9816 		      {
9817 			tree type = TREE_TYPE (TREE_TYPE (init));
9818 			enum machine_mode mode = TYPE_MODE (type);
9819 
9820 			if (GET_MODE_CLASS (mode) == MODE_INT
9821 			    && GET_MODE_SIZE (mode) == 1)
9822 			  return gen_int_mode (TREE_STRING_POINTER (init)
9823 					       [TREE_INT_CST_LOW (index1)],
9824 					       mode);
9825 		      }
9826 		  }
9827 	      }
9828 	  }
9829       }
9830       goto normal_inner_ref;
9831 
9832     case COMPONENT_REF:
9833       /* If the operand is a CONSTRUCTOR, we can just extract the
9834 	 appropriate field if it is present.  */
9835       if (TREE_CODE (treeop0) == CONSTRUCTOR)
9836 	{
9837 	  unsigned HOST_WIDE_INT idx;
9838 	  tree field, value;
9839 
9840 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9841 				    idx, field, value)
9842 	    if (field == treeop1
9843 		/* We can normally use the value of the field in the
9844 		   CONSTRUCTOR.  However, if this is a bitfield in
9845 		   an integral mode that we can fit in a HOST_WIDE_INT,
9846 		   we must mask only the number of bits in the bitfield,
9847 		   since this is done implicitly by the constructor.  If
9848 		   the bitfield does not meet either of those conditions,
9849 		   we can't do this optimization.  */
9850 		&& (! DECL_BIT_FIELD (field)
9851 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9852 			&& (GET_MODE_PRECISION (DECL_MODE (field))
9853 			    <= HOST_BITS_PER_WIDE_INT))))
9854 	      {
9855 		if (DECL_BIT_FIELD (field)
9856 		    && modifier == EXPAND_STACK_PARM)
9857 		  target = 0;
9858 		op0 = expand_expr (value, target, tmode, modifier);
9859 		if (DECL_BIT_FIELD (field))
9860 		  {
9861 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9862 		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9863 
9864 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
9865 		      {
9866 			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9867 			op0 = expand_and (imode, op0, op1, target);
9868 		      }
9869 		    else
9870 		      {
9871 			int count = GET_MODE_PRECISION (imode) - bitsize;
9872 
9873 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9874 					    target, 0);
9875 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9876 					    target, 0);
9877 		      }
9878 		  }
9879 
9880 		return op0;
9881 	      }
9882 	}
9883       goto normal_inner_ref;
9884 
9885     case BIT_FIELD_REF:
9886     case ARRAY_RANGE_REF:
9887     normal_inner_ref:
9888       {
9889 	enum machine_mode mode1, mode2;
9890 	HOST_WIDE_INT bitsize, bitpos;
9891 	tree offset;
9892 	int volatilep = 0, must_force_mem;
9893 	bool packedp = false;
9894 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9895 					&mode1, &unsignedp, &volatilep, true);
9896 	rtx orig_op0, memloc;
9897 	bool mem_attrs_from_type = false;
9898 
9899 	/* If we got back the original object, something is wrong.  Perhaps
9900 	   we are evaluating an expression too early.  In any event, don't
9901 	   infinitely recurse.  */
9902 	gcc_assert (tem != exp);
9903 
9904 	if (TYPE_PACKED (TREE_TYPE (TREE_OPERAND (exp, 0)))
9905 	    || (TREE_CODE (TREE_OPERAND (exp, 1)) == FIELD_DECL
9906 		&& DECL_PACKED (TREE_OPERAND (exp, 1))))
9907 	  packedp = true;
9908 
9909 	/* If TEM's type is a union of variable size, pass TARGET to the inner
9910 	   computation, since it will need a temporary and TARGET is known
9911 	   to have to do.  This occurs in unchecked conversion in Ada.  */
9912 	orig_op0 = op0
9913 	  = expand_expr_real (tem,
9914 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9915 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
9916 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9917 				   != INTEGER_CST)
9918 			       && modifier != EXPAND_STACK_PARM
9919 			       ? target : NULL_RTX),
9920 			      VOIDmode,
9921 			      (modifier == EXPAND_INITIALIZER
9922 			       || modifier == EXPAND_CONST_ADDRESS
9923 			       || modifier == EXPAND_STACK_PARM)
9924 			      ? modifier : EXPAND_NORMAL,
9925 			      NULL, true);
9926 
9927 
9928 	/* If the bitfield is volatile, we want to access it in the
9929 	   field's mode, not the computed mode.
9930 	   If a MEM has VOIDmode (external with incomplete type),
9931 	   use BLKmode for it instead.  */
9932 	if (MEM_P (op0))
9933 	  {
9934 	    if (volatilep && flag_strict_volatile_bitfields > 0)
9935 	      op0 = adjust_address (op0, mode1, 0);
9936 	    else if (GET_MODE (op0) == VOIDmode)
9937 	      op0 = adjust_address (op0, BLKmode, 0);
9938 	  }
9939 
9940 	mode2
9941 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9942 
9943 	/* If we have either an offset, a BLKmode result, or a reference
9944 	   outside the underlying object, we must force it to memory.
9945 	   Such a case can occur in Ada if we have unchecked conversion
9946 	   of an expression from a scalar type to an aggregate type or
9947 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9948 	   passed a partially uninitialized object or a view-conversion
9949 	   to a larger size.  */
9950 	must_force_mem = (offset
9951 			  || mode1 == BLKmode
9952 			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9953 
9954 	/* Handle CONCAT first.  */
9955 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
9956 	  {
9957 	    if (bitpos == 0
9958 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9959 	      return op0;
9960 	    if (bitpos == 0
9961 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9962 		&& bitsize)
9963 	      {
9964 		op0 = XEXP (op0, 0);
9965 		mode2 = GET_MODE (op0);
9966 	      }
9967 	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9968 		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9969 		     && bitpos
9970 		     && bitsize)
9971 	      {
9972 		op0 = XEXP (op0, 1);
9973 		bitpos = 0;
9974 		mode2 = GET_MODE (op0);
9975 	      }
9976 	    else
9977 	      /* Otherwise force into memory.  */
9978 	      must_force_mem = 1;
9979 	  }
9980 
9981 	/* If this is a constant, put it in a register if it is a legitimate
9982 	   constant and we don't need a memory reference.  */
9983 	if (CONSTANT_P (op0)
9984 	    && mode2 != BLKmode
9985 	    && targetm.legitimate_constant_p (mode2, op0)
9986 	    && !must_force_mem)
9987 	  op0 = force_reg (mode2, op0);
9988 
9989 	/* Otherwise, if this is a constant, try to force it to the constant
9990 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
9991 	   is a legitimate constant.  */
9992 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9993 	  op0 = validize_mem (memloc);
9994 
9995 	/* Otherwise, if this is a constant or the object is not in memory
9996 	   and need be, put it there.  */
9997 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9998 	  {
9999 	    tree nt = build_qualified_type (TREE_TYPE (tem),
10000 					    (TYPE_QUALS (TREE_TYPE (tem))
10001 					     | TYPE_QUAL_CONST));
10002 	    memloc = assign_temp (nt, 1, 1);
10003 	    emit_move_insn (memloc, op0);
10004 	    op0 = memloc;
10005 	    mem_attrs_from_type = true;
10006 	  }
10007 
10008 	if (offset)
10009 	  {
10010 	    enum machine_mode address_mode;
10011 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10012 					  EXPAND_SUM);
10013 
10014 	    gcc_assert (MEM_P (op0));
10015 
10016 	    address_mode = get_address_mode (op0);
10017 	    if (GET_MODE (offset_rtx) != address_mode)
10018 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10019 
10020 	    if (GET_MODE (op0) == BLKmode
10021 		/* A constant address in OP0 can have VOIDmode, we must
10022 		   not try to call force_reg in that case.  */
10023 		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
10024 		&& bitsize != 0
10025 		&& (bitpos % bitsize) == 0
10026 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10027 		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
10028 	      {
10029 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10030 		bitpos = 0;
10031 	      }
10032 
10033 	    op0 = offset_address (op0, offset_rtx,
10034 				  highest_pow2_factor (offset));
10035 	  }
10036 
10037 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10038 	   record its alignment as BIGGEST_ALIGNMENT.  */
10039 	if (MEM_P (op0) && bitpos == 0 && offset != 0
10040 	    && is_aligning_offset (offset, tem))
10041 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10042 
10043 	/* Don't forget about volatility even if this is a bitfield.  */
10044 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10045 	  {
10046 	    if (op0 == orig_op0)
10047 	      op0 = copy_rtx (op0);
10048 
10049 	    MEM_VOLATILE_P (op0) = 1;
10050 	  }
10051 
10052 	/* In cases where an aligned union has an unaligned object
10053 	   as a field, we might be extracting a BLKmode value from
10054 	   an integer-mode (e.g., SImode) object.  Handle this case
10055 	   by doing the extract into an object as wide as the field
10056 	   (which we know to be the width of a basic mode), then
10057 	   storing into memory, and changing the mode to BLKmode.  */
10058 	if (mode1 == VOIDmode
10059 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10060 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10061 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10062 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10063 		&& modifier != EXPAND_CONST_ADDRESS
10064 		&& modifier != EXPAND_INITIALIZER
10065 		&& modifier != EXPAND_MEMORY)
10066 	    /* If the field is volatile, we always want an aligned
10067 	       access.  Do this in following two situations:
10068 	       1. the access is not already naturally
10069 	       aligned, otherwise "normal" (non-bitfield) volatile fields
10070 	       become non-addressable.
10071 	       2. the bitsize is narrower than the access size. Need
10072 	       to extract bitfields from the access.  */
10073 	    || (volatilep && flag_strict_volatile_bitfields > 0
10074 		&& (bitpos % GET_MODE_ALIGNMENT (mode) != 0
10075 		    || (mode1 != BLKmode
10076 		        && bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)))
10077 	    /* If the field isn't aligned enough to fetch as a memref,
10078 	       fetch it as a bit field.  */
10079 	    || (mode1 != BLKmode
10080 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10081 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10082 		      || (MEM_P (op0)
10083 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10084 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10085 		     && ((modifier == EXPAND_CONST_ADDRESS
10086 			  || modifier == EXPAND_INITIALIZER)
10087 			 ? STRICT_ALIGNMENT
10088 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10089 		    || (bitpos % BITS_PER_UNIT != 0)))
10090 	    /* If the type and the field are a constant size and the
10091 	       size of the type isn't the same size as the bitfield,
10092 	       we must use bitfield operations.  */
10093 	    || (bitsize >= 0
10094 		&& TYPE_SIZE (TREE_TYPE (exp))
10095 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10096 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10097 					  bitsize)))
10098 	  {
10099 	    enum machine_mode ext_mode = mode;
10100 
10101 	    if (ext_mode == BLKmode
10102 		&& ! (target != 0 && MEM_P (op0)
10103 		      && MEM_P (target)
10104 		      && bitpos % BITS_PER_UNIT == 0))
10105 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10106 
10107 	    if (ext_mode == BLKmode)
10108 	      {
10109 		if (target == 0)
10110 		  target = assign_temp (type, 1, 1);
10111 
10112 		if (bitsize == 0)
10113 		  return target;
10114 
10115 		/* In this case, BITPOS must start at a byte boundary and
10116 		   TARGET, if specified, must be a MEM.  */
10117 		gcc_assert (MEM_P (op0)
10118 			    && (!target || MEM_P (target))
10119 			    && !(bitpos % BITS_PER_UNIT));
10120 
10121 		emit_block_move (target,
10122 				 adjust_address (op0, VOIDmode,
10123 						 bitpos / BITS_PER_UNIT),
10124 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10125 					  / BITS_PER_UNIT),
10126 				 (modifier == EXPAND_STACK_PARM
10127 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10128 
10129 		return target;
10130 	      }
10131 
10132 	    op0 = validize_mem (op0);
10133 
10134 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10135 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10136 
10137 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp, packedp,
10138 				     (modifier == EXPAND_STACK_PARM
10139 				      ? NULL_RTX : target),
10140 				     ext_mode, ext_mode);
10141 
10142 	    /* If the result is a record type and BITSIZE is narrower than
10143 	       the mode of OP0, an integral mode, and this is a big endian
10144 	       machine, we must put the field into the high-order bits.  */
10145 	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10146 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10147 		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10148 	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10149 				  GET_MODE_BITSIZE (GET_MODE (op0))
10150 				  - bitsize, op0, 1);
10151 
10152 	    /* If the result type is BLKmode, store the data into a temporary
10153 	       of the appropriate type, but with the mode corresponding to the
10154 	       mode for the data we have (op0's mode).  It's tempting to make
10155 	       this a constant type, since we know it's only being stored once,
10156 	       but that can cause problems if we are taking the address of this
10157 	       COMPONENT_REF because the MEM of any reference via that address
10158 	       will have flags corresponding to the type, which will not
10159 	       necessarily be constant.  */
10160 	    if (mode == BLKmode)
10161 	      {
10162 		rtx new_rtx;
10163 
10164 		new_rtx = assign_stack_temp_for_type (ext_mode,
10165 						   GET_MODE_BITSIZE (ext_mode),
10166 						   type);
10167 		emit_move_insn (new_rtx, op0);
10168 		op0 = copy_rtx (new_rtx);
10169 		PUT_MODE (op0, BLKmode);
10170 	      }
10171 
10172 	    return op0;
10173 	  }
10174 
10175 	/* If the result is BLKmode, use that to access the object
10176 	   now as well.  */
10177 	if (mode == BLKmode)
10178 	  mode1 = BLKmode;
10179 
10180 	/* Get a reference to just this component.  */
10181 	if (modifier == EXPAND_CONST_ADDRESS
10182 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10183 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10184 	else
10185 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10186 
10187 	if (op0 == orig_op0)
10188 	  op0 = copy_rtx (op0);
10189 
10190 	/* If op0 is a temporary because of forcing to memory, pass only the
10191 	   type to set_mem_attributes so that the original expression is never
10192 	   marked as ADDRESSABLE through MEM_EXPR of the temporary.  */
10193 	if (mem_attrs_from_type)
10194 	  set_mem_attributes (op0, type, 0);
10195 	else
10196 	  set_mem_attributes (op0, exp, 0);
10197 
10198 	if (REG_P (XEXP (op0, 0)))
10199 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10200 
10201 	MEM_VOLATILE_P (op0) |= volatilep;
10202 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10203 	    || modifier == EXPAND_CONST_ADDRESS
10204 	    || modifier == EXPAND_INITIALIZER)
10205 	  return op0;
10206 	else if (target == 0)
10207 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10208 
10209 	convert_move (target, op0, unsignedp);
10210 	return target;
10211       }
10212 
10213     case OBJ_TYPE_REF:
10214       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10215 
10216     case CALL_EXPR:
10217       /* All valid uses of __builtin_va_arg_pack () are removed during
10218 	 inlining.  */
10219       if (CALL_EXPR_VA_ARG_PACK (exp))
10220 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10221       {
10222 	tree fndecl = get_callee_fndecl (exp), attr;
10223 
10224 	if (fndecl
10225 	    && (attr = lookup_attribute ("error",
10226 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10227 	  error ("%Kcall to %qs declared with attribute error: %s",
10228 		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10229 		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10230 	if (fndecl
10231 	    && (attr = lookup_attribute ("warning",
10232 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10233 	  warning_at (tree_nonartificial_location (exp),
10234 		      0, "%Kcall to %qs declared with attribute warning: %s",
10235 		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10236 		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10237 
10238 	/* Check for a built-in function.  */
10239 	if (fndecl && DECL_BUILT_IN (fndecl))
10240 	  {
10241 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10242 	    return expand_builtin (exp, target, subtarget, tmode, ignore);
10243 	  }
10244       }
10245       return expand_call (exp, target, ignore);
10246 
10247     case VIEW_CONVERT_EXPR:
10248       op0 = NULL_RTX;
10249 
10250       /* If we are converting to BLKmode, try to avoid an intermediate
10251 	 temporary by fetching an inner memory reference.  */
10252       if (mode == BLKmode
10253 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10254 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10255 	  && handled_component_p (treeop0))
10256       {
10257 	enum machine_mode mode1;
10258 	HOST_WIDE_INT bitsize, bitpos;
10259 	tree offset;
10260 	int unsignedp;
10261 	int volatilep = 0;
10262 	tree tem
10263 	  = get_inner_reference (treeop0, &bitsize, &bitpos,
10264 				 &offset, &mode1, &unsignedp, &volatilep,
10265 				 true);
10266 	rtx orig_op0;
10267 
10268 	/* ??? We should work harder and deal with non-zero offsets.  */
10269 	if (!offset
10270 	    && (bitpos % BITS_PER_UNIT) == 0
10271 	    && bitsize >= 0
10272 	    && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
10273 	  {
10274 	    /* See the normal_inner_ref case for the rationale.  */
10275 	    orig_op0
10276 	      = expand_expr_real (tem,
10277 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10278 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10279 				       != INTEGER_CST)
10280 				   && modifier != EXPAND_STACK_PARM
10281 				   ? target : NULL_RTX),
10282 				  VOIDmode,
10283 				  (modifier == EXPAND_INITIALIZER
10284 				   || modifier == EXPAND_CONST_ADDRESS
10285 				   || modifier == EXPAND_STACK_PARM)
10286 				  ? modifier : EXPAND_NORMAL,
10287 				  NULL, true);
10288 
10289 	    if (MEM_P (orig_op0))
10290 	      {
10291 		op0 = orig_op0;
10292 
10293 		/* Get a reference to just this component.  */
10294 		if (modifier == EXPAND_CONST_ADDRESS
10295 		    || modifier == EXPAND_SUM
10296 		    || modifier == EXPAND_INITIALIZER)
10297 		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10298 		else
10299 		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10300 
10301 		if (op0 == orig_op0)
10302 		  op0 = copy_rtx (op0);
10303 
10304 		set_mem_attributes (op0, treeop0, 0);
10305 		if (REG_P (XEXP (op0, 0)))
10306 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10307 
10308 		MEM_VOLATILE_P (op0) |= volatilep;
10309 	      }
10310 	  }
10311       }
10312 
10313       if (!op0)
10314 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10315 				NULL, inner_reference_p);
10316 
10317       /* If the input and output modes are both the same, we are done.  */
10318       if (mode == GET_MODE (op0))
10319 	;
10320       /* If neither mode is BLKmode, and both modes are the same size
10321 	 then we can use gen_lowpart.  */
10322       else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10323 	       && (GET_MODE_PRECISION (mode)
10324 		   == GET_MODE_PRECISION (GET_MODE (op0)))
10325 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
10326 	{
10327 	  if (GET_CODE (op0) == SUBREG)
10328 	    op0 = force_reg (GET_MODE (op0), op0);
10329 	  temp = gen_lowpart_common (mode, op0);
10330 	  if (temp)
10331 	    op0 = temp;
10332 	  else
10333 	    {
10334 	      if (!REG_P (op0) && !MEM_P (op0))
10335 		op0 = force_reg (GET_MODE (op0), op0);
10336 	      op0 = gen_lowpart (mode, op0);
10337 	    }
10338 	}
10339       /* If both types are integral, convert from one mode to the other.  */
10340       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10341 	op0 = convert_modes (mode, GET_MODE (op0), op0,
10342 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10343       /* As a last resort, spill op0 to memory, and reload it in a
10344 	 different mode.  */
10345       else if (!MEM_P (op0))
10346 	{
10347 	  /* If the operand is not a MEM, force it into memory.  Since we
10348 	     are going to be changing the mode of the MEM, don't call
10349 	     force_const_mem for constants because we don't allow pool
10350 	     constants to change mode.  */
10351 	  tree inner_type = TREE_TYPE (treeop0);
10352 
10353 	  gcc_assert (!TREE_ADDRESSABLE (exp));
10354 
10355 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10356 	    target
10357 	      = assign_stack_temp_for_type
10358 		(TYPE_MODE (inner_type),
10359 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10360 
10361 	  emit_move_insn (target, op0);
10362 	  op0 = target;
10363 	}
10364 
10365       /* At this point, OP0 is in the correct mode.  If the output type is
10366 	 such that the operand is known to be aligned, indicate that it is.
10367 	 Otherwise, we need only be concerned about alignment for non-BLKmode
10368 	 results.  */
10369       if (MEM_P (op0))
10370 	{
10371 	  enum insn_code icode;
10372 
10373 	  if (TYPE_ALIGN_OK (type))
10374 	    {
10375 	      /* ??? Copying the MEM without substantially changing it might
10376 		 run afoul of the code handling volatile memory references in
10377 		 store_expr, which assumes that TARGET is returned unmodified
10378 		 if it has been used.  */
10379 	      op0 = copy_rtx (op0);
10380 	      set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10381 	    }
10382 	  else if (modifier != EXPAND_WRITE
10383 		   && modifier != EXPAND_MEMORY
10384 		   && !inner_reference_p
10385 		   && mode != BLKmode
10386 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10387 	    {
10388 	      /* If the target does have special handling for unaligned
10389 		 loads of mode then use them.  */
10390 	      if ((icode = optab_handler (movmisalign_optab, mode))
10391 		  != CODE_FOR_nothing)
10392 		{
10393 		  rtx reg, insn;
10394 
10395 		  op0 = adjust_address (op0, mode, 0);
10396 		  /* We've already validated the memory, and we're creating a
10397 		     new pseudo destination.  The predicates really can't
10398 		     fail.  */
10399 		  reg = gen_reg_rtx (mode);
10400 
10401 		  /* Nor can the insn generator.  */
10402 		  insn = GEN_FCN (icode) (reg, op0);
10403 		  emit_insn (insn);
10404 		  return reg;
10405 		}
10406 	      else if (STRICT_ALIGNMENT)
10407 		{
10408 		  tree inner_type = TREE_TYPE (treeop0);
10409 		  HOST_WIDE_INT temp_size
10410 		    = MAX (int_size_in_bytes (inner_type),
10411 			   (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10412 		  rtx new_rtx
10413 		    = assign_stack_temp_for_type (mode, temp_size, type);
10414 		  rtx new_with_op0_mode
10415 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
10416 
10417 		  gcc_assert (!TREE_ADDRESSABLE (exp));
10418 
10419 		  if (GET_MODE (op0) == BLKmode)
10420 		    emit_block_move (new_with_op0_mode, op0,
10421 				     GEN_INT (GET_MODE_SIZE (mode)),
10422 				     (modifier == EXPAND_STACK_PARM
10423 				      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10424 		  else
10425 		    emit_move_insn (new_with_op0_mode, op0);
10426 
10427 		  op0 = new_rtx;
10428 		}
10429 	    }
10430 
10431 	  op0 = adjust_address (op0, mode, 0);
10432 	}
10433 
10434       return op0;
10435 
10436     case MODIFY_EXPR:
10437       {
10438 	tree lhs = treeop0;
10439 	tree rhs = treeop1;
10440 	gcc_assert (ignore);
10441 
10442 	/* Check for |= or &= of a bitfield of size one into another bitfield
10443 	   of size 1.  In this case, (unless we need the result of the
10444 	   assignment) we can do this more efficiently with a
10445 	   test followed by an assignment, if necessary.
10446 
10447 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
10448 	   things change so we do, this code should be enhanced to
10449 	   support it.  */
10450 	if (TREE_CODE (lhs) == COMPONENT_REF
10451 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
10452 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
10453 	    && TREE_OPERAND (rhs, 0) == lhs
10454 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10455 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10456 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10457 	  {
10458 	    rtx label = gen_label_rtx ();
10459 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10460 	    do_jump (TREE_OPERAND (rhs, 1),
10461 		     value ? label : 0,
10462 		     value ? 0 : label, -1);
10463 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10464 			       false);
10465 	    do_pending_stack_adjust ();
10466 	    emit_label (label);
10467 	    return const0_rtx;
10468 	  }
10469 
10470 	expand_assignment (lhs, rhs, false);
10471 	return const0_rtx;
10472       }
10473 
10474     case ADDR_EXPR:
10475       return expand_expr_addr_expr (exp, target, tmode, modifier);
10476 
10477     case REALPART_EXPR:
10478       op0 = expand_normal (treeop0);
10479       return read_complex_part (op0, false);
10480 
10481     case IMAGPART_EXPR:
10482       op0 = expand_normal (treeop0);
10483       return read_complex_part (op0, true);
10484 
10485     case RETURN_EXPR:
10486     case LABEL_EXPR:
10487     case GOTO_EXPR:
10488     case SWITCH_EXPR:
10489     case ASM_EXPR:
10490       /* Expanded in cfgexpand.c.  */
10491       gcc_unreachable ();
10492 
10493     case TRY_CATCH_EXPR:
10494     case CATCH_EXPR:
10495     case EH_FILTER_EXPR:
10496     case TRY_FINALLY_EXPR:
10497       /* Lowered by tree-eh.c.  */
10498       gcc_unreachable ();
10499 
10500     case WITH_CLEANUP_EXPR:
10501     case CLEANUP_POINT_EXPR:
10502     case TARGET_EXPR:
10503     case CASE_LABEL_EXPR:
10504     case VA_ARG_EXPR:
10505     case BIND_EXPR:
10506     case INIT_EXPR:
10507     case CONJ_EXPR:
10508     case COMPOUND_EXPR:
10509     case PREINCREMENT_EXPR:
10510     case PREDECREMENT_EXPR:
10511     case POSTINCREMENT_EXPR:
10512     case POSTDECREMENT_EXPR:
10513     case LOOP_EXPR:
10514     case EXIT_EXPR:
10515     case COMPOUND_LITERAL_EXPR:
10516       /* Lowered by gimplify.c.  */
10517       gcc_unreachable ();
10518 
10519     case FDESC_EXPR:
10520       /* Function descriptors are not valid except for as
10521 	 initialization constants, and should not be expanded.  */
10522       gcc_unreachable ();
10523 
10524     case WITH_SIZE_EXPR:
10525       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
10526 	 have pulled out the size to use in whatever context it needed.  */
10527       return expand_expr_real (treeop0, original_target, tmode,
10528 			       modifier, alt_rtl, inner_reference_p);
10529 
10530     default:
10531       return expand_expr_real_2 (&ops, target, tmode, modifier);
10532     }
10533 }
10534 
10535 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10536    signedness of TYPE), possibly returning the result in TARGET.  */
10537 static rtx
10538 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10539 {
10540   HOST_WIDE_INT prec = TYPE_PRECISION (type);
10541   if (target && GET_MODE (target) != GET_MODE (exp))
10542     target = 0;
10543   /* For constant values, reduce using build_int_cst_type. */
10544   if (CONST_INT_P (exp))
10545     {
10546       HOST_WIDE_INT value = INTVAL (exp);
10547       tree t = build_int_cst_type (type, value);
10548       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10549     }
10550   else if (TYPE_UNSIGNED (type))
10551     {
10552       rtx mask = immed_double_int_const (double_int::mask (prec),
10553 					 GET_MODE (exp));
10554       return expand_and (GET_MODE (exp), exp, mask, target);
10555     }
10556   else
10557     {
10558       int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10559       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10560 			  exp, count, target, 0);
10561       return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10562 			   exp, count, target, 0);
10563     }
10564 }
10565 
10566 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10567    when applied to the address of EXP produces an address known to be
10568    aligned more than BIGGEST_ALIGNMENT.  */
10569 
10570 static int
10571 is_aligning_offset (const_tree offset, const_tree exp)
10572 {
10573   /* Strip off any conversions.  */
10574   while (CONVERT_EXPR_P (offset))
10575     offset = TREE_OPERAND (offset, 0);
10576 
10577   /* We must now have a BIT_AND_EXPR with a constant that is one less than
10578      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
10579   if (TREE_CODE (offset) != BIT_AND_EXPR
10580       || !host_integerp (TREE_OPERAND (offset, 1), 1)
10581       || compare_tree_int (TREE_OPERAND (offset, 1),
10582 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10583       || exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
10584     return 0;
10585 
10586   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10587      It must be NEGATE_EXPR.  Then strip any more conversions.  */
10588   offset = TREE_OPERAND (offset, 0);
10589   while (CONVERT_EXPR_P (offset))
10590     offset = TREE_OPERAND (offset, 0);
10591 
10592   if (TREE_CODE (offset) != NEGATE_EXPR)
10593     return 0;
10594 
10595   offset = TREE_OPERAND (offset, 0);
10596   while (CONVERT_EXPR_P (offset))
10597     offset = TREE_OPERAND (offset, 0);
10598 
10599   /* This must now be the address of EXP.  */
10600   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10601 }
10602 
10603 /* Return the tree node if an ARG corresponds to a string constant or zero
10604    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
10605    in bytes within the string that ARG is accessing.  The type of the
10606    offset will be `sizetype'.  */
10607 
10608 tree
10609 string_constant (tree arg, tree *ptr_offset)
10610 {
10611   tree array, offset, lower_bound;
10612   STRIP_NOPS (arg);
10613 
10614   if (TREE_CODE (arg) == ADDR_EXPR)
10615     {
10616       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10617 	{
10618 	  *ptr_offset = size_zero_node;
10619 	  return TREE_OPERAND (arg, 0);
10620 	}
10621       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10622 	{
10623 	  array = TREE_OPERAND (arg, 0);
10624 	  offset = size_zero_node;
10625 	}
10626       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10627 	{
10628 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10629 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10630 	  if (TREE_CODE (array) != STRING_CST
10631 	      && TREE_CODE (array) != VAR_DECL)
10632 	    return 0;
10633 
10634 	  /* Check if the array has a nonzero lower bound.  */
10635 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10636 	  if (!integer_zerop (lower_bound))
10637 	    {
10638 	      /* If the offset and base aren't both constants, return 0.  */
10639 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
10640 	        return 0;
10641 	      if (TREE_CODE (offset) != INTEGER_CST)
10642 		return 0;
10643 	      /* Adjust offset by the lower bound.  */
10644 	      offset = size_diffop (fold_convert (sizetype, offset),
10645 				    fold_convert (sizetype, lower_bound));
10646 	    }
10647 	}
10648       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10649 	{
10650 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10651 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10652 	  if (TREE_CODE (array) != ADDR_EXPR)
10653 	    return 0;
10654 	  array = TREE_OPERAND (array, 0);
10655 	  if (TREE_CODE (array) != STRING_CST
10656 	      && TREE_CODE (array) != VAR_DECL)
10657 	    return 0;
10658 	}
10659       else
10660 	return 0;
10661     }
10662   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10663     {
10664       tree arg0 = TREE_OPERAND (arg, 0);
10665       tree arg1 = TREE_OPERAND (arg, 1);
10666 
10667       STRIP_NOPS (arg0);
10668       STRIP_NOPS (arg1);
10669 
10670       if (TREE_CODE (arg0) == ADDR_EXPR
10671 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10672 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10673 	{
10674 	  array = TREE_OPERAND (arg0, 0);
10675 	  offset = arg1;
10676 	}
10677       else if (TREE_CODE (arg1) == ADDR_EXPR
10678 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10679 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10680 	{
10681 	  array = TREE_OPERAND (arg1, 0);
10682 	  offset = arg0;
10683 	}
10684       else
10685 	return 0;
10686     }
10687   else
10688     return 0;
10689 
10690   if (TREE_CODE (array) == STRING_CST)
10691     {
10692       *ptr_offset = fold_convert (sizetype, offset);
10693       return array;
10694     }
10695   else if (TREE_CODE (array) == VAR_DECL
10696 	   || TREE_CODE (array) == CONST_DECL)
10697     {
10698       int length;
10699 
10700       /* Variables initialized to string literals can be handled too.  */
10701       if (!const_value_known_p (array)
10702 	  || !DECL_INITIAL (array)
10703 	  || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
10704 	return 0;
10705 
10706       /* Avoid const char foo[4] = "abcde";  */
10707       if (DECL_SIZE_UNIT (array) == NULL_TREE
10708 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10709 	  || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
10710 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10711 	return 0;
10712 
10713       /* If variable is bigger than the string literal, OFFSET must be constant
10714 	 and inside of the bounds of the string literal.  */
10715       offset = fold_convert (sizetype, offset);
10716       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10717 	  && (! host_integerp (offset, 1)
10718 	      || compare_tree_int (offset, length) >= 0))
10719 	return 0;
10720 
10721       *ptr_offset = offset;
10722       return DECL_INITIAL (array);
10723     }
10724 
10725   return 0;
10726 }
10727 
10728 /* Generate code to calculate OPS, and exploded expression
10729    using a store-flag instruction and return an rtx for the result.
10730    OPS reflects a comparison.
10731 
10732    If TARGET is nonzero, store the result there if convenient.
10733 
10734    Return zero if there is no suitable set-flag instruction
10735    available on this machine.
10736 
10737    Once expand_expr has been called on the arguments of the comparison,
10738    we are committed to doing the store flag, since it is not safe to
10739    re-evaluate the expression.  We emit the store-flag insn by calling
10740    emit_store_flag, but only expand the arguments if we have a reason
10741    to believe that emit_store_flag will be successful.  If we think that
10742    it will, but it isn't, we have to simulate the store-flag with a
10743    set/jump/set sequence.  */
10744 
10745 static rtx
10746 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10747 {
10748   enum rtx_code code;
10749   tree arg0, arg1, type;
10750   tree tem;
10751   enum machine_mode operand_mode;
10752   int unsignedp;
10753   rtx op0, op1;
10754   rtx subtarget = target;
10755   location_t loc = ops->location;
10756 
10757   arg0 = ops->op0;
10758   arg1 = ops->op1;
10759 
10760   /* Don't crash if the comparison was erroneous.  */
10761   if (arg0 == error_mark_node || arg1 == error_mark_node)
10762     return const0_rtx;
10763 
10764   type = TREE_TYPE (arg0);
10765   operand_mode = TYPE_MODE (type);
10766   unsignedp = TYPE_UNSIGNED (type);
10767 
10768   /* We won't bother with BLKmode store-flag operations because it would mean
10769      passing a lot of information to emit_store_flag.  */
10770   if (operand_mode == BLKmode)
10771     return 0;
10772 
10773   /* We won't bother with store-flag operations involving function pointers
10774      when function pointers must be canonicalized before comparisons.  */
10775 #ifdef HAVE_canonicalize_funcptr_for_compare
10776   if (HAVE_canonicalize_funcptr_for_compare
10777       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10778 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10779 	       == FUNCTION_TYPE))
10780 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10781 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10782 		  == FUNCTION_TYPE))))
10783     return 0;
10784 #endif
10785 
10786   STRIP_NOPS (arg0);
10787   STRIP_NOPS (arg1);
10788 
10789   /* For vector typed comparisons emit code to generate the desired
10790      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
10791      expander for this.  */
10792   if (TREE_CODE (ops->type) == VECTOR_TYPE)
10793     {
10794       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
10795       tree if_true = constant_boolean_node (true, ops->type);
10796       tree if_false = constant_boolean_node (false, ops->type);
10797       return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
10798     }
10799 
10800   /* Get the rtx comparison code to use.  We know that EXP is a comparison
10801      operation of some type.  Some comparisons against 1 and -1 can be
10802      converted to comparisons with zero.  Do so here so that the tests
10803      below will be aware that we have a comparison with zero.   These
10804      tests will not catch constants in the first operand, but constants
10805      are rarely passed as the first operand.  */
10806 
10807   switch (ops->code)
10808     {
10809     case EQ_EXPR:
10810       code = EQ;
10811       break;
10812     case NE_EXPR:
10813       code = NE;
10814       break;
10815     case LT_EXPR:
10816       if (integer_onep (arg1))
10817 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10818       else
10819 	code = unsignedp ? LTU : LT;
10820       break;
10821     case LE_EXPR:
10822       if (! unsignedp && integer_all_onesp (arg1))
10823 	arg1 = integer_zero_node, code = LT;
10824       else
10825 	code = unsignedp ? LEU : LE;
10826       break;
10827     case GT_EXPR:
10828       if (! unsignedp && integer_all_onesp (arg1))
10829 	arg1 = integer_zero_node, code = GE;
10830       else
10831 	code = unsignedp ? GTU : GT;
10832       break;
10833     case GE_EXPR:
10834       if (integer_onep (arg1))
10835 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10836       else
10837 	code = unsignedp ? GEU : GE;
10838       break;
10839 
10840     case UNORDERED_EXPR:
10841       code = UNORDERED;
10842       break;
10843     case ORDERED_EXPR:
10844       code = ORDERED;
10845       break;
10846     case UNLT_EXPR:
10847       code = UNLT;
10848       break;
10849     case UNLE_EXPR:
10850       code = UNLE;
10851       break;
10852     case UNGT_EXPR:
10853       code = UNGT;
10854       break;
10855     case UNGE_EXPR:
10856       code = UNGE;
10857       break;
10858     case UNEQ_EXPR:
10859       code = UNEQ;
10860       break;
10861     case LTGT_EXPR:
10862       code = LTGT;
10863       break;
10864 
10865     default:
10866       gcc_unreachable ();
10867     }
10868 
10869   /* Put a constant second.  */
10870   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10871       || TREE_CODE (arg0) == FIXED_CST)
10872     {
10873       tem = arg0; arg0 = arg1; arg1 = tem;
10874       code = swap_condition (code);
10875     }
10876 
10877   /* If this is an equality or inequality test of a single bit, we can
10878      do this by shifting the bit being tested to the low-order bit and
10879      masking the result with the constant 1.  If the condition was EQ,
10880      we xor it with 1.  This does not require an scc insn and is faster
10881      than an scc insn even if we have it.
10882 
10883      The code to make this transformation was moved into fold_single_bit_test,
10884      so we just call into the folder and expand its result.  */
10885 
10886   if ((code == NE || code == EQ)
10887       && integer_zerop (arg1)
10888       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10889     {
10890       gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
10891       if (srcstmt
10892 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
10893 	{
10894 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
10895 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10896 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
10897 				       gimple_assign_rhs1 (srcstmt),
10898 				       gimple_assign_rhs2 (srcstmt));
10899 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
10900 	  if (temp)
10901 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
10902 	}
10903     }
10904 
10905   if (! get_subtarget (target)
10906       || GET_MODE (subtarget) != operand_mode)
10907     subtarget = 0;
10908 
10909   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10910 
10911   if (target == 0)
10912     target = gen_reg_rtx (mode);
10913 
10914   /* Try a cstore if possible.  */
10915   return emit_store_flag_force (target, code, op0, op1,
10916 				operand_mode, unsignedp,
10917 				(TYPE_PRECISION (ops->type) == 1
10918 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10919 }
10920 
10921 
10922 /* Stubs in case we haven't got a casesi insn.  */
10923 #ifndef HAVE_casesi
10924 # define HAVE_casesi 0
10925 # define gen_casesi(a, b, c, d, e) (0)
10926 # define CODE_FOR_casesi CODE_FOR_nothing
10927 #endif
10928 
10929 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
10930    0 otherwise (i.e. if there is no casesi instruction).
10931 
10932    DEFAULT_PROBABILITY is the probability of jumping to the default
10933    label.  */
10934 int
10935 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10936 	    rtx table_label, rtx default_label, rtx fallback_label,
10937             int default_probability)
10938 {
10939   struct expand_operand ops[5];
10940   enum machine_mode index_mode = SImode;
10941   rtx op1, op2, index;
10942 
10943   if (! HAVE_casesi)
10944     return 0;
10945 
10946   /* Convert the index to SImode.  */
10947   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10948     {
10949       enum machine_mode omode = TYPE_MODE (index_type);
10950       rtx rangertx = expand_normal (range);
10951 
10952       /* We must handle the endpoints in the original mode.  */
10953       index_expr = build2 (MINUS_EXPR, index_type,
10954 			   index_expr, minval);
10955       minval = integer_zero_node;
10956       index = expand_normal (index_expr);
10957       if (default_label)
10958         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10959 				 omode, 1, default_label,
10960                                  default_probability);
10961       /* Now we can safely truncate.  */
10962       index = convert_to_mode (index_mode, index, 0);
10963     }
10964   else
10965     {
10966       if (TYPE_MODE (index_type) != index_mode)
10967 	{
10968 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
10969 	  index_expr = fold_convert (index_type, index_expr);
10970 	}
10971 
10972       index = expand_normal (index_expr);
10973     }
10974 
10975   do_pending_stack_adjust ();
10976 
10977   op1 = expand_normal (minval);
10978   op2 = expand_normal (range);
10979 
10980   create_input_operand (&ops[0], index, index_mode);
10981   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
10982   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
10983   create_fixed_operand (&ops[3], table_label);
10984   create_fixed_operand (&ops[4], (default_label
10985 				  ? default_label
10986 				  : fallback_label));
10987   expand_jump_insn (CODE_FOR_casesi, 5, ops);
10988   return 1;
10989 }
10990 
10991 /* Attempt to generate a tablejump instruction; same concept.  */
10992 #ifndef HAVE_tablejump
10993 #define HAVE_tablejump 0
10994 #define gen_tablejump(x, y) (0)
10995 #endif
10996 
10997 /* Subroutine of the next function.
10998 
10999    INDEX is the value being switched on, with the lowest value
11000    in the table already subtracted.
11001    MODE is its expected mode (needed if INDEX is constant).
11002    RANGE is the length of the jump table.
11003    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11004 
11005    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11006    index value is out of range.
11007    DEFAULT_PROBABILITY is the probability of jumping to
11008    the default label.  */
11009 
11010 static void
11011 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
11012 	      rtx default_label, int default_probability)
11013 {
11014   rtx temp, vector;
11015 
11016   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11017     cfun->cfg->max_jumptable_ents = INTVAL (range);
11018 
11019   /* Do an unsigned comparison (in the proper mode) between the index
11020      expression and the value which represents the length of the range.
11021      Since we just finished subtracting the lower bound of the range
11022      from the index expression, this comparison allows us to simultaneously
11023      check that the original index expression value is both greater than
11024      or equal to the minimum value of the range and less than or equal to
11025      the maximum value of the range.  */
11026 
11027   if (default_label)
11028     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11029 			     default_label, default_probability);
11030 
11031 
11032   /* If index is in range, it must fit in Pmode.
11033      Convert to Pmode so we can index with it.  */
11034   if (mode != Pmode)
11035     index = convert_to_mode (Pmode, index, 1);
11036 
11037   /* Don't let a MEM slip through, because then INDEX that comes
11038      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11039      and break_out_memory_refs will go to work on it and mess it up.  */
11040 #ifdef PIC_CASE_VECTOR_ADDRESS
11041   if (flag_pic && !REG_P (index))
11042     index = copy_to_mode_reg (Pmode, index);
11043 #endif
11044 
11045   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11046      GET_MODE_SIZE, because this indicates how large insns are.  The other
11047      uses should all be Pmode, because they are addresses.  This code
11048      could fail if addresses and insns are not the same size.  */
11049   index = gen_rtx_PLUS (Pmode,
11050 			gen_rtx_MULT (Pmode, index,
11051 				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11052 			gen_rtx_LABEL_REF (Pmode, table_label));
11053 #ifdef PIC_CASE_VECTOR_ADDRESS
11054   if (flag_pic)
11055     index = PIC_CASE_VECTOR_ADDRESS (index);
11056   else
11057 #endif
11058     index = memory_address (CASE_VECTOR_MODE, index);
11059   temp = gen_reg_rtx (CASE_VECTOR_MODE);
11060   vector = gen_const_mem (CASE_VECTOR_MODE, index);
11061   convert_move (temp, vector, 0);
11062 
11063   emit_jump_insn (gen_tablejump (temp, table_label));
11064 
11065   /* If we are generating PIC code or if the table is PC-relative, the
11066      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11067   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11068     emit_barrier ();
11069 }
11070 
11071 int
11072 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11073 	       rtx table_label, rtx default_label, int default_probability)
11074 {
11075   rtx index;
11076 
11077   if (! HAVE_tablejump)
11078     return 0;
11079 
11080   index_expr = fold_build2 (MINUS_EXPR, index_type,
11081 			    fold_convert (index_type, index_expr),
11082 			    fold_convert (index_type, minval));
11083   index = expand_normal (index_expr);
11084   do_pending_stack_adjust ();
11085 
11086   do_tablejump (index, TYPE_MODE (index_type),
11087 		convert_modes (TYPE_MODE (index_type),
11088 			       TYPE_MODE (TREE_TYPE (range)),
11089 			       expand_normal (range),
11090 			       TYPE_UNSIGNED (TREE_TYPE (range))),
11091 		table_label, default_label, default_probability);
11092   return 1;
11093 }
11094 
11095 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11096 static rtx
11097 const_vector_from_tree (tree exp)
11098 {
11099   rtvec v;
11100   unsigned i;
11101   int units;
11102   tree elt;
11103   enum machine_mode inner, mode;
11104 
11105   mode = TYPE_MODE (TREE_TYPE (exp));
11106 
11107   if (initializer_zerop (exp))
11108     return CONST0_RTX (mode);
11109 
11110   units = GET_MODE_NUNITS (mode);
11111   inner = GET_MODE_INNER (mode);
11112 
11113   v = rtvec_alloc (units);
11114 
11115   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11116     {
11117       elt = VECTOR_CST_ELT (exp, i);
11118 
11119       if (TREE_CODE (elt) == REAL_CST)
11120 	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11121 							 inner);
11122       else if (TREE_CODE (elt) == FIXED_CST)
11123 	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11124 							 inner);
11125       else
11126 	RTVEC_ELT (v, i) = immed_double_int_const (tree_to_double_int (elt),
11127 						   inner);
11128     }
11129 
11130   return gen_rtx_CONST_VECTOR (mode, v);
11131 }
11132 
11133 /* Build a decl for a personality function given a language prefix.  */
11134 
11135 tree
11136 build_personality_function (const char *lang)
11137 {
11138   const char *unwind_and_version;
11139   tree decl, type;
11140   char *name;
11141 
11142   switch (targetm_common.except_unwind_info (&global_options))
11143     {
11144     case UI_NONE:
11145       return NULL;
11146     case UI_SJLJ:
11147       unwind_and_version = "_sj0";
11148       break;
11149     case UI_DWARF2:
11150     case UI_TARGET:
11151       unwind_and_version = "_v0";
11152       break;
11153     case UI_SEH:
11154       unwind_and_version = "_seh0";
11155       break;
11156     default:
11157       gcc_unreachable ();
11158     }
11159 
11160   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11161 
11162   type = build_function_type_list (integer_type_node, integer_type_node,
11163 				   long_long_unsigned_type_node,
11164 				   ptr_type_node, ptr_type_node, NULL_TREE);
11165   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11166 		     get_identifier (name), type);
11167   DECL_ARTIFICIAL (decl) = 1;
11168   DECL_EXTERNAL (decl) = 1;
11169   TREE_PUBLIC (decl) = 1;
11170 
11171   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
11172      are the flags assigned by targetm.encode_section_info.  */
11173   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11174 
11175   return decl;
11176 }
11177 
11178 /* Extracts the personality function of DECL and returns the corresponding
11179    libfunc.  */
11180 
11181 rtx
11182 get_personality_function (tree decl)
11183 {
11184   tree personality = DECL_FUNCTION_PERSONALITY (decl);
11185   enum eh_personality_kind pk;
11186 
11187   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11188   if (pk == eh_personality_none)
11189     return NULL;
11190 
11191   if (!personality
11192       && pk == eh_personality_any)
11193     personality = lang_hooks.eh_personality ();
11194 
11195   if (pk == eh_personality_lang)
11196     gcc_assert (personality != NULL_TREE);
11197 
11198   return XEXP (DECL_RTL (personality), 0);
11199 }
11200 
11201 #include "gt-expr.h"
11202