xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/expr.c (revision 07ece4eabb6d327c320416d49d51617a7c0fb3be)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3    2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4    Free Software Foundation, Inc.
5 
6 This file is part of GCC.
7 
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12 
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16 for more details.
17 
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3.  If not see
20 <http://www.gnu.org/licenses/>.  */
21 
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "machmode.h"
27 #include "real.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "flags.h"
31 #include "regs.h"
32 #include "hard-reg-set.h"
33 #include "except.h"
34 #include "function.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
38 #include "expr.h"
39 #include "optabs.h"
40 #include "libfuncs.h"
41 #include "recog.h"
42 #include "reload.h"
43 #include "output.h"
44 #include "typeclass.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "langhooks.h"
48 #include "intl.h"
49 #include "tm_p.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
53 #include "target.h"
54 #include "timevar.h"
55 #include "df.h"
56 #include "diagnostic.h"
57 #include "ssaexpand.h"
58 
59 /* Decide whether a function's arguments should be processed
60    from first to last or from last to first.
61 
62    They should if the stack and args grow in opposite directions, but
63    only if we have push insns.  */
64 
65 #ifdef PUSH_ROUNDING
66 
67 #ifndef PUSH_ARGS_REVERSED
68 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
69 #define PUSH_ARGS_REVERSED	/* If it's last to first.  */
70 #endif
71 #endif
72 
73 #endif
74 
75 #ifndef STACK_PUSH_CODE
76 #ifdef STACK_GROWS_DOWNWARD
77 #define STACK_PUSH_CODE PRE_DEC
78 #else
79 #define STACK_PUSH_CODE PRE_INC
80 #endif
81 #endif
82 
83 
84 /* If this is nonzero, we do not bother generating VOLATILE
85    around volatile memory references, and we are willing to
86    output indirect addresses.  If cse is to follow, we reject
87    indirect addresses so a useful potential cse is generated;
88    if it is used only once, instruction combination will produce
89    the same indirect address eventually.  */
90 int cse_not_expected;
91 
92 /* This structure is used by move_by_pieces to describe the move to
93    be performed.  */
94 struct move_by_pieces_d
95 {
96   rtx to;
97   rtx to_addr;
98   int autinc_to;
99   int explicit_inc_to;
100   rtx from;
101   rtx from_addr;
102   int autinc_from;
103   int explicit_inc_from;
104   unsigned HOST_WIDE_INT len;
105   HOST_WIDE_INT offset;
106   int reverse;
107 };
108 
109 /* This structure is used by store_by_pieces to describe the clear to
110    be performed.  */
111 
112 struct store_by_pieces_d
113 {
114   rtx to;
115   rtx to_addr;
116   int autinc_to;
117   int explicit_inc_to;
118   unsigned HOST_WIDE_INT len;
119   HOST_WIDE_INT offset;
120   rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
121   void *constfundata;
122   int reverse;
123 };
124 
125 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
126 						     unsigned int,
127 						     unsigned int);
128 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
129 			      struct move_by_pieces_d *);
130 static bool block_move_libcall_safe_for_call_parm (void);
131 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT);
132 static tree emit_block_move_libcall_fn (int);
133 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
134 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
135 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
136 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
137 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
138 			       struct store_by_pieces_d *);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 				     HOST_WIDE_INT, enum machine_mode,
144 				     tree, tree, int, alias_set_type);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 			tree, tree, alias_set_type, bool);
148 
149 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
150 
151 static int is_aligning_offset (const_tree, const_tree);
152 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
153 			     enum expand_modifier);
154 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
155 static rtx do_store_flag (sepops, rtx, enum machine_mode);
156 #ifdef PUSH_ROUNDING
157 static void emit_single_push_insn (enum machine_mode, rtx, tree);
158 #endif
159 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
160 static rtx const_vector_from_tree (tree);
161 static void write_complex_part (rtx, rtx, bool);
162 
163 /* Record for each mode whether we can move a register directly to or
164    from an object of that mode in memory.  If we can't, we won't try
165    to use that mode directly when accessing a field of that mode.  */
166 
167 static char direct_load[NUM_MACHINE_MODES];
168 static char direct_store[NUM_MACHINE_MODES];
169 
170 /* Record for each mode whether we can float-extend from memory.  */
171 
172 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
173 
174 /* This macro is used to determine whether move_by_pieces should be called
175    to perform a structure copy.  */
176 #ifndef MOVE_BY_PIECES_P
177 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
178   (move_by_pieces_ninsns (SIZE, ALIGN, MOVE_MAX_PIECES + 1) \
179    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
180 #endif
181 
182 /* This macro is used to determine whether clear_by_pieces should be
183    called to clear storage.  */
184 #ifndef CLEAR_BY_PIECES_P
185 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
186   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
187    < (unsigned int) CLEAR_RATIO (optimize_insn_for_speed_p ()))
188 #endif
189 
190 /* This macro is used to determine whether store_by_pieces should be
191    called to "memset" storage with byte values other than zero.  */
192 #ifndef SET_BY_PIECES_P
193 #define SET_BY_PIECES_P(SIZE, ALIGN) \
194   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
195    < (unsigned int) SET_RATIO (optimize_insn_for_speed_p ()))
196 #endif
197 
198 /* This macro is used to determine whether store_by_pieces should be
199    called to "memcpy" storage when the source is a constant string.  */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) \
202   (move_by_pieces_ninsns (SIZE, ALIGN, STORE_MAX_PIECES + 1) \
203    < (unsigned int) MOVE_RATIO (optimize_insn_for_speed_p ()))
204 #endif
205 
206 /* This array records the insn_code of insns to perform block moves.  */
207 enum insn_code movmem_optab[NUM_MACHINE_MODES];
208 
209 /* This array records the insn_code of insns to perform block sets.  */
210 enum insn_code setmem_optab[NUM_MACHINE_MODES];
211 
212 /* These arrays record the insn_code of three different kinds of insns
213    to perform block compares.  */
214 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
215 enum insn_code cmpstrn_optab[NUM_MACHINE_MODES];
216 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
217 
218 /* Synchronization primitives.  */
219 enum insn_code sync_add_optab[NUM_MACHINE_MODES];
220 enum insn_code sync_sub_optab[NUM_MACHINE_MODES];
221 enum insn_code sync_ior_optab[NUM_MACHINE_MODES];
222 enum insn_code sync_and_optab[NUM_MACHINE_MODES];
223 enum insn_code sync_xor_optab[NUM_MACHINE_MODES];
224 enum insn_code sync_nand_optab[NUM_MACHINE_MODES];
225 enum insn_code sync_old_add_optab[NUM_MACHINE_MODES];
226 enum insn_code sync_old_sub_optab[NUM_MACHINE_MODES];
227 enum insn_code sync_old_ior_optab[NUM_MACHINE_MODES];
228 enum insn_code sync_old_and_optab[NUM_MACHINE_MODES];
229 enum insn_code sync_old_xor_optab[NUM_MACHINE_MODES];
230 enum insn_code sync_old_nand_optab[NUM_MACHINE_MODES];
231 enum insn_code sync_new_add_optab[NUM_MACHINE_MODES];
232 enum insn_code sync_new_sub_optab[NUM_MACHINE_MODES];
233 enum insn_code sync_new_ior_optab[NUM_MACHINE_MODES];
234 enum insn_code sync_new_and_optab[NUM_MACHINE_MODES];
235 enum insn_code sync_new_xor_optab[NUM_MACHINE_MODES];
236 enum insn_code sync_new_nand_optab[NUM_MACHINE_MODES];
237 enum insn_code sync_compare_and_swap[NUM_MACHINE_MODES];
238 enum insn_code sync_lock_test_and_set[NUM_MACHINE_MODES];
239 enum insn_code sync_lock_release[NUM_MACHINE_MODES];
240 
241 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow.  */
242 
243 #ifndef SLOW_UNALIGNED_ACCESS
244 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
245 #endif
246 
247 /* This is run to set up which modes can be used
248    directly in memory and to initialize the block move optab.  It is run
249    at the beginning of compilation and when the target is reinitialized.  */
250 
251 void
252 init_expr_target (void)
253 {
254   rtx insn, pat;
255   enum machine_mode mode;
256   int num_clobbers;
257   rtx mem, mem1;
258   rtx reg;
259 
260   /* Try indexing by frame ptr and try by stack ptr.
261      It is known that on the Convex the stack ptr isn't a valid index.
262      With luck, one or the other is valid on any machine.  */
263   mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
264   mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
265 
266   /* A scratch register we can modify in-place below to avoid
267      useless RTL allocations.  */
268   reg = gen_rtx_REG (VOIDmode, -1);
269 
270   insn = rtx_alloc (INSN);
271   pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
272   PATTERN (insn) = pat;
273 
274   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
275        mode = (enum machine_mode) ((int) mode + 1))
276     {
277       int regno;
278 
279       direct_load[(int) mode] = direct_store[(int) mode] = 0;
280       PUT_MODE (mem, mode);
281       PUT_MODE (mem1, mode);
282       PUT_MODE (reg, mode);
283 
284       /* See if there is some register that can be used in this mode and
285 	 directly loaded or stored from memory.  */
286 
287       if (mode != VOIDmode && mode != BLKmode)
288 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
289 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
290 	     regno++)
291 	  {
292 	    if (! HARD_REGNO_MODE_OK (regno, mode))
293 	      continue;
294 
295 	    SET_REGNO (reg, regno);
296 
297 	    SET_SRC (pat) = mem;
298 	    SET_DEST (pat) = reg;
299 	    if (recog (pat, insn, &num_clobbers) >= 0)
300 	      direct_load[(int) mode] = 1;
301 
302 	    SET_SRC (pat) = mem1;
303 	    SET_DEST (pat) = reg;
304 	    if (recog (pat, insn, &num_clobbers) >= 0)
305 	      direct_load[(int) mode] = 1;
306 
307 	    SET_SRC (pat) = reg;
308 	    SET_DEST (pat) = mem;
309 	    if (recog (pat, insn, &num_clobbers) >= 0)
310 	      direct_store[(int) mode] = 1;
311 
312 	    SET_SRC (pat) = reg;
313 	    SET_DEST (pat) = mem1;
314 	    if (recog (pat, insn, &num_clobbers) >= 0)
315 	      direct_store[(int) mode] = 1;
316 	  }
317     }
318 
319   mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
320 
321   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
322        mode = GET_MODE_WIDER_MODE (mode))
323     {
324       enum machine_mode srcmode;
325       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
326 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
327 	{
328 	  enum insn_code ic;
329 
330 	  ic = can_extend_p (mode, srcmode, 0);
331 	  if (ic == CODE_FOR_nothing)
332 	    continue;
333 
334 	  PUT_MODE (mem, srcmode);
335 
336 	  if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
337 	    float_extend_from_mem[mode][srcmode] = true;
338 	}
339     }
340 }
341 
342 /* This is run at the start of compiling a function.  */
343 
344 void
345 init_expr (void)
346 {
347   memset (&crtl->expr, 0, sizeof (crtl->expr));
348 }
349 
350 /* Copy data from FROM to TO, where the machine modes are not the same.
351    Both modes may be integer, or both may be floating, or both may be
352    fixed-point.
353    UNSIGNEDP should be nonzero if FROM is an unsigned type.
354    This causes zero-extension instead of sign-extension.  */
355 
356 void
357 convert_move (rtx to, rtx from, int unsignedp)
358 {
359   enum machine_mode to_mode = GET_MODE (to);
360   enum machine_mode from_mode = GET_MODE (from);
361   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
362   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
363   enum insn_code code;
364   rtx libcall;
365 
366   /* rtx code for making an equivalent value.  */
367   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
368 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
369 
370 
371   gcc_assert (to_real == from_real);
372   gcc_assert (to_mode != BLKmode);
373   gcc_assert (from_mode != BLKmode);
374 
375   /* If the source and destination are already the same, then there's
376      nothing to do.  */
377   if (to == from)
378     return;
379 
380   /* If FROM is a SUBREG that indicates that we have already done at least
381      the required extension, strip it.  We don't handle such SUBREGs as
382      TO here.  */
383 
384   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
385       && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
386 	  >= GET_MODE_SIZE (to_mode))
387       && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
388     from = gen_lowpart (to_mode, from), from_mode = to_mode;
389 
390   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
391 
392   if (to_mode == from_mode
393       || (from_mode == VOIDmode && CONSTANT_P (from)))
394     {
395       emit_move_insn (to, from);
396       return;
397     }
398 
399   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
400     {
401       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
402 
403       if (VECTOR_MODE_P (to_mode))
404 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
405       else
406 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
407 
408       emit_move_insn (to, from);
409       return;
410     }
411 
412   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
413     {
414       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
415       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
416       return;
417     }
418 
419   if (to_real)
420     {
421       rtx value, insns;
422       convert_optab tab;
423 
424       gcc_assert ((GET_MODE_PRECISION (from_mode)
425 		   != GET_MODE_PRECISION (to_mode))
426 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
427 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
428 
429       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
430 	/* Conversion between decimal float and binary float, same size.  */
431 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
432       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
433 	tab = sext_optab;
434       else
435 	tab = trunc_optab;
436 
437       /* Try converting directly if the insn is supported.  */
438 
439       code = convert_optab_handler (tab, to_mode, from_mode)->insn_code;
440       if (code != CODE_FOR_nothing)
441 	{
442 	  emit_unop_insn (code, to, from,
443 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
444 	  return;
445 	}
446 
447       /* Otherwise use a libcall.  */
448       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
449 
450       /* Is this conversion implemented yet?  */
451       gcc_assert (libcall);
452 
453       start_sequence ();
454       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
455 				       1, from, from_mode);
456       insns = get_insns ();
457       end_sequence ();
458       emit_libcall_block (insns, to, value,
459 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
460 								       from)
461 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
462       return;
463     }
464 
465   /* Handle pointer conversion.  */			/* SPEE 900220.  */
466   /* Targets are expected to provide conversion insns between PxImode and
467      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
468   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
469     {
470       enum machine_mode full_mode
471 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
472 
473       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code
474 		  != CODE_FOR_nothing);
475 
476       if (full_mode != from_mode)
477 	from = convert_to_mode (full_mode, from, unsignedp);
478       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode)->insn_code,
479 		      to, from, UNKNOWN);
480       return;
481     }
482   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
483     {
484       rtx new_from;
485       enum machine_mode full_mode
486 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
487 
488       gcc_assert (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code
489 		  != CODE_FOR_nothing);
490 
491       if (to_mode == full_mode)
492 	{
493 	  emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
494 			  to, from, UNKNOWN);
495 	  return;
496 	}
497 
498       new_from = gen_reg_rtx (full_mode);
499       emit_unop_insn (convert_optab_handler (sext_optab, full_mode, from_mode)->insn_code,
500 		      new_from, from, UNKNOWN);
501 
502       /* else proceed to integer conversions below.  */
503       from_mode = full_mode;
504       from = new_from;
505     }
506 
507    /* Make sure both are fixed-point modes or both are not.  */
508    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
509 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
510    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
511     {
512       /* If we widen from_mode to to_mode and they are in the same class,
513 	 we won't saturate the result.
514 	 Otherwise, always saturate the result to play safe.  */
515       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
516 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
517 	expand_fixed_convert (to, from, 0, 0);
518       else
519 	expand_fixed_convert (to, from, 0, 1);
520       return;
521     }
522 
523   /* Now both modes are integers.  */
524 
525   /* Handle expanding beyond a word.  */
526   if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
527       && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
528     {
529       rtx insns;
530       rtx lowpart;
531       rtx fill_value;
532       rtx lowfrom;
533       int i;
534       enum machine_mode lowpart_mode;
535       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
536 
537       /* Try converting directly if the insn is supported.  */
538       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
539 	  != CODE_FOR_nothing)
540 	{
541 	  /* If FROM is a SUBREG, put it into a register.  Do this
542 	     so that we always generate the same set of insns for
543 	     better cse'ing; if an intermediate assignment occurred,
544 	     we won't be doing the operation directly on the SUBREG.  */
545 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
546 	    from = force_reg (from_mode, from);
547 	  emit_unop_insn (code, to, from, equiv_code);
548 	  return;
549 	}
550       /* Next, try converting via full word.  */
551       else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
552 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
553 		   != CODE_FOR_nothing))
554 	{
555 	  rtx word_to = gen_reg_rtx (word_mode);
556 	  if (REG_P (to))
557 	    {
558 	      if (reg_overlap_mentioned_p (to, from))
559 		from = force_reg (from_mode, from);
560 	      emit_clobber (to);
561 	    }
562 	  convert_move (word_to, from, unsignedp);
563 	  emit_unop_insn (code, to, word_to, equiv_code);
564 	  return;
565 	}
566 
567       /* No special multiword conversion insn; do it by hand.  */
568       start_sequence ();
569 
570       /* Since we will turn this into a no conflict block, we must ensure
571 	 that the source does not overlap the target.  */
572 
573       if (reg_overlap_mentioned_p (to, from))
574 	from = force_reg (from_mode, from);
575 
576       /* Get a copy of FROM widened to a word, if necessary.  */
577       if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
578 	lowpart_mode = word_mode;
579       else
580 	lowpart_mode = from_mode;
581 
582       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
583 
584       lowpart = gen_lowpart (lowpart_mode, to);
585       emit_move_insn (lowpart, lowfrom);
586 
587       /* Compute the value to put in each remaining word.  */
588       if (unsignedp)
589 	fill_value = const0_rtx;
590       else
591 	fill_value = emit_store_flag (gen_reg_rtx (word_mode),
592 				      LT, lowfrom, const0_rtx,
593 				      VOIDmode, 0, -1);
594 
595       /* Fill the remaining words.  */
596       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
597 	{
598 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
599 	  rtx subword = operand_subword (to, index, 1, to_mode);
600 
601 	  gcc_assert (subword);
602 
603 	  if (fill_value != subword)
604 	    emit_move_insn (subword, fill_value);
605 	}
606 
607       insns = get_insns ();
608       end_sequence ();
609 
610       emit_insn (insns);
611       return;
612     }
613 
614   /* Truncating multi-word to a word or less.  */
615   if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
616       && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
617     {
618       if (!((MEM_P (from)
619 	     && ! MEM_VOLATILE_P (from)
620 	     && direct_load[(int) to_mode]
621 	     && ! mode_dependent_address_p (XEXP (from, 0)))
622 	    || REG_P (from)
623 	    || GET_CODE (from) == SUBREG))
624 	from = force_reg (from_mode, from);
625       convert_move (to, gen_lowpart (word_mode, from), 0);
626       return;
627     }
628 
629   /* Now follow all the conversions between integers
630      no more than a word long.  */
631 
632   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
633   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
634       && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
635 				GET_MODE_BITSIZE (from_mode)))
636     {
637       if (!((MEM_P (from)
638 	     && ! MEM_VOLATILE_P (from)
639 	     && direct_load[(int) to_mode]
640 	     && ! mode_dependent_address_p (XEXP (from, 0)))
641 	    || REG_P (from)
642 	    || GET_CODE (from) == SUBREG))
643 	from = force_reg (from_mode, from);
644       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
645 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
646 	from = copy_to_reg (from);
647       emit_move_insn (to, gen_lowpart (to_mode, from));
648       return;
649     }
650 
651   /* Handle extension.  */
652   if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
653     {
654       /* Convert directly if that works.  */
655       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
656 	  != CODE_FOR_nothing)
657 	{
658 	  emit_unop_insn (code, to, from, equiv_code);
659 	  return;
660 	}
661       else
662 	{
663 	  enum machine_mode intermediate;
664 	  rtx tmp;
665 	  tree shift_amount;
666 
667 	  /* Search for a mode to convert via.  */
668 	  for (intermediate = from_mode; intermediate != VOIDmode;
669 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
670 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
671 		  != CODE_FOR_nothing)
672 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
673 		     && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
674 					       GET_MODE_BITSIZE (intermediate))))
675 		&& (can_extend_p (intermediate, from_mode, unsignedp)
676 		    != CODE_FOR_nothing))
677 	      {
678 		convert_move (to, convert_to_mode (intermediate, from,
679 						   unsignedp), unsignedp);
680 		return;
681 	      }
682 
683 	  /* No suitable intermediate mode.
684 	     Generate what we need with	shifts.  */
685 	  shift_amount = build_int_cst (NULL_TREE,
686 					GET_MODE_BITSIZE (to_mode)
687 					- GET_MODE_BITSIZE (from_mode));
688 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
689 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
690 			      to, unsignedp);
691 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
692 			      to, unsignedp);
693 	  if (tmp != to)
694 	    emit_move_insn (to, tmp);
695 	  return;
696 	}
697     }
698 
699   /* Support special truncate insns for certain modes.  */
700   if (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code != CODE_FOR_nothing)
701     {
702       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode)->insn_code,
703 		      to, from, UNKNOWN);
704       return;
705     }
706 
707   /* Handle truncation of volatile memrefs, and so on;
708      the things that couldn't be truncated directly,
709      and for which there was no special instruction.
710 
711      ??? Code above formerly short-circuited this, for most integer
712      mode pairs, with a force_reg in from_mode followed by a recursive
713      call to this routine.  Appears always to have been wrong.  */
714   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
715     {
716       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
717       emit_move_insn (to, temp);
718       return;
719     }
720 
721   /* Mode combination is not recognized.  */
722   gcc_unreachable ();
723 }
724 
725 /* Return an rtx for a value that would result
726    from converting X to mode MODE.
727    Both X and MODE may be floating, or both integer.
728    UNSIGNEDP is nonzero if X is an unsigned value.
729    This can be done by referring to a part of X in place
730    or by copying to a new temporary with conversion.  */
731 
732 rtx
733 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
734 {
735   return convert_modes (mode, VOIDmode, x, unsignedp);
736 }
737 
738 /* Return an rtx for a value that would result
739    from converting X from mode OLDMODE to mode MODE.
740    Both modes may be floating, or both integer.
741    UNSIGNEDP is nonzero if X is an unsigned value.
742 
743    This can be done by referring to a part of X in place
744    or by copying to a new temporary with conversion.
745 
746    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
747 
748 rtx
749 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
750 {
751   rtx temp;
752 
753   /* If FROM is a SUBREG that indicates that we have already done at least
754      the required extension, strip it.  */
755 
756   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
757       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
758       && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
759     x = gen_lowpart (mode, x);
760 
761   if (GET_MODE (x) != VOIDmode)
762     oldmode = GET_MODE (x);
763 
764   if (mode == oldmode)
765     return x;
766 
767   /* There is one case that we must handle specially: If we are converting
768      a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
769      we are to interpret the constant as unsigned, gen_lowpart will do
770      the wrong if the constant appears negative.  What we want to do is
771      make the high-order word of the constant zero, not all ones.  */
772 
773   if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
774       && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
775       && CONST_INT_P (x) && INTVAL (x) < 0)
776     {
777       HOST_WIDE_INT val = INTVAL (x);
778 
779       if (oldmode != VOIDmode
780 	  && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
781 	{
782 	  int width = GET_MODE_BITSIZE (oldmode);
783 
784 	  /* We need to zero extend VAL.  */
785 	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
786 	}
787 
788       return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
789     }
790 
791   /* We can do this with a gen_lowpart if both desired and current modes
792      are integer, and this is either a constant integer, a register, or a
793      non-volatile MEM.  Except for the constant case where MODE is no
794      wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand.  */
795 
796   if ((CONST_INT_P (x)
797        && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
798       || (GET_MODE_CLASS (mode) == MODE_INT
799 	  && GET_MODE_CLASS (oldmode) == MODE_INT
800 	  && (GET_CODE (x) == CONST_DOUBLE
801 	      || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
802 		  && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
803 		       && direct_load[(int) mode])
804 		      || (REG_P (x)
805 			  && (! HARD_REGISTER_P (x)
806 			      || HARD_REGNO_MODE_OK (REGNO (x), mode))
807 			  && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
808 						    GET_MODE_BITSIZE (GET_MODE (x)))))))))
809     {
810       /* ?? If we don't know OLDMODE, we have to assume here that
811 	 X does not need sign- or zero-extension.   This may not be
812 	 the case, but it's the best we can do.  */
813       if (CONST_INT_P (x) && oldmode != VOIDmode
814 	  && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
815 	{
816 	  HOST_WIDE_INT val = INTVAL (x);
817 	  int width = GET_MODE_BITSIZE (oldmode);
818 
819 	  /* We must sign or zero-extend in this case.  Start by
820 	     zero-extending, then sign extend if we need to.  */
821 	  val &= ((HOST_WIDE_INT) 1 << width) - 1;
822 	  if (! unsignedp
823 	      && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
824 	    val |= (HOST_WIDE_INT) (-1) << width;
825 
826 	  return gen_int_mode (val, mode);
827 	}
828 
829       return gen_lowpart (mode, x);
830     }
831 
832   /* Converting from integer constant into mode is always equivalent to an
833      subreg operation.  */
834   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
835     {
836       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
837       return simplify_gen_subreg (mode, x, oldmode, 0);
838     }
839 
840   temp = gen_reg_rtx (mode);
841   convert_move (temp, x, unsignedp);
842   return temp;
843 }
844 
845 /* STORE_MAX_PIECES is the number of bytes at a time that we can
846    store efficiently.  Due to internal GCC limitations, this is
847    MOVE_MAX_PIECES limited by the number of bytes GCC can represent
848    for an immediate constant.  */
849 
850 #define STORE_MAX_PIECES  MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
851 
852 /* Determine whether the LEN bytes can be moved by using several move
853    instructions.  Return nonzero if a call to move_by_pieces should
854    succeed.  */
855 
856 int
857 can_move_by_pieces (unsigned HOST_WIDE_INT len,
858 		    unsigned int align ATTRIBUTE_UNUSED)
859 {
860   return MOVE_BY_PIECES_P (len, align);
861 }
862 
863 /* Generate several move instructions to copy LEN bytes from block FROM to
864    block TO.  (These are MEM rtx's with BLKmode).
865 
866    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
867    used to push FROM to the stack.
868 
869    ALIGN is maximum stack alignment we can assume.
870 
871    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
872    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
873    stpcpy.  */
874 
875 rtx
876 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
877 		unsigned int align, int endp)
878 {
879   struct move_by_pieces_d data;
880   enum machine_mode to_addr_mode, from_addr_mode
881     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (from));
882   rtx to_addr, from_addr = XEXP (from, 0);
883   unsigned int max_size = MOVE_MAX_PIECES + 1;
884   enum machine_mode mode = VOIDmode, tmode;
885   enum insn_code icode;
886 
887   align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
888 
889   data.offset = 0;
890   data.from_addr = from_addr;
891   if (to)
892     {
893       to_addr_mode = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
894       to_addr = XEXP (to, 0);
895       data.to = to;
896       data.autinc_to
897 	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
898 	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
899       data.reverse
900 	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
901     }
902   else
903     {
904       to_addr_mode = VOIDmode;
905       to_addr = NULL_RTX;
906       data.to = NULL_RTX;
907       data.autinc_to = 1;
908 #ifdef STACK_GROWS_DOWNWARD
909       data.reverse = 1;
910 #else
911       data.reverse = 0;
912 #endif
913     }
914   data.to_addr = to_addr;
915   data.from = from;
916   data.autinc_from
917     = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
918        || GET_CODE (from_addr) == POST_INC
919        || GET_CODE (from_addr) == POST_DEC);
920 
921   data.explicit_inc_from = 0;
922   data.explicit_inc_to = 0;
923   if (data.reverse) data.offset = len;
924   data.len = len;
925 
926   /* If copying requires more than two move insns,
927      copy addresses to registers (to make displacements shorter)
928      and use post-increment if available.  */
929   if (!(data.autinc_from && data.autinc_to)
930       && move_by_pieces_ninsns (len, align, max_size) > 2)
931     {
932       /* Find the mode of the largest move...  */
933       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
934 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
935 	if (GET_MODE_SIZE (tmode) < max_size)
936 	  mode = tmode;
937 
938       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
939 	{
940 	  data.from_addr = copy_to_mode_reg (from_addr_mode,
941 					     plus_constant (from_addr, len));
942 	  data.autinc_from = 1;
943 	  data.explicit_inc_from = -1;
944 	}
945       if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
946 	{
947 	  data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
948 	  data.autinc_from = 1;
949 	  data.explicit_inc_from = 1;
950 	}
951       if (!data.autinc_from && CONSTANT_P (from_addr))
952 	data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
953       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
954 	{
955 	  data.to_addr = copy_to_mode_reg (to_addr_mode,
956 					   plus_constant (to_addr, len));
957 	  data.autinc_to = 1;
958 	  data.explicit_inc_to = -1;
959 	}
960       if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
961 	{
962 	  data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
963 	  data.autinc_to = 1;
964 	  data.explicit_inc_to = 1;
965 	}
966       if (!data.autinc_to && CONSTANT_P (to_addr))
967 	data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
968     }
969 
970   tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
971   if (align >= GET_MODE_ALIGNMENT (tmode))
972     align = GET_MODE_ALIGNMENT (tmode);
973   else
974     {
975       enum machine_mode xmode;
976 
977       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
978 	   tmode != VOIDmode;
979 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
980 	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
981 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
982 	  break;
983 
984       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
985     }
986 
987   /* First move what we can in the largest integer mode, then go to
988      successively smaller modes.  */
989 
990   while (max_size > 1)
991     {
992       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
993 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
994 	if (GET_MODE_SIZE (tmode) < max_size)
995 	  mode = tmode;
996 
997       if (mode == VOIDmode)
998 	break;
999 
1000       icode = optab_handler (mov_optab, mode)->insn_code;
1001       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1002 	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1003 
1004       max_size = GET_MODE_SIZE (mode);
1005     }
1006 
1007   /* The code above should have handled everything.  */
1008   gcc_assert (!data.len);
1009 
1010   if (endp)
1011     {
1012       rtx to1;
1013 
1014       gcc_assert (!data.reverse);
1015       if (data.autinc_to)
1016 	{
1017 	  if (endp == 2)
1018 	    {
1019 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1020 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1021 	      else
1022 		data.to_addr = copy_to_mode_reg (to_addr_mode,
1023 						 plus_constant (data.to_addr,
1024 								-1));
1025 	    }
1026 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1027 					   data.offset);
1028 	}
1029       else
1030 	{
1031 	  if (endp == 2)
1032 	    --data.offset;
1033 	  to1 = adjust_address (data.to, QImode, data.offset);
1034 	}
1035       return to1;
1036     }
1037   else
1038     return data.to;
1039 }
1040 
1041 /* Return number of insns required to move L bytes by pieces.
1042    ALIGN (in bits) is maximum alignment we can assume.  */
1043 
1044 static unsigned HOST_WIDE_INT
1045 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
1046 		       unsigned int max_size)
1047 {
1048   unsigned HOST_WIDE_INT n_insns = 0;
1049   enum machine_mode tmode;
1050 
1051   tmode = mode_for_size (MOVE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
1052   if (align >= GET_MODE_ALIGNMENT (tmode))
1053     align = GET_MODE_ALIGNMENT (tmode);
1054   else
1055     {
1056       enum machine_mode tmode, xmode;
1057 
1058       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
1059 	   tmode != VOIDmode;
1060 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
1061 	if (GET_MODE_SIZE (tmode) > MOVE_MAX_PIECES
1062 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
1063 	  break;
1064 
1065       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
1066     }
1067 
1068   while (max_size > 1)
1069     {
1070       enum machine_mode mode = VOIDmode;
1071       enum insn_code icode;
1072 
1073       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1074 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1075 	if (GET_MODE_SIZE (tmode) < max_size)
1076 	  mode = tmode;
1077 
1078       if (mode == VOIDmode)
1079 	break;
1080 
1081       icode = optab_handler (mov_optab, mode)->insn_code;
1082       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1083 	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1084 
1085       max_size = GET_MODE_SIZE (mode);
1086     }
1087 
1088   gcc_assert (!l);
1089   return n_insns;
1090 }
1091 
1092 /* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1093    with move instructions for mode MODE.  GENFUN is the gen_... function
1094    to make a move insn for that mode.  DATA has all the other info.  */
1095 
1096 static void
1097 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1098 		  struct move_by_pieces_d *data)
1099 {
1100   unsigned int size = GET_MODE_SIZE (mode);
1101   rtx to1 = NULL_RTX, from1;
1102 
1103   while (data->len >= size)
1104     {
1105       if (data->reverse)
1106 	data->offset -= size;
1107 
1108       if (data->to)
1109 	{
1110 	  if (data->autinc_to)
1111 	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1112 					     data->offset);
1113 	  else
1114 	    to1 = adjust_address (data->to, mode, data->offset);
1115 	}
1116 
1117       if (data->autinc_from)
1118 	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1119 					   data->offset);
1120       else
1121 	from1 = adjust_address (data->from, mode, data->offset);
1122 
1123       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1124 	emit_insn (gen_add2_insn (data->to_addr,
1125 				  GEN_INT (-(HOST_WIDE_INT)size)));
1126       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1127 	emit_insn (gen_add2_insn (data->from_addr,
1128 				  GEN_INT (-(HOST_WIDE_INT)size)));
1129 
1130       if (data->to)
1131 	emit_insn ((*genfun) (to1, from1));
1132       else
1133 	{
1134 #ifdef PUSH_ROUNDING
1135 	  emit_single_push_insn (mode, from1, NULL);
1136 #else
1137 	  gcc_unreachable ();
1138 #endif
1139 	}
1140 
1141       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1142 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1143       if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1144 	emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1145 
1146       if (! data->reverse)
1147 	data->offset += size;
1148 
1149       data->len -= size;
1150     }
1151 }
1152 
1153 /* Emit code to move a block Y to a block X.  This may be done with
1154    string-move instructions, with multiple scalar move instructions,
1155    or with a library call.
1156 
1157    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1158    SIZE is an rtx that says how long they are.
1159    ALIGN is the maximum alignment we can assume they have.
1160    METHOD describes what kind of copy this is, and what mechanisms may be used.
1161 
1162    Return the address of the new block, if memcpy is called and returns it,
1163    0 otherwise.  */
1164 
1165 rtx
1166 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1167 		       unsigned int expected_align, HOST_WIDE_INT expected_size)
1168 {
1169   bool may_use_call;
1170   rtx retval = 0;
1171   unsigned int align;
1172 
1173   switch (method)
1174     {
1175     case BLOCK_OP_NORMAL:
1176     case BLOCK_OP_TAILCALL:
1177       may_use_call = true;
1178       break;
1179 
1180     case BLOCK_OP_CALL_PARM:
1181       may_use_call = block_move_libcall_safe_for_call_parm ();
1182 
1183       /* Make inhibit_defer_pop nonzero around the library call
1184 	 to force it to pop the arguments right away.  */
1185       NO_DEFER_POP;
1186       break;
1187 
1188     case BLOCK_OP_NO_LIBCALL:
1189       may_use_call = false;
1190       break;
1191 
1192     default:
1193       gcc_unreachable ();
1194     }
1195 
1196   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1197   gcc_assert (align >= BITS_PER_UNIT);
1198 
1199   gcc_assert (MEM_P (x));
1200   gcc_assert (MEM_P (y));
1201   gcc_assert (size);
1202 
1203   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1204      block copy is more efficient for other large modes, e.g. DCmode.  */
1205   x = adjust_address (x, BLKmode, 0);
1206   y = adjust_address (y, BLKmode, 0);
1207 
1208   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1209      can be incorrect is coming from __builtin_memcpy.  */
1210   if (CONST_INT_P (size))
1211     {
1212       if (INTVAL (size) == 0)
1213 	return 0;
1214 
1215       x = shallow_copy_rtx (x);
1216       y = shallow_copy_rtx (y);
1217       set_mem_size (x, size);
1218       set_mem_size (y, size);
1219     }
1220 
1221   if (CONST_INT_P (size) && MOVE_BY_PIECES_P (INTVAL (size), align))
1222     move_by_pieces (x, y, INTVAL (size), align, 0);
1223   else if (emit_block_move_via_movmem (x, y, size, align,
1224 				       expected_align, expected_size))
1225     ;
1226   else if (may_use_call
1227 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1228 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1229     retval = emit_block_move_via_libcall (x, y, size,
1230 					  method == BLOCK_OP_TAILCALL);
1231   else
1232     emit_block_move_via_loop (x, y, size, align);
1233 
1234   if (method == BLOCK_OP_CALL_PARM)
1235     OK_DEFER_POP;
1236 
1237   return retval;
1238 }
1239 
1240 rtx
1241 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1242 {
1243   return emit_block_move_hints (x, y, size, method, 0, -1);
1244 }
1245 
1246 /* A subroutine of emit_block_move.  Returns true if calling the
1247    block move libcall will not clobber any parameters which may have
1248    already been placed on the stack.  */
1249 
1250 static bool
1251 block_move_libcall_safe_for_call_parm (void)
1252 {
1253 #if defined (REG_PARM_STACK_SPACE)
1254   tree fn;
1255 #endif
1256 
1257   /* If arguments are pushed on the stack, then they're safe.  */
1258   if (PUSH_ARGS)
1259     return true;
1260 
1261   /* If registers go on the stack anyway, any argument is sure to clobber
1262      an outgoing argument.  */
1263 #if defined (REG_PARM_STACK_SPACE)
1264   fn = emit_block_move_libcall_fn (false);
1265   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1266       && REG_PARM_STACK_SPACE (fn) != 0)
1267     return false;
1268 #endif
1269 
1270   /* If any argument goes in memory, then it might clobber an outgoing
1271      argument.  */
1272   {
1273     CUMULATIVE_ARGS args_so_far;
1274     tree fn, arg;
1275 
1276     fn = emit_block_move_libcall_fn (false);
1277     INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1278 
1279     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1280     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1281       {
1282 	enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1283 	rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1284 	if (!tmp || !REG_P (tmp))
1285 	  return false;
1286 	if (targetm.calls.arg_partial_bytes (&args_so_far, mode, NULL, 1))
1287 	  return false;
1288 	FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1289       }
1290   }
1291   return true;
1292 }
1293 
1294 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1295    return true if successful.  */
1296 
1297 static bool
1298 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1299 			    unsigned int expected_align, HOST_WIDE_INT expected_size)
1300 {
1301   rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1302   int save_volatile_ok = volatile_ok;
1303   enum machine_mode mode;
1304 
1305   if (expected_align < align)
1306     expected_align = align;
1307 
1308   /* Since this is a move insn, we don't care about volatility.  */
1309   volatile_ok = 1;
1310 
1311   /* Try the most limited insn first, because there's no point
1312      including more than one in the machine description unless
1313      the more limited one has some advantage.  */
1314 
1315   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1316        mode = GET_MODE_WIDER_MODE (mode))
1317     {
1318       enum insn_code code = movmem_optab[(int) mode];
1319       insn_operand_predicate_fn pred;
1320 
1321       if (code != CODE_FOR_nothing
1322 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1323 	     here because if SIZE is less than the mode mask, as it is
1324 	     returned by the macro, it will definitely be less than the
1325 	     actual mode mask.  */
1326 	  && ((CONST_INT_P (size)
1327 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1328 		   <= (GET_MODE_MASK (mode) >> 1)))
1329 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1330 	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1331 	      || (*pred) (x, BLKmode))
1332 	  && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1333 	      || (*pred) (y, BLKmode))
1334 	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1335 	      || (*pred) (opalign, VOIDmode)))
1336 	{
1337 	  rtx op2;
1338 	  rtx last = get_last_insn ();
1339 	  rtx pat;
1340 
1341 	  op2 = convert_to_mode (mode, size, 1);
1342 	  pred = insn_data[(int) code].operand[2].predicate;
1343 	  if (pred != 0 && ! (*pred) (op2, mode))
1344 	    op2 = copy_to_mode_reg (mode, op2);
1345 
1346 	  /* ??? When called via emit_block_move_for_call, it'd be
1347 	     nice if there were some way to inform the backend, so
1348 	     that it doesn't fail the expansion because it thinks
1349 	     emitting the libcall would be more efficient.  */
1350 
1351 	  if (insn_data[(int) code].n_operands == 4)
1352 	    pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1353 	  else
1354 	    pat = GEN_FCN ((int) code) (x, y, op2, opalign,
1355 					GEN_INT (expected_align
1356 						 / BITS_PER_UNIT),
1357 					GEN_INT (expected_size));
1358 	  if (pat)
1359 	    {
1360 	      emit_insn (pat);
1361 	      volatile_ok = save_volatile_ok;
1362 	      return true;
1363 	    }
1364 	  else
1365 	    delete_insns_since (last);
1366 	}
1367     }
1368 
1369   volatile_ok = save_volatile_ok;
1370   return false;
1371 }
1372 
1373 /* A subroutine of emit_block_move.  Expand a call to memcpy.
1374    Return the return value from memcpy, 0 otherwise.  */
1375 
1376 rtx
1377 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1378 {
1379   rtx dst_addr, src_addr;
1380   tree call_expr, fn, src_tree, dst_tree, size_tree;
1381   enum machine_mode size_mode;
1382   rtx retval;
1383 
1384   /* Emit code to copy the addresses of DST and SRC and SIZE into new
1385      pseudos.  We can then place those new pseudos into a VAR_DECL and
1386      use them later.  */
1387 
1388   dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1389   src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1390 
1391   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1392   src_addr = convert_memory_address (ptr_mode, src_addr);
1393 
1394   dst_tree = make_tree (ptr_type_node, dst_addr);
1395   src_tree = make_tree (ptr_type_node, src_addr);
1396 
1397   size_mode = TYPE_MODE (sizetype);
1398 
1399   size = convert_to_mode (size_mode, size, 1);
1400   size = copy_to_mode_reg (size_mode, size);
1401 
1402   /* It is incorrect to use the libcall calling conventions to call
1403      memcpy in this context.  This could be a user call to memcpy and
1404      the user may wish to examine the return value from memcpy.  For
1405      targets where libcalls and normal calls have different conventions
1406      for returning pointers, we could end up generating incorrect code.  */
1407 
1408   size_tree = make_tree (sizetype, size);
1409 
1410   fn = emit_block_move_libcall_fn (true);
1411   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1412   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1413 
1414   retval = expand_normal (call_expr);
1415 
1416   return retval;
1417 }
1418 
1419 /* A subroutine of emit_block_move_via_libcall.  Create the tree node
1420    for the function we use for block copies.  The first time FOR_CALL
1421    is true, we call assemble_external.  */
1422 
1423 static GTY(()) tree block_move_fn;
1424 
1425 void
1426 init_block_move_fn (const char *asmspec)
1427 {
1428   if (!block_move_fn)
1429     {
1430       tree args, fn;
1431 
1432       fn = get_identifier ("memcpy");
1433       args = build_function_type_list (ptr_type_node, ptr_type_node,
1434 				       const_ptr_type_node, sizetype,
1435 				       NULL_TREE);
1436 
1437       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1438       DECL_EXTERNAL (fn) = 1;
1439       TREE_PUBLIC (fn) = 1;
1440       DECL_ARTIFICIAL (fn) = 1;
1441       TREE_NOTHROW (fn) = 1;
1442       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1443       DECL_VISIBILITY_SPECIFIED (fn) = 1;
1444 
1445       block_move_fn = fn;
1446     }
1447 
1448   if (asmspec)
1449     set_user_assembler_name (block_move_fn, asmspec);
1450 }
1451 
1452 static tree
1453 emit_block_move_libcall_fn (int for_call)
1454 {
1455   static bool emitted_extern;
1456 
1457   if (!block_move_fn)
1458     init_block_move_fn (NULL);
1459 
1460   if (for_call && !emitted_extern)
1461     {
1462       emitted_extern = true;
1463       make_decl_rtl (block_move_fn);
1464       assemble_external (block_move_fn);
1465     }
1466 
1467   return block_move_fn;
1468 }
1469 
1470 /* A subroutine of emit_block_move.  Copy the data via an explicit
1471    loop.  This is used only when libcalls are forbidden.  */
1472 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1473 
1474 static void
1475 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1476 			  unsigned int align ATTRIBUTE_UNUSED)
1477 {
1478   rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1479   enum machine_mode x_addr_mode
1480     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (x));
1481   enum machine_mode y_addr_mode
1482     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (y));
1483   enum machine_mode iter_mode;
1484 
1485   iter_mode = GET_MODE (size);
1486   if (iter_mode == VOIDmode)
1487     iter_mode = word_mode;
1488 
1489   top_label = gen_label_rtx ();
1490   cmp_label = gen_label_rtx ();
1491   iter = gen_reg_rtx (iter_mode);
1492 
1493   emit_move_insn (iter, const0_rtx);
1494 
1495   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1496   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1497   do_pending_stack_adjust ();
1498 
1499   emit_jump (cmp_label);
1500   emit_label (top_label);
1501 
1502   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1503   x_addr = gen_rtx_PLUS (x_addr_mode, x_addr, tmp);
1504 
1505   if (x_addr_mode != y_addr_mode)
1506     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1507   y_addr = gen_rtx_PLUS (y_addr_mode, y_addr, tmp);
1508 
1509   x = change_address (x, QImode, x_addr);
1510   y = change_address (y, QImode, y_addr);
1511 
1512   emit_move_insn (x, y);
1513 
1514   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1515 			     true, OPTAB_LIB_WIDEN);
1516   if (tmp != iter)
1517     emit_move_insn (iter, tmp);
1518 
1519   emit_label (cmp_label);
1520 
1521   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1522 			   true, top_label);
1523 }
1524 
1525 /* Copy all or part of a value X into registers starting at REGNO.
1526    The number of registers to be filled is NREGS.  */
1527 
1528 void
1529 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1530 {
1531   int i;
1532 #ifdef HAVE_load_multiple
1533   rtx pat;
1534   rtx last;
1535 #endif
1536 
1537   if (nregs == 0)
1538     return;
1539 
1540   if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1541     x = validize_mem (force_const_mem (mode, x));
1542 
1543   /* See if the machine can do this with a load multiple insn.  */
1544 #ifdef HAVE_load_multiple
1545   if (HAVE_load_multiple)
1546     {
1547       last = get_last_insn ();
1548       pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1549 			       GEN_INT (nregs));
1550       if (pat)
1551 	{
1552 	  emit_insn (pat);
1553 	  return;
1554 	}
1555       else
1556 	delete_insns_since (last);
1557     }
1558 #endif
1559 
1560   for (i = 0; i < nregs; i++)
1561     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1562 		    operand_subword_force (x, i, mode));
1563 }
1564 
1565 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1566    The number of registers to be filled is NREGS.  */
1567 
1568 void
1569 move_block_from_reg (int regno, rtx x, int nregs)
1570 {
1571   int i;
1572 
1573   if (nregs == 0)
1574     return;
1575 
1576   /* See if the machine can do this with a store multiple insn.  */
1577 #ifdef HAVE_store_multiple
1578   if (HAVE_store_multiple)
1579     {
1580       rtx last = get_last_insn ();
1581       rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1582 				    GEN_INT (nregs));
1583       if (pat)
1584 	{
1585 	  emit_insn (pat);
1586 	  return;
1587 	}
1588       else
1589 	delete_insns_since (last);
1590     }
1591 #endif
1592 
1593   for (i = 0; i < nregs; i++)
1594     {
1595       rtx tem = operand_subword (x, i, 1, BLKmode);
1596 
1597       gcc_assert (tem);
1598 
1599       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1600     }
1601 }
1602 
1603 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1604    ORIG, where ORIG is a non-consecutive group of registers represented by
1605    a PARALLEL.  The clone is identical to the original except in that the
1606    original set of registers is replaced by a new set of pseudo registers.
1607    The new set has the same modes as the original set.  */
1608 
1609 rtx
1610 gen_group_rtx (rtx orig)
1611 {
1612   int i, length;
1613   rtx *tmps;
1614 
1615   gcc_assert (GET_CODE (orig) == PARALLEL);
1616 
1617   length = XVECLEN (orig, 0);
1618   tmps = XALLOCAVEC (rtx, length);
1619 
1620   /* Skip a NULL entry in first slot.  */
1621   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1622 
1623   if (i)
1624     tmps[0] = 0;
1625 
1626   for (; i < length; i++)
1627     {
1628       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1629       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1630 
1631       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1632     }
1633 
1634   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1635 }
1636 
1637 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1638    except that values are placed in TMPS[i], and must later be moved
1639    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1640 
1641 static void
1642 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1643 {
1644   rtx src;
1645   int start, i;
1646   enum machine_mode m = GET_MODE (orig_src);
1647 
1648   gcc_assert (GET_CODE (dst) == PARALLEL);
1649 
1650   if (m != VOIDmode
1651       && !SCALAR_INT_MODE_P (m)
1652       && !MEM_P (orig_src)
1653       && GET_CODE (orig_src) != CONCAT)
1654     {
1655       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1656       if (imode == BLKmode)
1657 	src = assign_stack_temp (GET_MODE (orig_src), ssize, 0);
1658       else
1659 	src = gen_reg_rtx (imode);
1660       if (imode != BLKmode)
1661 	src = gen_lowpart (GET_MODE (orig_src), src);
1662       emit_move_insn (src, orig_src);
1663       /* ...and back again.  */
1664       if (imode != BLKmode)
1665 	src = gen_lowpart (imode, src);
1666       emit_group_load_1 (tmps, dst, src, type, ssize);
1667       return;
1668     }
1669 
1670   /* Check for a NULL entry, used to indicate that the parameter goes
1671      both on the stack and in registers.  */
1672   if (XEXP (XVECEXP (dst, 0, 0), 0))
1673     start = 0;
1674   else
1675     start = 1;
1676 
1677   /* Process the pieces.  */
1678   for (i = start; i < XVECLEN (dst, 0); i++)
1679     {
1680       enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1681       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1682       unsigned int bytelen = GET_MODE_SIZE (mode);
1683       int shift = 0;
1684 
1685       /* Handle trailing fragments that run over the size of the struct.  */
1686       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1687 	{
1688 	  /* Arrange to shift the fragment to where it belongs.
1689 	     extract_bit_field loads to the lsb of the reg.  */
1690 	  if (
1691 #ifdef BLOCK_REG_PADDING
1692 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1693 	      == (BYTES_BIG_ENDIAN ? upward : downward)
1694 #else
1695 	      BYTES_BIG_ENDIAN
1696 #endif
1697 	      )
1698 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1699 	  bytelen = ssize - bytepos;
1700 	  gcc_assert (bytelen > 0);
1701 	}
1702 
1703       /* If we won't be loading directly from memory, protect the real source
1704 	 from strange tricks we might play; but make sure that the source can
1705 	 be loaded directly into the destination.  */
1706       src = orig_src;
1707       if (!MEM_P (orig_src)
1708 	  && (!CONSTANT_P (orig_src)
1709 	      || (GET_MODE (orig_src) != mode
1710 		  && GET_MODE (orig_src) != VOIDmode)))
1711 	{
1712 	  if (GET_MODE (orig_src) == VOIDmode)
1713 	    src = gen_reg_rtx (mode);
1714 	  else
1715 	    src = gen_reg_rtx (GET_MODE (orig_src));
1716 
1717 	  emit_move_insn (src, orig_src);
1718 	}
1719 
1720       /* Optimize the access just a bit.  */
1721       if (MEM_P (src)
1722 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1723 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1724 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1725 	  && bytelen == GET_MODE_SIZE (mode))
1726 	{
1727 	  tmps[i] = gen_reg_rtx (mode);
1728 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1729 	}
1730       else if (COMPLEX_MODE_P (mode)
1731 	       && GET_MODE (src) == mode
1732 	       && bytelen == GET_MODE_SIZE (mode))
1733 	/* Let emit_move_complex do the bulk of the work.  */
1734 	tmps[i] = src;
1735       else if (GET_CODE (src) == CONCAT)
1736 	{
1737 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1738 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1739 
1740 	  if ((bytepos == 0 && bytelen == slen0)
1741 	      || (bytepos != 0 && bytepos + bytelen <= slen))
1742 	    {
1743 	      /* The following assumes that the concatenated objects all
1744 		 have the same size.  In this case, a simple calculation
1745 		 can be used to determine the object and the bit field
1746 		 to be extracted.  */
1747 	      tmps[i] = XEXP (src, bytepos / slen0);
1748 	      if (! CONSTANT_P (tmps[i])
1749 		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1750 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1751 					     (bytepos % slen0) * BITS_PER_UNIT,
1752 					     1, NULL_RTX, mode, mode);
1753 	    }
1754 	  else
1755 	    {
1756 	      rtx mem;
1757 
1758 	      gcc_assert (!bytepos);
1759 	      mem = assign_stack_temp (GET_MODE (src), slen, 0);
1760 	      emit_move_insn (mem, src);
1761 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1762 					   0, 1, NULL_RTX, mode, mode);
1763 	    }
1764 	}
1765       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1766 	 SIMD register, which is currently broken.  While we get GCC
1767 	 to emit proper RTL for these cases, let's dump to memory.  */
1768       else if (VECTOR_MODE_P (GET_MODE (dst))
1769 	       && REG_P (src))
1770 	{
1771 	  int slen = GET_MODE_SIZE (GET_MODE (src));
1772 	  rtx mem;
1773 
1774 	  mem = assign_stack_temp (GET_MODE (src), slen, 0);
1775 	  emit_move_insn (mem, src);
1776 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1777 	}
1778       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1779                && XVECLEN (dst, 0) > 1)
1780         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1781       else if (CONSTANT_P (src))
1782 	{
1783 	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1784 
1785 	  if (len == ssize)
1786 	    tmps[i] = src;
1787 	  else
1788 	    {
1789 	      rtx first, second;
1790 
1791 	      gcc_assert (2 * len == ssize);
1792 	      split_double (src, &first, &second);
1793 	      if (i)
1794 		tmps[i] = second;
1795 	      else
1796 		tmps[i] = first;
1797 	    }
1798 	}
1799       else if (REG_P (src) && GET_MODE (src) == mode)
1800 	tmps[i] = src;
1801       else
1802 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1803 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1804 				     mode, mode);
1805 
1806       if (shift)
1807 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1808 				build_int_cst (NULL_TREE, shift), tmps[i], 0);
1809     }
1810 }
1811 
1812 /* Emit code to move a block SRC of type TYPE to a block DST,
1813    where DST is non-consecutive registers represented by a PARALLEL.
1814    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1815    if not known.  */
1816 
1817 void
1818 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1819 {
1820   rtx *tmps;
1821   int i;
1822 
1823   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1824   emit_group_load_1 (tmps, dst, src, type, ssize);
1825 
1826   /* Copy the extracted pieces into the proper (probable) hard regs.  */
1827   for (i = 0; i < XVECLEN (dst, 0); i++)
1828     {
1829       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1830       if (d == NULL)
1831 	continue;
1832       emit_move_insn (d, tmps[i]);
1833     }
1834 }
1835 
1836 /* Similar, but load SRC into new pseudos in a format that looks like
1837    PARALLEL.  This can later be fed to emit_group_move to get things
1838    in the right place.  */
1839 
1840 rtx
1841 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1842 {
1843   rtvec vec;
1844   int i;
1845 
1846   vec = rtvec_alloc (XVECLEN (parallel, 0));
1847   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1848 
1849   /* Convert the vector to look just like the original PARALLEL, except
1850      with the computed values.  */
1851   for (i = 0; i < XVECLEN (parallel, 0); i++)
1852     {
1853       rtx e = XVECEXP (parallel, 0, i);
1854       rtx d = XEXP (e, 0);
1855 
1856       if (d)
1857 	{
1858 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1859 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1860 	}
1861       RTVEC_ELT (vec, i) = e;
1862     }
1863 
1864   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1865 }
1866 
1867 /* Emit code to move a block SRC to block DST, where SRC and DST are
1868    non-consecutive groups of registers, each represented by a PARALLEL.  */
1869 
1870 void
1871 emit_group_move (rtx dst, rtx src)
1872 {
1873   int i;
1874 
1875   gcc_assert (GET_CODE (src) == PARALLEL
1876 	      && GET_CODE (dst) == PARALLEL
1877 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1878 
1879   /* Skip first entry if NULL.  */
1880   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1881     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1882 		    XEXP (XVECEXP (src, 0, i), 0));
1883 }
1884 
1885 /* Move a group of registers represented by a PARALLEL into pseudos.  */
1886 
1887 rtx
1888 emit_group_move_into_temps (rtx src)
1889 {
1890   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1891   int i;
1892 
1893   for (i = 0; i < XVECLEN (src, 0); i++)
1894     {
1895       rtx e = XVECEXP (src, 0, i);
1896       rtx d = XEXP (e, 0);
1897 
1898       if (d)
1899 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1900       RTVEC_ELT (vec, i) = e;
1901     }
1902 
1903   return gen_rtx_PARALLEL (GET_MODE (src), vec);
1904 }
1905 
1906 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1907    where SRC is non-consecutive registers represented by a PARALLEL.
1908    SSIZE represents the total size of block ORIG_DST, or -1 if not
1909    known.  */
1910 
1911 void
1912 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1913 {
1914   rtx *tmps, dst;
1915   int start, finish, i;
1916   enum machine_mode m = GET_MODE (orig_dst);
1917 
1918   gcc_assert (GET_CODE (src) == PARALLEL);
1919 
1920   if (!SCALAR_INT_MODE_P (m)
1921       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1922     {
1923       enum machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1924       if (imode == BLKmode)
1925         dst = assign_stack_temp (GET_MODE (orig_dst), ssize, 0);
1926       else
1927         dst = gen_reg_rtx (imode);
1928       emit_group_store (dst, src, type, ssize);
1929       if (imode != BLKmode)
1930         dst = gen_lowpart (GET_MODE (orig_dst), dst);
1931       emit_move_insn (orig_dst, dst);
1932       return;
1933     }
1934 
1935   /* Check for a NULL entry, used to indicate that the parameter goes
1936      both on the stack and in registers.  */
1937   if (XEXP (XVECEXP (src, 0, 0), 0))
1938     start = 0;
1939   else
1940     start = 1;
1941   finish = XVECLEN (src, 0);
1942 
1943   tmps = XALLOCAVEC (rtx, finish);
1944 
1945   /* Copy the (probable) hard regs into pseudos.  */
1946   for (i = start; i < finish; i++)
1947     {
1948       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1949       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1950 	{
1951 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1952 	  emit_move_insn (tmps[i], reg);
1953 	}
1954       else
1955 	tmps[i] = reg;
1956     }
1957 
1958   /* If we won't be storing directly into memory, protect the real destination
1959      from strange tricks we might play.  */
1960   dst = orig_dst;
1961   if (GET_CODE (dst) == PARALLEL)
1962     {
1963       rtx temp;
1964 
1965       /* We can get a PARALLEL dst if there is a conditional expression in
1966 	 a return statement.  In that case, the dst and src are the same,
1967 	 so no action is necessary.  */
1968       if (rtx_equal_p (dst, src))
1969 	return;
1970 
1971       /* It is unclear if we can ever reach here, but we may as well handle
1972 	 it.  Allocate a temporary, and split this into a store/load to/from
1973 	 the temporary.  */
1974 
1975       temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1976       emit_group_store (temp, src, type, ssize);
1977       emit_group_load (dst, temp, type, ssize);
1978       return;
1979     }
1980   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1981     {
1982       enum machine_mode outer = GET_MODE (dst);
1983       enum machine_mode inner;
1984       HOST_WIDE_INT bytepos;
1985       bool done = false;
1986       rtx temp;
1987 
1988       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1989 	dst = gen_reg_rtx (outer);
1990 
1991       /* Make life a bit easier for combine.  */
1992       /* If the first element of the vector is the low part
1993 	 of the destination mode, use a paradoxical subreg to
1994 	 initialize the destination.  */
1995       if (start < finish)
1996 	{
1997 	  inner = GET_MODE (tmps[start]);
1998 	  bytepos = subreg_lowpart_offset (inner, outer);
1999 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
2000 	    {
2001 	      temp = simplify_gen_subreg (outer, tmps[start],
2002 					  inner, 0);
2003 	      if (temp)
2004 		{
2005 		  emit_move_insn (dst, temp);
2006 		  done = true;
2007 		  start++;
2008 		}
2009 	    }
2010 	}
2011 
2012       /* If the first element wasn't the low part, try the last.  */
2013       if (!done
2014 	  && start < finish - 1)
2015 	{
2016 	  inner = GET_MODE (tmps[finish - 1]);
2017 	  bytepos = subreg_lowpart_offset (inner, outer);
2018 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2019 	    {
2020 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2021 					  inner, 0);
2022 	      if (temp)
2023 		{
2024 		  emit_move_insn (dst, temp);
2025 		  done = true;
2026 		  finish--;
2027 		}
2028 	    }
2029 	}
2030 
2031       /* Otherwise, simply initialize the result to zero.  */
2032       if (!done)
2033         emit_move_insn (dst, CONST0_RTX (outer));
2034     }
2035 
2036   /* Process the pieces.  */
2037   for (i = start; i < finish; i++)
2038     {
2039       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2040       enum machine_mode mode = GET_MODE (tmps[i]);
2041       unsigned int bytelen = GET_MODE_SIZE (mode);
2042       unsigned int adj_bytelen = bytelen;
2043       rtx dest = dst;
2044 
2045       /* Handle trailing fragments that run over the size of the struct.  */
2046       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2047 	adj_bytelen = ssize - bytepos;
2048 
2049       if (GET_CODE (dst) == CONCAT)
2050 	{
2051 	  if (bytepos + adj_bytelen
2052 	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2053 	    dest = XEXP (dst, 0);
2054 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2055 	    {
2056 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2057 	      dest = XEXP (dst, 1);
2058 	    }
2059 	  else
2060 	    {
2061 	      enum machine_mode dest_mode = GET_MODE (dest);
2062 	      enum machine_mode tmp_mode = GET_MODE (tmps[i]);
2063 
2064 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2065 
2066 	      if (GET_MODE_ALIGNMENT (dest_mode)
2067 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2068 		{
2069 		  dest = assign_stack_temp (dest_mode,
2070 					    GET_MODE_SIZE (dest_mode),
2071 					    0);
2072 		  emit_move_insn (adjust_address (dest,
2073 						  tmp_mode,
2074 						  bytepos),
2075 				  tmps[i]);
2076 		  dst = dest;
2077 		}
2078 	      else
2079 		{
2080 		  dest = assign_stack_temp (tmp_mode,
2081 					    GET_MODE_SIZE (tmp_mode),
2082 					    0);
2083 		  emit_move_insn (dest, tmps[i]);
2084 		  dst = adjust_address (dest, dest_mode, bytepos);
2085 		}
2086 	      break;
2087 	    }
2088 	}
2089 
2090       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2091 	{
2092 	  /* store_bit_field always takes its value from the lsb.
2093 	     Move the fragment to the lsb if it's not already there.  */
2094 	  if (
2095 #ifdef BLOCK_REG_PADDING
2096 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2097 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2098 #else
2099 	      BYTES_BIG_ENDIAN
2100 #endif
2101 	      )
2102 	    {
2103 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2104 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2105 				      build_int_cst (NULL_TREE, shift),
2106 				      tmps[i], 0);
2107 	    }
2108 	  bytelen = adj_bytelen;
2109 	}
2110 
2111       /* Optimize the access just a bit.  */
2112       if (MEM_P (dest)
2113 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2114 	      || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2115 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2116 	  && bytelen == GET_MODE_SIZE (mode))
2117 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2118       else
2119 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2120 			 mode, tmps[i]);
2121     }
2122 
2123   /* Copy from the pseudo into the (probable) hard reg.  */
2124   if (orig_dst != dst)
2125     emit_move_insn (orig_dst, dst);
2126 }
2127 
2128 /* Generate code to copy a BLKmode object of TYPE out of a
2129    set of registers starting with SRCREG into TGTBLK.  If TGTBLK
2130    is null, a stack temporary is created.  TGTBLK is returned.
2131 
2132    The purpose of this routine is to handle functions that return
2133    BLKmode structures in registers.  Some machines (the PA for example)
2134    want to return all small structures in registers regardless of the
2135    structure's alignment.  */
2136 
2137 rtx
2138 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2139 {
2140   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2141   rtx src = NULL, dst = NULL;
2142   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2143   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2144   enum machine_mode copy_mode;
2145 
2146   if (tgtblk == 0)
2147     {
2148       tgtblk = assign_temp (build_qualified_type (type,
2149 						  (TYPE_QUALS (type)
2150 						   | TYPE_QUAL_CONST)),
2151 			    0, 1, 1);
2152       preserve_temp_slots (tgtblk);
2153     }
2154 
2155   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2156      into a new pseudo which is a full word.  */
2157 
2158   if (GET_MODE (srcreg) != BLKmode
2159       && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2160     srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2161 
2162   /* If the structure doesn't take up a whole number of words, see whether
2163      SRCREG is padded on the left or on the right.  If it's on the left,
2164      set PADDING_CORRECTION to the number of bits to skip.
2165 
2166      In most ABIs, the structure will be returned at the least end of
2167      the register, which translates to right padding on little-endian
2168      targets and left padding on big-endian targets.  The opposite
2169      holds if the structure is returned at the most significant
2170      end of the register.  */
2171   if (bytes % UNITS_PER_WORD != 0
2172       && (targetm.calls.return_in_msb (type)
2173 	  ? !BYTES_BIG_ENDIAN
2174 	  : BYTES_BIG_ENDIAN))
2175     padding_correction
2176       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2177 
2178   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2179      memory, take care of not reading/writing past its end by selecting
2180      a copy mode suited to BITSIZE.  This should always be possible given
2181      how it is computed.
2182 
2183      We could probably emit more efficient code for machines which do not use
2184      strict alignment, but it doesn't seem worth the effort at the current
2185      time.  */
2186 
2187   copy_mode = word_mode;
2188   if (MEM_P (tgtblk))
2189     {
2190       enum machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2191       if (mem_mode != BLKmode)
2192 	copy_mode = mem_mode;
2193     }
2194 
2195   for (bitpos = 0, xbitpos = padding_correction;
2196        bitpos < bytes * BITS_PER_UNIT;
2197        bitpos += bitsize, xbitpos += bitsize)
2198     {
2199       /* We need a new source operand each time xbitpos is on a
2200 	 word boundary and when xbitpos == padding_correction
2201 	 (the first time through).  */
2202       if (xbitpos % BITS_PER_WORD == 0
2203 	  || xbitpos == padding_correction)
2204 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2205 				     GET_MODE (srcreg));
2206 
2207       /* We need a new destination operand each time bitpos is on
2208 	 a word boundary.  */
2209       if (bitpos % BITS_PER_WORD == 0)
2210 	dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2211 
2212       /* Use xbitpos for the source extraction (right justified) and
2213 	 bitpos for the destination store (left justified).  */
2214       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, copy_mode,
2215 		       extract_bit_field (src, bitsize,
2216 					  xbitpos % BITS_PER_WORD, 1,
2217 					  NULL_RTX, copy_mode, copy_mode));
2218     }
2219 
2220   return tgtblk;
2221 }
2222 
2223 /* Copy BLKmode value SRC into a register of mode MODE.  Return the
2224    register if it contains any data, otherwise return null.
2225 
2226    This is used on targets that return BLKmode values in registers.  */
2227 
2228 rtx
2229 copy_blkmode_to_reg (enum machine_mode mode, tree src)
2230 {
2231   int i, n_regs;
2232   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2233   unsigned int bitsize;
2234   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2235   enum machine_mode dst_mode;
2236 
2237   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2238 
2239   x = expand_normal (src);
2240 
2241   bytes = int_size_in_bytes (TREE_TYPE (src));
2242   if (bytes == 0)
2243     return NULL_RTX;
2244 
2245   /* If the structure doesn't take up a whole number of words, see
2246      whether the register value should be padded on the left or on
2247      the right.  Set PADDING_CORRECTION to the number of padding
2248      bits needed on the left side.
2249 
2250      In most ABIs, the structure will be returned at the least end of
2251      the register, which translates to right padding on little-endian
2252      targets and left padding on big-endian targets.  The opposite
2253      holds if the structure is returned at the most significant
2254      end of the register.  */
2255   if (bytes % UNITS_PER_WORD != 0
2256       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2257 	  ? !BYTES_BIG_ENDIAN
2258 	  : BYTES_BIG_ENDIAN))
2259     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2260 					   * BITS_PER_UNIT));
2261 
2262   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2263   dst_words = XALLOCAVEC (rtx, n_regs);
2264   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2265 
2266   /* Copy the structure BITSIZE bits at a time.  */
2267   for (bitpos = 0, xbitpos = padding_correction;
2268        bitpos < bytes * BITS_PER_UNIT;
2269        bitpos += bitsize, xbitpos += bitsize)
2270     {
2271       /* We need a new destination pseudo each time xbitpos is
2272 	 on a word boundary and when xbitpos == padding_correction
2273 	 (the first time through).  */
2274       if (xbitpos % BITS_PER_WORD == 0
2275 	  || xbitpos == padding_correction)
2276 	{
2277 	  /* Generate an appropriate register.  */
2278 	  dst_word = gen_reg_rtx (word_mode);
2279 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2280 
2281 	  /* Clear the destination before we move anything into it.  */
2282 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2283 	}
2284 
2285       /* We need a new source operand each time bitpos is on a word
2286 	 boundary.  */
2287       if (bitpos % BITS_PER_WORD == 0)
2288 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2289 
2290       /* Use bitpos for the source extraction (left justified) and
2291 	 xbitpos for the destination store (right justified).  */
2292       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD, word_mode,
2293 		       extract_bit_field (src_word, bitsize,
2294 					  bitpos % BITS_PER_WORD, 1,
2295 					  NULL_RTX, word_mode, word_mode));
2296     }
2297 
2298   if (mode == BLKmode)
2299     {
2300       /* Find the smallest integer mode large enough to hold the
2301 	 entire structure.  */
2302       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2303 	   mode != VOIDmode;
2304 	   mode = GET_MODE_WIDER_MODE (mode))
2305 	/* Have we found a large enough mode?  */
2306 	if (GET_MODE_SIZE (mode) >= bytes)
2307 	  break;
2308 
2309       /* A suitable mode should have been found.  */
2310       gcc_assert (mode != VOIDmode);
2311     }
2312 
2313   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2314     dst_mode = word_mode;
2315   else
2316     dst_mode = mode;
2317   dst = gen_reg_rtx (dst_mode);
2318 
2319   for (i = 0; i < n_regs; i++)
2320     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2321 
2322   if (mode != dst_mode)
2323     dst = gen_lowpart (mode, dst);
2324 
2325   return dst;
2326 }
2327 
2328 /* Add a USE expression for REG to the (possibly empty) list pointed
2329    to by CALL_FUSAGE.  REG must denote a hard register.  */
2330 
2331 void
2332 use_reg (rtx *call_fusage, rtx reg)
2333 {
2334   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2335 
2336   *call_fusage
2337     = gen_rtx_EXPR_LIST (VOIDmode,
2338 			 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2339 }
2340 
2341 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2342    starting at REGNO.  All of these registers must be hard registers.  */
2343 
2344 void
2345 use_regs (rtx *call_fusage, int regno, int nregs)
2346 {
2347   int i;
2348 
2349   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2350 
2351   for (i = 0; i < nregs; i++)
2352     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2353 }
2354 
2355 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2356    PARALLEL REGS.  This is for calls that pass values in multiple
2357    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2358 
2359 void
2360 use_group_regs (rtx *call_fusage, rtx regs)
2361 {
2362   int i;
2363 
2364   for (i = 0; i < XVECLEN (regs, 0); i++)
2365     {
2366       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2367 
2368       /* A NULL entry means the parameter goes both on the stack and in
2369 	 registers.  This can also be a MEM for targets that pass values
2370 	 partially on the stack and partially in registers.  */
2371       if (reg != 0 && REG_P (reg))
2372 	use_reg (call_fusage, reg);
2373     }
2374 }
2375 
2376 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2377    assigment and the code of the expresion on the RHS is CODE.  Return
2378    NULL otherwise.  */
2379 
2380 static gimple
2381 get_def_for_expr (tree name, enum tree_code code)
2382 {
2383   gimple def_stmt;
2384 
2385   if (TREE_CODE (name) != SSA_NAME)
2386     return NULL;
2387 
2388   def_stmt = get_gimple_for_ssa_name (name);
2389   if (!def_stmt
2390       || gimple_assign_rhs_code (def_stmt) != code)
2391     return NULL;
2392 
2393   return def_stmt;
2394 }
2395 
2396 
2397 /* Determine whether the LEN bytes generated by CONSTFUN can be
2398    stored to memory using several move instructions.  CONSTFUNDATA is
2399    a pointer which will be passed as argument in every CONSTFUN call.
2400    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2401    a memset operation and false if it's a copy of a constant string.
2402    Return nonzero if a call to store_by_pieces should succeed.  */
2403 
2404 int
2405 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2406 		     rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2407 		     void *constfundata, unsigned int align, bool memsetp)
2408 {
2409   unsigned HOST_WIDE_INT l;
2410   unsigned int max_size;
2411   HOST_WIDE_INT offset = 0;
2412   enum machine_mode mode, tmode;
2413   enum insn_code icode;
2414   int reverse;
2415   rtx cst;
2416 
2417   if (len == 0)
2418     return 1;
2419 
2420   if (! (memsetp
2421 	 ? SET_BY_PIECES_P (len, align)
2422 	 : STORE_BY_PIECES_P (len, align)))
2423     return 0;
2424 
2425   tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2426   if (align >= GET_MODE_ALIGNMENT (tmode))
2427     align = GET_MODE_ALIGNMENT (tmode);
2428   else
2429     {
2430       enum machine_mode xmode;
2431 
2432       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2433 	   tmode != VOIDmode;
2434 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2435 	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2436 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2437 	  break;
2438 
2439       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2440     }
2441 
2442   /* We would first store what we can in the largest integer mode, then go to
2443      successively smaller modes.  */
2444 
2445   for (reverse = 0;
2446        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2447        reverse++)
2448     {
2449       l = len;
2450       mode = VOIDmode;
2451       max_size = STORE_MAX_PIECES + 1;
2452       while (max_size > 1)
2453 	{
2454 	  for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2455 	       tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2456 	    if (GET_MODE_SIZE (tmode) < max_size)
2457 	      mode = tmode;
2458 
2459 	  if (mode == VOIDmode)
2460 	    break;
2461 
2462 	  icode = optab_handler (mov_optab, mode)->insn_code;
2463 	  if (icode != CODE_FOR_nothing
2464 	      && align >= GET_MODE_ALIGNMENT (mode))
2465 	    {
2466 	      unsigned int size = GET_MODE_SIZE (mode);
2467 
2468 	      while (l >= size)
2469 		{
2470 		  if (reverse)
2471 		    offset -= size;
2472 
2473 		  cst = (*constfun) (constfundata, offset, mode);
2474 		  if (!LEGITIMATE_CONSTANT_P (cst))
2475 		    return 0;
2476 
2477 		  if (!reverse)
2478 		    offset += size;
2479 
2480 		  l -= size;
2481 		}
2482 	    }
2483 
2484 	  max_size = GET_MODE_SIZE (mode);
2485 	}
2486 
2487       /* The code above should have handled everything.  */
2488       gcc_assert (!l);
2489     }
2490 
2491   return 1;
2492 }
2493 
2494 /* Generate several move instructions to store LEN bytes generated by
2495    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2496    pointer which will be passed as argument in every CONSTFUN call.
2497    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2498    a memset operation and false if it's a copy of a constant string.
2499    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2500    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2501    stpcpy.  */
2502 
2503 rtx
2504 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2505 		 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2506 		 void *constfundata, unsigned int align, bool memsetp, int endp)
2507 {
2508   enum machine_mode to_addr_mode
2509     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to));
2510   struct store_by_pieces_d data;
2511 
2512   if (len == 0)
2513     {
2514       gcc_assert (endp != 2);
2515       return to;
2516     }
2517 
2518   gcc_assert (memsetp
2519 	      ? SET_BY_PIECES_P (len, align)
2520 	      : STORE_BY_PIECES_P (len, align));
2521   data.constfun = constfun;
2522   data.constfundata = constfundata;
2523   data.len = len;
2524   data.to = to;
2525   store_by_pieces_1 (&data, align);
2526   if (endp)
2527     {
2528       rtx to1;
2529 
2530       gcc_assert (!data.reverse);
2531       if (data.autinc_to)
2532 	{
2533 	  if (endp == 2)
2534 	    {
2535 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2536 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2537 	      else
2538 		data.to_addr = copy_to_mode_reg (to_addr_mode,
2539 						 plus_constant (data.to_addr,
2540 								-1));
2541 	    }
2542 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2543 					   data.offset);
2544 	}
2545       else
2546 	{
2547 	  if (endp == 2)
2548 	    --data.offset;
2549 	  to1 = adjust_address (data.to, QImode, data.offset);
2550 	}
2551       return to1;
2552     }
2553   else
2554     return data.to;
2555 }
2556 
2557 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2558    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2559 
2560 static void
2561 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2562 {
2563   struct store_by_pieces_d data;
2564 
2565   if (len == 0)
2566     return;
2567 
2568   data.constfun = clear_by_pieces_1;
2569   data.constfundata = NULL;
2570   data.len = len;
2571   data.to = to;
2572   store_by_pieces_1 (&data, align);
2573 }
2574 
2575 /* Callback routine for clear_by_pieces.
2576    Return const0_rtx unconditionally.  */
2577 
2578 static rtx
2579 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2580 		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2581 		   enum machine_mode mode ATTRIBUTE_UNUSED)
2582 {
2583   return const0_rtx;
2584 }
2585 
2586 /* Subroutine of clear_by_pieces and store_by_pieces.
2587    Generate several move instructions to store LEN bytes of block TO.  (A MEM
2588    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2589 
2590 static void
2591 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2592 		   unsigned int align ATTRIBUTE_UNUSED)
2593 {
2594   enum machine_mode to_addr_mode
2595     = targetm.addr_space.address_mode (MEM_ADDR_SPACE (data->to));
2596   rtx to_addr = XEXP (data->to, 0);
2597   unsigned int max_size = STORE_MAX_PIECES + 1;
2598   enum machine_mode mode = VOIDmode, tmode;
2599   enum insn_code icode;
2600 
2601   data->offset = 0;
2602   data->to_addr = to_addr;
2603   data->autinc_to
2604     = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2605        || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2606 
2607   data->explicit_inc_to = 0;
2608   data->reverse
2609     = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2610   if (data->reverse)
2611     data->offset = data->len;
2612 
2613   /* If storing requires more than two move insns,
2614      copy addresses to registers (to make displacements shorter)
2615      and use post-increment if available.  */
2616   if (!data->autinc_to
2617       && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2618     {
2619       /* Determine the main mode we'll be using.  */
2620       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2621 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2622 	if (GET_MODE_SIZE (tmode) < max_size)
2623 	  mode = tmode;
2624 
2625       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2626 	{
2627 	  data->to_addr = copy_to_mode_reg (to_addr_mode,
2628 					    plus_constant (to_addr, data->len));
2629 	  data->autinc_to = 1;
2630 	  data->explicit_inc_to = -1;
2631 	}
2632 
2633       if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2634 	  && ! data->autinc_to)
2635 	{
2636 	  data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2637 	  data->autinc_to = 1;
2638 	  data->explicit_inc_to = 1;
2639 	}
2640 
2641       if ( !data->autinc_to && CONSTANT_P (to_addr))
2642 	data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2643     }
2644 
2645   tmode = mode_for_size (STORE_MAX_PIECES * BITS_PER_UNIT, MODE_INT, 1);
2646   if (align >= GET_MODE_ALIGNMENT (tmode))
2647     align = GET_MODE_ALIGNMENT (tmode);
2648   else
2649     {
2650       enum machine_mode xmode;
2651 
2652       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
2653 	   tmode != VOIDmode;
2654 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
2655 	if (GET_MODE_SIZE (tmode) > STORE_MAX_PIECES
2656 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
2657 	  break;
2658 
2659       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
2660     }
2661 
2662   /* First store what we can in the largest integer mode, then go to
2663      successively smaller modes.  */
2664 
2665   while (max_size > 1)
2666     {
2667       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2668 	   tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2669 	if (GET_MODE_SIZE (tmode) < max_size)
2670 	  mode = tmode;
2671 
2672       if (mode == VOIDmode)
2673 	break;
2674 
2675       icode = optab_handler (mov_optab, mode)->insn_code;
2676       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2677 	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2678 
2679       max_size = GET_MODE_SIZE (mode);
2680     }
2681 
2682   /* The code above should have handled everything.  */
2683   gcc_assert (!data->len);
2684 }
2685 
2686 /* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2687    with move instructions for mode MODE.  GENFUN is the gen_... function
2688    to make a move insn for that mode.  DATA has all the other info.  */
2689 
2690 static void
2691 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2692 		   struct store_by_pieces_d *data)
2693 {
2694   unsigned int size = GET_MODE_SIZE (mode);
2695   rtx to1, cst;
2696 
2697   while (data->len >= size)
2698     {
2699       if (data->reverse)
2700 	data->offset -= size;
2701 
2702       if (data->autinc_to)
2703 	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2704 					 data->offset);
2705       else
2706 	to1 = adjust_address (data->to, mode, data->offset);
2707 
2708       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2709 	emit_insn (gen_add2_insn (data->to_addr,
2710 				  GEN_INT (-(HOST_WIDE_INT) size)));
2711 
2712       cst = (*data->constfun) (data->constfundata, data->offset, mode);
2713       emit_insn ((*genfun) (to1, cst));
2714 
2715       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2716 	emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2717 
2718       if (! data->reverse)
2719 	data->offset += size;
2720 
2721       data->len -= size;
2722     }
2723 }
2724 
2725 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2726    its length in bytes.  */
2727 
2728 rtx
2729 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2730 		     unsigned int expected_align, HOST_WIDE_INT expected_size)
2731 {
2732   enum machine_mode mode = GET_MODE (object);
2733   unsigned int align;
2734 
2735   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2736 
2737   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2738      just move a zero.  Otherwise, do this a piece at a time.  */
2739   if (mode != BLKmode
2740       && CONST_INT_P (size)
2741       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2742     {
2743       rtx zero = CONST0_RTX (mode);
2744       if (zero != NULL)
2745 	{
2746 	  emit_move_insn (object, zero);
2747 	  return NULL;
2748 	}
2749 
2750       if (COMPLEX_MODE_P (mode))
2751 	{
2752 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2753 	  if (zero != NULL)
2754 	    {
2755 	      write_complex_part (object, zero, 0);
2756 	      write_complex_part (object, zero, 1);
2757 	      return NULL;
2758 	    }
2759 	}
2760     }
2761 
2762   if (size == const0_rtx)
2763     return NULL;
2764 
2765   align = MEM_ALIGN (object);
2766 
2767   if (CONST_INT_P (size)
2768       && CLEAR_BY_PIECES_P (INTVAL (size), align))
2769     clear_by_pieces (object, INTVAL (size), align);
2770   else if (set_storage_via_setmem (object, size, const0_rtx, align,
2771 				   expected_align, expected_size))
2772     ;
2773   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2774     return set_storage_via_libcall (object, size, const0_rtx,
2775 				    method == BLOCK_OP_TAILCALL);
2776   else
2777     gcc_unreachable ();
2778 
2779   return NULL;
2780 }
2781 
2782 rtx
2783 clear_storage (rtx object, rtx size, enum block_op_methods method)
2784 {
2785   return clear_storage_hints (object, size, method, 0, -1);
2786 }
2787 
2788 
2789 /* A subroutine of clear_storage.  Expand a call to memset.
2790    Return the return value of memset, 0 otherwise.  */
2791 
2792 rtx
2793 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2794 {
2795   tree call_expr, fn, object_tree, size_tree, val_tree;
2796   enum machine_mode size_mode;
2797   rtx retval;
2798 
2799   /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2800      place those into new pseudos into a VAR_DECL and use them later.  */
2801 
2802   object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2803 
2804   size_mode = TYPE_MODE (sizetype);
2805   size = convert_to_mode (size_mode, size, 1);
2806   size = copy_to_mode_reg (size_mode, size);
2807 
2808   /* It is incorrect to use the libcall calling conventions to call
2809      memset in this context.  This could be a user call to memset and
2810      the user may wish to examine the return value from memset.  For
2811      targets where libcalls and normal calls have different conventions
2812      for returning pointers, we could end up generating incorrect code.  */
2813 
2814   object_tree = make_tree (ptr_type_node, object);
2815   if (!CONST_INT_P (val))
2816     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2817   size_tree = make_tree (sizetype, size);
2818   val_tree = make_tree (integer_type_node, val);
2819 
2820   fn = clear_storage_libcall_fn (true);
2821   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2822   CALL_EXPR_TAILCALL (call_expr) = tailcall;
2823 
2824   retval = expand_normal (call_expr);
2825 
2826   return retval;
2827 }
2828 
2829 /* A subroutine of set_storage_via_libcall.  Create the tree node
2830    for the function we use for block clears.  The first time FOR_CALL
2831    is true, we call assemble_external.  */
2832 
2833 tree block_clear_fn;
2834 
2835 void
2836 init_block_clear_fn (const char *asmspec)
2837 {
2838   if (!block_clear_fn)
2839     {
2840       tree fn, args;
2841 
2842       fn = get_identifier ("memset");
2843       args = build_function_type_list (ptr_type_node, ptr_type_node,
2844 				       integer_type_node, sizetype,
2845 				       NULL_TREE);
2846 
2847       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2848       DECL_EXTERNAL (fn) = 1;
2849       TREE_PUBLIC (fn) = 1;
2850       DECL_ARTIFICIAL (fn) = 1;
2851       TREE_NOTHROW (fn) = 1;
2852       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2853       DECL_VISIBILITY_SPECIFIED (fn) = 1;
2854 
2855       block_clear_fn = fn;
2856     }
2857 
2858   if (asmspec)
2859     set_user_assembler_name (block_clear_fn, asmspec);
2860 }
2861 
2862 static tree
2863 clear_storage_libcall_fn (int for_call)
2864 {
2865   static bool emitted_extern;
2866 
2867   if (!block_clear_fn)
2868     init_block_clear_fn (NULL);
2869 
2870   if (for_call && !emitted_extern)
2871     {
2872       emitted_extern = true;
2873       make_decl_rtl (block_clear_fn);
2874       assemble_external (block_clear_fn);
2875     }
2876 
2877   return block_clear_fn;
2878 }
2879 
2880 /* Expand a setmem pattern; return true if successful.  */
2881 
2882 bool
2883 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2884 			unsigned int expected_align, HOST_WIDE_INT expected_size)
2885 {
2886   /* Try the most limited insn first, because there's no point
2887      including more than one in the machine description unless
2888      the more limited one has some advantage.  */
2889 
2890   rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2891   enum machine_mode mode;
2892 
2893   if (expected_align < align)
2894     expected_align = align;
2895 
2896   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2897        mode = GET_MODE_WIDER_MODE (mode))
2898     {
2899       enum insn_code code = setmem_optab[(int) mode];
2900       insn_operand_predicate_fn pred;
2901 
2902       if (code != CODE_FOR_nothing
2903 	  /* We don't need MODE to be narrower than
2904 	     BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2905 	     the mode mask, as it is returned by the macro, it will
2906 	     definitely be less than the actual mode mask.  */
2907 	  && ((CONST_INT_P (size)
2908 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2909 		   <= (GET_MODE_MASK (mode) >> 1)))
2910 	      || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2911 	  && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2912 	      || (*pred) (object, BLKmode))
2913 	  && ((pred = insn_data[(int) code].operand[3].predicate) == 0
2914 	      || (*pred) (opalign, VOIDmode)))
2915 	{
2916 	  rtx opsize, opchar;
2917 	  enum machine_mode char_mode;
2918 	  rtx last = get_last_insn ();
2919 	  rtx pat;
2920 
2921 	  opsize = convert_to_mode (mode, size, 1);
2922 	  pred = insn_data[(int) code].operand[1].predicate;
2923 	  if (pred != 0 && ! (*pred) (opsize, mode))
2924 	    opsize = copy_to_mode_reg (mode, opsize);
2925 
2926 	  opchar = val;
2927 	  char_mode = insn_data[(int) code].operand[2].mode;
2928 	  if (char_mode != VOIDmode)
2929 	    {
2930 	      opchar = convert_to_mode (char_mode, opchar, 1);
2931 	      pred = insn_data[(int) code].operand[2].predicate;
2932 	      if (pred != 0 && ! (*pred) (opchar, char_mode))
2933 		opchar = copy_to_mode_reg (char_mode, opchar);
2934 	    }
2935 
2936 	  if (insn_data[(int) code].n_operands == 4)
2937 	    pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign);
2938 	  else
2939 	    pat = GEN_FCN ((int) code) (object, opsize, opchar, opalign,
2940 					GEN_INT (expected_align
2941 						 / BITS_PER_UNIT),
2942 					GEN_INT (expected_size));
2943 	  if (pat)
2944 	    {
2945 	      emit_insn (pat);
2946 	      return true;
2947 	    }
2948 	  else
2949 	    delete_insns_since (last);
2950 	}
2951     }
2952 
2953   return false;
2954 }
2955 
2956 
2957 /* Write to one of the components of the complex value CPLX.  Write VAL to
2958    the real part if IMAG_P is false, and the imaginary part if its true.  */
2959 
2960 static void
2961 write_complex_part (rtx cplx, rtx val, bool imag_p)
2962 {
2963   enum machine_mode cmode;
2964   enum machine_mode imode;
2965   unsigned ibitsize;
2966 
2967   if (GET_CODE (cplx) == CONCAT)
2968     {
2969       emit_move_insn (XEXP (cplx, imag_p), val);
2970       return;
2971     }
2972 
2973   cmode = GET_MODE (cplx);
2974   imode = GET_MODE_INNER (cmode);
2975   ibitsize = GET_MODE_BITSIZE (imode);
2976 
2977   /* For MEMs simplify_gen_subreg may generate an invalid new address
2978      because, e.g., the original address is considered mode-dependent
2979      by the target, which restricts simplify_subreg from invoking
2980      adjust_address_nv.  Instead of preparing fallback support for an
2981      invalid address, we call adjust_address_nv directly.  */
2982   if (MEM_P (cplx))
2983     {
2984       emit_move_insn (adjust_address_nv (cplx, imode,
2985 					 imag_p ? GET_MODE_SIZE (imode) : 0),
2986 		      val);
2987       return;
2988     }
2989 
2990   /* If the sub-object is at least word sized, then we know that subregging
2991      will work.  This special case is important, since store_bit_field
2992      wants to operate on integer modes, and there's rarely an OImode to
2993      correspond to TCmode.  */
2994   if (ibitsize >= BITS_PER_WORD
2995       /* For hard regs we have exact predicates.  Assume we can split
2996 	 the original object if it spans an even number of hard regs.
2997 	 This special case is important for SCmode on 64-bit platforms
2998 	 where the natural size of floating-point regs is 32-bit.  */
2999       || (REG_P (cplx)
3000 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3001 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3002     {
3003       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3004 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3005       if (part)
3006         {
3007 	  emit_move_insn (part, val);
3008 	  return;
3009 	}
3010       else
3011 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3012 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3013     }
3014 
3015   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, imode, val);
3016 }
3017 
3018 /* Extract one of the components of the complex value CPLX.  Extract the
3019    real part if IMAG_P is false, and the imaginary part if it's true.  */
3020 
3021 static rtx
3022 read_complex_part (rtx cplx, bool imag_p)
3023 {
3024   enum machine_mode cmode, imode;
3025   unsigned ibitsize;
3026 
3027   if (GET_CODE (cplx) == CONCAT)
3028     return XEXP (cplx, imag_p);
3029 
3030   cmode = GET_MODE (cplx);
3031   imode = GET_MODE_INNER (cmode);
3032   ibitsize = GET_MODE_BITSIZE (imode);
3033 
3034   /* Special case reads from complex constants that got spilled to memory.  */
3035   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3036     {
3037       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3038       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3039 	{
3040 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3041 	  if (CONSTANT_CLASS_P (part))
3042 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3043 	}
3044     }
3045 
3046   /* For MEMs simplify_gen_subreg may generate an invalid new address
3047      because, e.g., the original address is considered mode-dependent
3048      by the target, which restricts simplify_subreg from invoking
3049      adjust_address_nv.  Instead of preparing fallback support for an
3050      invalid address, we call adjust_address_nv directly.  */
3051   if (MEM_P (cplx))
3052     return adjust_address_nv (cplx, imode,
3053 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3054 
3055   /* If the sub-object is at least word sized, then we know that subregging
3056      will work.  This special case is important, since extract_bit_field
3057      wants to operate on integer modes, and there's rarely an OImode to
3058      correspond to TCmode.  */
3059   if (ibitsize >= BITS_PER_WORD
3060       /* For hard regs we have exact predicates.  Assume we can split
3061 	 the original object if it spans an even number of hard regs.
3062 	 This special case is important for SCmode on 64-bit platforms
3063 	 where the natural size of floating-point regs is 32-bit.  */
3064       || (REG_P (cplx)
3065 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3066 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3067     {
3068       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3069 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3070       if (ret)
3071         return ret;
3072       else
3073 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3074 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3075     }
3076 
3077   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3078 			    true, NULL_RTX, imode, imode);
3079 }
3080 
3081 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3082    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3083    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3084    we'll force-create a SUBREG if needed.  */
3085 
3086 static rtx
3087 emit_move_change_mode (enum machine_mode new_mode,
3088 		       enum machine_mode old_mode, rtx x, bool force)
3089 {
3090   rtx ret;
3091 
3092   if (push_operand (x, GET_MODE (x)))
3093     {
3094       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3095       MEM_COPY_ATTRIBUTES (ret, x);
3096     }
3097   else if (MEM_P (x))
3098     {
3099       /* We don't have to worry about changing the address since the
3100 	 size in bytes is supposed to be the same.  */
3101       if (reload_in_progress)
3102 	{
3103 	  /* Copy the MEM to change the mode and move any
3104 	     substitutions from the old MEM to the new one.  */
3105 	  ret = adjust_address_nv (x, new_mode, 0);
3106 	  copy_replacements (x, ret);
3107 	}
3108       else
3109 	ret = adjust_address (x, new_mode, 0);
3110     }
3111   else
3112     {
3113       /* Note that we do want simplify_subreg's behavior of validating
3114 	 that the new mode is ok for a hard register.  If we were to use
3115 	 simplify_gen_subreg, we would create the subreg, but would
3116 	 probably run into the target not being able to implement it.  */
3117       /* Except, of course, when FORCE is true, when this is exactly what
3118 	 we want.  Which is needed for CCmodes on some targets.  */
3119       if (force)
3120 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3121       else
3122 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3123     }
3124 
3125   return ret;
3126 }
3127 
3128 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3129    an integer mode of the same size as MODE.  Returns the instruction
3130    emitted, or NULL if such a move could not be generated.  */
3131 
3132 static rtx
3133 emit_move_via_integer (enum machine_mode mode, rtx x, rtx y, bool force)
3134 {
3135   enum machine_mode imode;
3136   enum insn_code code;
3137 
3138   /* There must exist a mode of the exact size we require.  */
3139   imode = int_mode_for_mode (mode);
3140   if (imode == BLKmode)
3141     return NULL_RTX;
3142 
3143   /* The target must support moves in this mode.  */
3144   code = optab_handler (mov_optab, imode)->insn_code;
3145   if (code == CODE_FOR_nothing)
3146     return NULL_RTX;
3147 
3148   x = emit_move_change_mode (imode, mode, x, force);
3149   if (x == NULL_RTX)
3150     return NULL_RTX;
3151   y = emit_move_change_mode (imode, mode, y, force);
3152   if (y == NULL_RTX)
3153     return NULL_RTX;
3154   return emit_insn (GEN_FCN (code) (x, y));
3155 }
3156 
3157 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3158    Return an equivalent MEM that does not use an auto-increment.  */
3159 
3160 static rtx
3161 emit_move_resolve_push (enum machine_mode mode, rtx x)
3162 {
3163   enum rtx_code code = GET_CODE (XEXP (x, 0));
3164   HOST_WIDE_INT adjust;
3165   rtx temp;
3166 
3167   adjust = GET_MODE_SIZE (mode);
3168 #ifdef PUSH_ROUNDING
3169   adjust = PUSH_ROUNDING (adjust);
3170 #endif
3171   if (code == PRE_DEC || code == POST_DEC)
3172     adjust = -adjust;
3173   else if (code == PRE_MODIFY || code == POST_MODIFY)
3174     {
3175       rtx expr = XEXP (XEXP (x, 0), 1);
3176       HOST_WIDE_INT val;
3177 
3178       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3179       gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3180       val = INTVAL (XEXP (expr, 1));
3181       if (GET_CODE (expr) == MINUS)
3182 	val = -val;
3183       gcc_assert (adjust == val || adjust == -val);
3184       adjust = val;
3185     }
3186 
3187   /* Do not use anti_adjust_stack, since we don't want to update
3188      stack_pointer_delta.  */
3189   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3190 			      GEN_INT (adjust), stack_pointer_rtx,
3191 			      0, OPTAB_LIB_WIDEN);
3192   if (temp != stack_pointer_rtx)
3193     emit_move_insn (stack_pointer_rtx, temp);
3194 
3195   switch (code)
3196     {
3197     case PRE_INC:
3198     case PRE_DEC:
3199     case PRE_MODIFY:
3200       temp = stack_pointer_rtx;
3201       break;
3202     case POST_INC:
3203     case POST_DEC:
3204     case POST_MODIFY:
3205       temp = plus_constant (stack_pointer_rtx, -adjust);
3206       break;
3207     default:
3208       gcc_unreachable ();
3209     }
3210 
3211   return replace_equiv_address (x, temp);
3212 }
3213 
3214 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3215    X is known to satisfy push_operand, and MODE is known to be complex.
3216    Returns the last instruction emitted.  */
3217 
3218 rtx
3219 emit_move_complex_push (enum machine_mode mode, rtx x, rtx y)
3220 {
3221   enum machine_mode submode = GET_MODE_INNER (mode);
3222   bool imag_first;
3223 
3224 #ifdef PUSH_ROUNDING
3225   unsigned int submodesize = GET_MODE_SIZE (submode);
3226 
3227   /* In case we output to the stack, but the size is smaller than the
3228      machine can push exactly, we need to use move instructions.  */
3229   if (PUSH_ROUNDING (submodesize) != submodesize)
3230     {
3231       x = emit_move_resolve_push (mode, x);
3232       return emit_move_insn (x, y);
3233     }
3234 #endif
3235 
3236   /* Note that the real part always precedes the imag part in memory
3237      regardless of machine's endianness.  */
3238   switch (GET_CODE (XEXP (x, 0)))
3239     {
3240     case PRE_DEC:
3241     case POST_DEC:
3242       imag_first = true;
3243       break;
3244     case PRE_INC:
3245     case POST_INC:
3246       imag_first = false;
3247       break;
3248     default:
3249       gcc_unreachable ();
3250     }
3251 
3252   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3253 		  read_complex_part (y, imag_first));
3254   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3255 			 read_complex_part (y, !imag_first));
3256 }
3257 
3258 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3259    via two moves of the parts.  Returns the last instruction emitted.  */
3260 
3261 rtx
3262 emit_move_complex_parts (rtx x, rtx y)
3263 {
3264   /* Show the output dies here.  This is necessary for SUBREGs
3265      of pseudos since we cannot track their lifetimes correctly;
3266      hard regs shouldn't appear here except as return values.  */
3267   if (!reload_completed && !reload_in_progress
3268       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3269     emit_clobber (x);
3270 
3271   write_complex_part (x, read_complex_part (y, false), false);
3272   write_complex_part (x, read_complex_part (y, true), true);
3273 
3274   return get_last_insn ();
3275 }
3276 
3277 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3278    MODE is known to be complex.  Returns the last instruction emitted.  */
3279 
3280 static rtx
3281 emit_move_complex (enum machine_mode mode, rtx x, rtx y)
3282 {
3283   bool try_int;
3284 
3285   /* Need to take special care for pushes, to maintain proper ordering
3286      of the data, and possibly extra padding.  */
3287   if (push_operand (x, mode))
3288     return emit_move_complex_push (mode, x, y);
3289 
3290   /* See if we can coerce the target into moving both values at once.  */
3291 
3292   /* Move floating point as parts.  */
3293   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3294       && optab_handler (mov_optab, GET_MODE_INNER (mode))->insn_code != CODE_FOR_nothing)
3295     try_int = false;
3296   /* Not possible if the values are inherently not adjacent.  */
3297   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3298     try_int = false;
3299   /* Is possible if both are registers (or subregs of registers).  */
3300   else if (register_operand (x, mode) && register_operand (y, mode))
3301     try_int = true;
3302   /* If one of the operands is a memory, and alignment constraints
3303      are friendly enough, we may be able to do combined memory operations.
3304      We do not attempt this if Y is a constant because that combination is
3305      usually better with the by-parts thing below.  */
3306   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3307 	   && (!STRICT_ALIGNMENT
3308 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3309     try_int = true;
3310   else
3311     try_int = false;
3312 
3313   if (try_int)
3314     {
3315       rtx ret;
3316 
3317       /* For memory to memory moves, optimal behavior can be had with the
3318 	 existing block move logic.  */
3319       if (MEM_P (x) && MEM_P (y))
3320 	{
3321 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3322 			   BLOCK_OP_NO_LIBCALL);
3323 	  return get_last_insn ();
3324 	}
3325 
3326       ret = emit_move_via_integer (mode, x, y, true);
3327       if (ret)
3328 	return ret;
3329     }
3330 
3331   return emit_move_complex_parts (x, y);
3332 }
3333 
3334 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3335    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3336 
3337 static rtx
3338 emit_move_ccmode (enum machine_mode mode, rtx x, rtx y)
3339 {
3340   rtx ret;
3341 
3342   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3343   if (mode != CCmode)
3344     {
3345       enum insn_code code = optab_handler (mov_optab, CCmode)->insn_code;
3346       if (code != CODE_FOR_nothing)
3347 	{
3348 	  x = emit_move_change_mode (CCmode, mode, x, true);
3349 	  y = emit_move_change_mode (CCmode, mode, y, true);
3350 	  return emit_insn (GEN_FCN (code) (x, y));
3351 	}
3352     }
3353 
3354   /* Otherwise, find the MODE_INT mode of the same width.  */
3355   ret = emit_move_via_integer (mode, x, y, false);
3356   gcc_assert (ret != NULL);
3357   return ret;
3358 }
3359 
3360 /* Return true if word I of OP lies entirely in the
3361    undefined bits of a paradoxical subreg.  */
3362 
3363 static bool
3364 undefined_operand_subword_p (const_rtx op, int i)
3365 {
3366   enum machine_mode innermode, innermostmode;
3367   int offset;
3368   if (GET_CODE (op) != SUBREG)
3369     return false;
3370   innermode = GET_MODE (op);
3371   innermostmode = GET_MODE (SUBREG_REG (op));
3372   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3373   /* The SUBREG_BYTE represents offset, as if the value were stored in
3374      memory, except for a paradoxical subreg where we define
3375      SUBREG_BYTE to be 0; undo this exception as in
3376      simplify_subreg.  */
3377   if (SUBREG_BYTE (op) == 0
3378       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3379     {
3380       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3381       if (WORDS_BIG_ENDIAN)
3382 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3383       if (BYTES_BIG_ENDIAN)
3384 	offset += difference % UNITS_PER_WORD;
3385     }
3386   if (offset >= GET_MODE_SIZE (innermostmode)
3387       || offset <= -GET_MODE_SIZE (word_mode))
3388     return true;
3389   return false;
3390 }
3391 
3392 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3393    MODE is any multi-word or full-word mode that lacks a move_insn
3394    pattern.  Note that you will get better code if you define such
3395    patterns, even if they must turn into multiple assembler instructions.  */
3396 
3397 static rtx
3398 emit_move_multi_word (enum machine_mode mode, rtx x, rtx y)
3399 {
3400   rtx last_insn = 0;
3401   rtx seq, inner;
3402   bool need_clobber;
3403   int i;
3404 
3405   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3406 
3407   /* If X is a push on the stack, do the push now and replace
3408      X with a reference to the stack pointer.  */
3409   if (push_operand (x, mode))
3410     x = emit_move_resolve_push (mode, x);
3411 
3412   /* If we are in reload, see if either operand is a MEM whose address
3413      is scheduled for replacement.  */
3414   if (reload_in_progress && MEM_P (x)
3415       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3416     x = replace_equiv_address_nv (x, inner);
3417   if (reload_in_progress && MEM_P (y)
3418       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3419     y = replace_equiv_address_nv (y, inner);
3420 
3421   start_sequence ();
3422 
3423   need_clobber = false;
3424   for (i = 0;
3425        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3426        i++)
3427     {
3428       rtx xpart = operand_subword (x, i, 1, mode);
3429       rtx ypart;
3430 
3431       /* Do not generate code for a move if it would come entirely
3432 	 from the undefined bits of a paradoxical subreg.  */
3433       if (undefined_operand_subword_p (y, i))
3434 	continue;
3435 
3436       ypart = operand_subword (y, i, 1, mode);
3437 
3438       /* If we can't get a part of Y, put Y into memory if it is a
3439 	 constant.  Otherwise, force it into a register.  Then we must
3440 	 be able to get a part of Y.  */
3441       if (ypart == 0 && CONSTANT_P (y))
3442 	{
3443 	  y = use_anchored_address (force_const_mem (mode, y));
3444 	  ypart = operand_subword (y, i, 1, mode);
3445 	}
3446       else if (ypart == 0)
3447 	ypart = operand_subword_force (y, i, mode);
3448 
3449       gcc_assert (xpart && ypart);
3450 
3451       need_clobber |= (GET_CODE (xpart) == SUBREG);
3452 
3453       last_insn = emit_move_insn (xpart, ypart);
3454     }
3455 
3456   seq = get_insns ();
3457   end_sequence ();
3458 
3459   /* Show the output dies here.  This is necessary for SUBREGs
3460      of pseudos since we cannot track their lifetimes correctly;
3461      hard regs shouldn't appear here except as return values.
3462      We never want to emit such a clobber after reload.  */
3463   if (x != y
3464       && ! (reload_in_progress || reload_completed)
3465       && need_clobber != 0)
3466     emit_clobber (x);
3467 
3468   emit_insn (seq);
3469 
3470   return last_insn;
3471 }
3472 
3473 /* Low level part of emit_move_insn.
3474    Called just like emit_move_insn, but assumes X and Y
3475    are basically valid.  */
3476 
3477 rtx
3478 emit_move_insn_1 (rtx x, rtx y)
3479 {
3480   enum machine_mode mode = GET_MODE (x);
3481   enum insn_code code;
3482 
3483   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3484 
3485   code = optab_handler (mov_optab, mode)->insn_code;
3486   if (code != CODE_FOR_nothing)
3487     return emit_insn (GEN_FCN (code) (x, y));
3488 
3489   /* Expand complex moves by moving real part and imag part.  */
3490   if (COMPLEX_MODE_P (mode))
3491     return emit_move_complex (mode, x, y);
3492 
3493   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3494       || ALL_FIXED_POINT_MODE_P (mode))
3495     {
3496       rtx result = emit_move_via_integer (mode, x, y, true);
3497 
3498       /* If we can't find an integer mode, use multi words.  */
3499       if (result)
3500 	return result;
3501       else
3502 	return emit_move_multi_word (mode, x, y);
3503     }
3504 
3505   if (GET_MODE_CLASS (mode) == MODE_CC)
3506     return emit_move_ccmode (mode, x, y);
3507 
3508   /* Try using a move pattern for the corresponding integer mode.  This is
3509      only safe when simplify_subreg can convert MODE constants into integer
3510      constants.  At present, it can only do this reliably if the value
3511      fits within a HOST_WIDE_INT.  */
3512   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3513     {
3514       rtx ret = emit_move_via_integer (mode, x, y, false);
3515       if (ret)
3516 	return ret;
3517     }
3518 
3519   return emit_move_multi_word (mode, x, y);
3520 }
3521 
3522 /* Generate code to copy Y into X.
3523    Both Y and X must have the same mode, except that
3524    Y can be a constant with VOIDmode.
3525    This mode cannot be BLKmode; use emit_block_move for that.
3526 
3527    Return the last instruction emitted.  */
3528 
3529 rtx
3530 emit_move_insn (rtx x, rtx y)
3531 {
3532   enum machine_mode mode = GET_MODE (x);
3533   rtx y_cst = NULL_RTX;
3534   rtx last_insn, set;
3535 
3536   gcc_assert (mode != BLKmode
3537 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3538 
3539   if (CONSTANT_P (y))
3540     {
3541       if (optimize
3542 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3543 	  && (last_insn = compress_float_constant (x, y)))
3544 	return last_insn;
3545 
3546       y_cst = y;
3547 
3548       if (!LEGITIMATE_CONSTANT_P (y))
3549 	{
3550 	  y = force_const_mem (mode, y);
3551 
3552 	  /* If the target's cannot_force_const_mem prevented the spill,
3553 	     assume that the target's move expanders will also take care
3554 	     of the non-legitimate constant.  */
3555 	  if (!y)
3556 	    y = y_cst;
3557 	  else
3558 	    y = use_anchored_address (y);
3559 	}
3560     }
3561 
3562   /* If X or Y are memory references, verify that their addresses are valid
3563      for the machine.  */
3564   if (MEM_P (x)
3565       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3566 					 MEM_ADDR_SPACE (x))
3567 	  && ! push_operand (x, GET_MODE (x))))
3568     x = validize_mem (x);
3569 
3570   if (MEM_P (y)
3571       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3572 					MEM_ADDR_SPACE (y)))
3573     y = validize_mem (y);
3574 
3575   gcc_assert (mode != BLKmode);
3576 
3577   last_insn = emit_move_insn_1 (x, y);
3578 
3579   if (y_cst && REG_P (x)
3580       && (set = single_set (last_insn)) != NULL_RTX
3581       && SET_DEST (set) == x
3582       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3583     set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3584 
3585   return last_insn;
3586 }
3587 
3588 /* If Y is representable exactly in a narrower mode, and the target can
3589    perform the extension directly from constant or memory, then emit the
3590    move as an extension.  */
3591 
3592 static rtx
3593 compress_float_constant (rtx x, rtx y)
3594 {
3595   enum machine_mode dstmode = GET_MODE (x);
3596   enum machine_mode orig_srcmode = GET_MODE (y);
3597   enum machine_mode srcmode;
3598   REAL_VALUE_TYPE r;
3599   int oldcost, newcost;
3600   bool speed = optimize_insn_for_speed_p ();
3601 
3602   REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3603 
3604   if (LEGITIMATE_CONSTANT_P (y))
3605     oldcost = rtx_cost (y, SET, speed);
3606   else
3607     oldcost = rtx_cost (force_const_mem (dstmode, y), SET, speed);
3608 
3609   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3610        srcmode != orig_srcmode;
3611        srcmode = GET_MODE_WIDER_MODE (srcmode))
3612     {
3613       enum insn_code ic;
3614       rtx trunc_y, last_insn;
3615 
3616       /* Skip if the target can't extend this way.  */
3617       ic = can_extend_p (dstmode, srcmode, 0);
3618       if (ic == CODE_FOR_nothing)
3619 	continue;
3620 
3621       /* Skip if the narrowed value isn't exact.  */
3622       if (! exact_real_truncate (srcmode, &r))
3623 	continue;
3624 
3625       trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3626 
3627       if (LEGITIMATE_CONSTANT_P (trunc_y))
3628 	{
3629 	  /* Skip if the target needs extra instructions to perform
3630 	     the extension.  */
3631 	  if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3632 	    continue;
3633 	  /* This is valid, but may not be cheaper than the original. */
3634 	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3635 	  if (oldcost < newcost)
3636 	    continue;
3637 	}
3638       else if (float_extend_from_mem[dstmode][srcmode])
3639 	{
3640 	  trunc_y = force_const_mem (srcmode, trunc_y);
3641 	  /* This is valid, but may not be cheaper than the original. */
3642 	  newcost = rtx_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y), SET, speed);
3643 	  if (oldcost < newcost)
3644 	    continue;
3645 	  trunc_y = validize_mem (trunc_y);
3646 	}
3647       else
3648 	continue;
3649 
3650       /* For CSE's benefit, force the compressed constant pool entry
3651 	 into a new pseudo.  This constant may be used in different modes,
3652 	 and if not, combine will put things back together for us.  */
3653       trunc_y = force_reg (srcmode, trunc_y);
3654       emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3655       last_insn = get_last_insn ();
3656 
3657       if (REG_P (x))
3658 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3659 
3660       return last_insn;
3661     }
3662 
3663   return NULL_RTX;
3664 }
3665 
3666 /* Pushing data onto the stack.  */
3667 
3668 /* Push a block of length SIZE (perhaps variable)
3669    and return an rtx to address the beginning of the block.
3670    The value may be virtual_outgoing_args_rtx.
3671 
3672    EXTRA is the number of bytes of padding to push in addition to SIZE.
3673    BELOW nonzero means this padding comes at low addresses;
3674    otherwise, the padding comes at high addresses.  */
3675 
3676 rtx
3677 push_block (rtx size, int extra, int below)
3678 {
3679   rtx temp;
3680 
3681   size = convert_modes (Pmode, ptr_mode, size, 1);
3682   if (CONSTANT_P (size))
3683     anti_adjust_stack (plus_constant (size, extra));
3684   else if (REG_P (size) && extra == 0)
3685     anti_adjust_stack (size);
3686   else
3687     {
3688       temp = copy_to_mode_reg (Pmode, size);
3689       if (extra != 0)
3690 	temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3691 			     temp, 0, OPTAB_LIB_WIDEN);
3692       anti_adjust_stack (temp);
3693     }
3694 
3695 #ifndef STACK_GROWS_DOWNWARD
3696   if (0)
3697 #else
3698   if (1)
3699 #endif
3700     {
3701       temp = virtual_outgoing_args_rtx;
3702       if (extra != 0 && below)
3703 	temp = plus_constant (temp, extra);
3704     }
3705   else
3706     {
3707       if (CONST_INT_P (size))
3708 	temp = plus_constant (virtual_outgoing_args_rtx,
3709 			      -INTVAL (size) - (below ? 0 : extra));
3710       else if (extra != 0 && !below)
3711 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3712 			     negate_rtx (Pmode, plus_constant (size, extra)));
3713       else
3714 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3715 			     negate_rtx (Pmode, size));
3716     }
3717 
3718   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3719 }
3720 
3721 #ifdef PUSH_ROUNDING
3722 
3723 /* Emit single push insn.  */
3724 
3725 static void
3726 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3727 {
3728   rtx dest_addr;
3729   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3730   rtx dest;
3731   enum insn_code icode;
3732   insn_operand_predicate_fn pred;
3733 
3734   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3735   /* If there is push pattern, use it.  Otherwise try old way of throwing
3736      MEM representing push operation to move expander.  */
3737   icode = optab_handler (push_optab, mode)->insn_code;
3738   if (icode != CODE_FOR_nothing)
3739     {
3740       if (((pred = insn_data[(int) icode].operand[0].predicate)
3741 	   && !((*pred) (x, mode))))
3742 	x = force_reg (mode, x);
3743       emit_insn (GEN_FCN (icode) (x));
3744       return;
3745     }
3746   if (GET_MODE_SIZE (mode) == rounded_size)
3747     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3748   /* If we are to pad downward, adjust the stack pointer first and
3749      then store X into the stack location using an offset.  This is
3750      because emit_move_insn does not know how to pad; it does not have
3751      access to type.  */
3752   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3753     {
3754       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3755       HOST_WIDE_INT offset;
3756 
3757       emit_move_insn (stack_pointer_rtx,
3758 		      expand_binop (Pmode,
3759 #ifdef STACK_GROWS_DOWNWARD
3760 				    sub_optab,
3761 #else
3762 				    add_optab,
3763 #endif
3764 				    stack_pointer_rtx,
3765 				    GEN_INT (rounded_size),
3766 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
3767 
3768       offset = (HOST_WIDE_INT) padding_size;
3769 #ifdef STACK_GROWS_DOWNWARD
3770       if (STACK_PUSH_CODE == POST_DEC)
3771 	/* We have already decremented the stack pointer, so get the
3772 	   previous value.  */
3773 	offset += (HOST_WIDE_INT) rounded_size;
3774 #else
3775       if (STACK_PUSH_CODE == POST_INC)
3776 	/* We have already incremented the stack pointer, so get the
3777 	   previous value.  */
3778 	offset -= (HOST_WIDE_INT) rounded_size;
3779 #endif
3780       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3781     }
3782   else
3783     {
3784 #ifdef STACK_GROWS_DOWNWARD
3785       /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
3786       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3787 				GEN_INT (-(HOST_WIDE_INT) rounded_size));
3788 #else
3789       /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
3790       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3791 				GEN_INT (rounded_size));
3792 #endif
3793       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3794     }
3795 
3796   dest = gen_rtx_MEM (mode, dest_addr);
3797 
3798   if (type != 0)
3799     {
3800       set_mem_attributes (dest, type, 1);
3801 
3802       if (flag_optimize_sibling_calls)
3803 	/* Function incoming arguments may overlap with sibling call
3804 	   outgoing arguments and we cannot allow reordering of reads
3805 	   from function arguments with stores to outgoing arguments
3806 	   of sibling calls.  */
3807 	set_mem_alias_set (dest, 0);
3808     }
3809   emit_move_insn (dest, x);
3810 }
3811 #endif
3812 
3813 /* Generate code to push X onto the stack, assuming it has mode MODE and
3814    type TYPE.
3815    MODE is redundant except when X is a CONST_INT (since they don't
3816    carry mode info).
3817    SIZE is an rtx for the size of data to be copied (in bytes),
3818    needed only if X is BLKmode.
3819 
3820    ALIGN (in bits) is maximum alignment we can assume.
3821 
3822    If PARTIAL and REG are both nonzero, then copy that many of the first
3823    bytes of X into registers starting with REG, and push the rest of X.
3824    The amount of space pushed is decreased by PARTIAL bytes.
3825    REG must be a hard register in this case.
3826    If REG is zero but PARTIAL is not, take any all others actions for an
3827    argument partially in registers, but do not actually load any
3828    registers.
3829 
3830    EXTRA is the amount in bytes of extra space to leave next to this arg.
3831    This is ignored if an argument block has already been allocated.
3832 
3833    On a machine that lacks real push insns, ARGS_ADDR is the address of
3834    the bottom of the argument block for this call.  We use indexing off there
3835    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
3836    argument block has not been preallocated.
3837 
3838    ARGS_SO_FAR is the size of args previously pushed for this call.
3839 
3840    REG_PARM_STACK_SPACE is nonzero if functions require stack space
3841    for arguments passed in registers.  If nonzero, it will be the number
3842    of bytes required.  */
3843 
3844 void
3845 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3846 		unsigned int align, int partial, rtx reg, int extra,
3847 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3848 		rtx alignment_pad)
3849 {
3850   rtx xinner;
3851   enum direction stack_direction
3852 #ifdef STACK_GROWS_DOWNWARD
3853     = downward;
3854 #else
3855     = upward;
3856 #endif
3857 
3858   /* Decide where to pad the argument: `downward' for below,
3859      `upward' for above, or `none' for don't pad it.
3860      Default is below for small data on big-endian machines; else above.  */
3861   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3862 
3863   /* Invert direction if stack is post-decrement.
3864      FIXME: why?  */
3865   if (STACK_PUSH_CODE == POST_DEC)
3866     if (where_pad != none)
3867       where_pad = (where_pad == downward ? upward : downward);
3868 
3869   xinner = x;
3870 
3871   if (mode == BLKmode
3872       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)
3873 	  && type != NULL_TREE))
3874     {
3875       /* Copy a block into the stack, entirely or partially.  */
3876 
3877       rtx temp;
3878       int used;
3879       int offset;
3880       int skip;
3881 
3882       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
3883       used = partial - offset;
3884 
3885       if (mode != BLKmode)
3886 	{
3887 	  /* A value is to be stored in an insufficiently aligned
3888 	     stack slot; copy via a suitably aligned slot if
3889 	     necessary.  */
3890 	  size = GEN_INT (GET_MODE_SIZE (mode));
3891 	  if (!MEM_P (xinner))
3892 	    {
3893 	      temp = assign_temp (type, 0, 1, 1);
3894 	      emit_move_insn (temp, xinner);
3895 	      xinner = temp;
3896 	    }
3897 	}
3898 
3899       gcc_assert (size);
3900 
3901       /* USED is now the # of bytes we need not copy to the stack
3902 	 because registers will take care of them.  */
3903 
3904       if (partial != 0)
3905 	xinner = adjust_address (xinner, BLKmode, used);
3906 
3907       /* If the partial register-part of the arg counts in its stack size,
3908 	 skip the part of stack space corresponding to the registers.
3909 	 Otherwise, start copying to the beginning of the stack space,
3910 	 by setting SKIP to 0.  */
3911       skip = (reg_parm_stack_space == 0) ? 0 : used;
3912 
3913 #ifdef PUSH_ROUNDING
3914       /* Do it with several push insns if that doesn't take lots of insns
3915 	 and if there is no difficulty with push insns that skip bytes
3916 	 on the stack for alignment purposes.  */
3917       if (args_addr == 0
3918 	  && PUSH_ARGS
3919 	  && CONST_INT_P (size)
3920 	  && skip == 0
3921 	  && MEM_ALIGN (xinner) >= align
3922 	  && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3923 	  /* Here we avoid the case of a structure whose weak alignment
3924 	     forces many pushes of a small amount of data,
3925 	     and such small pushes do rounding that causes trouble.  */
3926 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3927 	      || align >= BIGGEST_ALIGNMENT
3928 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3929 		  == (align / BITS_PER_UNIT)))
3930 	  && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3931 	{
3932 	  /* Push padding now if padding above and stack grows down,
3933 	     or if padding below and stack grows up.
3934 	     But if space already allocated, this has already been done.  */
3935 	  if (extra && args_addr == 0
3936 	      && where_pad != none && where_pad != stack_direction)
3937 	    anti_adjust_stack (GEN_INT (extra));
3938 
3939 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3940 	}
3941       else
3942 #endif /* PUSH_ROUNDING  */
3943 	{
3944 	  rtx target;
3945 
3946 	  /* Otherwise make space on the stack and copy the data
3947 	     to the address of that space.  */
3948 
3949 	  /* Deduct words put into registers from the size we must copy.  */
3950 	  if (partial != 0)
3951 	    {
3952 	      if (CONST_INT_P (size))
3953 		size = GEN_INT (INTVAL (size) - used);
3954 	      else
3955 		size = expand_binop (GET_MODE (size), sub_optab, size,
3956 				     GEN_INT (used), NULL_RTX, 0,
3957 				     OPTAB_LIB_WIDEN);
3958 	    }
3959 
3960 	  /* Get the address of the stack space.
3961 	     In this case, we do not deal with EXTRA separately.
3962 	     A single stack adjust will do.  */
3963 	  if (! args_addr)
3964 	    {
3965 	      temp = push_block (size, extra, where_pad == downward);
3966 	      extra = 0;
3967 	    }
3968 	  else if (CONST_INT_P (args_so_far))
3969 	    temp = memory_address (BLKmode,
3970 				   plus_constant (args_addr,
3971 						  skip + INTVAL (args_so_far)));
3972 	  else
3973 	    temp = memory_address (BLKmode,
3974 				   plus_constant (gen_rtx_PLUS (Pmode,
3975 								args_addr,
3976 								args_so_far),
3977 						  skip));
3978 
3979 	  if (!ACCUMULATE_OUTGOING_ARGS)
3980 	    {
3981 	      /* If the source is referenced relative to the stack pointer,
3982 		 copy it to another register to stabilize it.  We do not need
3983 		 to do this if we know that we won't be changing sp.  */
3984 
3985 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3986 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3987 		temp = copy_to_reg (temp);
3988 	    }
3989 
3990 	  target = gen_rtx_MEM (BLKmode, temp);
3991 
3992 	  /* We do *not* set_mem_attributes here, because incoming arguments
3993 	     may overlap with sibling call outgoing arguments and we cannot
3994 	     allow reordering of reads from function arguments with stores
3995 	     to outgoing arguments of sibling calls.  We do, however, want
3996 	     to record the alignment of the stack slot.  */
3997 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
3998 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
3999 	  set_mem_align (target, align);
4000 
4001 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4002 	}
4003     }
4004   else if (partial > 0)
4005     {
4006       /* Scalar partly in registers.  */
4007 
4008       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4009       int i;
4010       int not_stack;
4011       /* # bytes of start of argument
4012 	 that we must make space for but need not store.  */
4013       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4014       int args_offset = INTVAL (args_so_far);
4015       int skip;
4016 
4017       /* Push padding now if padding above and stack grows down,
4018 	 or if padding below and stack grows up.
4019 	 But if space already allocated, this has already been done.  */
4020       if (extra && args_addr == 0
4021 	  && where_pad != none && where_pad != stack_direction)
4022 	anti_adjust_stack (GEN_INT (extra));
4023 
4024       /* If we make space by pushing it, we might as well push
4025 	 the real data.  Otherwise, we can leave OFFSET nonzero
4026 	 and leave the space uninitialized.  */
4027       if (args_addr == 0)
4028 	offset = 0;
4029 
4030       /* Now NOT_STACK gets the number of words that we don't need to
4031 	 allocate on the stack.  Convert OFFSET to words too.  */
4032       not_stack = (partial - offset) / UNITS_PER_WORD;
4033       offset /= UNITS_PER_WORD;
4034 
4035       /* If the partial register-part of the arg counts in its stack size,
4036 	 skip the part of stack space corresponding to the registers.
4037 	 Otherwise, start copying to the beginning of the stack space,
4038 	 by setting SKIP to 0.  */
4039       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4040 
4041       if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4042 	x = validize_mem (force_const_mem (mode, x));
4043 
4044       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4045 	 SUBREGs of such registers are not allowed.  */
4046       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4047 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4048 	x = copy_to_reg (x);
4049 
4050       /* Loop over all the words allocated on the stack for this arg.  */
4051       /* We can do it by words, because any scalar bigger than a word
4052 	 has a size a multiple of a word.  */
4053 #ifndef PUSH_ARGS_REVERSED
4054       for (i = not_stack; i < size; i++)
4055 #else
4056       for (i = size - 1; i >= not_stack; i--)
4057 #endif
4058 	if (i >= not_stack + offset)
4059 	  emit_push_insn (operand_subword_force (x, i, mode),
4060 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4061 			  0, args_addr,
4062 			  GEN_INT (args_offset + ((i - not_stack + skip)
4063 						  * UNITS_PER_WORD)),
4064 			  reg_parm_stack_space, alignment_pad);
4065     }
4066   else
4067     {
4068       rtx addr;
4069       rtx dest;
4070 
4071       /* Push padding now if padding above and stack grows down,
4072 	 or if padding below and stack grows up.
4073 	 But if space already allocated, this has already been done.  */
4074       if (extra && args_addr == 0
4075 	  && where_pad != none && where_pad != stack_direction)
4076 	anti_adjust_stack (GEN_INT (extra));
4077 
4078 #ifdef PUSH_ROUNDING
4079       if (args_addr == 0 && PUSH_ARGS)
4080 	emit_single_push_insn (mode, x, type);
4081       else
4082 #endif
4083 	{
4084 	  if (CONST_INT_P (args_so_far))
4085 	    addr
4086 	      = memory_address (mode,
4087 				plus_constant (args_addr,
4088 					       INTVAL (args_so_far)));
4089 	  else
4090 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4091 						       args_so_far));
4092 	  dest = gen_rtx_MEM (mode, addr);
4093 
4094 	  /* We do *not* set_mem_attributes here, because incoming arguments
4095 	     may overlap with sibling call outgoing arguments and we cannot
4096 	     allow reordering of reads from function arguments with stores
4097 	     to outgoing arguments of sibling calls.  We do, however, want
4098 	     to record the alignment of the stack slot.  */
4099 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4100 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4101 	  set_mem_align (dest, align);
4102 
4103 	  emit_move_insn (dest, x);
4104 	}
4105     }
4106 
4107   /* If part should go in registers, copy that part
4108      into the appropriate registers.  Do this now, at the end,
4109      since mem-to-mem copies above may do function calls.  */
4110   if (partial > 0 && reg != 0)
4111     {
4112       /* Handle calls that pass values in multiple non-contiguous locations.
4113 	 The Irix 6 ABI has examples of this.  */
4114       if (GET_CODE (reg) == PARALLEL)
4115 	emit_group_load (reg, x, type, -1);
4116       else
4117 	{
4118 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4119 	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4120 	}
4121     }
4122 
4123   if (extra && args_addr == 0 && where_pad == stack_direction)
4124     anti_adjust_stack (GEN_INT (extra));
4125 
4126   if (alignment_pad && args_addr == 0)
4127     anti_adjust_stack (alignment_pad);
4128 }
4129 
4130 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4131    operations.  */
4132 
4133 static rtx
4134 get_subtarget (rtx x)
4135 {
4136   return (optimize
4137           || x == 0
4138 	   /* Only registers can be subtargets.  */
4139 	   || !REG_P (x)
4140 	   /* Don't use hard regs to avoid extending their life.  */
4141 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4142 	  ? 0 : x);
4143 }
4144 
4145 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4146    FIELD is a bitfield.  Returns true if the optimization was successful,
4147    and there's nothing else to do.  */
4148 
4149 static bool
4150 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4151 				 unsigned HOST_WIDE_INT bitpos,
4152 				 enum machine_mode mode1, rtx str_rtx,
4153 				 tree to, tree src)
4154 {
4155   enum machine_mode str_mode = GET_MODE (str_rtx);
4156   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4157   tree op0, op1;
4158   rtx value, result;
4159   optab binop;
4160 
4161   if (mode1 != VOIDmode
4162       || bitsize >= BITS_PER_WORD
4163       || str_bitsize > BITS_PER_WORD
4164       || TREE_SIDE_EFFECTS (to)
4165       || TREE_THIS_VOLATILE (to))
4166     return false;
4167 
4168   STRIP_NOPS (src);
4169   if (!BINARY_CLASS_P (src)
4170       || TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4171     return false;
4172 
4173   op0 = TREE_OPERAND (src, 0);
4174   op1 = TREE_OPERAND (src, 1);
4175   STRIP_NOPS (op0);
4176 
4177   if (!operand_equal_p (to, op0, 0))
4178     return false;
4179 
4180   if (MEM_P (str_rtx))
4181     {
4182       unsigned HOST_WIDE_INT offset1;
4183 
4184       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4185 	str_mode = word_mode;
4186       str_mode = get_best_mode (bitsize, bitpos,
4187 				MEM_ALIGN (str_rtx), str_mode, 0);
4188       if (str_mode == VOIDmode)
4189 	return false;
4190       str_bitsize = GET_MODE_BITSIZE (str_mode);
4191 
4192       offset1 = bitpos;
4193       bitpos %= str_bitsize;
4194       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4195       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4196     }
4197   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4198     return false;
4199 
4200   /* If the bit field covers the whole REG/MEM, store_field
4201      will likely generate better code.  */
4202   if (bitsize >= str_bitsize)
4203     return false;
4204 
4205   /* We can't handle fields split across multiple entities.  */
4206   if (bitpos + bitsize > str_bitsize)
4207     return false;
4208 
4209   if (BYTES_BIG_ENDIAN)
4210     bitpos = str_bitsize - bitpos - bitsize;
4211 
4212   switch (TREE_CODE (src))
4213     {
4214     case PLUS_EXPR:
4215     case MINUS_EXPR:
4216       /* For now, just optimize the case of the topmost bitfield
4217 	 where we don't need to do any masking and also
4218 	 1 bit bitfields where xor can be used.
4219 	 We might win by one instruction for the other bitfields
4220 	 too if insv/extv instructions aren't used, so that
4221 	 can be added later.  */
4222       if (bitpos + bitsize != str_bitsize
4223 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4224 	break;
4225 
4226       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4227       value = convert_modes (str_mode,
4228 			     TYPE_MODE (TREE_TYPE (op1)), value,
4229 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4230 
4231       /* We may be accessing data outside the field, which means
4232 	 we can alias adjacent data.  */
4233       if (MEM_P (str_rtx))
4234 	{
4235 	  str_rtx = shallow_copy_rtx (str_rtx);
4236 	  set_mem_alias_set (str_rtx, 0);
4237 	  set_mem_expr (str_rtx, 0);
4238 	}
4239 
4240       binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
4241       if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4242 	{
4243 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4244 	  binop = xor_optab;
4245 	}
4246       value = expand_shift (LSHIFT_EXPR, str_mode, value,
4247 			    build_int_cst (NULL_TREE, bitpos),
4248 			    NULL_RTX, 1);
4249       result = expand_binop (str_mode, binop, str_rtx,
4250 			     value, str_rtx, 1, OPTAB_WIDEN);
4251       if (result != str_rtx)
4252 	emit_move_insn (str_rtx, result);
4253       return true;
4254 
4255     case BIT_IOR_EXPR:
4256     case BIT_XOR_EXPR:
4257       if (TREE_CODE (op1) != INTEGER_CST)
4258 	break;
4259       value = expand_expr (op1, NULL_RTX, GET_MODE (str_rtx), EXPAND_NORMAL);
4260       value = convert_modes (GET_MODE (str_rtx),
4261 			     TYPE_MODE (TREE_TYPE (op1)), value,
4262 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4263 
4264       /* We may be accessing data outside the field, which means
4265 	 we can alias adjacent data.  */
4266       if (MEM_P (str_rtx))
4267 	{
4268 	  str_rtx = shallow_copy_rtx (str_rtx);
4269 	  set_mem_alias_set (str_rtx, 0);
4270 	  set_mem_expr (str_rtx, 0);
4271 	}
4272 
4273       binop = TREE_CODE (src) == BIT_IOR_EXPR ? ior_optab : xor_optab;
4274       if (bitpos + bitsize != GET_MODE_BITSIZE (GET_MODE (str_rtx)))
4275 	{
4276 	  rtx mask = GEN_INT (((unsigned HOST_WIDE_INT) 1 << bitsize)
4277 			      - 1);
4278 	  value = expand_and (GET_MODE (str_rtx), value, mask,
4279 			      NULL_RTX);
4280 	}
4281       value = expand_shift (LSHIFT_EXPR, GET_MODE (str_rtx), value,
4282 			    build_int_cst (NULL_TREE, bitpos),
4283 			    NULL_RTX, 1);
4284       result = expand_binop (GET_MODE (str_rtx), binop, str_rtx,
4285 			     value, str_rtx, 1, OPTAB_WIDEN);
4286       if (result != str_rtx)
4287 	emit_move_insn (str_rtx, result);
4288       return true;
4289 
4290     default:
4291       break;
4292     }
4293 
4294   return false;
4295 }
4296 
4297 
4298 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4299    is true, try generating a nontemporal store.  */
4300 
4301 void
4302 expand_assignment (tree to, tree from, bool nontemporal)
4303 {
4304   rtx to_rtx = 0;
4305   rtx result;
4306 
4307   /* Don't crash if the lhs of the assignment was erroneous.  */
4308   if (TREE_CODE (to) == ERROR_MARK)
4309     {
4310       result = expand_normal (from);
4311       return;
4312     }
4313 
4314   /* Optimize away no-op moves without side-effects.  */
4315   if (operand_equal_p (to, from, 0))
4316     return;
4317 
4318   /* Assignment of a structure component needs special treatment
4319      if the structure component's rtx is not simply a MEM.
4320      Assignment of an array element at a constant index, and assignment of
4321      an array element in an unaligned packed structure field, has the same
4322      problem.  */
4323   if (handled_component_p (to)
4324       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4325     {
4326       enum machine_mode mode1;
4327       HOST_WIDE_INT bitsize, bitpos;
4328       tree offset;
4329       int unsignedp;
4330       int volatilep = 0;
4331       tree tem;
4332 
4333       push_temp_slots ();
4334       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4335 				 &unsignedp, &volatilep, true);
4336 
4337       /* If we are going to use store_bit_field and extract_bit_field,
4338 	 make sure to_rtx will be safe for multiple use.  */
4339 
4340       to_rtx = expand_normal (tem);
4341 
4342       if (offset != 0)
4343 	{
4344 	  enum machine_mode address_mode;
4345 	  rtx offset_rtx;
4346 
4347 	  if (!MEM_P (to_rtx))
4348 	    {
4349 	      /* We can get constant negative offsets into arrays with broken
4350 		 user code.  Translate this to a trap instead of ICEing.  */
4351 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4352 	      expand_builtin_trap ();
4353 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4354 	    }
4355 
4356 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4357 	  address_mode
4358 	    = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
4359 	  if (GET_MODE (offset_rtx) != address_mode)
4360 	    offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4361 
4362 	  /* A constant address in TO_RTX can have VOIDmode, we must not try
4363 	     to call force_reg for that case.  Avoid that case.  */
4364 	  if (MEM_P (to_rtx)
4365 	      && GET_MODE (to_rtx) == BLKmode
4366 	      && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4367 	      && bitsize > 0
4368 	      && (bitpos % bitsize) == 0
4369 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4370 	      && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4371 	    {
4372 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4373 	      bitpos = 0;
4374 	    }
4375 
4376 	  to_rtx = offset_address (to_rtx, offset_rtx,
4377 				   highest_pow2_factor_for_target (to,
4378 				   				   offset));
4379 	}
4380 
4381       /* No action is needed if the target is not a memory and the field
4382 	 lies completely outside that target.  This can occur if the source
4383 	 code contains an out-of-bounds access to a small array.  */
4384       if (!MEM_P (to_rtx)
4385 	  && GET_MODE (to_rtx) != BLKmode
4386 	  && (unsigned HOST_WIDE_INT) bitpos
4387 	     >= GET_MODE_BITSIZE (GET_MODE (to_rtx)))
4388 	{
4389 	  expand_normal (from);
4390 	  result = NULL;
4391 	}
4392       /* Handle expand_expr of a complex value returning a CONCAT.  */
4393       else if (GET_CODE (to_rtx) == CONCAT)
4394 	{
4395 	  if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from))))
4396 	    {
4397 	      gcc_assert (bitpos == 0);
4398 	      result = store_expr (from, to_rtx, false, nontemporal);
4399 	    }
4400 	  else
4401 	    {
4402 	      gcc_assert (bitpos == 0 || bitpos == GET_MODE_BITSIZE (mode1));
4403 	      result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4404 				   nontemporal);
4405 	    }
4406 	}
4407       else
4408 	{
4409 	  if (MEM_P (to_rtx))
4410 	    {
4411 	      /* If the field is at offset zero, we could have been given the
4412 		 DECL_RTX of the parent struct.  Don't munge it.  */
4413 	      to_rtx = shallow_copy_rtx (to_rtx);
4414 
4415 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4416 
4417 	      /* Deal with volatile and readonly fields.  The former is only
4418 		 done for MEM.  Also set MEM_KEEP_ALIAS_SET_P if needed.  */
4419 	      if (volatilep)
4420 		MEM_VOLATILE_P (to_rtx) = 1;
4421 	      if (component_uses_parent_alias_set (to))
4422 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4423 	    }
4424 
4425 	  if (optimize_bitfield_assignment_op (bitsize, bitpos, mode1,
4426 					       to_rtx, to, from))
4427 	    result = NULL;
4428 	  else
4429 	    result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4430 				  TREE_TYPE (tem), get_alias_set (to),
4431 				  nontemporal);
4432 	}
4433 
4434       if (result)
4435 	preserve_temp_slots (result);
4436       free_temp_slots ();
4437       pop_temp_slots ();
4438       return;
4439     }
4440 
4441    else if (TREE_CODE (to) == MISALIGNED_INDIRECT_REF)
4442      {
4443        addr_space_t as = ADDR_SPACE_GENERIC;
4444        enum machine_mode mode, op_mode1;
4445        enum insn_code icode;
4446        rtx reg, addr, mem, insn;
4447 
4448        if (POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (to, 0))))
4449 	 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (to, 0))));
4450 
4451        reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4452        reg = force_not_mem (reg);
4453 
4454        mode = TYPE_MODE (TREE_TYPE (to));
4455        addr = expand_expr (TREE_OPERAND (to, 0), NULL_RTX, VOIDmode,
4456                          EXPAND_SUM);
4457        addr = memory_address_addr_space (mode, addr, as);
4458        mem = gen_rtx_MEM (mode, addr);
4459 
4460        set_mem_attributes (mem, to, 0);
4461        set_mem_addr_space (mem, as);
4462 
4463        icode = movmisalign_optab->handlers[mode].insn_code;
4464        gcc_assert (icode != CODE_FOR_nothing);
4465 
4466        op_mode1 = insn_data[icode].operand[1].mode;
4467        if (! (*insn_data[icode].operand[1].predicate) (reg, op_mode1)
4468            && op_mode1 != VOIDmode)
4469          reg = copy_to_mode_reg (op_mode1, reg);
4470 
4471       insn = GEN_FCN (icode) (mem, reg);
4472        emit_insn (insn);
4473        return;
4474      }
4475 
4476   /* If the rhs is a function call and its value is not an aggregate,
4477      call the function before we start to compute the lhs.
4478      This is needed for correct code for cases such as
4479      val = setjmp (buf) on machines where reference to val
4480      requires loading up part of an address in a separate insn.
4481 
4482      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4483      since it might be a promoted variable where the zero- or sign- extension
4484      needs to be done.  Handling this in the normal way is safe because no
4485      computation is done before the call.  The same is true for SSA names.  */
4486   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
4487       && COMPLETE_TYPE_P (TREE_TYPE (from))
4488       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4489       && ! (((TREE_CODE (to) == VAR_DECL
4490 	      || TREE_CODE (to) == PARM_DECL
4491 	      || TREE_CODE (to) == RESULT_DECL)
4492 	     && REG_P (DECL_RTL (to)))
4493 	    || TREE_CODE (to) == SSA_NAME))
4494     {
4495       rtx value;
4496 
4497       push_temp_slots ();
4498       value = expand_normal (from);
4499       if (to_rtx == 0)
4500 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4501 
4502       /* Handle calls that return values in multiple non-contiguous locations.
4503 	 The Irix 6 ABI has examples of this.  */
4504       if (GET_CODE (to_rtx) == PARALLEL)
4505 	emit_group_load (to_rtx, value, TREE_TYPE (from),
4506 			 int_size_in_bytes (TREE_TYPE (from)));
4507       else if (GET_MODE (to_rtx) == BLKmode)
4508 	emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4509       else
4510 	{
4511 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
4512 	    value = convert_memory_address_addr_space
4513 		      (GET_MODE (to_rtx), value,
4514 		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
4515 
4516 	  emit_move_insn (to_rtx, value);
4517 	}
4518       preserve_temp_slots (to_rtx);
4519       free_temp_slots ();
4520       pop_temp_slots ();
4521       return;
4522     }
4523 
4524   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.
4525      Don't re-expand if it was expanded already (in COMPONENT_REF case).  */
4526 
4527   if (to_rtx == 0)
4528     to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4529 
4530   /* Don't move directly into a return register.  */
4531   if (TREE_CODE (to) == RESULT_DECL
4532       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
4533     {
4534       rtx temp;
4535 
4536       push_temp_slots ();
4537       if (REG_P (to_rtx) && TYPE_MODE (TREE_TYPE (from)) == BLKmode)
4538 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
4539       else
4540 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
4541 
4542       if (GET_CODE (to_rtx) == PARALLEL)
4543 	emit_group_load (to_rtx, temp, TREE_TYPE (from),
4544 			 int_size_in_bytes (TREE_TYPE (from)));
4545       else if (temp)
4546 	emit_move_insn (to_rtx, temp);
4547 
4548       preserve_temp_slots (to_rtx);
4549       free_temp_slots ();
4550       pop_temp_slots ();
4551       return;
4552     }
4553 
4554   /* In case we are returning the contents of an object which overlaps
4555      the place the value is being stored, use a safe function when copying
4556      a value through a pointer into a structure value return block.  */
4557   if (TREE_CODE (to) == RESULT_DECL
4558       && TREE_CODE (from) == INDIRECT_REF
4559       && ADDR_SPACE_GENERIC_P
4560 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
4561       && refs_may_alias_p (to, from)
4562       && cfun->returns_struct
4563       && !cfun->returns_pcc_struct)
4564     {
4565       rtx from_rtx, size;
4566 
4567       push_temp_slots ();
4568       size = expr_size (from);
4569       from_rtx = expand_normal (from);
4570 
4571       emit_library_call (memmove_libfunc, LCT_NORMAL,
4572 			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4573 			 XEXP (from_rtx, 0), Pmode,
4574 			 convert_to_mode (TYPE_MODE (sizetype),
4575 					  size, TYPE_UNSIGNED (sizetype)),
4576 			 TYPE_MODE (sizetype));
4577 
4578       preserve_temp_slots (to_rtx);
4579       free_temp_slots ();
4580       pop_temp_slots ();
4581       return;
4582     }
4583 
4584   /* Compute FROM and store the value in the rtx we got.  */
4585 
4586   push_temp_slots ();
4587   result = store_expr (from, to_rtx, 0, nontemporal);
4588   preserve_temp_slots (result);
4589   free_temp_slots ();
4590   pop_temp_slots ();
4591   return;
4592 }
4593 
4594 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
4595    succeeded, false otherwise.  */
4596 
4597 bool
4598 emit_storent_insn (rtx to, rtx from)
4599 {
4600   enum machine_mode mode = GET_MODE (to), imode;
4601   enum insn_code code = optab_handler (storent_optab, mode)->insn_code;
4602   rtx pattern;
4603 
4604   if (code == CODE_FOR_nothing)
4605     return false;
4606 
4607   imode = insn_data[code].operand[0].mode;
4608   if (!insn_data[code].operand[0].predicate (to, imode))
4609     return false;
4610 
4611   imode = insn_data[code].operand[1].mode;
4612   if (!insn_data[code].operand[1].predicate (from, imode))
4613     {
4614       from = copy_to_mode_reg (imode, from);
4615       if (!insn_data[code].operand[1].predicate (from, imode))
4616 	return false;
4617     }
4618 
4619   pattern = GEN_FCN (code) (to, from);
4620   if (pattern == NULL_RTX)
4621     return false;
4622 
4623   emit_insn (pattern);
4624   return true;
4625 }
4626 
4627 /* Generate code for computing expression EXP,
4628    and storing the value into TARGET.
4629 
4630    If the mode is BLKmode then we may return TARGET itself.
4631    It turns out that in BLKmode it doesn't cause a problem.
4632    because C has no operators that could combine two different
4633    assignments into the same BLKmode object with different values
4634    with no sequence point.  Will other languages need this to
4635    be more thorough?
4636 
4637    If CALL_PARAM_P is nonzero, this is a store into a call param on the
4638    stack, and block moves may need to be treated specially.
4639 
4640    If NONTEMPORAL is true, try using a nontemporal store instruction.  */
4641 
4642 rtx
4643 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
4644 {
4645   rtx temp;
4646   rtx alt_rtl = NULL_RTX;
4647   location_t loc = EXPR_LOCATION (exp);
4648 
4649   if (VOID_TYPE_P (TREE_TYPE (exp)))
4650     {
4651       /* C++ can generate ?: expressions with a throw expression in one
4652 	 branch and an rvalue in the other. Here, we resolve attempts to
4653 	 store the throw expression's nonexistent result.  */
4654       gcc_assert (!call_param_p);
4655       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
4656       return NULL_RTX;
4657     }
4658   if (TREE_CODE (exp) == COMPOUND_EXPR)
4659     {
4660       /* Perform first part of compound expression, then assign from second
4661 	 part.  */
4662       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4663 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4664       return store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4665 			 nontemporal);
4666     }
4667   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4668     {
4669       /* For conditional expression, get safe form of the target.  Then
4670 	 test the condition, doing the appropriate assignment on either
4671 	 side.  This avoids the creation of unnecessary temporaries.
4672 	 For non-BLKmode, it is more efficient not to do this.  */
4673 
4674       rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4675 
4676       do_pending_stack_adjust ();
4677       NO_DEFER_POP;
4678       jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
4679       store_expr (TREE_OPERAND (exp, 1), target, call_param_p,
4680 		  nontemporal);
4681       emit_jump_insn (gen_jump (lab2));
4682       emit_barrier ();
4683       emit_label (lab1);
4684       store_expr (TREE_OPERAND (exp, 2), target, call_param_p,
4685 		  nontemporal);
4686       emit_label (lab2);
4687       OK_DEFER_POP;
4688 
4689       return NULL_RTX;
4690     }
4691   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4692     /* If this is a scalar in a register that is stored in a wider mode
4693        than the declared mode, compute the result into its declared mode
4694        and then convert to the wider mode.  Our value is the computed
4695        expression.  */
4696     {
4697       rtx inner_target = 0;
4698 
4699       /* We can do the conversion inside EXP, which will often result
4700 	 in some optimizations.  Do the conversion in two steps: first
4701 	 change the signedness, if needed, then the extend.  But don't
4702 	 do this if the type of EXP is a subtype of something else
4703 	 since then the conversion might involve more than just
4704 	 converting modes.  */
4705       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
4706 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
4707 	  && GET_MODE_PRECISION (GET_MODE (target))
4708 	     == TYPE_PRECISION (TREE_TYPE (exp)))
4709 	{
4710 	  if (TYPE_UNSIGNED (TREE_TYPE (exp))
4711 	      != SUBREG_PROMOTED_UNSIGNED_P (target))
4712 	    {
4713 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
4714 		 version, so use the mode instead.  */
4715 	      tree ntype
4716 		= (signed_or_unsigned_type_for
4717 		   (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)));
4718 	      if (ntype == NULL)
4719 		ntype = lang_hooks.types.type_for_mode
4720 		  (TYPE_MODE (TREE_TYPE (exp)),
4721 		   SUBREG_PROMOTED_UNSIGNED_P (target));
4722 
4723 	      exp = fold_convert_loc (loc, ntype, exp);
4724 	    }
4725 
4726 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
4727 				  (GET_MODE (SUBREG_REG (target)),
4728 				   SUBREG_PROMOTED_UNSIGNED_P (target)),
4729 				  exp);
4730 
4731 	  inner_target = SUBREG_REG (target);
4732 	}
4733 
4734       temp = expand_expr (exp, inner_target, VOIDmode,
4735 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4736 
4737       /* If TEMP is a VOIDmode constant, use convert_modes to make
4738 	 sure that we properly convert it.  */
4739       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4740 	{
4741 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4742 				temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4743 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4744 			        GET_MODE (target), temp,
4745 			        SUBREG_PROMOTED_UNSIGNED_P (target));
4746 	}
4747 
4748       convert_move (SUBREG_REG (target), temp,
4749 		    SUBREG_PROMOTED_UNSIGNED_P (target));
4750 
4751       return NULL_RTX;
4752     }
4753   else if (TREE_CODE (exp) == STRING_CST
4754 	   && !nontemporal && !call_param_p
4755 	   && TREE_STRING_LENGTH (exp) > 0
4756 	   && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
4757     {
4758       /* Optimize initialization of an array with a STRING_CST.  */
4759       HOST_WIDE_INT exp_len, str_copy_len;
4760       rtx dest_mem;
4761 
4762       exp_len = int_expr_size (exp);
4763       if (exp_len <= 0)
4764 	goto normal_expr;
4765 
4766       str_copy_len = strlen (TREE_STRING_POINTER (exp));
4767       if (str_copy_len < TREE_STRING_LENGTH (exp) - 1)
4768 	goto normal_expr;
4769 
4770       str_copy_len = TREE_STRING_LENGTH (exp);
4771       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
4772 	{
4773 	  str_copy_len += STORE_MAX_PIECES - 1;
4774 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
4775 	}
4776       str_copy_len = MIN (str_copy_len, exp_len);
4777       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
4778 				CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4779 				MEM_ALIGN (target), false))
4780 	goto normal_expr;
4781 
4782       dest_mem = target;
4783 
4784       dest_mem = store_by_pieces (dest_mem,
4785 				  str_copy_len, builtin_strncpy_read_str,
4786 				  CONST_CAST(char *, TREE_STRING_POINTER (exp)),
4787 				  MEM_ALIGN (target), false,
4788 				  exp_len > str_copy_len ? 1 : 0);
4789       if (exp_len > str_copy_len)
4790 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
4791 		       GEN_INT (exp_len - str_copy_len),
4792 		       BLOCK_OP_NORMAL);
4793       return NULL_RTX;
4794     }
4795   else
4796     {
4797       rtx tmp_target;
4798 
4799   normal_expr:
4800       /* If we want to use a nontemporal store, force the value to
4801 	 register first.  */
4802       tmp_target = nontemporal ? NULL_RTX : target;
4803       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
4804 			       (call_param_p
4805 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
4806 			       &alt_rtl);
4807     }
4808 
4809   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4810      the same as that of TARGET, adjust the constant.  This is needed, for
4811      example, in case it is a CONST_DOUBLE and we want only a word-sized
4812      value.  */
4813   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4814       && TREE_CODE (exp) != ERROR_MARK
4815       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4816     temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4817 			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4818 
4819   /* If value was not generated in the target, store it there.
4820      Convert the value to TARGET's type first if necessary and emit the
4821      pending incrementations that have been queued when expanding EXP.
4822      Note that we cannot emit the whole queue blindly because this will
4823      effectively disable the POST_INC optimization later.
4824 
4825      If TEMP and TARGET compare equal according to rtx_equal_p, but
4826      one or both of them are volatile memory refs, we have to distinguish
4827      two cases:
4828      - expand_expr has used TARGET.  In this case, we must not generate
4829        another copy.  This can be detected by TARGET being equal according
4830        to == .
4831      - expand_expr has not used TARGET - that means that the source just
4832        happens to have the same RTX form.  Since temp will have been created
4833        by expand_expr, it will compare unequal according to == .
4834        We must generate a copy in this case, to reach the correct number
4835        of volatile memory references.  */
4836 
4837   if ((! rtx_equal_p (temp, target)
4838        || (temp != target && (side_effects_p (temp)
4839 			      || side_effects_p (target))))
4840       && TREE_CODE (exp) != ERROR_MARK
4841       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4842 	 but TARGET is not valid memory reference, TEMP will differ
4843 	 from TARGET although it is really the same location.  */
4844       && !(alt_rtl
4845 	   && rtx_equal_p (alt_rtl, target)
4846 	   && !side_effects_p (alt_rtl)
4847 	   && !side_effects_p (target))
4848       /* If there's nothing to copy, don't bother.  Don't call
4849 	 expr_size unless necessary, because some front-ends (C++)
4850 	 expr_size-hook must not be given objects that are not
4851 	 supposed to be bit-copied or bit-initialized.  */
4852       && expr_size (exp) != const0_rtx)
4853     {
4854       if (GET_MODE (temp) != GET_MODE (target)
4855 	  && GET_MODE (temp) != VOIDmode)
4856 	{
4857 	  int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4858 	  if (GET_MODE (target) == BLKmode
4859 		   || GET_MODE (temp) == BLKmode)
4860 	    emit_block_move (target, temp, expr_size (exp),
4861 			     (call_param_p
4862 			      ? BLOCK_OP_CALL_PARM
4863 			      : BLOCK_OP_NORMAL));
4864 	  else
4865 	    convert_move (target, temp, unsignedp);
4866 	}
4867 
4868       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4869 	{
4870 	  /* Handle copying a string constant into an array.  The string
4871 	     constant may be shorter than the array.  So copy just the string's
4872 	     actual length, and clear the rest.  First get the size of the data
4873 	     type of the string, which is actually the size of the target.  */
4874 	  rtx size = expr_size (exp);
4875 
4876 	  if (CONST_INT_P (size)
4877 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
4878 	    emit_block_move (target, temp, size,
4879 			     (call_param_p
4880 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4881 	  else
4882 	    {
4883 	      enum machine_mode pointer_mode
4884 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
4885 	      enum machine_mode address_mode
4886 		= targetm.addr_space.address_mode (MEM_ADDR_SPACE (target));
4887 
4888 	      /* Compute the size of the data to copy from the string.  */
4889 	      tree copy_size
4890 		= size_binop_loc (loc, MIN_EXPR,
4891 				  make_tree (sizetype, size),
4892 				  size_int (TREE_STRING_LENGTH (exp)));
4893 	      rtx copy_size_rtx
4894 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
4895 			       (call_param_p
4896 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
4897 	      rtx label = 0;
4898 
4899 	      /* Copy that much.  */
4900 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
4901 					       TYPE_UNSIGNED (sizetype));
4902 	      emit_block_move (target, temp, copy_size_rtx,
4903 			       (call_param_p
4904 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4905 
4906 	      /* Figure out how much is left in TARGET that we have to clear.
4907 		 Do all calculations in pointer_mode.  */
4908 	      if (CONST_INT_P (copy_size_rtx))
4909 		{
4910 		  size = plus_constant (size, -INTVAL (copy_size_rtx));
4911 		  target = adjust_address (target, BLKmode,
4912 					   INTVAL (copy_size_rtx));
4913 		}
4914 	      else
4915 		{
4916 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4917 				       copy_size_rtx, NULL_RTX, 0,
4918 				       OPTAB_LIB_WIDEN);
4919 
4920 		  if (GET_MODE (copy_size_rtx) != address_mode)
4921 		    copy_size_rtx = convert_to_mode (address_mode,
4922 						     copy_size_rtx,
4923 						     TYPE_UNSIGNED (sizetype));
4924 
4925 		  target = offset_address (target, copy_size_rtx,
4926 					   highest_pow2_factor (copy_size));
4927 		  label = gen_label_rtx ();
4928 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4929 					   GET_MODE (size), 0, label);
4930 		}
4931 
4932 	      if (size != const0_rtx)
4933 		clear_storage (target, size, BLOCK_OP_NORMAL);
4934 
4935 	      if (label)
4936 		emit_label (label);
4937 	    }
4938 	}
4939       /* Handle calls that return values in multiple non-contiguous locations.
4940 	 The Irix 6 ABI has examples of this.  */
4941       else if (GET_CODE (target) == PARALLEL)
4942 	emit_group_load (target, temp, TREE_TYPE (exp),
4943 			 int_size_in_bytes (TREE_TYPE (exp)));
4944       else if (GET_MODE (temp) == BLKmode)
4945 	emit_block_move (target, temp, expr_size (exp),
4946 			 (call_param_p
4947 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4948       else if (nontemporal
4949 	       && emit_storent_insn (target, temp))
4950 	/* If we managed to emit a nontemporal store, there is nothing else to
4951 	   do.  */
4952 	;
4953       else
4954 	{
4955 	  temp = force_operand (temp, target);
4956 	  if (temp != target)
4957 	    emit_move_insn (target, temp);
4958 	}
4959     }
4960 
4961   return NULL_RTX;
4962 }
4963 
4964 /* Helper for categorize_ctor_elements.  Identical interface.  */
4965 
4966 static bool
4967 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
4968 			    HOST_WIDE_INT *p_elt_count,
4969 			    bool *p_must_clear)
4970 {
4971   unsigned HOST_WIDE_INT idx;
4972   HOST_WIDE_INT nz_elts, elt_count;
4973   tree value, purpose;
4974 
4975   /* Whether CTOR is a valid constant initializer, in accordance with what
4976      initializer_constant_valid_p does.  If inferred from the constructor
4977      elements, true until proven otherwise.  */
4978   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
4979   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
4980 
4981   nz_elts = 0;
4982   elt_count = 0;
4983 
4984   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
4985     {
4986       HOST_WIDE_INT mult;
4987 
4988       mult = 1;
4989       if (TREE_CODE (purpose) == RANGE_EXPR)
4990 	{
4991 	  tree lo_index = TREE_OPERAND (purpose, 0);
4992 	  tree hi_index = TREE_OPERAND (purpose, 1);
4993 
4994 	  if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4995 	    mult = (tree_low_cst (hi_index, 1)
4996 		    - tree_low_cst (lo_index, 1) + 1);
4997 	}
4998 
4999       switch (TREE_CODE (value))
5000 	{
5001 	case CONSTRUCTOR:
5002 	  {
5003 	    HOST_WIDE_INT nz = 0, ic = 0;
5004 
5005 	    bool const_elt_p
5006 	      = categorize_ctor_elements_1 (value, &nz, &ic, p_must_clear);
5007 
5008 	    nz_elts += mult * nz;
5009  	    elt_count += mult * ic;
5010 
5011 	    if (const_from_elts_p && const_p)
5012 	      const_p = const_elt_p;
5013 	  }
5014 	  break;
5015 
5016 	case INTEGER_CST:
5017 	case REAL_CST:
5018 	case FIXED_CST:
5019 	  if (!initializer_zerop (value))
5020 	    nz_elts += mult;
5021 	  elt_count += mult;
5022 	  break;
5023 
5024 	case STRING_CST:
5025 	  nz_elts += mult * TREE_STRING_LENGTH (value);
5026 	  elt_count += mult * TREE_STRING_LENGTH (value);
5027 	  break;
5028 
5029 	case COMPLEX_CST:
5030 	  if (!initializer_zerop (TREE_REALPART (value)))
5031 	    nz_elts += mult;
5032 	  if (!initializer_zerop (TREE_IMAGPART (value)))
5033 	    nz_elts += mult;
5034 	  elt_count += mult;
5035 	  break;
5036 
5037 	case VECTOR_CST:
5038 	  {
5039 	    tree v;
5040 	    for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
5041 	      {
5042 		if (!initializer_zerop (TREE_VALUE (v)))
5043 		  nz_elts += mult;
5044 		elt_count += mult;
5045 	      }
5046 	  }
5047 	  break;
5048 
5049 	default:
5050 	  nz_elts += mult;
5051 	  elt_count += mult;
5052 
5053 	  if (const_from_elts_p && const_p)
5054 	    const_p = initializer_constant_valid_p (value, TREE_TYPE (value))
5055 		      != NULL_TREE;
5056 	  break;
5057 	}
5058     }
5059 
5060   if (!*p_must_clear
5061       && (TREE_CODE (TREE_TYPE (ctor)) == UNION_TYPE
5062 	  || TREE_CODE (TREE_TYPE (ctor)) == QUAL_UNION_TYPE))
5063     {
5064       tree init_sub_type;
5065       bool clear_this = true;
5066 
5067       if (!VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (ctor)))
5068 	{
5069 	  /* We don't expect more than one element of the union to be
5070 	     initialized.  Not sure what we should do otherwise... */
5071           gcc_assert (VEC_length (constructor_elt, CONSTRUCTOR_ELTS (ctor))
5072 		      == 1);
5073 
5074           init_sub_type = TREE_TYPE (VEC_index (constructor_elt,
5075 						CONSTRUCTOR_ELTS (ctor),
5076 						0)->value);
5077 
5078 	  /* ??? We could look at each element of the union, and find the
5079 	     largest element.  Which would avoid comparing the size of the
5080 	     initialized element against any tail padding in the union.
5081 	     Doesn't seem worth the effort...  */
5082 	  if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (ctor)),
5083 				TYPE_SIZE (init_sub_type)) == 1)
5084 	    {
5085 	      /* And now we have to find out if the element itself is fully
5086 		 constructed.  E.g. for union { struct { int a, b; } s; } u
5087 		 = { .s = { .a = 1 } }.  */
5088 	      if (elt_count == count_type_elements (init_sub_type, false))
5089 		clear_this = false;
5090 	    }
5091 	}
5092 
5093       *p_must_clear = clear_this;
5094     }
5095 
5096   *p_nz_elts += nz_elts;
5097   *p_elt_count += elt_count;
5098 
5099   return const_p;
5100 }
5101 
5102 /* Examine CTOR to discover:
5103    * how many scalar fields are set to nonzero values,
5104      and place it in *P_NZ_ELTS;
5105    * how many scalar fields in total are in CTOR,
5106      and place it in *P_ELT_COUNT.
5107    * if a type is a union, and the initializer from the constructor
5108      is not the largest element in the union, then set *p_must_clear.
5109 
5110    Return whether or not CTOR is a valid static constant initializer, the same
5111    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5112 
5113 bool
5114 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5115 			  HOST_WIDE_INT *p_elt_count,
5116 			  bool *p_must_clear)
5117 {
5118   *p_nz_elts = 0;
5119   *p_elt_count = 0;
5120   *p_must_clear = false;
5121 
5122   return
5123     categorize_ctor_elements_1 (ctor, p_nz_elts, p_elt_count, p_must_clear);
5124 }
5125 
5126 /* Count the number of scalars in TYPE.  Return -1 on overflow or
5127    variable-sized.  If ALLOW_FLEXARR is true, don't count flexible
5128    array member at the end of the structure.  */
5129 
5130 HOST_WIDE_INT
5131 count_type_elements (const_tree type, bool allow_flexarr)
5132 {
5133   const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
5134   switch (TREE_CODE (type))
5135     {
5136     case ARRAY_TYPE:
5137       {
5138 	tree telts = array_type_nelts (type);
5139 	if (telts && host_integerp (telts, 1))
5140 	  {
5141 	    HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
5142 	    HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type), false);
5143 	    if (n == 0)
5144 	      return 0;
5145 	    else if (max / n > m)
5146 	      return n * m;
5147 	  }
5148 	return -1;
5149       }
5150 
5151     case RECORD_TYPE:
5152       {
5153 	HOST_WIDE_INT n = 0, t;
5154 	tree f;
5155 
5156 	for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
5157 	  if (TREE_CODE (f) == FIELD_DECL)
5158 	    {
5159 	      t = count_type_elements (TREE_TYPE (f), false);
5160 	      if (t < 0)
5161 		{
5162 		  /* Check for structures with flexible array member.  */
5163 		  tree tf = TREE_TYPE (f);
5164 		  if (allow_flexarr
5165 		      && TREE_CHAIN (f) == NULL
5166 		      && TREE_CODE (tf) == ARRAY_TYPE
5167 		      && TYPE_DOMAIN (tf)
5168 		      && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5169 		      && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5170 		      && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5171 		      && int_size_in_bytes (type) >= 0)
5172 		    break;
5173 
5174 		  return -1;
5175 		}
5176 	      n += t;
5177 	    }
5178 
5179 	return n;
5180       }
5181 
5182     case UNION_TYPE:
5183     case QUAL_UNION_TYPE:
5184       return -1;
5185 
5186     case COMPLEX_TYPE:
5187       return 2;
5188 
5189     case VECTOR_TYPE:
5190       return TYPE_VECTOR_SUBPARTS (type);
5191 
5192     case INTEGER_TYPE:
5193     case REAL_TYPE:
5194     case FIXED_POINT_TYPE:
5195     case ENUMERAL_TYPE:
5196     case BOOLEAN_TYPE:
5197     case POINTER_TYPE:
5198     case OFFSET_TYPE:
5199     case REFERENCE_TYPE:
5200       return 1;
5201 
5202     case ERROR_MARK:
5203       return 0;
5204 
5205     case VOID_TYPE:
5206     case METHOD_TYPE:
5207     case FUNCTION_TYPE:
5208     case LANG_TYPE:
5209     default:
5210       gcc_unreachable ();
5211     }
5212 }
5213 
5214 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
5215 
5216 static int
5217 mostly_zeros_p (const_tree exp)
5218 {
5219   if (TREE_CODE (exp) == CONSTRUCTOR)
5220 
5221     {
5222       HOST_WIDE_INT nz_elts, count, elts;
5223       bool must_clear;
5224 
5225       categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5226       if (must_clear)
5227 	return 1;
5228 
5229       elts = count_type_elements (TREE_TYPE (exp), false);
5230 
5231       return nz_elts < elts / 4;
5232     }
5233 
5234   return initializer_zerop (exp);
5235 }
5236 
5237 /* Return 1 if EXP contains all zeros.  */
5238 
5239 static int
5240 all_zeros_p (const_tree exp)
5241 {
5242   if (TREE_CODE (exp) == CONSTRUCTOR)
5243 
5244     {
5245       HOST_WIDE_INT nz_elts, count;
5246       bool must_clear;
5247 
5248       categorize_ctor_elements (exp, &nz_elts, &count, &must_clear);
5249       return nz_elts == 0;
5250     }
5251 
5252   return initializer_zerop (exp);
5253 }
5254 
5255 /* Helper function for store_constructor.
5256    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5257    TYPE is the type of the CONSTRUCTOR, not the element type.
5258    CLEARED is as for store_constructor.
5259    ALIAS_SET is the alias set to use for any stores.
5260 
5261    This provides a recursive shortcut back to store_constructor when it isn't
5262    necessary to go through store_field.  This is so that we can pass through
5263    the cleared field to let store_constructor know that we may not have to
5264    clear a substructure if the outer structure has already been cleared.  */
5265 
5266 static void
5267 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5268 			 HOST_WIDE_INT bitpos, enum machine_mode mode,
5269 			 tree exp, tree type, int cleared,
5270 			 alias_set_type alias_set)
5271 {
5272   if (TREE_CODE (exp) == CONSTRUCTOR
5273       /* We can only call store_constructor recursively if the size and
5274 	 bit position are on a byte boundary.  */
5275       && bitpos % BITS_PER_UNIT == 0
5276       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5277       /* If we have a nonzero bitpos for a register target, then we just
5278 	 let store_field do the bitfield handling.  This is unlikely to
5279 	 generate unnecessary clear instructions anyways.  */
5280       && (bitpos == 0 || MEM_P (target)))
5281     {
5282       if (MEM_P (target))
5283 	target
5284 	  = adjust_address (target,
5285 			    GET_MODE (target) == BLKmode
5286 			    || 0 != (bitpos
5287 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5288 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5289 
5290 
5291       /* Update the alias set, if required.  */
5292       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5293 	  && MEM_ALIAS_SET (target) != 0)
5294 	{
5295 	  target = copy_rtx (target);
5296 	  set_mem_alias_set (target, alias_set);
5297 	}
5298 
5299       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5300     }
5301   else
5302     store_field (target, bitsize, bitpos, mode, exp, type, alias_set, false);
5303 }
5304 
5305 /* Store the value of constructor EXP into the rtx TARGET.
5306    TARGET is either a REG or a MEM; we know it cannot conflict, since
5307    safe_from_p has been called.
5308    CLEARED is true if TARGET is known to have been zero'd.
5309    SIZE is the number of bytes of TARGET we are allowed to modify: this
5310    may not be the same as the size of EXP if we are assigning to a field
5311    which has been packed to exclude padding bits.  */
5312 
5313 static void
5314 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5315 {
5316   tree type = TREE_TYPE (exp);
5317 #ifdef WORD_REGISTER_OPERATIONS
5318   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5319 #endif
5320 
5321   switch (TREE_CODE (type))
5322     {
5323     case RECORD_TYPE:
5324     case UNION_TYPE:
5325     case QUAL_UNION_TYPE:
5326       {
5327 	unsigned HOST_WIDE_INT idx;
5328 	tree field, value;
5329 
5330 	/* If size is zero or the target is already cleared, do nothing.  */
5331 	if (size == 0 || cleared)
5332 	  cleared = 1;
5333 	/* We either clear the aggregate or indicate the value is dead.  */
5334 	else if ((TREE_CODE (type) == UNION_TYPE
5335 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
5336 		 && ! CONSTRUCTOR_ELTS (exp))
5337 	  /* If the constructor is empty, clear the union.  */
5338 	  {
5339 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5340 	    cleared = 1;
5341 	  }
5342 
5343 	/* If we are building a static constructor into a register,
5344 	   set the initial value as zero so we can fold the value into
5345 	   a constant.  But if more than one register is involved,
5346 	   this probably loses.  */
5347 	else if (REG_P (target) && TREE_STATIC (exp)
5348 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
5349 	  {
5350 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5351 	    cleared = 1;
5352 	  }
5353 
5354         /* If the constructor has fewer fields than the structure or
5355 	   if we are initializing the structure to mostly zeros, clear
5356 	   the whole structure first.  Don't do this if TARGET is a
5357 	   register whose mode size isn't equal to SIZE since
5358 	   clear_storage can't handle this case.  */
5359 	else if (size > 0
5360 		 && (((int)VEC_length (constructor_elt, CONSTRUCTOR_ELTS (exp))
5361 		      != fields_length (type))
5362 		     || mostly_zeros_p (exp))
5363 		 && (!REG_P (target)
5364 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5365 			 == size)))
5366 	  {
5367 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5368 	    cleared = 1;
5369 	  }
5370 
5371 	if (REG_P (target) && !cleared)
5372 	  emit_clobber (target);
5373 
5374 	/* Store each element of the constructor into the
5375 	   corresponding field of TARGET.  */
5376 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
5377 	  {
5378 	    enum machine_mode mode;
5379 	    HOST_WIDE_INT bitsize;
5380 	    HOST_WIDE_INT bitpos = 0;
5381 	    tree offset;
5382 	    rtx to_rtx = target;
5383 
5384 	    /* Just ignore missing fields.  We cleared the whole
5385 	       structure, above, if any fields are missing.  */
5386 	    if (field == 0)
5387 	      continue;
5388 
5389 	    if (cleared && initializer_zerop (value))
5390 	      continue;
5391 
5392 	    if (host_integerp (DECL_SIZE (field), 1))
5393 	      bitsize = tree_low_cst (DECL_SIZE (field), 1);
5394 	    else
5395 	      bitsize = -1;
5396 
5397 	    mode = DECL_MODE (field);
5398 	    if (DECL_BIT_FIELD (field))
5399 	      mode = VOIDmode;
5400 
5401 	    offset = DECL_FIELD_OFFSET (field);
5402 	    if (host_integerp (offset, 0)
5403 		&& host_integerp (bit_position (field), 0))
5404 	      {
5405 		bitpos = int_bit_position (field);
5406 		offset = 0;
5407 	      }
5408 	    else
5409 	      bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
5410 
5411 	    if (offset)
5412 	      {
5413 	        enum machine_mode address_mode;
5414 		rtx offset_rtx;
5415 
5416 		offset
5417 		  = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
5418 						    make_tree (TREE_TYPE (exp),
5419 							       target));
5420 
5421 		offset_rtx = expand_normal (offset);
5422 		gcc_assert (MEM_P (to_rtx));
5423 
5424 		address_mode
5425 		  = targetm.addr_space.address_mode (MEM_ADDR_SPACE (to_rtx));
5426 		if (GET_MODE (offset_rtx) != address_mode)
5427 		  offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
5428 
5429 		to_rtx = offset_address (to_rtx, offset_rtx,
5430 					 highest_pow2_factor (offset));
5431 	      }
5432 
5433 #ifdef WORD_REGISTER_OPERATIONS
5434 	    /* If this initializes a field that is smaller than a
5435 	       word, at the start of a word, try to widen it to a full
5436 	       word.  This special case allows us to output C++ member
5437 	       function initializations in a form that the optimizers
5438 	       can understand.  */
5439 	    if (REG_P (target)
5440 		&& bitsize < BITS_PER_WORD
5441 		&& bitpos % BITS_PER_WORD == 0
5442 		&& GET_MODE_CLASS (mode) == MODE_INT
5443 		&& TREE_CODE (value) == INTEGER_CST
5444 		&& exp_size >= 0
5445 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
5446 	      {
5447 		tree type = TREE_TYPE (value);
5448 
5449 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
5450 		  {
5451 		    type = lang_hooks.types.type_for_size
5452 		      (BITS_PER_WORD, TYPE_UNSIGNED (type));
5453 		    value = fold_convert (type, value);
5454 		  }
5455 
5456 		if (BYTES_BIG_ENDIAN)
5457 		  value
5458 		   = fold_build2 (LSHIFT_EXPR, type, value,
5459 				   build_int_cst (type,
5460 						  BITS_PER_WORD - bitsize));
5461 		bitsize = BITS_PER_WORD;
5462 		mode = word_mode;
5463 	      }
5464 #endif
5465 
5466 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5467 		&& DECL_NONADDRESSABLE_P (field))
5468 	      {
5469 		to_rtx = copy_rtx (to_rtx);
5470 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5471 	      }
5472 
5473 	    store_constructor_field (to_rtx, bitsize, bitpos, mode,
5474 				     value, type, cleared,
5475 				     get_alias_set (TREE_TYPE (field)));
5476 	  }
5477 	break;
5478       }
5479     case ARRAY_TYPE:
5480       {
5481 	tree value, index;
5482 	unsigned HOST_WIDE_INT i;
5483 	int need_to_clear;
5484 	tree domain;
5485 	tree elttype = TREE_TYPE (type);
5486 	int const_bounds_p;
5487 	HOST_WIDE_INT minelt = 0;
5488 	HOST_WIDE_INT maxelt = 0;
5489 
5490 	domain = TYPE_DOMAIN (type);
5491 	const_bounds_p = (TYPE_MIN_VALUE (domain)
5492 			  && TYPE_MAX_VALUE (domain)
5493 			  && host_integerp (TYPE_MIN_VALUE (domain), 0)
5494 			  && host_integerp (TYPE_MAX_VALUE (domain), 0));
5495 
5496 	/* If we have constant bounds for the range of the type, get them.  */
5497 	if (const_bounds_p)
5498 	  {
5499 	    minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5500 	    maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5501 	  }
5502 
5503 	/* If the constructor has fewer elements than the array, clear
5504            the whole array first.  Similarly if this is static
5505            constructor of a non-BLKmode object.  */
5506 	if (cleared)
5507 	  need_to_clear = 0;
5508 	else if (REG_P (target) && TREE_STATIC (exp))
5509 	  need_to_clear = 1;
5510 	else
5511 	  {
5512 	    unsigned HOST_WIDE_INT idx;
5513 	    tree index, value;
5514 	    HOST_WIDE_INT count = 0, zero_count = 0;
5515 	    need_to_clear = ! const_bounds_p;
5516 
5517 	    /* This loop is a more accurate version of the loop in
5518 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
5519 	       is also needed to check for missing elements.  */
5520 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
5521 	      {
5522 		HOST_WIDE_INT this_node_count;
5523 
5524 		if (need_to_clear)
5525 		  break;
5526 
5527 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5528 		  {
5529 		    tree lo_index = TREE_OPERAND (index, 0);
5530 		    tree hi_index = TREE_OPERAND (index, 1);
5531 
5532 		    if (! host_integerp (lo_index, 1)
5533 			|| ! host_integerp (hi_index, 1))
5534 		      {
5535 			need_to_clear = 1;
5536 			break;
5537 		      }
5538 
5539 		    this_node_count = (tree_low_cst (hi_index, 1)
5540 				       - tree_low_cst (lo_index, 1) + 1);
5541 		  }
5542 		else
5543 		  this_node_count = 1;
5544 
5545 		count += this_node_count;
5546 		if (mostly_zeros_p (value))
5547 		  zero_count += this_node_count;
5548 	      }
5549 
5550 	    /* Clear the entire array first if there are any missing
5551 	       elements, or if the incidence of zero elements is >=
5552 	       75%.  */
5553 	    if (! need_to_clear
5554 		&& (count < maxelt - minelt + 1
5555 		    || 4 * zero_count >= 3 * count))
5556 	      need_to_clear = 1;
5557 	  }
5558 
5559 	if (need_to_clear && size > 0)
5560 	  {
5561 	    if (REG_P (target))
5562 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
5563 	    else
5564 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5565 	    cleared = 1;
5566 	  }
5567 
5568 	if (!cleared && REG_P (target))
5569 	  /* Inform later passes that the old value is dead.  */
5570 	  emit_clobber (target);
5571 
5572 	/* Store each element of the constructor into the
5573 	   corresponding element of TARGET, determined by counting the
5574 	   elements.  */
5575 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
5576 	  {
5577 	    enum machine_mode mode;
5578 	    HOST_WIDE_INT bitsize;
5579 	    HOST_WIDE_INT bitpos;
5580 	    rtx xtarget = target;
5581 
5582 	    if (cleared && initializer_zerop (value))
5583 	      continue;
5584 
5585 	    mode = TYPE_MODE (elttype);
5586 	    if (mode == BLKmode)
5587 	      bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5588 			 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5589 			 : -1);
5590 	    else
5591 	      bitsize = GET_MODE_BITSIZE (mode);
5592 
5593 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5594 	      {
5595 		tree lo_index = TREE_OPERAND (index, 0);
5596 		tree hi_index = TREE_OPERAND (index, 1);
5597 		rtx index_r, pos_rtx;
5598 		HOST_WIDE_INT lo, hi, count;
5599 		tree position;
5600 
5601 		/* If the range is constant and "small", unroll the loop.  */
5602 		if (const_bounds_p
5603 		    && host_integerp (lo_index, 0)
5604 		    && host_integerp (hi_index, 0)
5605 		    && (lo = tree_low_cst (lo_index, 0),
5606 			hi = tree_low_cst (hi_index, 0),
5607 			count = hi - lo + 1,
5608 			(!MEM_P (target)
5609 			 || count <= 2
5610 			 || (host_integerp (TYPE_SIZE (elttype), 1)
5611 			     && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5612 				 <= 40 * 8)))))
5613 		  {
5614 		    lo -= minelt;  hi -= minelt;
5615 		    for (; lo <= hi; lo++)
5616 		      {
5617 			bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5618 
5619 			if (MEM_P (target)
5620 			    && !MEM_KEEP_ALIAS_SET_P (target)
5621 			    && TREE_CODE (type) == ARRAY_TYPE
5622 			    && TYPE_NONALIASED_COMPONENT (type))
5623 			  {
5624 			    target = copy_rtx (target);
5625 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
5626 			  }
5627 
5628 			store_constructor_field
5629 			  (target, bitsize, bitpos, mode, value, type, cleared,
5630 			   get_alias_set (elttype));
5631 		      }
5632 		  }
5633 		else
5634 		  {
5635 		    rtx loop_start = gen_label_rtx ();
5636 		    rtx loop_end = gen_label_rtx ();
5637 		    tree exit_cond;
5638 
5639 		    expand_normal (hi_index);
5640 
5641 		    index = build_decl (EXPR_LOCATION (exp),
5642 					VAR_DECL, NULL_TREE, domain);
5643 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
5644 		    SET_DECL_RTL (index, index_r);
5645 		    store_expr (lo_index, index_r, 0, false);
5646 
5647 		    /* Build the head of the loop.  */
5648 		    do_pending_stack_adjust ();
5649 		    emit_label (loop_start);
5650 
5651 		    /* Assign value to element index.  */
5652 		    position =
5653 		      fold_convert (ssizetype,
5654 				    fold_build2 (MINUS_EXPR,
5655 						 TREE_TYPE (index),
5656 						 index,
5657 						 TYPE_MIN_VALUE (domain)));
5658 
5659 		    position =
5660 			size_binop (MULT_EXPR, position,
5661 				    fold_convert (ssizetype,
5662 						  TYPE_SIZE_UNIT (elttype)));
5663 
5664 		    pos_rtx = expand_normal (position);
5665 		    xtarget = offset_address (target, pos_rtx,
5666 					      highest_pow2_factor (position));
5667 		    xtarget = adjust_address (xtarget, mode, 0);
5668 		    if (TREE_CODE (value) == CONSTRUCTOR)
5669 		      store_constructor (value, xtarget, cleared,
5670 					 bitsize / BITS_PER_UNIT);
5671 		    else
5672 		      store_expr (value, xtarget, 0, false);
5673 
5674 		    /* Generate a conditional jump to exit the loop.  */
5675 		    exit_cond = build2 (LT_EXPR, integer_type_node,
5676 					index, hi_index);
5677 		    jumpif (exit_cond, loop_end, -1);
5678 
5679 		    /* Update the loop counter, and jump to the head of
5680 		       the loop.  */
5681 		    expand_assignment (index,
5682 				       build2 (PLUS_EXPR, TREE_TYPE (index),
5683 					       index, integer_one_node),
5684 				       false);
5685 
5686 		    emit_jump (loop_start);
5687 
5688 		    /* Build the end of the loop.  */
5689 		    emit_label (loop_end);
5690 		  }
5691 	      }
5692 	    else if ((index != 0 && ! host_integerp (index, 0))
5693 		     || ! host_integerp (TYPE_SIZE (elttype), 1))
5694 	      {
5695 		tree position;
5696 
5697 		if (index == 0)
5698 		  index = ssize_int (1);
5699 
5700 		if (minelt)
5701 		  index = fold_convert (ssizetype,
5702 					fold_build2 (MINUS_EXPR,
5703 						     TREE_TYPE (index),
5704 						     index,
5705 						     TYPE_MIN_VALUE (domain)));
5706 
5707 		position =
5708 		  size_binop (MULT_EXPR, index,
5709 			      fold_convert (ssizetype,
5710 					    TYPE_SIZE_UNIT (elttype)));
5711 		xtarget = offset_address (target,
5712 					  expand_normal (position),
5713 					  highest_pow2_factor (position));
5714 		xtarget = adjust_address (xtarget, mode, 0);
5715 		store_expr (value, xtarget, 0, false);
5716 	      }
5717 	    else
5718 	      {
5719 		if (index != 0)
5720 		  bitpos = ((tree_low_cst (index, 0) - minelt)
5721 			    * tree_low_cst (TYPE_SIZE (elttype), 1));
5722 		else
5723 		  bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5724 
5725 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5726 		    && TREE_CODE (type) == ARRAY_TYPE
5727 		    && TYPE_NONALIASED_COMPONENT (type))
5728 		  {
5729 		    target = copy_rtx (target);
5730 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
5731 		  }
5732 		store_constructor_field (target, bitsize, bitpos, mode, value,
5733 					 type, cleared, get_alias_set (elttype));
5734 	      }
5735 	  }
5736 	break;
5737       }
5738 
5739     case VECTOR_TYPE:
5740       {
5741 	unsigned HOST_WIDE_INT idx;
5742 	constructor_elt *ce;
5743 	int i;
5744 	int need_to_clear;
5745 	int icode = 0;
5746 	tree elttype = TREE_TYPE (type);
5747 	int elt_size = tree_low_cst (TYPE_SIZE (elttype), 1);
5748 	enum machine_mode eltmode = TYPE_MODE (elttype);
5749 	HOST_WIDE_INT bitsize;
5750 	HOST_WIDE_INT bitpos;
5751 	rtvec vector = NULL;
5752 	unsigned n_elts;
5753 	alias_set_type alias;
5754 
5755 	gcc_assert (eltmode != BLKmode);
5756 
5757 	n_elts = TYPE_VECTOR_SUBPARTS (type);
5758 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
5759 	  {
5760 	    enum machine_mode mode = GET_MODE (target);
5761 
5762 	    icode = (int) optab_handler (vec_init_optab, mode)->insn_code;
5763 	    if (icode != CODE_FOR_nothing)
5764 	      {
5765 		unsigned int i;
5766 
5767 		vector = rtvec_alloc (n_elts);
5768 		for (i = 0; i < n_elts; i++)
5769 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
5770 	      }
5771 	  }
5772 
5773 	/* If the constructor has fewer elements than the vector,
5774 	   clear the whole array first.  Similarly if this is static
5775 	   constructor of a non-BLKmode object.  */
5776 	if (cleared)
5777 	  need_to_clear = 0;
5778 	else if (REG_P (target) && TREE_STATIC (exp))
5779 	  need_to_clear = 1;
5780 	else
5781 	  {
5782 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
5783 	    tree value;
5784 
5785 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
5786 	      {
5787 		int n_elts_here = tree_low_cst
5788 		  (int_const_binop (TRUNC_DIV_EXPR,
5789 				    TYPE_SIZE (TREE_TYPE (value)),
5790 				    TYPE_SIZE (elttype), 0), 1);
5791 
5792 		count += n_elts_here;
5793 		if (mostly_zeros_p (value))
5794 		  zero_count += n_elts_here;
5795 	      }
5796 
5797 	    /* Clear the entire vector first if there are any missing elements,
5798 	       or if the incidence of zero elements is >= 75%.  */
5799 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
5800 	  }
5801 
5802 	if (need_to_clear && size > 0 && !vector)
5803 	  {
5804 	    if (REG_P (target))
5805 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5806 	    else
5807 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
5808 	    cleared = 1;
5809 	  }
5810 
5811 	/* Inform later passes that the old value is dead.  */
5812 	if (!cleared && !vector && REG_P (target))
5813 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5814 
5815         if (MEM_P (target))
5816 	  alias = MEM_ALIAS_SET (target);
5817 	else
5818 	  alias = get_alias_set (elttype);
5819 
5820         /* Store each element of the constructor into the corresponding
5821 	   element of TARGET, determined by counting the elements.  */
5822 	for (idx = 0, i = 0;
5823 	     VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
5824 	     idx++, i += bitsize / elt_size)
5825 	  {
5826 	    HOST_WIDE_INT eltpos;
5827 	    tree value = ce->value;
5828 
5829 	    bitsize = tree_low_cst (TYPE_SIZE (TREE_TYPE (value)), 1);
5830 	    if (cleared && initializer_zerop (value))
5831 	      continue;
5832 
5833 	    if (ce->index)
5834 	      eltpos = tree_low_cst (ce->index, 1);
5835 	    else
5836 	      eltpos = i;
5837 
5838 	    if (vector)
5839 	      {
5840 	        /* Vector CONSTRUCTORs should only be built from smaller
5841 		   vectors in the case of BLKmode vectors.  */
5842 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
5843 		RTVEC_ELT (vector, eltpos)
5844 		  = expand_normal (value);
5845 	      }
5846 	    else
5847 	      {
5848 		enum machine_mode value_mode =
5849 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
5850 		  ? TYPE_MODE (TREE_TYPE (value))
5851 		  : eltmode;
5852 		bitpos = eltpos * elt_size;
5853 		store_constructor_field (target, bitsize, bitpos,
5854 					 value_mode, value, type,
5855 					 cleared, alias);
5856 	      }
5857 	  }
5858 
5859 	if (vector)
5860 	  emit_insn (GEN_FCN (icode)
5861 		     (target,
5862 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
5863 	break;
5864       }
5865 
5866     default:
5867       gcc_unreachable ();
5868     }
5869 }
5870 
5871 /* Store the value of EXP (an expression tree)
5872    into a subfield of TARGET which has mode MODE and occupies
5873    BITSIZE bits, starting BITPOS bits from the start of TARGET.
5874    If MODE is VOIDmode, it means that we are storing into a bit-field.
5875 
5876    Always return const0_rtx unless we have something particular to
5877    return.
5878 
5879    TYPE is the type of the underlying object,
5880 
5881    ALIAS_SET is the alias set for the destination.  This value will
5882    (in general) be different from that for TARGET, since TARGET is a
5883    reference to the containing structure.
5884 
5885    If NONTEMPORAL is true, try generating a nontemporal store.  */
5886 
5887 static rtx
5888 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5889 	     enum machine_mode mode, tree exp, tree type,
5890 	     alias_set_type alias_set, bool nontemporal)
5891 {
5892   if (TREE_CODE (exp) == ERROR_MARK)
5893     return const0_rtx;
5894 
5895   /* If we have nothing to store, do nothing unless the expression has
5896      side-effects.  */
5897   if (bitsize == 0)
5898     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5899 
5900   /* If we are storing into an unaligned field of an aligned union that is
5901      in a register, we may have the mode of TARGET being an integer mode but
5902      MODE == BLKmode.  In that case, get an aligned object whose size and
5903      alignment are the same as TARGET and store TARGET into it (we can avoid
5904      the store if the field being stored is the entire width of TARGET).  Then
5905      call ourselves recursively to store the field into a BLKmode version of
5906      that object.  Finally, load from the object into TARGET.  This is not
5907      very efficient in general, but should only be slightly more expensive
5908      than the otherwise-required unaligned accesses.  Perhaps this can be
5909      cleaned up later.  It's tempting to make OBJECT readonly, but it's set
5910      twice, once with emit_move_insn and once via store_field.  */
5911 
5912   if (mode == BLKmode
5913       && (REG_P (target) || GET_CODE (target) == SUBREG))
5914     {
5915       rtx object = assign_temp (type, 0, 1, 1);
5916       rtx blk_object = adjust_address (object, BLKmode, 0);
5917 
5918       if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5919 	emit_move_insn (object, target);
5920 
5921       store_field (blk_object, bitsize, bitpos, mode, exp, type, alias_set,
5922 		   nontemporal);
5923 
5924       emit_move_insn (target, object);
5925 
5926       /* We want to return the BLKmode version of the data.  */
5927       return blk_object;
5928     }
5929 
5930   if (GET_CODE (target) == CONCAT)
5931     {
5932       /* We're storing into a struct containing a single __complex.  */
5933 
5934       gcc_assert (!bitpos);
5935       return store_expr (exp, target, 0, nontemporal);
5936     }
5937 
5938   /* If the structure is in a register or if the component
5939      is a bit field, we cannot use addressing to access it.
5940      Use bit-field techniques or SUBREG to store in it.  */
5941 
5942   if (mode == VOIDmode
5943       || (mode != BLKmode && ! direct_store[(int) mode]
5944 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5945 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5946       || REG_P (target)
5947       || GET_CODE (target) == SUBREG
5948       /* If the field isn't aligned enough to store as an ordinary memref,
5949 	 store it as a bit field.  */
5950       || (mode != BLKmode
5951 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5952 		|| bitpos % GET_MODE_ALIGNMENT (mode))
5953 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5954 	      || (bitpos % BITS_PER_UNIT != 0)))
5955       /* If the RHS and field are a constant size and the size of the
5956 	 RHS isn't the same size as the bitfield, we must use bitfield
5957 	 operations.  */
5958       || (bitsize >= 0
5959 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5960 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5961     {
5962       rtx temp;
5963       gimple nop_def;
5964 
5965       /* If EXP is a NOP_EXPR of precision less than its mode, then that
5966 	 implies a mask operation.  If the precision is the same size as
5967 	 the field we're storing into, that mask is redundant.  This is
5968 	 particularly common with bit field assignments generated by the
5969 	 C front end.  */
5970       nop_def = get_def_for_expr (exp, NOP_EXPR);
5971       if (nop_def)
5972 	{
5973 	  tree type = TREE_TYPE (exp);
5974 	  if (INTEGRAL_TYPE_P (type)
5975 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
5976 	      && bitsize == TYPE_PRECISION (type))
5977 	    {
5978 	      tree op = gimple_assign_rhs1 (nop_def);
5979 	      type = TREE_TYPE (op);
5980 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
5981 		exp = op;
5982 	    }
5983 	}
5984 
5985       temp = expand_normal (exp);
5986 
5987       /* If BITSIZE is narrower than the size of the type of EXP
5988 	 we will be narrowing TEMP.  Normally, what's wanted are the
5989 	 low-order bits.  However, if EXP's type is a record and this is
5990 	 big-endian machine, we want the upper BITSIZE bits.  */
5991       if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5992 	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5993 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5994 	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5995 			     size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5996 				       - bitsize),
5997 			     NULL_RTX, 1);
5998 
5999       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
6000 	 MODE.  */
6001       if (mode != VOIDmode && mode != BLKmode
6002 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6003 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6004 
6005       /* If the modes of TEMP and TARGET are both BLKmode, both
6006 	 must be in memory and BITPOS must be aligned on a byte
6007 	 boundary.  If so, we simply do a block copy.  Likewise
6008 	 for a BLKmode-like TARGET.  */
6009       if (GET_MODE (temp) == BLKmode
6010 	  && (GET_MODE (target) == BLKmode
6011 	      || (MEM_P (target)
6012 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6013 		  && (bitpos % BITS_PER_UNIT) == 0
6014 		  && (bitsize % BITS_PER_UNIT) == 0)))
6015 	{
6016 	  gcc_assert (MEM_P (target) && MEM_P (temp)
6017 		      && (bitpos % BITS_PER_UNIT) == 0);
6018 
6019 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6020 	  emit_block_move (target, temp,
6021 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6022 				    / BITS_PER_UNIT),
6023 			   BLOCK_OP_NORMAL);
6024 
6025 	  return const0_rtx;
6026 	}
6027 
6028       /* Store the value in the bitfield.  */
6029       store_bit_field (target, bitsize, bitpos, mode, temp);
6030 
6031       return const0_rtx;
6032     }
6033   else
6034     {
6035       /* Now build a reference to just the desired component.  */
6036       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6037 
6038       if (to_rtx == target)
6039 	to_rtx = copy_rtx (to_rtx);
6040 
6041       MEM_SET_IN_STRUCT_P (to_rtx, 1);
6042       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6043 	set_mem_alias_set (to_rtx, alias_set);
6044 
6045       return store_expr (exp, to_rtx, 0, nontemporal);
6046     }
6047 }
6048 
6049 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6050    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6051    codes and find the ultimate containing object, which we return.
6052 
6053    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6054    bit position, and *PUNSIGNEDP to the signedness of the field.
6055    If the position of the field is variable, we store a tree
6056    giving the variable offset (in units) in *POFFSET.
6057    This offset is in addition to the bit position.
6058    If the position is not variable, we store 0 in *POFFSET.
6059 
6060    If any of the extraction expressions is volatile,
6061    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6062 
6063    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6064    Otherwise, it is a mode that can be used to access the field.
6065 
6066    If the field describes a variable-sized object, *PMODE is set to
6067    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6068    this case, but the address of the object can be found.
6069 
6070    If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6071    look through nodes that serve as markers of a greater alignment than
6072    the one that can be deduced from the expression.  These nodes make it
6073    possible for front-ends to prevent temporaries from being created by
6074    the middle-end on alignment considerations.  For that purpose, the
6075    normal operating mode at high-level is to always pass FALSE so that
6076    the ultimate containing object is really returned; moreover, the
6077    associated predicate handled_component_p will always return TRUE
6078    on these nodes, thus indicating that they are essentially handled
6079    by get_inner_reference.  TRUE should only be passed when the caller
6080    is scanning the expression in order to build another representation
6081    and specifically knows how to handle these nodes; as such, this is
6082    the normal operating mode in the RTL expanders.  */
6083 
6084 tree
6085 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6086 		     HOST_WIDE_INT *pbitpos, tree *poffset,
6087 		     enum machine_mode *pmode, int *punsignedp,
6088 		     int *pvolatilep, bool keep_aligning)
6089 {
6090   tree size_tree = 0;
6091   enum machine_mode mode = VOIDmode;
6092   bool blkmode_bitfield = false;
6093   tree offset = size_zero_node;
6094   tree bit_offset = bitsize_zero_node;
6095 
6096   /* First get the mode, signedness, and size.  We do this from just the
6097      outermost expression.  */
6098   *pbitsize = -1;
6099   if (TREE_CODE (exp) == COMPONENT_REF)
6100     {
6101       tree field = TREE_OPERAND (exp, 1);
6102       size_tree = DECL_SIZE (field);
6103       if (!DECL_BIT_FIELD (field))
6104 	mode = DECL_MODE (field);
6105       else if (DECL_MODE (field) == BLKmode)
6106 	blkmode_bitfield = true;
6107 
6108       *punsignedp = DECL_UNSIGNED (field);
6109     }
6110   else if (TREE_CODE (exp) == BIT_FIELD_REF)
6111     {
6112       size_tree = TREE_OPERAND (exp, 1);
6113       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6114 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6115 
6116       /* For vector types, with the correct size of access, use the mode of
6117 	 inner type.  */
6118       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6119 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6120 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6121         mode = TYPE_MODE (TREE_TYPE (exp));
6122     }
6123   else
6124     {
6125       mode = TYPE_MODE (TREE_TYPE (exp));
6126       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6127 
6128       if (mode == BLKmode)
6129 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
6130       else
6131 	*pbitsize = GET_MODE_BITSIZE (mode);
6132     }
6133 
6134   if (size_tree != 0)
6135     {
6136       if (! host_integerp (size_tree, 1))
6137 	mode = BLKmode, *pbitsize = -1;
6138       else
6139 	*pbitsize = tree_low_cst (size_tree, 1);
6140     }
6141 
6142   /* Compute cumulative bit-offset for nested component-refs and array-refs,
6143      and find the ultimate containing object.  */
6144   while (1)
6145     {
6146       switch (TREE_CODE (exp))
6147 	{
6148 	case BIT_FIELD_REF:
6149 	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
6150 				   TREE_OPERAND (exp, 2));
6151 	  break;
6152 
6153 	case COMPONENT_REF:
6154 	  {
6155 	    tree field = TREE_OPERAND (exp, 1);
6156 	    tree this_offset = component_ref_field_offset (exp);
6157 
6158 	    /* If this field hasn't been filled in yet, don't go past it.
6159 	       This should only happen when folding expressions made during
6160 	       type construction.  */
6161 	    if (this_offset == 0)
6162 	      break;
6163 
6164 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
6165 	    bit_offset = size_binop (PLUS_EXPR, bit_offset,
6166 				     DECL_FIELD_BIT_OFFSET (field));
6167 
6168 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6169 	  }
6170 	  break;
6171 
6172 	case ARRAY_REF:
6173 	case ARRAY_RANGE_REF:
6174 	  {
6175 	    tree index = TREE_OPERAND (exp, 1);
6176 	    tree low_bound = array_ref_low_bound (exp);
6177 	    tree unit_size = array_ref_element_size (exp);
6178 
6179 	    /* We assume all arrays have sizes that are a multiple of a byte.
6180 	       First subtract the lower bound, if any, in the type of the
6181 	       index, then convert to sizetype and multiply by the size of
6182 	       the array element.  */
6183 	    if (! integer_zerop (low_bound))
6184 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6185 				   index, low_bound);
6186 
6187 	    offset = size_binop (PLUS_EXPR, offset,
6188 			         size_binop (MULT_EXPR,
6189 					     fold_convert (sizetype, index),
6190 					     unit_size));
6191 	  }
6192 	  break;
6193 
6194 	case REALPART_EXPR:
6195 	  break;
6196 
6197 	case IMAGPART_EXPR:
6198 	  bit_offset = size_binop (PLUS_EXPR, bit_offset,
6199 				   bitsize_int (*pbitsize));
6200 	  break;
6201 
6202 	case VIEW_CONVERT_EXPR:
6203 	  if (keep_aligning && STRICT_ALIGNMENT
6204 	      && (TYPE_ALIGN (TREE_TYPE (exp))
6205 	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6206 	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6207 		  < BIGGEST_ALIGNMENT)
6208 	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6209 		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6210 	    goto done;
6211 	  break;
6212 
6213 	default:
6214 	  goto done;
6215 	}
6216 
6217       /* If any reference in the chain is volatile, the effect is volatile.  */
6218       if (TREE_THIS_VOLATILE (exp))
6219 	*pvolatilep = 1;
6220 
6221       exp = TREE_OPERAND (exp, 0);
6222     }
6223  done:
6224 
6225   /* If OFFSET is constant, see if we can return the whole thing as a
6226      constant bit position.  Make sure to handle overflow during
6227      this conversion.  */
6228   if (host_integerp (offset, 0))
6229     {
6230       double_int tem = double_int_mul (tree_to_double_int (offset),
6231 				       uhwi_to_double_int (BITS_PER_UNIT));
6232       tem = double_int_add (tem, tree_to_double_int (bit_offset));
6233       if (double_int_fits_in_shwi_p (tem))
6234 	{
6235 	  *pbitpos = double_int_to_shwi (tem);
6236 	  *poffset = offset = NULL_TREE;
6237 	}
6238     }
6239 
6240   /* Otherwise, split it up.  */
6241   if (offset)
6242     {
6243       *pbitpos = tree_low_cst (bit_offset, 0);
6244       *poffset = offset;
6245     }
6246 
6247   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6248   if (mode == VOIDmode
6249       && blkmode_bitfield
6250       && (*pbitpos % BITS_PER_UNIT) == 0
6251       && (*pbitsize % BITS_PER_UNIT) == 0)
6252     *pmode = BLKmode;
6253   else
6254     *pmode = mode;
6255 
6256   return exp;
6257 }
6258 
6259 /* Given an expression EXP that may be a COMPONENT_REF, an ARRAY_REF or an
6260    ARRAY_RANGE_REF, look for whether EXP or any nested component-refs within
6261    EXP is marked as PACKED.  */
6262 
6263 bool
6264 contains_packed_reference (const_tree exp)
6265 {
6266   bool packed_p = false;
6267 
6268   while (1)
6269     {
6270       switch (TREE_CODE (exp))
6271 	{
6272 	case COMPONENT_REF:
6273 	  {
6274 	    tree field = TREE_OPERAND (exp, 1);
6275 	    packed_p = DECL_PACKED (field)
6276 		       || TYPE_PACKED (TREE_TYPE (field))
6277 		       || TYPE_PACKED (TREE_TYPE (exp));
6278 	    if (packed_p)
6279 	      goto done;
6280 	  }
6281 	  break;
6282 
6283 	case BIT_FIELD_REF:
6284 	case ARRAY_REF:
6285 	case ARRAY_RANGE_REF:
6286 	case REALPART_EXPR:
6287 	case IMAGPART_EXPR:
6288 	case VIEW_CONVERT_EXPR:
6289 	  break;
6290 
6291 	default:
6292 	  goto done;
6293 	}
6294       exp = TREE_OPERAND (exp, 0);
6295     }
6296  done:
6297   return packed_p;
6298 }
6299 
6300 /* Return a tree of sizetype representing the size, in bytes, of the element
6301    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6302 
6303 tree
6304 array_ref_element_size (tree exp)
6305 {
6306   tree aligned_size = TREE_OPERAND (exp, 3);
6307   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6308   location_t loc = EXPR_LOCATION (exp);
6309 
6310   /* If a size was specified in the ARRAY_REF, it's the size measured
6311      in alignment units of the element type.  So multiply by that value.  */
6312   if (aligned_size)
6313     {
6314       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6315 	 sizetype from another type of the same width and signedness.  */
6316       if (TREE_TYPE (aligned_size) != sizetype)
6317 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6318       return size_binop_loc (loc, MULT_EXPR, aligned_size,
6319 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
6320     }
6321 
6322   /* Otherwise, take the size from that of the element type.  Substitute
6323      any PLACEHOLDER_EXPR that we have.  */
6324   else
6325     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6326 }
6327 
6328 /* Return a tree representing the lower bound of the array mentioned in
6329    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6330 
6331 tree
6332 array_ref_low_bound (tree exp)
6333 {
6334   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6335 
6336   /* If a lower bound is specified in EXP, use it.  */
6337   if (TREE_OPERAND (exp, 2))
6338     return TREE_OPERAND (exp, 2);
6339 
6340   /* Otherwise, if there is a domain type and it has a lower bound, use it,
6341      substituting for a PLACEHOLDER_EXPR as needed.  */
6342   if (domain_type && TYPE_MIN_VALUE (domain_type))
6343     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
6344 
6345   /* Otherwise, return a zero of the appropriate type.  */
6346   return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
6347 }
6348 
6349 /* Return a tree representing the upper bound of the array mentioned in
6350    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6351 
6352 tree
6353 array_ref_up_bound (tree exp)
6354 {
6355   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
6356 
6357   /* If there is a domain type and it has an upper bound, use it, substituting
6358      for a PLACEHOLDER_EXPR as needed.  */
6359   if (domain_type && TYPE_MAX_VALUE (domain_type))
6360     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
6361 
6362   /* Otherwise fail.  */
6363   return NULL_TREE;
6364 }
6365 
6366 /* Return a tree representing the offset, in bytes, of the field referenced
6367    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
6368 
6369 tree
6370 component_ref_field_offset (tree exp)
6371 {
6372   tree aligned_offset = TREE_OPERAND (exp, 2);
6373   tree field = TREE_OPERAND (exp, 1);
6374   location_t loc = EXPR_LOCATION (exp);
6375 
6376   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
6377      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
6378      value.  */
6379   if (aligned_offset)
6380     {
6381       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6382 	 sizetype from another type of the same width and signedness.  */
6383       if (TREE_TYPE (aligned_offset) != sizetype)
6384 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
6385       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
6386 			     size_int (DECL_OFFSET_ALIGN (field)
6387 				       / BITS_PER_UNIT));
6388     }
6389 
6390   /* Otherwise, take the offset from that of the field.  Substitute
6391      any PLACEHOLDER_EXPR that we have.  */
6392   else
6393     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
6394 }
6395 
6396 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
6397 
6398 static unsigned HOST_WIDE_INT
6399 target_align (const_tree target)
6400 {
6401   /* We might have a chain of nested references with intermediate misaligning
6402      bitfields components, so need to recurse to find out.  */
6403 
6404   unsigned HOST_WIDE_INT this_align, outer_align;
6405 
6406   switch (TREE_CODE (target))
6407     {
6408     case BIT_FIELD_REF:
6409       return 1;
6410 
6411     case COMPONENT_REF:
6412       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
6413       outer_align = target_align (TREE_OPERAND (target, 0));
6414       return MIN (this_align, outer_align);
6415 
6416     case ARRAY_REF:
6417     case ARRAY_RANGE_REF:
6418       this_align = TYPE_ALIGN (TREE_TYPE (target));
6419       outer_align = target_align (TREE_OPERAND (target, 0));
6420       return MIN (this_align, outer_align);
6421 
6422     CASE_CONVERT:
6423     case NON_LVALUE_EXPR:
6424     case VIEW_CONVERT_EXPR:
6425       this_align = TYPE_ALIGN (TREE_TYPE (target));
6426       outer_align = target_align (TREE_OPERAND (target, 0));
6427       return MAX (this_align, outer_align);
6428 
6429     default:
6430       return TYPE_ALIGN (TREE_TYPE (target));
6431     }
6432 }
6433 
6434 
6435 /* Given an rtx VALUE that may contain additions and multiplications, return
6436    an equivalent value that just refers to a register, memory, or constant.
6437    This is done by generating instructions to perform the arithmetic and
6438    returning a pseudo-register containing the value.
6439 
6440    The returned value may be a REG, SUBREG, MEM or constant.  */
6441 
6442 rtx
6443 force_operand (rtx value, rtx target)
6444 {
6445   rtx op1, op2;
6446   /* Use subtarget as the target for operand 0 of a binary operation.  */
6447   rtx subtarget = get_subtarget (target);
6448   enum rtx_code code = GET_CODE (value);
6449 
6450   /* Check for subreg applied to an expression produced by loop optimizer.  */
6451   if (code == SUBREG
6452       && !REG_P (SUBREG_REG (value))
6453       && !MEM_P (SUBREG_REG (value)))
6454     {
6455       value
6456 	= simplify_gen_subreg (GET_MODE (value),
6457 			       force_reg (GET_MODE (SUBREG_REG (value)),
6458 					  force_operand (SUBREG_REG (value),
6459 							 NULL_RTX)),
6460 			       GET_MODE (SUBREG_REG (value)),
6461 			       SUBREG_BYTE (value));
6462       code = GET_CODE (value);
6463     }
6464 
6465   /* Check for a PIC address load.  */
6466   if ((code == PLUS || code == MINUS)
6467       && XEXP (value, 0) == pic_offset_table_rtx
6468       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6469 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
6470 	  || GET_CODE (XEXP (value, 1)) == CONST))
6471     {
6472       if (!subtarget)
6473 	subtarget = gen_reg_rtx (GET_MODE (value));
6474       emit_move_insn (subtarget, value);
6475       return subtarget;
6476     }
6477 
6478   if (ARITHMETIC_P (value))
6479     {
6480       op2 = XEXP (value, 1);
6481       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
6482 	subtarget = 0;
6483       if (code == MINUS && CONST_INT_P (op2))
6484 	{
6485 	  code = PLUS;
6486 	  op2 = negate_rtx (GET_MODE (value), op2);
6487 	}
6488 
6489       /* Check for an addition with OP2 a constant integer and our first
6490          operand a PLUS of a virtual register and something else.  In that
6491          case, we want to emit the sum of the virtual register and the
6492          constant first and then add the other value.  This allows virtual
6493          register instantiation to simply modify the constant rather than
6494          creating another one around this addition.  */
6495       if (code == PLUS && CONST_INT_P (op2)
6496 	  && GET_CODE (XEXP (value, 0)) == PLUS
6497 	  && REG_P (XEXP (XEXP (value, 0), 0))
6498 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6499 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6500 	{
6501 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
6502 					  XEXP (XEXP (value, 0), 0), op2,
6503 					  subtarget, 0, OPTAB_LIB_WIDEN);
6504 	  return expand_simple_binop (GET_MODE (value), code, temp,
6505 				      force_operand (XEXP (XEXP (value,
6506 								 0), 1), 0),
6507 				      target, 0, OPTAB_LIB_WIDEN);
6508 	}
6509 
6510       op1 = force_operand (XEXP (value, 0), subtarget);
6511       op2 = force_operand (op2, NULL_RTX);
6512       switch (code)
6513 	{
6514 	case MULT:
6515 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
6516 	case DIV:
6517 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
6518 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
6519 					target, 1, OPTAB_LIB_WIDEN);
6520 	  else
6521 	    return expand_divmod (0,
6522 				  FLOAT_MODE_P (GET_MODE (value))
6523 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
6524 				  GET_MODE (value), op1, op2, target, 0);
6525 	case MOD:
6526 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6527 				target, 0);
6528 	case UDIV:
6529 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6530 				target, 1);
6531 	case UMOD:
6532 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6533 				target, 1);
6534 	case ASHIFTRT:
6535 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6536 				      target, 0, OPTAB_LIB_WIDEN);
6537 	default:
6538 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
6539 				      target, 1, OPTAB_LIB_WIDEN);
6540 	}
6541     }
6542   if (UNARY_P (value))
6543     {
6544       if (!target)
6545 	target = gen_reg_rtx (GET_MODE (value));
6546       op1 = force_operand (XEXP (value, 0), NULL_RTX);
6547       switch (code)
6548 	{
6549 	case ZERO_EXTEND:
6550 	case SIGN_EXTEND:
6551 	case TRUNCATE:
6552 	case FLOAT_EXTEND:
6553 	case FLOAT_TRUNCATE:
6554 	  convert_move (target, op1, code == ZERO_EXTEND);
6555 	  return target;
6556 
6557 	case FIX:
6558 	case UNSIGNED_FIX:
6559 	  expand_fix (target, op1, code == UNSIGNED_FIX);
6560 	  return target;
6561 
6562 	case FLOAT:
6563 	case UNSIGNED_FLOAT:
6564 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
6565 	  return target;
6566 
6567 	default:
6568 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
6569 	}
6570     }
6571 
6572 #ifdef INSN_SCHEDULING
6573   /* On machines that have insn scheduling, we want all memory reference to be
6574      explicit, so we need to deal with such paradoxical SUBREGs.  */
6575   if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
6576       && (GET_MODE_SIZE (GET_MODE (value))
6577 	  > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6578     value
6579       = simplify_gen_subreg (GET_MODE (value),
6580 			     force_reg (GET_MODE (SUBREG_REG (value)),
6581 					force_operand (SUBREG_REG (value),
6582 						       NULL_RTX)),
6583 			     GET_MODE (SUBREG_REG (value)),
6584 			     SUBREG_BYTE (value));
6585 #endif
6586 
6587   return value;
6588 }
6589 
6590 /* Subroutine of expand_expr: return nonzero iff there is no way that
6591    EXP can reference X, which is being modified.  TOP_P is nonzero if this
6592    call is going to be used to determine whether we need a temporary
6593    for EXP, as opposed to a recursive call to this function.
6594 
6595    It is always safe for this routine to return zero since it merely
6596    searches for optimization opportunities.  */
6597 
6598 int
6599 safe_from_p (const_rtx x, tree exp, int top_p)
6600 {
6601   rtx exp_rtl = 0;
6602   int i, nops;
6603 
6604   if (x == 0
6605       /* If EXP has varying size, we MUST use a target since we currently
6606 	 have no way of allocating temporaries of variable size
6607 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6608 	 So we assume here that something at a higher level has prevented a
6609 	 clash.  This is somewhat bogus, but the best we can do.  Only
6610 	 do this when X is BLKmode and when we are at the top level.  */
6611       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6612 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
6613 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6614 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6615 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6616 	      != INTEGER_CST)
6617 	  && GET_MODE (x) == BLKmode)
6618       /* If X is in the outgoing argument area, it is always safe.  */
6619       || (MEM_P (x)
6620 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
6621 	      || (GET_CODE (XEXP (x, 0)) == PLUS
6622 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
6623     return 1;
6624 
6625   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6626      find the underlying pseudo.  */
6627   if (GET_CODE (x) == SUBREG)
6628     {
6629       x = SUBREG_REG (x);
6630       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6631 	return 0;
6632     }
6633 
6634   /* Now look at our tree code and possibly recurse.  */
6635   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6636     {
6637     case tcc_declaration:
6638       exp_rtl = DECL_RTL_IF_SET (exp);
6639       break;
6640 
6641     case tcc_constant:
6642       return 1;
6643 
6644     case tcc_exceptional:
6645       if (TREE_CODE (exp) == TREE_LIST)
6646 	{
6647 	  while (1)
6648 	    {
6649 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6650 		return 0;
6651 	      exp = TREE_CHAIN (exp);
6652 	      if (!exp)
6653 		return 1;
6654 	      if (TREE_CODE (exp) != TREE_LIST)
6655 		return safe_from_p (x, exp, 0);
6656 	    }
6657 	}
6658       else if (TREE_CODE (exp) == CONSTRUCTOR)
6659 	{
6660 	  constructor_elt *ce;
6661 	  unsigned HOST_WIDE_INT idx;
6662 
6663 	  for (idx = 0;
6664 	       VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (exp), idx, ce);
6665 	       idx++)
6666 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
6667 		|| !safe_from_p (x, ce->value, 0))
6668 	      return 0;
6669 	  return 1;
6670 	}
6671       else if (TREE_CODE (exp) == ERROR_MARK)
6672 	return 1;	/* An already-visited SAVE_EXPR? */
6673       else
6674 	return 0;
6675 
6676     case tcc_statement:
6677       /* The only case we look at here is the DECL_INITIAL inside a
6678 	 DECL_EXPR.  */
6679       return (TREE_CODE (exp) != DECL_EXPR
6680 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6681 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6682 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6683 
6684     case tcc_binary:
6685     case tcc_comparison:
6686       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6687 	return 0;
6688       /* Fall through.  */
6689 
6690     case tcc_unary:
6691       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6692 
6693     case tcc_expression:
6694     case tcc_reference:
6695     case tcc_vl_exp:
6696       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
6697 	 the expression.  If it is set, we conflict iff we are that rtx or
6698 	 both are in memory.  Otherwise, we check all operands of the
6699 	 expression recursively.  */
6700 
6701       switch (TREE_CODE (exp))
6702 	{
6703 	case ADDR_EXPR:
6704 	  /* If the operand is static or we are static, we can't conflict.
6705 	     Likewise if we don't conflict with the operand at all.  */
6706 	  if (staticp (TREE_OPERAND (exp, 0))
6707 	      || TREE_STATIC (exp)
6708 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6709 	    return 1;
6710 
6711 	  /* Otherwise, the only way this can conflict is if we are taking
6712 	     the address of a DECL a that address if part of X, which is
6713 	     very rare.  */
6714 	  exp = TREE_OPERAND (exp, 0);
6715 	  if (DECL_P (exp))
6716 	    {
6717 	      if (!DECL_RTL_SET_P (exp)
6718 		  || !MEM_P (DECL_RTL (exp)))
6719 		return 0;
6720 	      else
6721 		exp_rtl = XEXP (DECL_RTL (exp), 0);
6722 	    }
6723 	  break;
6724 
6725 	case MISALIGNED_INDIRECT_REF:
6726 	case ALIGN_INDIRECT_REF:
6727 	case INDIRECT_REF:
6728 	  if (MEM_P (x)
6729 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6730 					get_alias_set (exp)))
6731 	    return 0;
6732 	  break;
6733 
6734 	case CALL_EXPR:
6735 	  /* Assume that the call will clobber all hard registers and
6736 	     all of memory.  */
6737 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6738 	      || MEM_P (x))
6739 	    return 0;
6740 	  break;
6741 
6742 	case WITH_CLEANUP_EXPR:
6743 	case CLEANUP_POINT_EXPR:
6744 	  /* Lowered by gimplify.c.  */
6745 	  gcc_unreachable ();
6746 
6747 	case SAVE_EXPR:
6748 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6749 
6750 	default:
6751 	  break;
6752 	}
6753 
6754       /* If we have an rtx, we do not need to scan our operands.  */
6755       if (exp_rtl)
6756 	break;
6757 
6758       nops = TREE_OPERAND_LENGTH (exp);
6759       for (i = 0; i < nops; i++)
6760 	if (TREE_OPERAND (exp, i) != 0
6761 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6762 	  return 0;
6763 
6764       break;
6765 
6766     case tcc_type:
6767       /* Should never get a type here.  */
6768       gcc_unreachable ();
6769     }
6770 
6771   /* If we have an rtl, find any enclosed object.  Then see if we conflict
6772      with it.  */
6773   if (exp_rtl)
6774     {
6775       if (GET_CODE (exp_rtl) == SUBREG)
6776 	{
6777 	  exp_rtl = SUBREG_REG (exp_rtl);
6778 	  if (REG_P (exp_rtl)
6779 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6780 	    return 0;
6781 	}
6782 
6783       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
6784 	 are memory and they conflict.  */
6785       return ! (rtx_equal_p (x, exp_rtl)
6786 		|| (MEM_P (x) && MEM_P (exp_rtl)
6787 		    && true_dependence (exp_rtl, VOIDmode, x,
6788 					rtx_addr_varies_p)));
6789     }
6790 
6791   /* If we reach here, it is safe.  */
6792   return 1;
6793 }
6794 
6795 
6796 /* Return the highest power of two that EXP is known to be a multiple of.
6797    This is used in updating alignment of MEMs in array references.  */
6798 
6799 unsigned HOST_WIDE_INT
6800 highest_pow2_factor (const_tree exp)
6801 {
6802   unsigned HOST_WIDE_INT c0, c1;
6803 
6804   switch (TREE_CODE (exp))
6805     {
6806     case INTEGER_CST:
6807       /* We can find the lowest bit that's a one.  If the low
6808 	 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6809 	 We need to handle this case since we can find it in a COND_EXPR,
6810 	 a MIN_EXPR, or a MAX_EXPR.  If the constant overflows, we have an
6811 	 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6812 	 later ICE.  */
6813       if (TREE_OVERFLOW (exp))
6814 	return BIGGEST_ALIGNMENT;
6815       else
6816 	{
6817 	  /* Note: tree_low_cst is intentionally not used here,
6818 	     we don't care about the upper bits.  */
6819 	  c0 = TREE_INT_CST_LOW (exp);
6820 	  c0 &= -c0;
6821 	  return c0 ? c0 : BIGGEST_ALIGNMENT;
6822 	}
6823       break;
6824 
6825     case PLUS_EXPR:  case MINUS_EXPR:  case MIN_EXPR:  case MAX_EXPR:
6826       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6827       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6828       return MIN (c0, c1);
6829 
6830     case MULT_EXPR:
6831       c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6832       c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6833       return c0 * c1;
6834 
6835     case ROUND_DIV_EXPR:  case TRUNC_DIV_EXPR:  case FLOOR_DIV_EXPR:
6836     case CEIL_DIV_EXPR:
6837       if (integer_pow2p (TREE_OPERAND (exp, 1))
6838 	  && host_integerp (TREE_OPERAND (exp, 1), 1))
6839 	{
6840 	  c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6841 	  c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6842 	  return MAX (1, c0 / c1);
6843 	}
6844       break;
6845 
6846     case BIT_AND_EXPR:
6847       /* The highest power of two of a bit-and expression is the maximum of
6848 	 that of its operands.  We typically get here for a complex LHS and
6849 	 a constant negative power of two on the RHS to force an explicit
6850 	 alignment, so don't bother looking at the LHS.  */
6851       return highest_pow2_factor (TREE_OPERAND (exp, 1));
6852 
6853     CASE_CONVERT:
6854     case SAVE_EXPR:
6855       return highest_pow2_factor (TREE_OPERAND (exp, 0));
6856 
6857     case COMPOUND_EXPR:
6858       return highest_pow2_factor (TREE_OPERAND (exp, 1));
6859 
6860     case COND_EXPR:
6861       c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6862       c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6863       return MIN (c0, c1);
6864 
6865     default:
6866       break;
6867     }
6868 
6869   return 1;
6870 }
6871 
6872 /* Similar, except that the alignment requirements of TARGET are
6873    taken into account.  Assume it is at least as aligned as its
6874    type, unless it is a COMPONENT_REF in which case the layout of
6875    the structure gives the alignment.  */
6876 
6877 static unsigned HOST_WIDE_INT
6878 highest_pow2_factor_for_target (const_tree target, const_tree exp)
6879 {
6880   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
6881   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
6882 
6883   return MAX (factor, talign);
6884 }
6885 
6886 /* Return &VAR expression for emulated thread local VAR.  */
6887 
6888 static tree
6889 emutls_var_address (tree var)
6890 {
6891   tree emuvar = emutls_decl (var);
6892   tree fn = built_in_decls [BUILT_IN_EMUTLS_GET_ADDRESS];
6893   tree arg = build_fold_addr_expr_with_type (emuvar, ptr_type_node);
6894   tree arglist = build_tree_list (NULL_TREE, arg);
6895   tree call = build_function_call_expr (UNKNOWN_LOCATION, fn, arglist);
6896   return fold_convert (build_pointer_type (TREE_TYPE (var)), call);
6897 }
6898 
6899 
6900 /* Subroutine of expand_expr.  Expand the two operands of a binary
6901    expression EXP0 and EXP1 placing the results in OP0 and OP1.
6902    The value may be stored in TARGET if TARGET is nonzero.  The
6903    MODIFIER argument is as documented by expand_expr.  */
6904 
6905 static void
6906 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6907 		 enum expand_modifier modifier)
6908 {
6909   if (! safe_from_p (target, exp1, 1))
6910     target = 0;
6911   if (operand_equal_p (exp0, exp1, 0))
6912     {
6913       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6914       *op1 = copy_rtx (*op0);
6915     }
6916   else
6917     {
6918       /* If we need to preserve evaluation order, copy exp0 into its own
6919 	 temporary variable so that it can't be clobbered by exp1.  */
6920       if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6921 	exp0 = save_expr (exp0);
6922       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6923       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6924     }
6925 }
6926 
6927 
6928 /* Return a MEM that contains constant EXP.  DEFER is as for
6929    output_constant_def and MODIFIER is as for expand_expr.  */
6930 
6931 static rtx
6932 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
6933 {
6934   rtx mem;
6935 
6936   mem = output_constant_def (exp, defer);
6937   if (modifier != EXPAND_INITIALIZER)
6938     mem = use_anchored_address (mem);
6939   return mem;
6940 }
6941 
6942 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
6943    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
6944 
6945 static rtx
6946 expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
6947 		         enum expand_modifier modifier, addr_space_t as)
6948 {
6949   rtx result, subtarget;
6950   tree inner, offset;
6951   HOST_WIDE_INT bitsize, bitpos;
6952   int volatilep, unsignedp;
6953   enum machine_mode mode1;
6954 
6955   /* If we are taking the address of a constant and are at the top level,
6956      we have to use output_constant_def since we can't call force_const_mem
6957      at top level.  */
6958   /* ??? This should be considered a front-end bug.  We should not be
6959      generating ADDR_EXPR of something that isn't an LVALUE.  The only
6960      exception here is STRING_CST.  */
6961   if (CONSTANT_CLASS_P (exp))
6962     return XEXP (expand_expr_constant (exp, 0, modifier), 0);
6963 
6964   /* Everything must be something allowed by is_gimple_addressable.  */
6965   switch (TREE_CODE (exp))
6966     {
6967     case INDIRECT_REF:
6968       /* This case will happen via recursion for &a->b.  */
6969       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6970 
6971     case CONST_DECL:
6972       /* Expand the initializer like constants above.  */
6973       return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
6974 
6975     case REALPART_EXPR:
6976       /* The real part of the complex number is always first, therefore
6977 	 the address is the same as the address of the parent object.  */
6978       offset = 0;
6979       bitpos = 0;
6980       inner = TREE_OPERAND (exp, 0);
6981       break;
6982 
6983     case IMAGPART_EXPR:
6984       /* The imaginary part of the complex number is always second.
6985 	 The expression is therefore always offset by the size of the
6986 	 scalar type.  */
6987       offset = 0;
6988       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
6989       inner = TREE_OPERAND (exp, 0);
6990       break;
6991 
6992     case VAR_DECL:
6993       /* TLS emulation hook - replace __thread VAR's &VAR with
6994 	 __emutls_get_address (&_emutls.VAR).  */
6995       if (! targetm.have_tls
6996 	  && TREE_CODE (exp) == VAR_DECL
6997 	  && DECL_THREAD_LOCAL_P (exp))
6998 	{
6999 	  exp = emutls_var_address (exp);
7000 	  return expand_expr (exp, target, tmode, modifier);
7001 	}
7002       /* Fall through.  */
7003 
7004     default:
7005       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7006 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7007 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7008 	 CONSTRUCTORs too, which should yield a memory reference for the
7009 	 constructor's contents.  Assume language specific tree nodes can
7010 	 be expanded in some interesting way.  */
7011       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7012       if (DECL_P (exp)
7013 	  || TREE_CODE (exp) == CONSTRUCTOR
7014 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7015 	{
7016 	  result = expand_expr (exp, target, tmode,
7017 				modifier == EXPAND_INITIALIZER
7018 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7019 
7020 	  if (TREE_ADDRESSABLE (exp)
7021 	      && ! MEM_P (result)
7022 	      && ! targetm.calls.allocate_stack_slots_for_args ())
7023 	    {
7024 	      error ("local frame unavailable (naked function?)");
7025 	      return result;
7026 	    }
7027 
7028 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7029 	     marked TREE_ADDRESSABLE, which will be either a front-end
7030 	     or a tree optimizer bug.  */
7031 	  gcc_assert (MEM_P (result));
7032 	  result = XEXP (result, 0);
7033 
7034 	  /* ??? Is this needed anymore?  */
7035 	  if (DECL_P (exp) && !TREE_USED (exp) == 0)
7036 	    {
7037 	      assemble_external (exp);
7038 	      TREE_USED (exp) = 1;
7039 	    }
7040 
7041 	  if (modifier != EXPAND_INITIALIZER
7042 	      && modifier != EXPAND_CONST_ADDRESS)
7043 	    result = force_operand (result, target);
7044 	  return result;
7045 	}
7046 
7047       /* Pass FALSE as the last argument to get_inner_reference although
7048 	 we are expanding to RTL.  The rationale is that we know how to
7049 	 handle "aligning nodes" here: we can just bypass them because
7050 	 they won't change the final object whose address will be returned
7051 	 (they actually exist only for that purpose).  */
7052       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7053 				   &mode1, &unsignedp, &volatilep, false);
7054       break;
7055     }
7056 
7057   /* We must have made progress.  */
7058   gcc_assert (inner != exp);
7059 
7060   subtarget = offset || bitpos ? NULL_RTX : target;
7061   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7062      inner alignment, force the inner to be sufficiently aligned.  */
7063   if (CONSTANT_CLASS_P (inner)
7064       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7065     {
7066       inner = copy_node (inner);
7067       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7068       TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7069       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7070     }
7071   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7072 
7073   if (offset)
7074     {
7075       rtx tmp;
7076 
7077       if (modifier != EXPAND_NORMAL)
7078 	result = force_operand (result, NULL);
7079       tmp = expand_expr (offset, NULL_RTX, tmode,
7080 			 modifier == EXPAND_INITIALIZER
7081 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7082 
7083       result = convert_memory_address_addr_space (tmode, result, as);
7084       tmp = convert_memory_address_addr_space (tmode, tmp, as);
7085 
7086       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7087 	result = gen_rtx_PLUS (tmode, result, tmp);
7088       else
7089 	{
7090 	  subtarget = bitpos ? NULL_RTX : target;
7091 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7092 					1, OPTAB_LIB_WIDEN);
7093 	}
7094     }
7095 
7096   if (bitpos)
7097     {
7098       /* Someone beforehand should have rejected taking the address
7099 	 of such an object.  */
7100       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7101 
7102       result = plus_constant (result, bitpos / BITS_PER_UNIT);
7103       if (modifier < EXPAND_SUM)
7104 	result = force_operand (result, target);
7105     }
7106 
7107   return result;
7108 }
7109 
7110 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7111    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7112 
7113 static rtx
7114 expand_expr_addr_expr (tree exp, rtx target, enum machine_mode tmode,
7115 		       enum expand_modifier modifier)
7116 {
7117   addr_space_t as = ADDR_SPACE_GENERIC;
7118   enum machine_mode address_mode = Pmode;
7119   enum machine_mode pointer_mode = ptr_mode;
7120   enum machine_mode rmode;
7121   rtx result;
7122 
7123   /* Target mode of VOIDmode says "whatever's natural".  */
7124   if (tmode == VOIDmode)
7125     tmode = TYPE_MODE (TREE_TYPE (exp));
7126 
7127   if (POINTER_TYPE_P (TREE_TYPE (exp)))
7128     {
7129       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7130       address_mode = targetm.addr_space.address_mode (as);
7131       pointer_mode = targetm.addr_space.pointer_mode (as);
7132     }
7133 
7134   /* We can get called with some Weird Things if the user does silliness
7135      like "(short) &a".  In that case, convert_memory_address won't do
7136      the right thing, so ignore the given target mode.  */
7137   if (tmode != address_mode && tmode != pointer_mode)
7138     tmode = address_mode;
7139 
7140   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7141 				    tmode, modifier, as);
7142 
7143   /* Despite expand_expr claims concerning ignoring TMODE when not
7144      strictly convenient, stuff breaks if we don't honor it.  Note
7145      that combined with the above, we only do this for pointer modes.  */
7146   rmode = GET_MODE (result);
7147   if (rmode == VOIDmode)
7148     rmode = tmode;
7149   if (rmode != tmode)
7150     result = convert_memory_address_addr_space (tmode, result, as);
7151 
7152   return result;
7153 }
7154 
7155 /* Generate code for computing CONSTRUCTOR EXP.
7156    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7157    is TRUE, instead of creating a temporary variable in memory
7158    NULL is returned and the caller needs to handle it differently.  */
7159 
7160 static rtx
7161 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7162 		    bool avoid_temp_mem)
7163 {
7164   tree type = TREE_TYPE (exp);
7165   enum machine_mode mode = TYPE_MODE (type);
7166 
7167   /* Try to avoid creating a temporary at all.  This is possible
7168      if all of the initializer is zero.
7169      FIXME: try to handle all [0..255] initializers we can handle
7170      with memset.  */
7171   if (TREE_STATIC (exp)
7172       && !TREE_ADDRESSABLE (exp)
7173       && target != 0 && mode == BLKmode
7174       && all_zeros_p (exp))
7175     {
7176       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7177       return target;
7178     }
7179 
7180   /* All elts simple constants => refer to a constant in memory.  But
7181      if this is a non-BLKmode mode, let it store a field at a time
7182      since that should make a CONST_INT or CONST_DOUBLE when we
7183      fold.  Likewise, if we have a target we can use, it is best to
7184      store directly into the target unless the type is large enough
7185      that memcpy will be used.  If we are making an initializer and
7186      all operands are constant, put it in memory as well.
7187 
7188      FIXME: Avoid trying to fill vector constructors piece-meal.
7189      Output them with output_constant_def below unless we're sure
7190      they're zeros.  This should go away when vector initializers
7191      are treated like VECTOR_CST instead of arrays.  */
7192   if ((TREE_STATIC (exp)
7193        && ((mode == BLKmode
7194 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
7195 		  || TREE_ADDRESSABLE (exp)
7196 		  || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7197 		      && (! MOVE_BY_PIECES_P
7198 				     (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7199 				      TYPE_ALIGN (type)))
7200 		      && ! mostly_zeros_p (exp))))
7201       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7202 	  && TREE_CONSTANT (exp)))
7203     {
7204       rtx constructor;
7205 
7206       if (avoid_temp_mem)
7207 	return NULL_RTX;
7208 
7209       constructor = expand_expr_constant (exp, 1, modifier);
7210 
7211       if (modifier != EXPAND_CONST_ADDRESS
7212 	  && modifier != EXPAND_INITIALIZER
7213 	  && modifier != EXPAND_SUM)
7214 	constructor = validize_mem (constructor);
7215 
7216       return constructor;
7217     }
7218 
7219   /* Handle calls that pass values in multiple non-contiguous
7220      locations.  The Irix 6 ABI has examples of this.  */
7221   if (target == 0 || ! safe_from_p (target, exp, 1)
7222       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7223     {
7224       if (avoid_temp_mem)
7225 	return NULL_RTX;
7226 
7227       target
7228 	= assign_temp (build_qualified_type (type, (TYPE_QUALS (type)
7229 						    | (TREE_READONLY (exp)
7230 						       * TYPE_QUAL_CONST))),
7231 		       0, TREE_ADDRESSABLE (exp), 1);
7232     }
7233 
7234   store_constructor (exp, target, 0, int_expr_size (exp));
7235   return target;
7236 }
7237 
7238 
7239 /* expand_expr: generate code for computing expression EXP.
7240    An rtx for the computed value is returned.  The value is never null.
7241    In the case of a void EXP, const0_rtx is returned.
7242 
7243    The value may be stored in TARGET if TARGET is nonzero.
7244    TARGET is just a suggestion; callers must assume that
7245    the rtx returned may not be the same as TARGET.
7246 
7247    If TARGET is CONST0_RTX, it means that the value will be ignored.
7248 
7249    If TMODE is not VOIDmode, it suggests generating the
7250    result in mode TMODE.  But this is done only when convenient.
7251    Otherwise, TMODE is ignored and the value generated in its natural mode.
7252    TMODE is just a suggestion; callers must assume that
7253    the rtx returned may not have mode TMODE.
7254 
7255    Note that TARGET may have neither TMODE nor MODE.  In that case, it
7256    probably will not be used.
7257 
7258    If MODIFIER is EXPAND_SUM then when EXP is an addition
7259    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7260    or a nest of (PLUS ...) and (MINUS ...) where the terms are
7261    products as above, or REG or MEM, or constant.
7262    Ordinarily in such cases we would output mul or add instructions
7263    and then return a pseudo reg containing the sum.
7264 
7265    EXPAND_INITIALIZER is much like EXPAND_SUM except that
7266    it also marks a label as absolutely required (it can't be dead).
7267    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7268    This is used for outputting expressions used in initializers.
7269 
7270    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7271    with a constant address even if that address is not normally legitimate.
7272    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7273 
7274    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7275    a call parameter.  Such targets require special care as we haven't yet
7276    marked TARGET so that it's safe from being trashed by libcalls.  We
7277    don't want to use TARGET for anything but the final result;
7278    Intermediate values must go elsewhere.   Additionally, calls to
7279    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7280 
7281    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7282    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7283    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7284    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7285    recursively.  */
7286 
7287 rtx
7288 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
7289 		  enum expand_modifier modifier, rtx *alt_rtl)
7290 {
7291   rtx ret;
7292 
7293   /* Handle ERROR_MARK before anybody tries to access its type.  */
7294   if (TREE_CODE (exp) == ERROR_MARK
7295       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
7296     {
7297       ret = CONST0_RTX (tmode);
7298       return ret ? ret : const0_rtx;
7299     }
7300 
7301   /* If this is an expression of some kind and it has an associated line
7302      number, then emit the line number before expanding the expression.
7303 
7304      We need to save and restore the file and line information so that
7305      errors discovered during expansion are emitted with the right
7306      information.  It would be better of the diagnostic routines
7307      used the file/line information embedded in the tree nodes rather
7308      than globals.  */
7309   if (cfun && EXPR_HAS_LOCATION (exp))
7310     {
7311       location_t saved_location = input_location;
7312       location_t saved_curr_loc = get_curr_insn_source_location ();
7313       tree saved_block = get_curr_insn_block ();
7314       input_location = EXPR_LOCATION (exp);
7315       set_curr_insn_source_location (input_location);
7316 
7317       /* Record where the insns produced belong.  */
7318       set_curr_insn_block (TREE_BLOCK (exp));
7319 
7320       ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7321 
7322       input_location = saved_location;
7323       set_curr_insn_block (saved_block);
7324       set_curr_insn_source_location (saved_curr_loc);
7325     }
7326   else
7327     {
7328       ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
7329     }
7330 
7331   return ret;
7332 }
7333 
7334 rtx
7335 expand_expr_real_2 (sepops ops, rtx target, enum machine_mode tmode,
7336 		    enum expand_modifier modifier)
7337 {
7338   rtx op0, op1, op2, temp;
7339   tree type;
7340   int unsignedp;
7341   enum machine_mode mode;
7342   enum tree_code code = ops->code;
7343   optab this_optab;
7344   rtx subtarget, original_target;
7345   int ignore;
7346   tree subexp0, subexp1;
7347   bool reduce_bit_field;
7348   gimple subexp0_def, subexp1_def;
7349   tree top0, top1;
7350   location_t loc = ops->location;
7351   tree treeop0, treeop1;
7352 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
7353 				 ? reduce_to_bit_field_precision ((expr), \
7354 								  target, \
7355 								  type)	  \
7356 				 : (expr))
7357 
7358   type = ops->type;
7359   mode = TYPE_MODE (type);
7360   unsignedp = TYPE_UNSIGNED (type);
7361 
7362   treeop0 = ops->op0;
7363   treeop1 = ops->op1;
7364 
7365   /* We should be called only on simple (binary or unary) expressions,
7366      exactly those that are valid in gimple expressions that aren't
7367      GIMPLE_SINGLE_RHS (or invalid).  */
7368   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
7369 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS);
7370 
7371   ignore = (target == const0_rtx
7372 	    || ((CONVERT_EXPR_CODE_P (code)
7373 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
7374 		&& TREE_CODE (type) == VOID_TYPE));
7375 
7376   /* We should be called only if we need the result.  */
7377   gcc_assert (!ignore);
7378 
7379   /* An operation in what may be a bit-field type needs the
7380      result to be reduced to the precision of the bit-field type,
7381      which is narrower than that of the type's mode.  */
7382   reduce_bit_field = (TREE_CODE (type) == INTEGER_TYPE
7383 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
7384 
7385   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
7386     target = 0;
7387 
7388   /* Use subtarget as the target for operand 0 of a binary operation.  */
7389   subtarget = get_subtarget (target);
7390   original_target = target;
7391 
7392   switch (code)
7393     {
7394     case NON_LVALUE_EXPR:
7395     case PAREN_EXPR:
7396     CASE_CONVERT:
7397       if (treeop0 == error_mark_node)
7398 	return const0_rtx;
7399 
7400       if (TREE_CODE (type) == UNION_TYPE)
7401 	{
7402 	  tree valtype = TREE_TYPE (treeop0);
7403 
7404 	  /* If both input and output are BLKmode, this conversion isn't doing
7405 	     anything except possibly changing memory attribute.  */
7406 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7407 	    {
7408 	      rtx result = expand_expr (treeop0, target, tmode,
7409 					modifier);
7410 
7411 	      result = copy_rtx (result);
7412 	      set_mem_attributes (result, type, 0);
7413 	      return result;
7414 	    }
7415 
7416 	  if (target == 0)
7417 	    {
7418 	      if (TYPE_MODE (type) != BLKmode)
7419 		target = gen_reg_rtx (TYPE_MODE (type));
7420 	      else
7421 		target = assign_temp (type, 0, 1, 1);
7422 	    }
7423 
7424 	  if (MEM_P (target))
7425 	    /* Store data into beginning of memory target.  */
7426 	    store_expr (treeop0,
7427 			adjust_address (target, TYPE_MODE (valtype), 0),
7428 			modifier == EXPAND_STACK_PARM,
7429 			false);
7430 
7431 	  else
7432 	    {
7433 	      gcc_assert (REG_P (target));
7434 
7435 	      /* Store this field into a union of the proper type.  */
7436 	      store_field (target,
7437 			   MIN ((int_size_in_bytes (TREE_TYPE
7438 						    (treeop0))
7439 				 * BITS_PER_UNIT),
7440 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7441 			   0, TYPE_MODE (valtype), treeop0,
7442 			   type, 0, false);
7443 	    }
7444 
7445 	  /* Return the entire union.  */
7446 	  return target;
7447 	}
7448 
7449       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
7450 	{
7451 	  op0 = expand_expr (treeop0, target, VOIDmode,
7452 			     modifier);
7453 
7454 	  /* If the signedness of the conversion differs and OP0 is
7455 	     a promoted SUBREG, clear that indication since we now
7456 	     have to do the proper extension.  */
7457 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
7458 	      && GET_CODE (op0) == SUBREG)
7459 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
7460 
7461 	  return REDUCE_BIT_FIELD (op0);
7462 	}
7463 
7464       op0 = expand_expr (treeop0, NULL_RTX, mode,
7465 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
7466       if (GET_MODE (op0) == mode)
7467 	;
7468 
7469       /* If OP0 is a constant, just convert it into the proper mode.  */
7470       else if (CONSTANT_P (op0))
7471 	{
7472 	  tree inner_type = TREE_TYPE (treeop0);
7473 	  enum machine_mode inner_mode = TYPE_MODE (inner_type);
7474 
7475 	  if (modifier == EXPAND_INITIALIZER)
7476 	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
7477 				       subreg_lowpart_offset (mode,
7478 							      inner_mode));
7479 	  else
7480 	    op0=  convert_modes (mode, inner_mode, op0,
7481 				 TYPE_UNSIGNED (inner_type));
7482 	}
7483 
7484       else if (modifier == EXPAND_INITIALIZER)
7485 	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7486 
7487       else if (target == 0)
7488 	op0 = convert_to_mode (mode, op0,
7489 			       TYPE_UNSIGNED (TREE_TYPE
7490 					      (treeop0)));
7491       else
7492 	{
7493 	  convert_move (target, op0,
7494 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
7495 	  op0 = target;
7496 	}
7497 
7498       return REDUCE_BIT_FIELD (op0);
7499 
7500     case ADDR_SPACE_CONVERT_EXPR:
7501       {
7502 	tree treeop0_type = TREE_TYPE (treeop0);
7503 	addr_space_t as_to;
7504 	addr_space_t as_from;
7505 
7506 	gcc_assert (POINTER_TYPE_P (type));
7507 	gcc_assert (POINTER_TYPE_P (treeop0_type));
7508 
7509 	as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
7510 	as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
7511 
7512         /* Conversions between pointers to the same address space should
7513 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
7514 	gcc_assert (as_to != as_from);
7515 
7516         /* Ask target code to handle conversion between pointers
7517 	   to overlapping address spaces.  */
7518 	if (targetm.addr_space.subset_p (as_to, as_from)
7519 	    || targetm.addr_space.subset_p (as_from, as_to))
7520 	  {
7521 	    op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
7522 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
7523 	    gcc_assert (op0);
7524 	    return op0;
7525 	  }
7526 
7527 	/* For disjoint address spaces, converting anything but
7528 	   a null pointer invokes undefined behaviour.  We simply
7529 	   always return a null pointer here.  */
7530 	return CONST0_RTX (mode);
7531       }
7532 
7533     case POINTER_PLUS_EXPR:
7534       /* Even though the sizetype mode and the pointer's mode can be different
7535          expand is able to handle this correctly and get the correct result out
7536          of the PLUS_EXPR code.  */
7537       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
7538          if sizetype precision is smaller than pointer precision.  */
7539       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
7540 	treeop1 = fold_convert_loc (loc, type,
7541 				    fold_convert_loc (loc, ssizetype,
7542 						      treeop1));
7543     case PLUS_EXPR:
7544 
7545       /* Check if this is a case for multiplication and addition.  */
7546       if ((TREE_CODE (type) == INTEGER_TYPE
7547 	   || TREE_CODE (type) == FIXED_POINT_TYPE)
7548 	  && (subexp0_def = get_def_for_expr (treeop0,
7549 					      MULT_EXPR)))
7550 	{
7551 	  tree subsubexp0, subsubexp1;
7552 	  gimple subsubexp0_def, subsubexp1_def;
7553 	  enum tree_code this_code;
7554 
7555 	  this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7556 						       : FIXED_CONVERT_EXPR;
7557 	  subsubexp0 = gimple_assign_rhs1 (subexp0_def);
7558 	  subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7559 	  subsubexp1 = gimple_assign_rhs2 (subexp0_def);
7560 	  subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7561 	  if (subsubexp0_def && subsubexp1_def
7562 	      && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7563 	      && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7564 	      && (TYPE_PRECISION (TREE_TYPE (top0))
7565 		  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7566 	      && (TYPE_PRECISION (TREE_TYPE (top0))
7567 		  == TYPE_PRECISION (TREE_TYPE (top1)))
7568 	      && (TYPE_UNSIGNED (TREE_TYPE (top0))
7569 		  == TYPE_UNSIGNED (TREE_TYPE (top1))))
7570 	    {
7571 	      tree op0type = TREE_TYPE (top0);
7572 	      enum machine_mode innermode = TYPE_MODE (op0type);
7573 	      bool zextend_p = TYPE_UNSIGNED (op0type);
7574 	      bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7575 	      if (sat_p == 0)
7576 		this_optab = zextend_p ? umadd_widen_optab : smadd_widen_optab;
7577 	      else
7578 		this_optab = zextend_p ? usmadd_widen_optab
7579 				       : ssmadd_widen_optab;
7580 	      if (mode == GET_MODE_2XWIDER_MODE (innermode)
7581 		  && (optab_handler (this_optab, mode)->insn_code
7582 		      != CODE_FOR_nothing))
7583 		{
7584 		  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7585 				   EXPAND_NORMAL);
7586 		  op2 = expand_expr (treeop1, subtarget,
7587 				     VOIDmode, EXPAND_NORMAL);
7588 		  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7589 					    target, unsignedp);
7590 		  gcc_assert (temp);
7591 		  return REDUCE_BIT_FIELD (temp);
7592 		}
7593 	    }
7594 	}
7595 
7596       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7597 	 something else, make sure we add the register to the constant and
7598 	 then to the other thing.  This case can occur during strength
7599 	 reduction and doing it this way will produce better code if the
7600 	 frame pointer or argument pointer is eliminated.
7601 
7602 	 fold-const.c will ensure that the constant is always in the inner
7603 	 PLUS_EXPR, so the only case we need to do anything about is if
7604 	 sp, ap, or fp is our second argument, in which case we must swap
7605 	 the innermost first argument and our second argument.  */
7606 
7607       if (TREE_CODE (treeop0) == PLUS_EXPR
7608 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
7609 	  && TREE_CODE (treeop1) == VAR_DECL
7610 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
7611 	      || DECL_RTL (treeop1) == stack_pointer_rtx
7612 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
7613 	{
7614 	  tree t = treeop1;
7615 
7616 	  treeop1 = TREE_OPERAND (treeop0, 0);
7617 	  TREE_OPERAND (treeop0, 0) = t;
7618 	}
7619 
7620       /* If the result is to be ptr_mode and we are adding an integer to
7621 	 something, we might be forming a constant.  So try to use
7622 	 plus_constant.  If it produces a sum and we can't accept it,
7623 	 use force_operand.  This allows P = &ARR[const] to generate
7624 	 efficient code on machines where a SYMBOL_REF is not a valid
7625 	 address.
7626 
7627 	 If this is an EXPAND_SUM call, always return the sum.  */
7628       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7629 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7630 	{
7631 	  if (modifier == EXPAND_STACK_PARM)
7632 	    target = 0;
7633 	  if (TREE_CODE (treeop0) == INTEGER_CST
7634 	      && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7635 	      && TREE_CONSTANT (treeop1))
7636 	    {
7637 	      rtx constant_part;
7638 
7639 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
7640 				 EXPAND_SUM);
7641 	      /* Use immed_double_const to ensure that the constant is
7642 		 truncated according to the mode of OP1, then sign extended
7643 		 to a HOST_WIDE_INT.  Using the constant directly can result
7644 		 in non-canonical RTL in a 64x32 cross compile.  */
7645 	      constant_part
7646 		= immed_double_const (TREE_INT_CST_LOW (treeop0),
7647 				      (HOST_WIDE_INT) 0,
7648 				      TYPE_MODE (TREE_TYPE (treeop1)));
7649 	      op1 = plus_constant (op1, INTVAL (constant_part));
7650 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7651 		op1 = force_operand (op1, target);
7652 	      return REDUCE_BIT_FIELD (op1);
7653 	    }
7654 
7655 	  else if (TREE_CODE (treeop1) == INTEGER_CST
7656 		   && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7657 		   && TREE_CONSTANT (treeop0))
7658 	    {
7659 	      rtx constant_part;
7660 
7661 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
7662 				 (modifier == EXPAND_INITIALIZER
7663 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
7664 	      if (! CONSTANT_P (op0))
7665 		{
7666 		  op1 = expand_expr (treeop1, NULL_RTX,
7667 				     VOIDmode, modifier);
7668 		  /* Return a PLUS if modifier says it's OK.  */
7669 		  if (modifier == EXPAND_SUM
7670 		      || modifier == EXPAND_INITIALIZER)
7671 		    return simplify_gen_binary (PLUS, mode, op0, op1);
7672 		  goto binop2;
7673 		}
7674 	      /* Use immed_double_const to ensure that the constant is
7675 		 truncated according to the mode of OP1, then sign extended
7676 		 to a HOST_WIDE_INT.  Using the constant directly can result
7677 		 in non-canonical RTL in a 64x32 cross compile.  */
7678 	      constant_part
7679 		= immed_double_const (TREE_INT_CST_LOW (treeop1),
7680 				      (HOST_WIDE_INT) 0,
7681 				      TYPE_MODE (TREE_TYPE (treeop0)));
7682 	      op0 = plus_constant (op0, INTVAL (constant_part));
7683 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7684 		op0 = force_operand (op0, target);
7685 	      return REDUCE_BIT_FIELD (op0);
7686 	    }
7687 	}
7688 
7689       /* No sense saving up arithmetic to be done
7690 	 if it's all in the wrong mode to form part of an address.
7691 	 And force_operand won't know whether to sign-extend or
7692 	 zero-extend.  */
7693       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7694 	  || mode != ptr_mode)
7695 	{
7696 	  expand_operands (treeop0, treeop1,
7697 			   subtarget, &op0, &op1, EXPAND_NORMAL);
7698 	  if (op0 == const0_rtx)
7699 	    return op1;
7700 	  if (op1 == const0_rtx)
7701 	    return op0;
7702 	  goto binop2;
7703 	}
7704 
7705       expand_operands (treeop0, treeop1,
7706 		       subtarget, &op0, &op1, modifier);
7707       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7708 
7709     case MINUS_EXPR:
7710       /* Check if this is a case for multiplication and subtraction.  */
7711       if ((TREE_CODE (type) == INTEGER_TYPE
7712 	   || TREE_CODE (type) == FIXED_POINT_TYPE)
7713 	  && (subexp1_def = get_def_for_expr (treeop1,
7714 					      MULT_EXPR)))
7715 	{
7716 	  tree subsubexp0, subsubexp1;
7717 	  gimple subsubexp0_def, subsubexp1_def;
7718 	  enum tree_code this_code;
7719 
7720 	  this_code = TREE_CODE (type) == INTEGER_TYPE ? NOP_EXPR
7721 						       : FIXED_CONVERT_EXPR;
7722 	  subsubexp0 = gimple_assign_rhs1 (subexp1_def);
7723 	  subsubexp0_def = get_def_for_expr (subsubexp0, this_code);
7724 	  subsubexp1 = gimple_assign_rhs2 (subexp1_def);
7725 	  subsubexp1_def = get_def_for_expr (subsubexp1, this_code);
7726 	  if (subsubexp0_def && subsubexp1_def
7727 	      && (top0 = gimple_assign_rhs1 (subsubexp0_def))
7728 	      && (top1 = gimple_assign_rhs1 (subsubexp1_def))
7729 	      && (TYPE_PRECISION (TREE_TYPE (top0))
7730 		  < TYPE_PRECISION (TREE_TYPE (subsubexp0)))
7731 	      && (TYPE_PRECISION (TREE_TYPE (top0))
7732 		  == TYPE_PRECISION (TREE_TYPE (top1)))
7733 	      && (TYPE_UNSIGNED (TREE_TYPE (top0))
7734 		  == TYPE_UNSIGNED (TREE_TYPE (top1))))
7735 	    {
7736 	      tree op0type = TREE_TYPE (top0);
7737 	      enum machine_mode innermode = TYPE_MODE (op0type);
7738 	      bool zextend_p = TYPE_UNSIGNED (op0type);
7739 	      bool sat_p = TYPE_SATURATING (TREE_TYPE (subsubexp0));
7740 	      if (sat_p == 0)
7741 		this_optab = zextend_p ? umsub_widen_optab : smsub_widen_optab;
7742 	      else
7743 		this_optab = zextend_p ? usmsub_widen_optab
7744 				       : ssmsub_widen_optab;
7745 	      if (mode == GET_MODE_2XWIDER_MODE (innermode)
7746 		  && (optab_handler (this_optab, mode)->insn_code
7747 		      != CODE_FOR_nothing))
7748 		{
7749 		  expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7750 				   EXPAND_NORMAL);
7751 		  op2 = expand_expr (treeop0, subtarget,
7752 				     VOIDmode, EXPAND_NORMAL);
7753 		  temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
7754 					    target, unsignedp);
7755 		  gcc_assert (temp);
7756 		  return REDUCE_BIT_FIELD (temp);
7757 		}
7758 	    }
7759 	}
7760 
7761       /* For initializers, we are allowed to return a MINUS of two
7762 	 symbolic constants.  Here we handle all cases when both operands
7763 	 are constant.  */
7764       /* Handle difference of two symbolic constants,
7765 	 for the sake of an initializer.  */
7766       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7767 	  && really_constant_p (treeop0)
7768 	  && really_constant_p (treeop1))
7769 	{
7770 	  expand_operands (treeop0, treeop1,
7771 			   NULL_RTX, &op0, &op1, modifier);
7772 
7773 	  /* If the last operand is a CONST_INT, use plus_constant of
7774 	     the negated constant.  Else make the MINUS.  */
7775 	  if (CONST_INT_P (op1))
7776 	    return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7777 	  else
7778 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7779 	}
7780 
7781       /* No sense saving up arithmetic to be done
7782 	 if it's all in the wrong mode to form part of an address.
7783 	 And force_operand won't know whether to sign-extend or
7784 	 zero-extend.  */
7785       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7786 	  || mode != ptr_mode)
7787 	goto binop;
7788 
7789       expand_operands (treeop0, treeop1,
7790 		       subtarget, &op0, &op1, modifier);
7791 
7792       /* Convert A - const to A + (-const).  */
7793       if (CONST_INT_P (op1))
7794 	{
7795 	  op1 = negate_rtx (mode, op1);
7796 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7797 	}
7798 
7799       goto binop2;
7800 
7801     case MULT_EXPR:
7802       /* If this is a fixed-point operation, then we cannot use the code
7803 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
7804          multiplications.   */
7805       if (ALL_FIXED_POINT_MODE_P (mode))
7806 	goto binop;
7807 
7808       /* If first operand is constant, swap them.
7809 	 Thus the following special case checks need only
7810 	 check the second operand.  */
7811       if (TREE_CODE (treeop0) == INTEGER_CST)
7812 	{
7813 	  tree t1 = treeop0;
7814 	  treeop0 = treeop1;
7815 	  treeop1 = t1;
7816 	}
7817 
7818       /* Attempt to return something suitable for generating an
7819 	 indexed address, for machines that support that.  */
7820 
7821       if (modifier == EXPAND_SUM && mode == ptr_mode
7822 	  && host_integerp (treeop1, 0))
7823 	{
7824 	  tree exp1 = treeop1;
7825 
7826 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
7827 			     EXPAND_SUM);
7828 
7829 	  if (!REG_P (op0))
7830 	    op0 = force_operand (op0, NULL_RTX);
7831 	  if (!REG_P (op0))
7832 	    op0 = copy_to_mode_reg (mode, op0);
7833 
7834 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7835 			       gen_int_mode (tree_low_cst (exp1, 0),
7836 					     TYPE_MODE (TREE_TYPE (exp1)))));
7837 	}
7838 
7839       if (modifier == EXPAND_STACK_PARM)
7840 	target = 0;
7841 
7842       /* Check for multiplying things that have been extended
7843 	 from a narrower type.  If this machine supports multiplying
7844 	 in that narrower type with a result in the desired type,
7845 	 do it that way, and avoid the explicit type-conversion.  */
7846 
7847       subexp0 = treeop0;
7848       subexp1 = treeop1;
7849       subexp0_def = get_def_for_expr (subexp0, NOP_EXPR);
7850       subexp1_def = get_def_for_expr (subexp1, NOP_EXPR);
7851       top0 = top1 = NULL_TREE;
7852 
7853       /* First, check if we have a multiplication of one signed and one
7854 	 unsigned operand.  */
7855       if (subexp0_def
7856 	  && (top0 = gimple_assign_rhs1 (subexp0_def))
7857 	  && subexp1_def
7858 	  && (top1 = gimple_assign_rhs1 (subexp1_def))
7859 	  && TREE_CODE (type) == INTEGER_TYPE
7860 	  && (TYPE_PRECISION (TREE_TYPE (top0))
7861 	      < TYPE_PRECISION (TREE_TYPE (subexp0)))
7862 	  && (TYPE_PRECISION (TREE_TYPE (top0))
7863 	      == TYPE_PRECISION (TREE_TYPE (top1)))
7864 	  && (TYPE_UNSIGNED (TREE_TYPE (top0))
7865 	      != TYPE_UNSIGNED (TREE_TYPE (top1))))
7866 	{
7867 	  enum machine_mode innermode
7868 	    = TYPE_MODE (TREE_TYPE (top0));
7869 	  this_optab = usmul_widen_optab;
7870 	  if (mode == GET_MODE_WIDER_MODE (innermode))
7871 	    {
7872 	      if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7873 		{
7874 		  if (TYPE_UNSIGNED (TREE_TYPE (top0)))
7875 		    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7876 				     EXPAND_NORMAL);
7877 		  else
7878 		    expand_operands (top0, top1, NULL_RTX, &op1, &op0,
7879 				     EXPAND_NORMAL);
7880 
7881 		  goto binop3;
7882 		}
7883 	    }
7884 	}
7885       /* Check for a multiplication with matching signedness.  If
7886 	 valid, TOP0 and TOP1 were set in the previous if
7887 	 condition.  */
7888       else if (top0
7889 	  && TREE_CODE (type) == INTEGER_TYPE
7890 	  && (TYPE_PRECISION (TREE_TYPE (top0))
7891 	      < TYPE_PRECISION (TREE_TYPE (subexp0)))
7892 	  && ((TREE_CODE (subexp1) == INTEGER_CST
7893 	       && int_fits_type_p (subexp1, TREE_TYPE (top0))
7894 	       /* Don't use a widening multiply if a shift will do.  */
7895 	       && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (subexp1)))
7896 		    > HOST_BITS_PER_WIDE_INT)
7897 		   || exact_log2 (TREE_INT_CST_LOW (subexp1)) < 0))
7898 	      ||
7899 	      (top1
7900 	       && (TYPE_PRECISION (TREE_TYPE (top1))
7901 		   == TYPE_PRECISION (TREE_TYPE (top0))
7902 	       /* If both operands are extended, they must either both
7903 		  be zero-extended or both be sign-extended.  */
7904 	       && (TYPE_UNSIGNED (TREE_TYPE (top1))
7905 		   == TYPE_UNSIGNED (TREE_TYPE (top0)))))))
7906 	{
7907 	  tree op0type = TREE_TYPE (top0);
7908 	  enum machine_mode innermode = TYPE_MODE (op0type);
7909 	  bool zextend_p = TYPE_UNSIGNED (op0type);
7910 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7911 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7912 
7913 	  if (mode == GET_MODE_2XWIDER_MODE (innermode))
7914 	    {
7915 	      if (optab_handler (this_optab, mode)->insn_code != CODE_FOR_nothing)
7916 		{
7917 		  if (TREE_CODE (subexp1) == INTEGER_CST)
7918 		    expand_operands (top0, subexp1, NULL_RTX, &op0, &op1,
7919 				     EXPAND_NORMAL);
7920 		  else
7921 		    expand_operands (top0, top1, NULL_RTX, &op0, &op1,
7922 				     EXPAND_NORMAL);
7923 		  goto binop3;
7924 		}
7925 	      else if (optab_handler (other_optab, mode)->insn_code != CODE_FOR_nothing
7926 		       && innermode == word_mode)
7927 		{
7928 		  rtx htem, hipart;
7929 		  op0 = expand_normal (top0);
7930 		  if (TREE_CODE (subexp1) == INTEGER_CST)
7931 		    op1 = convert_modes (innermode, mode,
7932 					 expand_normal (subexp1), unsignedp);
7933 		  else
7934 		    op1 = expand_normal (top1);
7935 		  temp = expand_binop (mode, other_optab, op0, op1, target,
7936 				       unsignedp, OPTAB_LIB_WIDEN);
7937 		  hipart = gen_highpart (innermode, temp);
7938 		  htem = expand_mult_highpart_adjust (innermode, hipart,
7939 						      op0, op1, hipart,
7940 						      zextend_p);
7941 		  if (htem != hipart)
7942 		    emit_move_insn (hipart, htem);
7943 		  return REDUCE_BIT_FIELD (temp);
7944 		}
7945 	    }
7946 	}
7947       expand_operands (subexp0, subexp1, subtarget, &op0, &op1, EXPAND_NORMAL);
7948       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7949 
7950     case TRUNC_DIV_EXPR:
7951     case FLOOR_DIV_EXPR:
7952     case CEIL_DIV_EXPR:
7953     case ROUND_DIV_EXPR:
7954     case EXACT_DIV_EXPR:
7955       /* If this is a fixed-point operation, then we cannot use the code
7956 	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
7957          divisions.   */
7958       if (ALL_FIXED_POINT_MODE_P (mode))
7959 	goto binop;
7960 
7961       if (modifier == EXPAND_STACK_PARM)
7962 	target = 0;
7963       /* Possible optimization: compute the dividend with EXPAND_SUM
7964 	 then if the divisor is constant can optimize the case
7965 	 where some terms of the dividend have coeffs divisible by it.  */
7966       expand_operands (treeop0, treeop1,
7967 		       subtarget, &op0, &op1, EXPAND_NORMAL);
7968       return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7969 
7970     case RDIV_EXPR:
7971       goto binop;
7972 
7973     case TRUNC_MOD_EXPR:
7974     case FLOOR_MOD_EXPR:
7975     case CEIL_MOD_EXPR:
7976     case ROUND_MOD_EXPR:
7977       if (modifier == EXPAND_STACK_PARM)
7978 	target = 0;
7979       expand_operands (treeop0, treeop1,
7980 		       subtarget, &op0, &op1, EXPAND_NORMAL);
7981       return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7982 
7983     case FIXED_CONVERT_EXPR:
7984       op0 = expand_normal (treeop0);
7985       if (target == 0 || modifier == EXPAND_STACK_PARM)
7986 	target = gen_reg_rtx (mode);
7987 
7988       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
7989 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
7990           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
7991 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
7992       else
7993 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
7994       return target;
7995 
7996     case FIX_TRUNC_EXPR:
7997       op0 = expand_normal (treeop0);
7998       if (target == 0 || modifier == EXPAND_STACK_PARM)
7999 	target = gen_reg_rtx (mode);
8000       expand_fix (target, op0, unsignedp);
8001       return target;
8002 
8003     case FLOAT_EXPR:
8004       op0 = expand_normal (treeop0);
8005       if (target == 0 || modifier == EXPAND_STACK_PARM)
8006 	target = gen_reg_rtx (mode);
8007       /* expand_float can't figure out what to do if FROM has VOIDmode.
8008 	 So give it the correct mode.  With -O, cse will optimize this.  */
8009       if (GET_MODE (op0) == VOIDmode)
8010 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8011 				op0);
8012       expand_float (target, op0,
8013 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8014       return target;
8015 
8016     case NEGATE_EXPR:
8017       op0 = expand_expr (treeop0, subtarget,
8018 			 VOIDmode, EXPAND_NORMAL);
8019       if (modifier == EXPAND_STACK_PARM)
8020 	target = 0;
8021       temp = expand_unop (mode,
8022       			  optab_for_tree_code (NEGATE_EXPR, type,
8023 					       optab_default),
8024 			  op0, target, 0);
8025       gcc_assert (temp);
8026       return REDUCE_BIT_FIELD (temp);
8027 
8028     case ABS_EXPR:
8029       op0 = expand_expr (treeop0, subtarget,
8030 			 VOIDmode, EXPAND_NORMAL);
8031       if (modifier == EXPAND_STACK_PARM)
8032 	target = 0;
8033 
8034       /* ABS_EXPR is not valid for complex arguments.  */
8035       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8036 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8037 
8038       /* Unsigned abs is simply the operand.  Testing here means we don't
8039 	 risk generating incorrect code below.  */
8040       if (TYPE_UNSIGNED (type))
8041 	return op0;
8042 
8043       return expand_abs (mode, op0, target, unsignedp,
8044 			 safe_from_p (target, treeop0, 1));
8045 
8046     case MAX_EXPR:
8047     case MIN_EXPR:
8048       target = original_target;
8049       if (target == 0
8050 	  || modifier == EXPAND_STACK_PARM
8051 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8052 	  || GET_MODE (target) != mode
8053 	  || (REG_P (target)
8054 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8055 	target = gen_reg_rtx (mode);
8056       expand_operands (treeop0, treeop1,
8057 		       target, &op0, &op1, EXPAND_NORMAL);
8058 
8059       /* First try to do it with a special MIN or MAX instruction.
8060 	 If that does not win, use a conditional jump to select the proper
8061 	 value.  */
8062       this_optab = optab_for_tree_code (code, type, optab_default);
8063       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8064 			   OPTAB_WIDEN);
8065       if (temp != 0)
8066 	return temp;
8067 
8068       /* At this point, a MEM target is no longer useful; we will get better
8069 	 code without it.  */
8070 
8071       if (! REG_P (target))
8072 	target = gen_reg_rtx (mode);
8073 
8074       /* If op1 was placed in target, swap op0 and op1.  */
8075       if (target != op0 && target == op1)
8076 	{
8077 	  temp = op0;
8078 	  op0 = op1;
8079 	  op1 = temp;
8080 	}
8081 
8082       /* We generate better code and avoid problems with op1 mentioning
8083 	 target by forcing op1 into a pseudo if it isn't a constant.  */
8084       if (! CONSTANT_P (op1))
8085 	op1 = force_reg (mode, op1);
8086 
8087       {
8088 	enum rtx_code comparison_code;
8089 	rtx cmpop1 = op1;
8090 
8091 	if (code == MAX_EXPR)
8092 	  comparison_code = unsignedp ? GEU : GE;
8093 	else
8094 	  comparison_code = unsignedp ? LEU : LE;
8095 
8096 	/* Canonicalize to comparisons against 0.  */
8097 	if (op1 == const1_rtx)
8098 	  {
8099 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8100 	       or (a != 0 ? a : 1) for unsigned.
8101 	       For MIN we are safe converting (a <= 1 ? a : 1)
8102 	       into (a <= 0 ? a : 1)  */
8103 	    cmpop1 = const0_rtx;
8104 	    if (code == MAX_EXPR)
8105 	      comparison_code = unsignedp ? NE : GT;
8106 	  }
8107 	if (op1 == constm1_rtx && !unsignedp)
8108 	  {
8109 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8110 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8111 	    cmpop1 = const0_rtx;
8112 	    if (code == MIN_EXPR)
8113 	      comparison_code = LT;
8114 	  }
8115 #ifdef HAVE_conditional_move
8116 	/* Use a conditional move if possible.  */
8117 	if (can_conditionally_move_p (mode))
8118 	  {
8119 	    rtx insn;
8120 
8121 	    /* ??? Same problem as in expmed.c: emit_conditional_move
8122 	       forces a stack adjustment via compare_from_rtx, and we
8123 	       lose the stack adjustment if the sequence we are about
8124 	       to create is discarded.  */
8125 	    do_pending_stack_adjust ();
8126 
8127 	    start_sequence ();
8128 
8129 	    /* Try to emit the conditional move.  */
8130 	    insn = emit_conditional_move (target, comparison_code,
8131 					  op0, cmpop1, mode,
8132 					  op0, op1, mode,
8133 					  unsignedp);
8134 
8135 	    /* If we could do the conditional move, emit the sequence,
8136 	       and return.  */
8137 	    if (insn)
8138 	      {
8139 		rtx seq = get_insns ();
8140 		end_sequence ();
8141 		emit_insn (seq);
8142 		return target;
8143 	      }
8144 
8145 	    /* Otherwise discard the sequence and fall back to code with
8146 	       branches.  */
8147 	    end_sequence ();
8148 	  }
8149 #endif
8150 	if (target != op0)
8151 	  emit_move_insn (target, op0);
8152 
8153 	temp = gen_label_rtx ();
8154 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8155 				 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8156 				 -1);
8157       }
8158       emit_move_insn (target, op1);
8159       emit_label (temp);
8160       return target;
8161 
8162     case BIT_NOT_EXPR:
8163       op0 = expand_expr (treeop0, subtarget,
8164 			 VOIDmode, EXPAND_NORMAL);
8165       if (modifier == EXPAND_STACK_PARM)
8166 	target = 0;
8167       temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8168       gcc_assert (temp);
8169       return temp;
8170 
8171       /* ??? Can optimize bitwise operations with one arg constant.
8172 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8173 	 and (a bitwise1 b) bitwise2 b (etc)
8174 	 but that is probably not worth while.  */
8175 
8176       /* BIT_AND_EXPR is for bitwise anding.  TRUTH_AND_EXPR is for anding two
8177 	 boolean values when we want in all cases to compute both of them.  In
8178 	 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8179 	 as actual zero-or-1 values and then bitwise anding.  In cases where
8180 	 there cannot be any side effects, better code would be made by
8181 	 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8182 	 how to recognize those cases.  */
8183 
8184     case TRUTH_AND_EXPR:
8185       code = BIT_AND_EXPR;
8186     case BIT_AND_EXPR:
8187       goto binop;
8188 
8189     case TRUTH_OR_EXPR:
8190       code = BIT_IOR_EXPR;
8191     case BIT_IOR_EXPR:
8192       goto binop;
8193 
8194     case TRUTH_XOR_EXPR:
8195       code = BIT_XOR_EXPR;
8196     case BIT_XOR_EXPR:
8197       goto binop;
8198 
8199     case LROTATE_EXPR:
8200     case RROTATE_EXPR:
8201       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8202 		  || (GET_MODE_PRECISION (TYPE_MODE (type))
8203 		      == TYPE_PRECISION (type)));
8204       /* fall through */
8205 
8206     case LSHIFT_EXPR:
8207     case RSHIFT_EXPR:
8208       /* If this is a fixed-point operation, then we cannot use the code
8209 	 below because "expand_shift" doesn't support sat/no-sat fixed-point
8210          shifts.   */
8211       if (ALL_FIXED_POINT_MODE_P (mode))
8212 	goto binop;
8213 
8214       if (! safe_from_p (subtarget, treeop1, 1))
8215 	subtarget = 0;
8216       if (modifier == EXPAND_STACK_PARM)
8217 	target = 0;
8218       op0 = expand_expr (treeop0, subtarget,
8219 			 VOIDmode, EXPAND_NORMAL);
8220       temp = expand_shift (code, mode, op0, treeop1, target,
8221 			   unsignedp);
8222       if (code == LSHIFT_EXPR)
8223 	temp = REDUCE_BIT_FIELD (temp);
8224       return temp;
8225 
8226       /* Could determine the answer when only additive constants differ.  Also,
8227 	 the addition of one can be handled by changing the condition.  */
8228     case LT_EXPR:
8229     case LE_EXPR:
8230     case GT_EXPR:
8231     case GE_EXPR:
8232     case EQ_EXPR:
8233     case NE_EXPR:
8234     case UNORDERED_EXPR:
8235     case ORDERED_EXPR:
8236     case UNLT_EXPR:
8237     case UNLE_EXPR:
8238     case UNGT_EXPR:
8239     case UNGE_EXPR:
8240     case UNEQ_EXPR:
8241     case LTGT_EXPR:
8242       temp = do_store_flag (ops,
8243 			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8244 			    tmode != VOIDmode ? tmode : mode);
8245       if (temp)
8246 	return temp;
8247 
8248       /* Use a compare and a jump for BLKmode comparisons, or for function
8249 	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
8250 
8251       if ((target == 0
8252 	   || modifier == EXPAND_STACK_PARM
8253 	   || ! safe_from_p (target, treeop0, 1)
8254 	   || ! safe_from_p (target, treeop1, 1)
8255 	   /* Make sure we don't have a hard reg (such as function's return
8256 	      value) live across basic blocks, if not optimizing.  */
8257 	   || (!optimize && REG_P (target)
8258 	       && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8259 	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8260 
8261       emit_move_insn (target, const0_rtx);
8262 
8263       op1 = gen_label_rtx ();
8264       jumpifnot_1 (code, treeop0, treeop1, op1, -1);
8265 
8266       if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
8267 	emit_move_insn (target, constm1_rtx);
8268       else
8269 	emit_move_insn (target, const1_rtx);
8270 
8271       emit_label (op1);
8272       return target;
8273 
8274     case TRUTH_NOT_EXPR:
8275       if (modifier == EXPAND_STACK_PARM)
8276 	target = 0;
8277       op0 = expand_expr (treeop0, target,
8278 			 VOIDmode, EXPAND_NORMAL);
8279       /* The parser is careful to generate TRUTH_NOT_EXPR
8280 	 only with operands that are always zero or one.  */
8281       temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8282 			   target, 1, OPTAB_LIB_WIDEN);
8283       gcc_assert (temp);
8284       return temp;
8285 
8286     case COMPLEX_EXPR:
8287       /* Get the rtx code of the operands.  */
8288       op0 = expand_normal (treeop0);
8289       op1 = expand_normal (treeop1);
8290 
8291       if (!target)
8292 	target = gen_reg_rtx (TYPE_MODE (type));
8293 
8294       /* Move the real (op0) and imaginary (op1) parts to their location.  */
8295       write_complex_part (target, op0, false);
8296       write_complex_part (target, op1, true);
8297 
8298       return target;
8299 
8300     case WIDEN_SUM_EXPR:
8301       {
8302         tree oprnd0 = treeop0;
8303         tree oprnd1 = treeop1;
8304 
8305         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8306         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
8307                                             target, unsignedp);
8308         return target;
8309       }
8310 
8311     case REDUC_MAX_EXPR:
8312     case REDUC_MIN_EXPR:
8313     case REDUC_PLUS_EXPR:
8314       {
8315         op0 = expand_normal (treeop0);
8316         this_optab = optab_for_tree_code (code, type, optab_default);
8317         temp = expand_unop (mode, this_optab, op0, target, unsignedp);
8318         gcc_assert (temp);
8319         return temp;
8320       }
8321 
8322     case VEC_EXTRACT_EVEN_EXPR:
8323     case VEC_EXTRACT_ODD_EXPR:
8324       {
8325         expand_operands (treeop0,  treeop1,
8326                          NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8327         this_optab = optab_for_tree_code (code, type, optab_default);
8328         temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8329                              OPTAB_WIDEN);
8330         gcc_assert (temp);
8331         return temp;
8332       }
8333 
8334     case VEC_INTERLEAVE_HIGH_EXPR:
8335     case VEC_INTERLEAVE_LOW_EXPR:
8336       {
8337         expand_operands (treeop0,  treeop1,
8338                          NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8339         this_optab = optab_for_tree_code (code, type, optab_default);
8340         temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8341                              OPTAB_WIDEN);
8342         gcc_assert (temp);
8343         return temp;
8344       }
8345 
8346     case VEC_LSHIFT_EXPR:
8347     case VEC_RSHIFT_EXPR:
8348       {
8349 	target = expand_vec_shift_expr (ops, target);
8350 	return target;
8351       }
8352 
8353     case VEC_UNPACK_HI_EXPR:
8354     case VEC_UNPACK_LO_EXPR:
8355       {
8356 	op0 = expand_normal (treeop0);
8357 	this_optab = optab_for_tree_code (code, type, optab_default);
8358 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
8359 					  target, unsignedp);
8360 	gcc_assert (temp);
8361 	return temp;
8362       }
8363 
8364     case VEC_UNPACK_FLOAT_HI_EXPR:
8365     case VEC_UNPACK_FLOAT_LO_EXPR:
8366       {
8367 	op0 = expand_normal (treeop0);
8368 	/* The signedness is determined from input operand.  */
8369 	this_optab = optab_for_tree_code (code,
8370 					  TREE_TYPE (treeop0),
8371 					  optab_default);
8372 	temp = expand_widen_pattern_expr
8373 	  (ops, op0, NULL_RTX, NULL_RTX,
8374 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8375 
8376 	gcc_assert (temp);
8377 	return temp;
8378       }
8379 
8380     case VEC_WIDEN_MULT_HI_EXPR:
8381     case VEC_WIDEN_MULT_LO_EXPR:
8382       {
8383 	tree oprnd0 = treeop0;
8384 	tree oprnd1 = treeop1;
8385 
8386 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8387 	target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
8388 					    target, unsignedp);
8389 	gcc_assert (target);
8390 	return target;
8391       }
8392 
8393     case VEC_PACK_TRUNC_EXPR:
8394     case VEC_PACK_SAT_EXPR:
8395     case VEC_PACK_FIX_TRUNC_EXPR:
8396       mode = TYPE_MODE (TREE_TYPE (treeop0));
8397       goto binop;
8398 
8399     default:
8400       gcc_unreachable ();
8401     }
8402 
8403   /* Here to do an ordinary binary operator.  */
8404  binop:
8405   expand_operands (treeop0, treeop1,
8406 		   subtarget, &op0, &op1, EXPAND_NORMAL);
8407  binop2:
8408   this_optab = optab_for_tree_code (code, type, optab_default);
8409  binop3:
8410   if (modifier == EXPAND_STACK_PARM)
8411     target = 0;
8412   temp = expand_binop (mode, this_optab, op0, op1, target,
8413 		       unsignedp, OPTAB_LIB_WIDEN);
8414   gcc_assert (temp);
8415   return REDUCE_BIT_FIELD (temp);
8416 }
8417 #undef REDUCE_BIT_FIELD
8418 
8419 rtx
8420 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
8421 		    enum expand_modifier modifier, rtx *alt_rtl)
8422 {
8423   rtx op0, op1, temp, decl_rtl;
8424   tree type;
8425   int unsignedp;
8426   enum machine_mode mode;
8427   enum tree_code code = TREE_CODE (exp);
8428   optab this_optab;
8429   rtx subtarget, original_target;
8430   int ignore;
8431   tree context;
8432   bool reduce_bit_field;
8433   location_t loc = EXPR_LOCATION (exp);
8434   struct separate_ops ops;
8435   tree treeop0, treeop1, treeop2;
8436 
8437   type = TREE_TYPE (exp);
8438   mode = TYPE_MODE (type);
8439   unsignedp = TYPE_UNSIGNED (type);
8440 
8441   treeop0 = treeop1 = treeop2 = NULL_TREE;
8442   if (!VL_EXP_CLASS_P (exp))
8443     switch (TREE_CODE_LENGTH (code))
8444       {
8445 	default:
8446 	case 3: treeop2 = TREE_OPERAND (exp, 2);
8447 	case 2: treeop1 = TREE_OPERAND (exp, 1);
8448 	case 1: treeop0 = TREE_OPERAND (exp, 0);
8449 	case 0: break;
8450       }
8451   ops.code = code;
8452   ops.type = type;
8453   ops.op0 = treeop0;
8454   ops.op1 = treeop1;
8455   ops.op2 = treeop2;
8456   ops.location = loc;
8457 
8458   ignore = (target == const0_rtx
8459 	    || ((CONVERT_EXPR_CODE_P (code)
8460 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8461 		&& TREE_CODE (type) == VOID_TYPE));
8462 
8463   /* An operation in what may be a bit-field type needs the
8464      result to be reduced to the precision of the bit-field type,
8465      which is narrower than that of the type's mode.  */
8466   reduce_bit_field = (!ignore
8467 		      && TREE_CODE (type) == INTEGER_TYPE
8468 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8469 
8470   /* If we are going to ignore this result, we need only do something
8471      if there is a side-effect somewhere in the expression.  If there
8472      is, short-circuit the most common cases here.  Note that we must
8473      not call expand_expr with anything but const0_rtx in case this
8474      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
8475 
8476   if (ignore)
8477     {
8478       if (! TREE_SIDE_EFFECTS (exp))
8479 	return const0_rtx;
8480 
8481       /* Ensure we reference a volatile object even if value is ignored, but
8482 	 don't do this if all we are doing is taking its address.  */
8483       if (TREE_THIS_VOLATILE (exp)
8484 	  && TREE_CODE (exp) != FUNCTION_DECL
8485 	  && mode != VOIDmode && mode != BLKmode
8486 	  && modifier != EXPAND_CONST_ADDRESS)
8487 	{
8488 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
8489 	  if (MEM_P (temp))
8490 	    temp = copy_to_reg (temp);
8491 	  return const0_rtx;
8492 	}
8493 
8494       if (TREE_CODE_CLASS (code) == tcc_unary
8495 	  || code == COMPONENT_REF || code == INDIRECT_REF)
8496 	return expand_expr (treeop0, const0_rtx, VOIDmode,
8497 			    modifier);
8498 
8499       else if (TREE_CODE_CLASS (code) == tcc_binary
8500 	       || TREE_CODE_CLASS (code) == tcc_comparison
8501 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
8502 	{
8503 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8504 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8505 	  return const0_rtx;
8506 	}
8507       else if (code == BIT_FIELD_REF)
8508 	{
8509 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
8510 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
8511 	  expand_expr (treeop2, const0_rtx, VOIDmode, modifier);
8512 	  return const0_rtx;
8513 	}
8514 
8515       target = 0;
8516     }
8517 
8518   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8519     target = 0;
8520 
8521   /* Use subtarget as the target for operand 0 of a binary operation.  */
8522   subtarget = get_subtarget (target);
8523   original_target = target;
8524 
8525   switch (code)
8526     {
8527     case LABEL_DECL:
8528       {
8529 	tree function = decl_function_context (exp);
8530 
8531 	temp = label_rtx (exp);
8532 	temp = gen_rtx_LABEL_REF (Pmode, temp);
8533 
8534 	if (function != current_function_decl
8535 	    && function != 0)
8536 	  LABEL_REF_NONLOCAL_P (temp) = 1;
8537 
8538 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
8539 	return temp;
8540       }
8541 
8542     case SSA_NAME:
8543       /* ??? ivopts calls expander, without any preparation from
8544          out-of-ssa.  So fake instructions as if this was an access to the
8545 	 base variable.  This unnecessarily allocates a pseudo, see how we can
8546 	 reuse it, if partition base vars have it set already.  */
8547       if (!currently_expanding_to_rtl)
8548 	return expand_expr_real_1 (SSA_NAME_VAR (exp), target, tmode, modifier, NULL);
8549       {
8550 	gimple g = get_gimple_for_ssa_name (exp);
8551 	if (g)
8552 	  return expand_expr_real (gimple_assign_rhs_to_tree (g), target,
8553 				   tmode, modifier, NULL);
8554       }
8555       decl_rtl = get_rtx_for_ssa_name (exp);
8556       exp = SSA_NAME_VAR (exp);
8557       goto expand_decl_rtl;
8558 
8559     case PARM_DECL:
8560     case VAR_DECL:
8561       /* If a static var's type was incomplete when the decl was written,
8562 	 but the type is complete now, lay out the decl now.  */
8563       if (DECL_SIZE (exp) == 0
8564 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
8565 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
8566 	layout_decl (exp, 0);
8567 
8568       /* TLS emulation hook - replace __thread vars with
8569 	 *__emutls_get_address (&_emutls.var).  */
8570       if (! targetm.have_tls
8571 	  && TREE_CODE (exp) == VAR_DECL
8572 	  && DECL_THREAD_LOCAL_P (exp))
8573 	{
8574 	  exp = build_fold_indirect_ref_loc (loc, emutls_var_address (exp));
8575 	  return expand_expr_real_1 (exp, target, tmode, modifier, NULL);
8576 	}
8577 
8578       /* ... fall through ...  */
8579 
8580     case FUNCTION_DECL:
8581     case RESULT_DECL:
8582       decl_rtl = DECL_RTL (exp);
8583     expand_decl_rtl:
8584       gcc_assert (decl_rtl);
8585       decl_rtl = copy_rtx (decl_rtl);
8586 
8587       /* Ensure variable marked as used even if it doesn't go through
8588 	 a parser.  If it hasn't be used yet, write out an external
8589 	 definition.  */
8590       if (! TREE_USED (exp))
8591 	{
8592 	  assemble_external (exp);
8593 	  TREE_USED (exp) = 1;
8594 	}
8595 
8596       /* Show we haven't gotten RTL for this yet.  */
8597       temp = 0;
8598 
8599       /* Variables inherited from containing functions should have
8600 	 been lowered by this point.  */
8601       context = decl_function_context (exp);
8602       gcc_assert (!context
8603 		  || context == current_function_decl
8604 		  || TREE_STATIC (exp)
8605 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
8606 		  || TREE_CODE (exp) == FUNCTION_DECL);
8607 
8608       /* This is the case of an array whose size is to be determined
8609 	 from its initializer, while the initializer is still being parsed.
8610 	 See expand_decl.  */
8611 
8612       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
8613 	temp = validize_mem (decl_rtl);
8614 
8615       /* If DECL_RTL is memory, we are in the normal case and the
8616 	 address is not valid, get the address into a register.  */
8617 
8618       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
8619 	{
8620 	  if (alt_rtl)
8621 	    *alt_rtl = decl_rtl;
8622 	  decl_rtl = use_anchored_address (decl_rtl);
8623 	  if (modifier != EXPAND_CONST_ADDRESS
8624 	      && modifier != EXPAND_SUM
8625 	      && !memory_address_addr_space_p (DECL_MODE (exp),
8626 					       XEXP (decl_rtl, 0),
8627 					       MEM_ADDR_SPACE (decl_rtl)))
8628 	    temp = replace_equiv_address (decl_rtl,
8629 					  copy_rtx (XEXP (decl_rtl, 0)));
8630 	}
8631 
8632       /* If we got something, return it.  But first, set the alignment
8633 	 if the address is a register.  */
8634       if (temp != 0)
8635 	{
8636 	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
8637 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
8638 
8639 	  return temp;
8640 	}
8641 
8642       /* If the mode of DECL_RTL does not match that of the decl,
8643 	 there are two cases: we are dealing with a BLKmode value
8644 	 that is returned in a register, or we are dealing with
8645 	 a promoted value.  In the latter case, return a SUBREG
8646 	 of the wanted mode, but mark it so that we know that it
8647 	 was already extended.  */
8648 
8649       if (REG_P (decl_rtl)
8650 	  && DECL_MODE (exp) != BLKmode
8651 	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
8652 	{
8653 	  enum machine_mode pmode;
8654 
8655 	  /* Get the signedness used for this variable.  Ensure we get the
8656 	     same mode we got when the variable was declared.  */
8657 	  pmode = promote_decl_mode (exp, &unsignedp);
8658 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
8659 
8660 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
8661 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
8662 	  SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
8663 	  return temp;
8664 	}
8665 
8666       return decl_rtl;
8667 
8668     case INTEGER_CST:
8669       temp = immed_double_const (TREE_INT_CST_LOW (exp),
8670 				 TREE_INT_CST_HIGH (exp), mode);
8671 
8672       return temp;
8673 
8674     case VECTOR_CST:
8675       {
8676 	tree tmp = NULL_TREE;
8677 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
8678 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
8679 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
8680 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
8681 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
8682 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
8683 	  return const_vector_from_tree (exp);
8684 	if (GET_MODE_CLASS (mode) == MODE_INT)
8685 	  {
8686 	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
8687 	    if (type_for_mode)
8688 	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
8689 	  }
8690 	if (!tmp)
8691 	  tmp = build_constructor_from_list (type,
8692 					     TREE_VECTOR_CST_ELTS (exp));
8693 	return expand_expr (tmp, ignore ? const0_rtx : target,
8694 			    tmode, modifier);
8695       }
8696 
8697     case CONST_DECL:
8698       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
8699 
8700     case REAL_CST:
8701       /* If optimized, generate immediate CONST_DOUBLE
8702 	 which will be turned into memory by reload if necessary.
8703 
8704 	 We used to force a register so that loop.c could see it.  But
8705 	 this does not allow gen_* patterns to perform optimizations with
8706 	 the constants.  It also produces two insns in cases like "x = 1.0;".
8707 	 On most machines, floating-point constants are not permitted in
8708 	 many insns, so we'd end up copying it to a register in any case.
8709 
8710 	 Now, we do the copying in expand_binop, if appropriate.  */
8711       return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
8712 					   TYPE_MODE (TREE_TYPE (exp)));
8713 
8714     case FIXED_CST:
8715       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
8716 					   TYPE_MODE (TREE_TYPE (exp)));
8717 
8718     case COMPLEX_CST:
8719       /* Handle evaluating a complex constant in a CONCAT target.  */
8720       if (original_target && GET_CODE (original_target) == CONCAT)
8721 	{
8722 	  enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8723 	  rtx rtarg, itarg;
8724 
8725 	  rtarg = XEXP (original_target, 0);
8726 	  itarg = XEXP (original_target, 1);
8727 
8728 	  /* Move the real and imaginary parts separately.  */
8729 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
8730 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
8731 
8732 	  if (op0 != rtarg)
8733 	    emit_move_insn (rtarg, op0);
8734 	  if (op1 != itarg)
8735 	    emit_move_insn (itarg, op1);
8736 
8737 	  return original_target;
8738 	}
8739 
8740       /* ... fall through ...  */
8741 
8742     case STRING_CST:
8743       temp = expand_expr_constant (exp, 1, modifier);
8744 
8745       /* temp contains a constant address.
8746 	 On RISC machines where a constant address isn't valid,
8747 	 make some insns to get that address into a register.  */
8748       if (modifier != EXPAND_CONST_ADDRESS
8749 	  && modifier != EXPAND_INITIALIZER
8750 	  && modifier != EXPAND_SUM
8751 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
8752 					    MEM_ADDR_SPACE (temp)))
8753 	return replace_equiv_address (temp,
8754 				      copy_rtx (XEXP (temp, 0)));
8755       return temp;
8756 
8757     case SAVE_EXPR:
8758       {
8759 	tree val = treeop0;
8760 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
8761 
8762 	if (!SAVE_EXPR_RESOLVED_P (exp))
8763 	  {
8764 	    /* We can indeed still hit this case, typically via builtin
8765 	       expanders calling save_expr immediately before expanding
8766 	       something.  Assume this means that we only have to deal
8767 	       with non-BLKmode values.  */
8768 	    gcc_assert (GET_MODE (ret) != BLKmode);
8769 
8770 	    val = build_decl (EXPR_LOCATION (exp),
8771 			      VAR_DECL, NULL, TREE_TYPE (exp));
8772 	    DECL_ARTIFICIAL (val) = 1;
8773 	    DECL_IGNORED_P (val) = 1;
8774 	    treeop0 = val;
8775 	    TREE_OPERAND (exp, 0) = treeop0;
8776 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
8777 
8778 	    if (!CONSTANT_P (ret))
8779 	      ret = copy_to_reg (ret);
8780 	    SET_DECL_RTL (val, ret);
8781 	  }
8782 
8783         return ret;
8784       }
8785 
8786 
8787     case CONSTRUCTOR:
8788       /* If we don't need the result, just ensure we evaluate any
8789 	 subexpressions.  */
8790       if (ignore)
8791 	{
8792 	  unsigned HOST_WIDE_INT idx;
8793 	  tree value;
8794 
8795 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
8796 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
8797 
8798 	  return const0_rtx;
8799 	}
8800 
8801       return expand_constructor (exp, target, modifier, false);
8802 
8803     case MISALIGNED_INDIRECT_REF:
8804     case ALIGN_INDIRECT_REF:
8805     case INDIRECT_REF:
8806       {
8807 	tree exp1 = treeop0;
8808 	addr_space_t as = ADDR_SPACE_GENERIC;
8809 	enum machine_mode address_mode = Pmode;
8810 
8811 	if (modifier != EXPAND_WRITE)
8812 	  {
8813 	    tree t;
8814 
8815 	    t = fold_read_from_constant_string (exp);
8816 	    if (t)
8817 	      return expand_expr (t, target, tmode, modifier);
8818 	  }
8819 
8820 	if (POINTER_TYPE_P (TREE_TYPE (exp1)))
8821 	  {
8822 	    as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp1)));
8823 	    address_mode = targetm.addr_space.address_mode (as);
8824 	  }
8825 
8826 	op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
8827 	op0 = memory_address_addr_space (mode, op0, as);
8828 
8829 	if (code == ALIGN_INDIRECT_REF)
8830 	  {
8831 	    int align = TYPE_ALIGN_UNIT (type);
8832 	    op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align));
8833 	    op0 = memory_address_addr_space (mode, op0, as);
8834 	  }
8835 
8836 	temp = gen_rtx_MEM (mode, op0);
8837 
8838 	set_mem_attributes (temp, exp, 0);
8839 	set_mem_addr_space (temp, as);
8840 
8841 	/* Resolve the misalignment now, so that we don't have to remember
8842 	   to resolve it later.  Of course, this only works for reads.  */
8843 	if (code == MISALIGNED_INDIRECT_REF)
8844 	  {
8845 	    int icode;
8846 	    rtx reg, insn;
8847 
8848 	    gcc_assert (modifier == EXPAND_NORMAL
8849 			|| modifier == EXPAND_STACK_PARM);
8850 
8851 	    /* The vectorizer should have already checked the mode.  */
8852 	    icode = optab_handler (movmisalign_optab, mode)->insn_code;
8853 	    gcc_assert (icode != CODE_FOR_nothing);
8854 
8855 	    /* We've already validated the memory, and we're creating a
8856 	       new pseudo destination.  The predicates really can't fail.  */
8857 	    reg = gen_reg_rtx (mode);
8858 
8859 	    /* Nor can the insn generator.  */
8860 	    insn = GEN_FCN (icode) (reg, temp);
8861 	    emit_insn (insn);
8862 
8863 	    return reg;
8864 	  }
8865 
8866 	return temp;
8867       }
8868 
8869     case TARGET_MEM_REF:
8870       {
8871 	addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (exp));
8872 	struct mem_address addr;
8873 
8874 	get_address_description (exp, &addr);
8875 	op0 = addr_for_mem_ref (&addr, as, true);
8876 	op0 = memory_address_addr_space (mode, op0, as);
8877 	temp = gen_rtx_MEM (mode, op0);
8878 	set_mem_attributes (temp, TMR_ORIGINAL (exp), 0);
8879 	set_mem_addr_space (temp, as);
8880       }
8881       return temp;
8882 
8883     case ARRAY_REF:
8884 
8885       {
8886 	tree array = treeop0;
8887 	tree index = treeop1;
8888 
8889 	/* Fold an expression like: "foo"[2].
8890 	   This is not done in fold so it won't happen inside &.
8891 	   Don't fold if this is for wide characters since it's too
8892 	   difficult to do correctly and this is a very rare case.  */
8893 
8894 	if (modifier != EXPAND_CONST_ADDRESS
8895 	    && modifier != EXPAND_INITIALIZER
8896 	    && modifier != EXPAND_MEMORY)
8897 	  {
8898 	    tree t = fold_read_from_constant_string (exp);
8899 
8900 	    if (t)
8901 	      return expand_expr (t, target, tmode, modifier);
8902 	  }
8903 
8904 	/* If this is a constant index into a constant array,
8905 	   just get the value from the array.  Handle both the cases when
8906 	   we have an explicit constructor and when our operand is a variable
8907 	   that was declared const.  */
8908 
8909 	if (modifier != EXPAND_CONST_ADDRESS
8910 	    && modifier != EXPAND_INITIALIZER
8911 	    && modifier != EXPAND_MEMORY
8912 	    && TREE_CODE (array) == CONSTRUCTOR
8913 	    && ! TREE_SIDE_EFFECTS (array)
8914 	    && TREE_CODE (index) == INTEGER_CST)
8915 	  {
8916 	    unsigned HOST_WIDE_INT ix;
8917 	    tree field, value;
8918 
8919 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
8920 				      field, value)
8921 	      if (tree_int_cst_equal (field, index))
8922 		{
8923 		  if (!TREE_SIDE_EFFECTS (value))
8924 		    return expand_expr (fold (value), target, tmode, modifier);
8925 		  break;
8926 		}
8927 	  }
8928 
8929 	else if (optimize >= 1
8930 		 && modifier != EXPAND_CONST_ADDRESS
8931 		 && modifier != EXPAND_INITIALIZER
8932 		 && modifier != EXPAND_MEMORY
8933 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8934 		 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8935 		 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
8936 		 && targetm.binds_local_p (array))
8937 	  {
8938 	    if (TREE_CODE (index) == INTEGER_CST)
8939 	      {
8940 		tree init = DECL_INITIAL (array);
8941 
8942 		if (TREE_CODE (init) == CONSTRUCTOR)
8943 		  {
8944 		    unsigned HOST_WIDE_INT ix;
8945 		    tree field, value;
8946 
8947 		    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
8948 					      field, value)
8949 		      if (tree_int_cst_equal (field, index))
8950 			{
8951 			  if (TREE_SIDE_EFFECTS (value))
8952 			    break;
8953 
8954 			  if (TREE_CODE (value) == CONSTRUCTOR)
8955 			    {
8956 			      /* If VALUE is a CONSTRUCTOR, this
8957 				 optimization is only useful if
8958 				 this doesn't store the CONSTRUCTOR
8959 				 into memory.  If it does, it is more
8960 				 efficient to just load the data from
8961 				 the array directly.  */
8962 			      rtx ret = expand_constructor (value, target,
8963 							    modifier, true);
8964 			      if (ret == NULL_RTX)
8965 				break;
8966 			    }
8967 
8968 			  return expand_expr (fold (value), target, tmode,
8969 					      modifier);
8970 			}
8971 		  }
8972 		else if(TREE_CODE (init) == STRING_CST)
8973 		  {
8974 		    tree index1 = index;
8975 		    tree low_bound = array_ref_low_bound (exp);
8976 		    index1 = fold_convert_loc (loc, sizetype,
8977 					       treeop1);
8978 
8979 		    /* Optimize the special-case of a zero lower bound.
8980 
8981 		       We convert the low_bound to sizetype to avoid some problems
8982 		       with constant folding.  (E.g. suppose the lower bound is 1,
8983 		       and its mode is QI.  Without the conversion,l (ARRAY
8984 		       +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8985 		       +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
8986 
8987 		    if (! integer_zerop (low_bound))
8988 		      index1 = size_diffop_loc (loc, index1,
8989 					    fold_convert_loc (loc, sizetype,
8990 							      low_bound));
8991 
8992 		    if (0 > compare_tree_int (index1,
8993 					      TREE_STRING_LENGTH (init)))
8994 		      {
8995 			tree type = TREE_TYPE (TREE_TYPE (init));
8996 			enum machine_mode mode = TYPE_MODE (type);
8997 
8998 			if (GET_MODE_CLASS (mode) == MODE_INT
8999 			    && GET_MODE_SIZE (mode) == 1)
9000 			  return gen_int_mode (TREE_STRING_POINTER (init)
9001 					       [TREE_INT_CST_LOW (index1)],
9002 					       mode);
9003 		      }
9004 		  }
9005 	      }
9006 	  }
9007       }
9008       goto normal_inner_ref;
9009 
9010     case COMPONENT_REF:
9011       /* If the operand is a CONSTRUCTOR, we can just extract the
9012 	 appropriate field if it is present.  */
9013       if (TREE_CODE (treeop0) == CONSTRUCTOR)
9014 	{
9015 	  unsigned HOST_WIDE_INT idx;
9016 	  tree field, value;
9017 
9018 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
9019 				    idx, field, value)
9020 	    if (field == treeop1
9021 		/* We can normally use the value of the field in the
9022 		   CONSTRUCTOR.  However, if this is a bitfield in
9023 		   an integral mode that we can fit in a HOST_WIDE_INT,
9024 		   we must mask only the number of bits in the bitfield,
9025 		   since this is done implicitly by the constructor.  If
9026 		   the bitfield does not meet either of those conditions,
9027 		   we can't do this optimization.  */
9028 		&& (! DECL_BIT_FIELD (field)
9029 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
9030 			&& (GET_MODE_BITSIZE (DECL_MODE (field))
9031 			    <= HOST_BITS_PER_WIDE_INT))))
9032 	      {
9033 		if (DECL_BIT_FIELD (field)
9034 		    && modifier == EXPAND_STACK_PARM)
9035 		  target = 0;
9036 		op0 = expand_expr (value, target, tmode, modifier);
9037 		if (DECL_BIT_FIELD (field))
9038 		  {
9039 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
9040 		    enum machine_mode imode = TYPE_MODE (TREE_TYPE (field));
9041 
9042 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
9043 		      {
9044 			op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
9045 			op0 = expand_and (imode, op0, op1, target);
9046 		      }
9047 		    else
9048 		      {
9049 			tree count
9050 			  = build_int_cst (NULL_TREE,
9051 					   GET_MODE_BITSIZE (imode) - bitsize);
9052 
9053 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
9054 					    target, 0);
9055 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
9056 					    target, 0);
9057 		      }
9058 		  }
9059 
9060 		return op0;
9061 	      }
9062 	}
9063       goto normal_inner_ref;
9064 
9065     case BIT_FIELD_REF:
9066     case ARRAY_RANGE_REF:
9067     normal_inner_ref:
9068       {
9069 	enum machine_mode mode1, mode2;
9070 	HOST_WIDE_INT bitsize, bitpos;
9071 	tree offset;
9072 	int volatilep = 0, must_force_mem;
9073 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
9074 					&mode1, &unsignedp, &volatilep, true);
9075 	rtx orig_op0, memloc;
9076 
9077 	/* If we got back the original object, something is wrong.  Perhaps
9078 	   we are evaluating an expression too early.  In any event, don't
9079 	   infinitely recurse.  */
9080 	gcc_assert (tem != exp);
9081 
9082 	/* If TEM's type is a union of variable size, pass TARGET to the inner
9083 	   computation, since it will need a temporary and TARGET is known
9084 	   to have to do.  This occurs in unchecked conversion in Ada.  */
9085 	orig_op0 = op0
9086 	  = expand_expr (tem,
9087 			 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9088 			  && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9089 			      != INTEGER_CST)
9090 			  && modifier != EXPAND_STACK_PARM
9091 			  ? target : NULL_RTX),
9092 			 VOIDmode,
9093 			 (modifier == EXPAND_INITIALIZER
9094 			  || modifier == EXPAND_CONST_ADDRESS
9095 			  || modifier == EXPAND_STACK_PARM)
9096 			 ? modifier : EXPAND_NORMAL);
9097 
9098 	mode2
9099 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
9100 
9101 	/* If we have either an offset, a BLKmode result, or a reference
9102 	   outside the underlying object, we must force it to memory.
9103 	   Such a case can occur in Ada if we have unchecked conversion
9104 	   of an expression from a scalar type to an aggregate type or
9105 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
9106 	   passed a partially uninitialized object or a view-conversion
9107 	   to a larger size.  */
9108 	must_force_mem = (offset
9109 			  || mode1 == BLKmode
9110 			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
9111 
9112 	/* Handle CONCAT first.  */
9113 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
9114 	  {
9115 	    if (bitpos == 0
9116 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0)))
9117 	      return op0;
9118 	    if (bitpos == 0
9119 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9120 		&& bitsize)
9121 	      {
9122 		op0 = XEXP (op0, 0);
9123 		mode2 = GET_MODE (op0);
9124 	      }
9125 	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
9126 		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
9127 		     && bitpos
9128 		     && bitsize)
9129 	      {
9130 		op0 = XEXP (op0, 1);
9131 		bitpos = 0;
9132 		mode2 = GET_MODE (op0);
9133 	      }
9134 	    else
9135 	      /* Otherwise force into memory.  */
9136 	      must_force_mem = 1;
9137 	  }
9138 
9139 	/* If this is a constant, put it in a register if it is a legitimate
9140 	   constant and we don't need a memory reference.  */
9141 	if (CONSTANT_P (op0)
9142 	    && mode2 != BLKmode
9143 	    && LEGITIMATE_CONSTANT_P (op0)
9144 	    && !must_force_mem)
9145 	  op0 = force_reg (mode2, op0);
9146 
9147 	/* Otherwise, if this is a constant, try to force it to the constant
9148 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
9149 	   is a legitimate constant.  */
9150 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
9151 	  op0 = validize_mem (memloc);
9152 
9153 	/* Otherwise, if this is a constant or the object is not in memory
9154 	   and need be, put it there.  */
9155 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
9156 	  {
9157 	    tree nt = build_qualified_type (TREE_TYPE (tem),
9158 					    (TYPE_QUALS (TREE_TYPE (tem))
9159 					     | TYPE_QUAL_CONST));
9160 	    memloc = assign_temp (nt, 1, 1, 1);
9161 	    emit_move_insn (memloc, op0);
9162 	    op0 = memloc;
9163 	  }
9164 
9165 	if (offset)
9166 	  {
9167 	    enum machine_mode address_mode;
9168 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
9169 					  EXPAND_SUM);
9170 
9171 	    gcc_assert (MEM_P (op0));
9172 
9173 	    address_mode
9174 	      = targetm.addr_space.address_mode (MEM_ADDR_SPACE (op0));
9175 	    if (GET_MODE (offset_rtx) != address_mode)
9176 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
9177 
9178 	    if (GET_MODE (op0) == BLKmode
9179 		/* A constant address in OP0 can have VOIDmode, we must
9180 		   not try to call force_reg in that case.  */
9181 		&& GET_MODE (XEXP (op0, 0)) != VOIDmode
9182 		&& bitsize != 0
9183 		&& (bitpos % bitsize) == 0
9184 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
9185 		&& MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
9186 	      {
9187 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9188 		bitpos = 0;
9189 	      }
9190 
9191 	    op0 = offset_address (op0, offset_rtx,
9192 				  highest_pow2_factor (offset));
9193 	  }
9194 
9195 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
9196 	   record its alignment as BIGGEST_ALIGNMENT.  */
9197 	if (MEM_P (op0) && bitpos == 0 && offset != 0
9198 	    && is_aligning_offset (offset, tem))
9199 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
9200 
9201 	/* Don't forget about volatility even if this is a bitfield.  */
9202 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
9203 	  {
9204 	    if (op0 == orig_op0)
9205 	      op0 = copy_rtx (op0);
9206 
9207 	    MEM_VOLATILE_P (op0) = 1;
9208 	  }
9209 
9210 	/* In cases where an aligned union has an unaligned object
9211 	   as a field, we might be extracting a BLKmode value from
9212 	   an integer-mode (e.g., SImode) object.  Handle this case
9213 	   by doing the extract into an object as wide as the field
9214 	   (which we know to be the width of a basic mode), then
9215 	   storing into memory, and changing the mode to BLKmode.  */
9216 	if (mode1 == VOIDmode
9217 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
9218 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
9219 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
9220 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
9221 		&& modifier != EXPAND_CONST_ADDRESS
9222 		&& modifier != EXPAND_INITIALIZER)
9223 	    /* If the field isn't aligned enough to fetch as a memref,
9224 	       fetch it as a bit field.  */
9225 	    || (mode1 != BLKmode
9226 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
9227 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
9228 		      || (MEM_P (op0)
9229 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
9230 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
9231 		     && ((modifier == EXPAND_CONST_ADDRESS
9232 			  || modifier == EXPAND_INITIALIZER)
9233 			 ? STRICT_ALIGNMENT
9234 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
9235 		    || (bitpos % BITS_PER_UNIT != 0)))
9236 	    /* If the type and the field are a constant size and the
9237 	       size of the type isn't the same size as the bitfield,
9238 	       we must use bitfield operations.  */
9239 	    || (bitsize >= 0
9240 		&& TYPE_SIZE (TREE_TYPE (exp))
9241 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9242 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
9243 					  bitsize)))
9244 	  {
9245 	    enum machine_mode ext_mode = mode;
9246 
9247 	    if (ext_mode == BLKmode
9248 		&& ! (target != 0 && MEM_P (op0)
9249 		      && MEM_P (target)
9250 		      && bitpos % BITS_PER_UNIT == 0))
9251 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
9252 
9253 	    if (ext_mode == BLKmode)
9254 	      {
9255 		if (target == 0)
9256 		  target = assign_temp (type, 0, 1, 1);
9257 
9258 		if (bitsize == 0)
9259 		  return target;
9260 
9261 		/* In this case, BITPOS must start at a byte boundary and
9262 		   TARGET, if specified, must be a MEM.  */
9263 		gcc_assert (MEM_P (op0)
9264 			    && (!target || MEM_P (target))
9265 			    && !(bitpos % BITS_PER_UNIT));
9266 
9267 		emit_block_move (target,
9268 				 adjust_address (op0, VOIDmode,
9269 						 bitpos / BITS_PER_UNIT),
9270 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
9271 					  / BITS_PER_UNIT),
9272 				 (modifier == EXPAND_STACK_PARM
9273 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9274 
9275 		return target;
9276 	      }
9277 
9278 	    op0 = validize_mem (op0);
9279 
9280 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
9281 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9282 
9283 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
9284 				     (modifier == EXPAND_STACK_PARM
9285 				      ? NULL_RTX : target),
9286 				     ext_mode, ext_mode);
9287 
9288 	    /* If the result is a record type and BITSIZE is narrower than
9289 	       the mode of OP0, an integral mode, and this is a big endian
9290 	       machine, we must put the field into the high-order bits.  */
9291 	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
9292 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9293 		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
9294 	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9295 				  size_int (GET_MODE_BITSIZE (GET_MODE (op0))
9296 					    - bitsize),
9297 				  op0, 1);
9298 
9299 	    /* If the result type is BLKmode, store the data into a temporary
9300 	       of the appropriate type, but with the mode corresponding to the
9301 	       mode for the data we have (op0's mode).  It's tempting to make
9302 	       this a constant type, since we know it's only being stored once,
9303 	       but that can cause problems if we are taking the address of this
9304 	       COMPONENT_REF because the MEM of any reference via that address
9305 	       will have flags corresponding to the type, which will not
9306 	       necessarily be constant.  */
9307 	    if (mode == BLKmode)
9308 	      {
9309 		HOST_WIDE_INT size = GET_MODE_BITSIZE (ext_mode);
9310 		rtx new_rtx;
9311 
9312 		/* If the reference doesn't use the alias set of its type,
9313 		   we cannot create the temporary using that type.  */
9314 		if (component_uses_parent_alias_set (exp))
9315 		  {
9316 		    new_rtx = assign_stack_local (ext_mode, size, 0);
9317 		    set_mem_alias_set (new_rtx, get_alias_set (exp));
9318 		  }
9319 		else
9320 		  new_rtx = assign_stack_temp_for_type (ext_mode, size, 0, type);
9321 
9322 		emit_move_insn (new_rtx, op0);
9323 		op0 = copy_rtx (new_rtx);
9324 		PUT_MODE (op0, BLKmode);
9325 		set_mem_attributes (op0, exp, 1);
9326 	      }
9327 
9328 	    return op0;
9329 	  }
9330 
9331 	/* If the result is BLKmode, use that to access the object
9332 	   now as well.  */
9333 	if (mode == BLKmode)
9334 	  mode1 = BLKmode;
9335 
9336 	/* Get a reference to just this component.  */
9337 	if (modifier == EXPAND_CONST_ADDRESS
9338 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9339 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
9340 	else
9341 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
9342 
9343 	if (op0 == orig_op0)
9344 	  op0 = copy_rtx (op0);
9345 
9346 	set_mem_attributes (op0, exp, 0);
9347 	if (REG_P (XEXP (op0, 0)))
9348 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9349 
9350 	MEM_VOLATILE_P (op0) |= volatilep;
9351 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
9352 	    || modifier == EXPAND_CONST_ADDRESS
9353 	    || modifier == EXPAND_INITIALIZER)
9354 	  return op0;
9355 	else if (target == 0)
9356 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9357 
9358 	convert_move (target, op0, unsignedp);
9359 	return target;
9360       }
9361 
9362     case OBJ_TYPE_REF:
9363       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
9364 
9365     case CALL_EXPR:
9366       /* All valid uses of __builtin_va_arg_pack () are removed during
9367 	 inlining.  */
9368       if (CALL_EXPR_VA_ARG_PACK (exp))
9369 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9370       {
9371 	tree fndecl = get_callee_fndecl (exp), attr;
9372 
9373 	if (fndecl
9374 	    && (attr = lookup_attribute ("error",
9375 					 DECL_ATTRIBUTES (fndecl))) != NULL)
9376 	  error ("%Kcall to %qs declared with attribute error: %s",
9377 		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9378 		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9379 	if (fndecl
9380 	    && (attr = lookup_attribute ("warning",
9381 					 DECL_ATTRIBUTES (fndecl))) != NULL)
9382 	  warning_at (tree_nonartificial_location (exp),
9383 		      0, "%Kcall to %qs declared with attribute warning: %s",
9384 		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
9385 		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
9386 
9387 	/* Check for a built-in function.  */
9388 	if (fndecl && DECL_BUILT_IN (fndecl))
9389 	  {
9390 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
9391 	    return expand_builtin (exp, target, subtarget, tmode, ignore);
9392 	  }
9393       }
9394       return expand_call (exp, target, ignore);
9395 
9396     case VIEW_CONVERT_EXPR:
9397       op0 = NULL_RTX;
9398 
9399       /* If we are converting to BLKmode, try to avoid an intermediate
9400 	 temporary by fetching an inner memory reference.  */
9401       if (mode == BLKmode
9402 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
9403 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
9404 	  && handled_component_p (treeop0))
9405       {
9406 	enum machine_mode mode1;
9407 	HOST_WIDE_INT bitsize, bitpos;
9408 	tree offset;
9409 	int unsignedp;
9410 	int volatilep = 0;
9411 	tree tem
9412 	  = get_inner_reference (treeop0, &bitsize, &bitpos,
9413 				 &offset, &mode1, &unsignedp, &volatilep,
9414 				 true);
9415 	rtx orig_op0;
9416 
9417 	/* ??? We should work harder and deal with non-zero offsets.  */
9418 	if (!offset
9419 	    && (bitpos % BITS_PER_UNIT) == 0
9420 	    && bitsize >= 0
9421 	    && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) == 0)
9422 	  {
9423 	    /* See the normal_inner_ref case for the rationale.  */
9424 	    orig_op0
9425 	      = expand_expr (tem,
9426 			     (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
9427 			      && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
9428 				  != INTEGER_CST)
9429 			      && modifier != EXPAND_STACK_PARM
9430 			      ? target : NULL_RTX),
9431 			     VOIDmode,
9432 			     (modifier == EXPAND_INITIALIZER
9433 			      || modifier == EXPAND_CONST_ADDRESS
9434 			      || modifier == EXPAND_STACK_PARM)
9435 			     ? modifier : EXPAND_NORMAL);
9436 
9437 	    if (MEM_P (orig_op0))
9438 	      {
9439 		op0 = orig_op0;
9440 
9441 		/* Get a reference to just this component.  */
9442 		if (modifier == EXPAND_CONST_ADDRESS
9443 		    || modifier == EXPAND_SUM
9444 		    || modifier == EXPAND_INITIALIZER)
9445 		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
9446 		else
9447 		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
9448 
9449 		if (op0 == orig_op0)
9450 		  op0 = copy_rtx (op0);
9451 
9452 		set_mem_attributes (op0, treeop0, 0);
9453 		if (REG_P (XEXP (op0, 0)))
9454 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
9455 
9456 		MEM_VOLATILE_P (op0) |= volatilep;
9457 	      }
9458 	  }
9459       }
9460 
9461       if (!op0)
9462 	op0 = expand_expr (treeop0,
9463 			   NULL_RTX, VOIDmode, modifier);
9464 
9465       /* If the input and output modes are both the same, we are done.  */
9466       if (mode == GET_MODE (op0))
9467 	;
9468       /* If neither mode is BLKmode, and both modes are the same size
9469 	 then we can use gen_lowpart.  */
9470       else if (mode != BLKmode && GET_MODE (op0) != BLKmode
9471 	       && GET_MODE_SIZE (mode) == GET_MODE_SIZE (GET_MODE (op0))
9472 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
9473 	{
9474 	  if (GET_CODE (op0) == SUBREG)
9475 	    op0 = force_reg (GET_MODE (op0), op0);
9476 	  temp = gen_lowpart_common (mode, op0);
9477 	  if (temp)
9478 	    op0 = temp;
9479 	  else
9480 	    {
9481 	      if (!REG_P (op0) && !MEM_P (op0))
9482 		op0 = force_reg (GET_MODE (op0), op0);
9483 	      op0 = gen_lowpart (mode, op0);
9484 	    }
9485 	}
9486       /* If both modes are integral, then we can convert from one to the
9487 	 other.  */
9488       else if (SCALAR_INT_MODE_P (GET_MODE (op0)) && SCALAR_INT_MODE_P (mode))
9489 	op0 = convert_modes (mode, GET_MODE (op0), op0,
9490 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9491       /* As a last resort, spill op0 to memory, and reload it in a
9492 	 different mode.  */
9493       else if (!MEM_P (op0))
9494 	{
9495 	  /* If the operand is not a MEM, force it into memory.  Since we
9496 	     are going to be changing the mode of the MEM, don't call
9497 	     force_const_mem for constants because we don't allow pool
9498 	     constants to change mode.  */
9499 	  tree inner_type = TREE_TYPE (treeop0);
9500 
9501 	  gcc_assert (!TREE_ADDRESSABLE (exp));
9502 
9503 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
9504 	    target
9505 	      = assign_stack_temp_for_type
9506 		(TYPE_MODE (inner_type),
9507 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
9508 
9509 	  emit_move_insn (target, op0);
9510 	  op0 = target;
9511 	}
9512 
9513       /* At this point, OP0 is in the correct mode.  If the output type is
9514 	 such that the operand is known to be aligned, indicate that it is.
9515 	 Otherwise, we need only be concerned about alignment for non-BLKmode
9516 	 results.  */
9517       if (MEM_P (op0))
9518 	{
9519 	  enum insn_code icode;
9520 	  op0 = copy_rtx (op0);
9521 
9522 	  if (TYPE_ALIGN_OK (type))
9523 	    set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
9524 	  else if (mode != BLKmode
9525 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode)
9526 		   /* If the target does have special handling for unaligned
9527 		      loads of mode then use them.  */
9528 		   && ((icode = optab_handler (movmisalign_optab,
9529 					       mode)->insn_code)
9530 		       != CODE_FOR_nothing))
9531 	      {
9532 		rtx reg, insn;
9533 
9534 		op0 = adjust_address (op0, mode, 0);
9535 		/* We've already validated the memory, and we're creating a
9536 		   new pseudo destination.  The predicates really can't
9537 		   fail.  */
9538 		reg = gen_reg_rtx (mode);
9539 
9540 		/* Nor can the insn generator.  */
9541 		insn = GEN_FCN (icode) (reg, op0);
9542 		emit_insn (insn);
9543 		return reg;
9544 	      }
9545 	  else if (STRICT_ALIGNMENT
9546 		   && mode != BLKmode
9547 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
9548 	    {
9549 	      tree inner_type = TREE_TYPE (treeop0);
9550 	      HOST_WIDE_INT temp_size
9551 		= MAX (int_size_in_bytes (inner_type),
9552 		       (HOST_WIDE_INT) GET_MODE_SIZE (mode));
9553 	      rtx new_rtx
9554 		= assign_stack_temp_for_type (mode, temp_size, 0, type);
9555 	      rtx new_with_op0_mode
9556 		= adjust_address (new_rtx, GET_MODE (op0), 0);
9557 
9558 	      gcc_assert (!TREE_ADDRESSABLE (exp));
9559 
9560 	      if (GET_MODE (op0) == BLKmode)
9561 		emit_block_move (new_with_op0_mode, op0,
9562 				 GEN_INT (GET_MODE_SIZE (mode)),
9563 				 (modifier == EXPAND_STACK_PARM
9564 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9565 	      else
9566 		emit_move_insn (new_with_op0_mode, op0);
9567 
9568 	      op0 = new_rtx;
9569 	    }
9570 
9571 	  op0 = adjust_address (op0, mode, 0);
9572 	}
9573 
9574       return op0;
9575 
9576       /* Use a compare and a jump for BLKmode comparisons, or for function
9577 	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
9578 
9579       /* Although TRUTH_{AND,OR}IF_EXPR aren't present in GIMPLE, they
9580 	 are occassionally created by folding during expansion.  */
9581     case TRUTH_ANDIF_EXPR:
9582     case TRUTH_ORIF_EXPR:
9583       if (! ignore
9584 	  && (target == 0
9585 	      || modifier == EXPAND_STACK_PARM
9586 	      || ! safe_from_p (target, treeop0, 1)
9587 	      || ! safe_from_p (target, treeop1, 1)
9588 	      /* Make sure we don't have a hard reg (such as function's return
9589 		 value) live across basic blocks, if not optimizing.  */
9590 	      || (!optimize && REG_P (target)
9591 		  && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9592 	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9593 
9594       if (target)
9595 	emit_move_insn (target, const0_rtx);
9596 
9597       op1 = gen_label_rtx ();
9598       jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9599 
9600       if (target)
9601 	emit_move_insn (target, const1_rtx);
9602 
9603       emit_label (op1);
9604       return ignore ? const0_rtx : target;
9605 
9606     case STATEMENT_LIST:
9607       {
9608 	tree_stmt_iterator iter;
9609 
9610 	gcc_assert (ignore);
9611 
9612 	for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
9613 	  expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
9614       }
9615       return const0_rtx;
9616 
9617     case COND_EXPR:
9618       /* A COND_EXPR with its type being VOID_TYPE represents a
9619 	 conditional jump and is handled in
9620 	 expand_gimple_cond_expr.  */
9621       gcc_assert (!VOID_TYPE_P (type));
9622 
9623         /* Note that COND_EXPRs whose type is a structure or union
9624   	 are required to be constructed to contain assignments of
9625   	 a temporary variable, so that we can evaluate them here
9626   	 for side effect only.  If type is void, we must do likewise.  */
9627 
9628         gcc_assert (!TREE_ADDRESSABLE (type)
9629 		    && !ignore
9630 		    && TREE_TYPE (treeop1) != void_type_node
9631 		    && TREE_TYPE (treeop2) != void_type_node);
9632 
9633        /* If we are not to produce a result, we have no target.  Otherwise,
9634  	 if a target was specified use it; it will not be used as an
9635  	 intermediate target unless it is safe.  If no target, use a
9636  	 temporary.  */
9637 
9638        if (modifier != EXPAND_STACK_PARM
9639  	  && original_target
9640  	  && safe_from_p (original_target, treeop0, 1)
9641  	  && GET_MODE (original_target) == mode
9642 #ifdef HAVE_conditional_move
9643  	  && (! can_conditionally_move_p (mode)
9644  	      || REG_P (original_target))
9645 #endif
9646  	  && !MEM_P (original_target))
9647  	temp = original_target;
9648        else
9649  	temp = assign_temp (type, 0, 0, 1);
9650 
9651        do_pending_stack_adjust ();
9652        NO_DEFER_POP;
9653        op0 = gen_label_rtx ();
9654        op1 = gen_label_rtx ();
9655        jumpifnot (treeop0, op0, -1);
9656        store_expr (treeop1, temp,
9657  		  modifier == EXPAND_STACK_PARM,
9658 		  false);
9659 
9660        emit_jump_insn (gen_jump (op1));
9661        emit_barrier ();
9662        emit_label (op0);
9663        store_expr (treeop2, temp,
9664  		  modifier == EXPAND_STACK_PARM,
9665 		  false);
9666 
9667        emit_label (op1);
9668        OK_DEFER_POP;
9669        return temp;
9670 
9671     case VEC_COND_EXPR:
9672       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9673       return target;
9674 
9675     case MODIFY_EXPR:
9676       {
9677 	tree lhs = treeop0;
9678 	tree rhs = treeop1;
9679 	gcc_assert (ignore);
9680 
9681 	/* Check for |= or &= of a bitfield of size one into another bitfield
9682 	   of size 1.  In this case, (unless we need the result of the
9683 	   assignment) we can do this more efficiently with a
9684 	   test followed by an assignment, if necessary.
9685 
9686 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
9687 	   things change so we do, this code should be enhanced to
9688 	   support it.  */
9689 	if (TREE_CODE (lhs) == COMPONENT_REF
9690 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
9691 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
9692 	    && TREE_OPERAND (rhs, 0) == lhs
9693 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
9694 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9695 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
9696 	  {
9697 	    rtx label = gen_label_rtx ();
9698 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
9699 	    do_jump (TREE_OPERAND (rhs, 1),
9700 		     value ? label : 0,
9701 		     value ? 0 : label, -1);
9702 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
9703 			       MOVE_NONTEMPORAL (exp));
9704 	    do_pending_stack_adjust ();
9705 	    emit_label (label);
9706 	    return const0_rtx;
9707 	  }
9708 
9709 	expand_assignment (lhs, rhs, MOVE_NONTEMPORAL (exp));
9710 	return const0_rtx;
9711       }
9712 
9713     case ADDR_EXPR:
9714       return expand_expr_addr_expr (exp, target, tmode, modifier);
9715 
9716     case REALPART_EXPR:
9717       op0 = expand_normal (treeop0);
9718       return read_complex_part (op0, false);
9719 
9720     case IMAGPART_EXPR:
9721       op0 = expand_normal (treeop0);
9722       return read_complex_part (op0, true);
9723 
9724     case RETURN_EXPR:
9725     case LABEL_EXPR:
9726     case GOTO_EXPR:
9727     case SWITCH_EXPR:
9728     case ASM_EXPR:
9729       /* Expanded in cfgexpand.c.  */
9730       gcc_unreachable ();
9731 
9732     case TRY_CATCH_EXPR:
9733     case CATCH_EXPR:
9734     case EH_FILTER_EXPR:
9735     case TRY_FINALLY_EXPR:
9736       /* Lowered by tree-eh.c.  */
9737       gcc_unreachable ();
9738 
9739     case WITH_CLEANUP_EXPR:
9740     case CLEANUP_POINT_EXPR:
9741     case TARGET_EXPR:
9742     case CASE_LABEL_EXPR:
9743     case VA_ARG_EXPR:
9744     case BIND_EXPR:
9745     case INIT_EXPR:
9746     case CONJ_EXPR:
9747     case COMPOUND_EXPR:
9748     case PREINCREMENT_EXPR:
9749     case PREDECREMENT_EXPR:
9750     case POSTINCREMENT_EXPR:
9751     case POSTDECREMENT_EXPR:
9752     case LOOP_EXPR:
9753     case EXIT_EXPR:
9754       /* Lowered by gimplify.c.  */
9755       gcc_unreachable ();
9756 
9757     case FDESC_EXPR:
9758       /* Function descriptors are not valid except for as
9759 	 initialization constants, and should not be expanded.  */
9760       gcc_unreachable ();
9761 
9762     case WITH_SIZE_EXPR:
9763       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
9764 	 have pulled out the size to use in whatever context it needed.  */
9765       return expand_expr_real (treeop0, original_target, tmode,
9766 			       modifier, alt_rtl);
9767 
9768     case REALIGN_LOAD_EXPR:
9769       {
9770         tree oprnd0 = treeop0;
9771         tree oprnd1 = treeop1;
9772         tree oprnd2 = treeop2;
9773         rtx op2;
9774 
9775         this_optab = optab_for_tree_code (code, type, optab_default);
9776         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9777         op2 = expand_normal (oprnd2);
9778         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9779 				  target, unsignedp);
9780         gcc_assert (temp);
9781         return temp;
9782       }
9783 
9784     case DOT_PROD_EXPR:
9785       {
9786 	tree oprnd0 = treeop0;
9787 	tree oprnd1 = treeop1;
9788 	tree oprnd2 = treeop2;
9789 	rtx op2;
9790 
9791 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9792 	op2 = expand_normal (oprnd2);
9793 	target = expand_widen_pattern_expr (&ops, op0, op1, op2,
9794 					    target, unsignedp);
9795 	return target;
9796       }
9797 
9798     case COMPOUND_LITERAL_EXPR:
9799       {
9800 	/* Initialize the anonymous variable declared in the compound
9801 	   literal, then return the variable.  */
9802 	tree decl = COMPOUND_LITERAL_EXPR_DECL (exp);
9803 
9804 	/* Create RTL for this variable.  */
9805 	if (!DECL_RTL_SET_P (decl))
9806 	  {
9807 	    if (DECL_HARD_REGISTER (decl))
9808 	      /* The user specified an assembler name for this variable.
9809 	         Set that up now.  */
9810 	      rest_of_decl_compilation (decl, 0, 0);
9811 	    else
9812 	      expand_decl (decl);
9813 	  }
9814 
9815 	return expand_expr_real (decl, original_target, tmode,
9816 				 modifier, alt_rtl);
9817       }
9818 
9819     default:
9820       return expand_expr_real_2 (&ops, target, tmode, modifier);
9821     }
9822 }
9823 
9824 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
9825    signedness of TYPE), possibly returning the result in TARGET.  */
9826 static rtx
9827 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
9828 {
9829   HOST_WIDE_INT prec = TYPE_PRECISION (type);
9830   if (target && GET_MODE (target) != GET_MODE (exp))
9831     target = 0;
9832   /* For constant values, reduce using build_int_cst_type. */
9833   if (CONST_INT_P (exp))
9834     {
9835       HOST_WIDE_INT value = INTVAL (exp);
9836       tree t = build_int_cst_type (type, value);
9837       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
9838     }
9839   else if (TYPE_UNSIGNED (type))
9840     {
9841       rtx mask;
9842       if (prec < HOST_BITS_PER_WIDE_INT)
9843 	mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
9844 				   GET_MODE (exp));
9845       else
9846 	mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
9847 				   ((unsigned HOST_WIDE_INT) 1
9848 				    << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
9849 				   GET_MODE (exp));
9850       return expand_and (GET_MODE (exp), exp, mask, target);
9851     }
9852   else
9853     {
9854       tree count = build_int_cst (NULL_TREE,
9855 				  GET_MODE_BITSIZE (GET_MODE (exp)) - prec);
9856       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9857       return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
9858     }
9859 }
9860 
9861 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9862    when applied to the address of EXP produces an address known to be
9863    aligned more than BIGGEST_ALIGNMENT.  */
9864 
9865 static int
9866 is_aligning_offset (const_tree offset, const_tree exp)
9867 {
9868   /* Strip off any conversions.  */
9869   while (CONVERT_EXPR_P (offset))
9870     offset = TREE_OPERAND (offset, 0);
9871 
9872   /* We must now have a BIT_AND_EXPR with a constant that is one less than
9873      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
9874   if (TREE_CODE (offset) != BIT_AND_EXPR
9875       || !host_integerp (TREE_OPERAND (offset, 1), 1)
9876       || compare_tree_int (TREE_OPERAND (offset, 1),
9877 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9878       || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9879     return 0;
9880 
9881   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9882      It must be NEGATE_EXPR.  Then strip any more conversions.  */
9883   offset = TREE_OPERAND (offset, 0);
9884   while (CONVERT_EXPR_P (offset))
9885     offset = TREE_OPERAND (offset, 0);
9886 
9887   if (TREE_CODE (offset) != NEGATE_EXPR)
9888     return 0;
9889 
9890   offset = TREE_OPERAND (offset, 0);
9891   while (CONVERT_EXPR_P (offset))
9892     offset = TREE_OPERAND (offset, 0);
9893 
9894   /* This must now be the address of EXP.  */
9895   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9896 }
9897 
9898 /* Return the tree node if an ARG corresponds to a string constant or zero
9899    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
9900    in bytes within the string that ARG is accessing.  The type of the
9901    offset will be `sizetype'.  */
9902 
9903 tree
9904 string_constant (tree arg, tree *ptr_offset)
9905 {
9906   tree array, offset, lower_bound;
9907   STRIP_NOPS (arg);
9908 
9909   if (TREE_CODE (arg) == ADDR_EXPR)
9910     {
9911       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9912 	{
9913 	  *ptr_offset = size_zero_node;
9914 	  return TREE_OPERAND (arg, 0);
9915 	}
9916       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
9917 	{
9918 	  array = TREE_OPERAND (arg, 0);
9919 	  offset = size_zero_node;
9920 	}
9921       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
9922 	{
9923 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9924 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
9925 	  if (TREE_CODE (array) != STRING_CST
9926 	      && TREE_CODE (array) != VAR_DECL)
9927 	    return 0;
9928 
9929 	  /* Check if the array has a nonzero lower bound.  */
9930 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
9931 	  if (!integer_zerop (lower_bound))
9932 	    {
9933 	      /* If the offset and base aren't both constants, return 0.  */
9934 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
9935 	        return 0;
9936 	      if (TREE_CODE (offset) != INTEGER_CST)
9937 		return 0;
9938 	      /* Adjust offset by the lower bound.  */
9939 	      offset = size_diffop (fold_convert (sizetype, offset),
9940 				    fold_convert (sizetype, lower_bound));
9941 	    }
9942 	}
9943       else
9944 	return 0;
9945     }
9946   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
9947     {
9948       tree arg0 = TREE_OPERAND (arg, 0);
9949       tree arg1 = TREE_OPERAND (arg, 1);
9950 
9951       STRIP_NOPS (arg0);
9952       STRIP_NOPS (arg1);
9953 
9954       if (TREE_CODE (arg0) == ADDR_EXPR
9955 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
9956 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
9957 	{
9958 	  array = TREE_OPERAND (arg0, 0);
9959 	  offset = arg1;
9960 	}
9961       else if (TREE_CODE (arg1) == ADDR_EXPR
9962 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
9963 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
9964 	{
9965 	  array = TREE_OPERAND (arg1, 0);
9966 	  offset = arg0;
9967 	}
9968       else
9969 	return 0;
9970     }
9971   else
9972     return 0;
9973 
9974   if (TREE_CODE (array) == STRING_CST)
9975     {
9976       *ptr_offset = fold_convert (sizetype, offset);
9977       return array;
9978     }
9979   else if (TREE_CODE (array) == VAR_DECL)
9980     {
9981       int length;
9982 
9983       /* Variables initialized to string literals can be handled too.  */
9984       if (DECL_INITIAL (array) == NULL_TREE
9985 	  || TREE_CODE (DECL_INITIAL (array)) != STRING_CST)
9986 	return 0;
9987 
9988       /* If they are read-only, non-volatile and bind locally.  */
9989       if (! TREE_READONLY (array)
9990 	  || TREE_SIDE_EFFECTS (array)
9991 	  || ! targetm.binds_local_p (array))
9992 	return 0;
9993 
9994       /* Avoid const char foo[4] = "abcde";  */
9995       if (DECL_SIZE_UNIT (array) == NULL_TREE
9996 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
9997 	  || (length = TREE_STRING_LENGTH (DECL_INITIAL (array))) <= 0
9998 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
9999 	return 0;
10000 
10001       /* If variable is bigger than the string literal, OFFSET must be constant
10002 	 and inside of the bounds of the string literal.  */
10003       offset = fold_convert (sizetype, offset);
10004       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
10005 	  && (! host_integerp (offset, 1)
10006 	      || compare_tree_int (offset, length) >= 0))
10007 	return 0;
10008 
10009       *ptr_offset = offset;
10010       return DECL_INITIAL (array);
10011     }
10012 
10013   return 0;
10014 }
10015 
10016 /* Generate code to calculate OPS, and exploded expression
10017    using a store-flag instruction and return an rtx for the result.
10018    OPS reflects a comparison.
10019 
10020    If TARGET is nonzero, store the result there if convenient.
10021 
10022    Return zero if there is no suitable set-flag instruction
10023    available on this machine.
10024 
10025    Once expand_expr has been called on the arguments of the comparison,
10026    we are committed to doing the store flag, since it is not safe to
10027    re-evaluate the expression.  We emit the store-flag insn by calling
10028    emit_store_flag, but only expand the arguments if we have a reason
10029    to believe that emit_store_flag will be successful.  If we think that
10030    it will, but it isn't, we have to simulate the store-flag with a
10031    set/jump/set sequence.  */
10032 
10033 static rtx
10034 do_store_flag (sepops ops, rtx target, enum machine_mode mode)
10035 {
10036   enum rtx_code code;
10037   tree arg0, arg1, type;
10038   tree tem;
10039   enum machine_mode operand_mode;
10040   int unsignedp;
10041   rtx op0, op1;
10042   rtx subtarget = target;
10043   location_t loc = ops->location;
10044 
10045   arg0 = ops->op0;
10046   arg1 = ops->op1;
10047 
10048   /* Don't crash if the comparison was erroneous.  */
10049   if (arg0 == error_mark_node || arg1 == error_mark_node)
10050     return const0_rtx;
10051 
10052   type = TREE_TYPE (arg0);
10053   operand_mode = TYPE_MODE (type);
10054   unsignedp = TYPE_UNSIGNED (type);
10055 
10056   /* We won't bother with BLKmode store-flag operations because it would mean
10057      passing a lot of information to emit_store_flag.  */
10058   if (operand_mode == BLKmode)
10059     return 0;
10060 
10061   /* We won't bother with store-flag operations involving function pointers
10062      when function pointers must be canonicalized before comparisons.  */
10063 #ifdef HAVE_canonicalize_funcptr_for_compare
10064   if (HAVE_canonicalize_funcptr_for_compare
10065       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
10066 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
10067 	       == FUNCTION_TYPE))
10068 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
10069 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
10070 		  == FUNCTION_TYPE))))
10071     return 0;
10072 #endif
10073 
10074   STRIP_NOPS (arg0);
10075   STRIP_NOPS (arg1);
10076 
10077   /* Get the rtx comparison code to use.  We know that EXP is a comparison
10078      operation of some type.  Some comparisons against 1 and -1 can be
10079      converted to comparisons with zero.  Do so here so that the tests
10080      below will be aware that we have a comparison with zero.   These
10081      tests will not catch constants in the first operand, but constants
10082      are rarely passed as the first operand.  */
10083 
10084   switch (ops->code)
10085     {
10086     case EQ_EXPR:
10087       code = EQ;
10088       break;
10089     case NE_EXPR:
10090       code = NE;
10091       break;
10092     case LT_EXPR:
10093       if (integer_onep (arg1))
10094 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10095       else
10096 	code = unsignedp ? LTU : LT;
10097       break;
10098     case LE_EXPR:
10099       if (! unsignedp && integer_all_onesp (arg1))
10100 	arg1 = integer_zero_node, code = LT;
10101       else
10102 	code = unsignedp ? LEU : LE;
10103       break;
10104     case GT_EXPR:
10105       if (! unsignedp && integer_all_onesp (arg1))
10106 	arg1 = integer_zero_node, code = GE;
10107       else
10108 	code = unsignedp ? GTU : GT;
10109       break;
10110     case GE_EXPR:
10111       if (integer_onep (arg1))
10112 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10113       else
10114 	code = unsignedp ? GEU : GE;
10115       break;
10116 
10117     case UNORDERED_EXPR:
10118       code = UNORDERED;
10119       break;
10120     case ORDERED_EXPR:
10121       code = ORDERED;
10122       break;
10123     case UNLT_EXPR:
10124       code = UNLT;
10125       break;
10126     case UNLE_EXPR:
10127       code = UNLE;
10128       break;
10129     case UNGT_EXPR:
10130       code = UNGT;
10131       break;
10132     case UNGE_EXPR:
10133       code = UNGE;
10134       break;
10135     case UNEQ_EXPR:
10136       code = UNEQ;
10137       break;
10138     case LTGT_EXPR:
10139       code = LTGT;
10140       break;
10141 
10142     default:
10143       gcc_unreachable ();
10144     }
10145 
10146   /* Put a constant second.  */
10147   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
10148       || TREE_CODE (arg0) == FIXED_CST)
10149     {
10150       tem = arg0; arg0 = arg1; arg1 = tem;
10151       code = swap_condition (code);
10152     }
10153 
10154   /* If this is an equality or inequality test of a single bit, we can
10155      do this by shifting the bit being tested to the low-order bit and
10156      masking the result with the constant 1.  If the condition was EQ,
10157      we xor it with 1.  This does not require an scc insn and is faster
10158      than an scc insn even if we have it.
10159 
10160      The code to make this transformation was moved into fold_single_bit_test,
10161      so we just call into the folder and expand its result.  */
10162 
10163   if ((code == NE || code == EQ)
10164       && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10165       && integer_pow2p (TREE_OPERAND (arg0, 1))
10166       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
10167     {
10168       tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
10169       return expand_expr (fold_single_bit_test (loc,
10170 						code == NE ? NE_EXPR : EQ_EXPR,
10171 						arg0, arg1, type),
10172 			  target, VOIDmode, EXPAND_NORMAL);
10173     }
10174 
10175   if (! get_subtarget (target)
10176       || GET_MODE (subtarget) != operand_mode)
10177     subtarget = 0;
10178 
10179   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
10180 
10181   if (target == 0)
10182     target = gen_reg_rtx (mode);
10183 
10184   /* Try a cstore if possible.  */
10185   return emit_store_flag_force (target, code, op0, op1,
10186 				operand_mode, unsignedp,
10187 				(TYPE_PRECISION (ops->type) == 1
10188 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
10189 }
10190 
10191 
10192 /* Stubs in case we haven't got a casesi insn.  */
10193 #ifndef HAVE_casesi
10194 # define HAVE_casesi 0
10195 # define gen_casesi(a, b, c, d, e) (0)
10196 # define CODE_FOR_casesi CODE_FOR_nothing
10197 #endif
10198 
10199 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
10200    0 otherwise (i.e. if there is no casesi instruction).  */
10201 int
10202 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10203 	    rtx table_label ATTRIBUTE_UNUSED, rtx default_label,
10204 	    rtx fallback_label ATTRIBUTE_UNUSED)
10205 {
10206   enum machine_mode index_mode = SImode;
10207   int index_bits = GET_MODE_BITSIZE (index_mode);
10208   rtx op1, op2, index;
10209   enum machine_mode op_mode;
10210 
10211   if (! HAVE_casesi)
10212     return 0;
10213 
10214   /* Convert the index to SImode.  */
10215   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10216     {
10217       enum machine_mode omode = TYPE_MODE (index_type);
10218       rtx rangertx = expand_normal (range);
10219 
10220       /* We must handle the endpoints in the original mode.  */
10221       index_expr = build2 (MINUS_EXPR, index_type,
10222 			   index_expr, minval);
10223       minval = integer_zero_node;
10224       index = expand_normal (index_expr);
10225       if (default_label)
10226         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10227 				 omode, 1, default_label);
10228       /* Now we can safely truncate.  */
10229       index = convert_to_mode (index_mode, index, 0);
10230     }
10231   else
10232     {
10233       if (TYPE_MODE (index_type) != index_mode)
10234 	{
10235 	  index_type = lang_hooks.types.type_for_size (index_bits, 0);
10236 	  index_expr = fold_convert (index_type, index_expr);
10237 	}
10238 
10239       index = expand_normal (index_expr);
10240     }
10241 
10242   do_pending_stack_adjust ();
10243 
10244   op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10245   if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10246       (index, op_mode))
10247     index = copy_to_mode_reg (op_mode, index);
10248 
10249   op1 = expand_normal (minval);
10250 
10251   op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10252   op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10253 		       op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10254   if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10255       (op1, op_mode))
10256     op1 = copy_to_mode_reg (op_mode, op1);
10257 
10258   op2 = expand_normal (range);
10259 
10260   op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10261   op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10262 		       op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10263   if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10264       (op2, op_mode))
10265     op2 = copy_to_mode_reg (op_mode, op2);
10266 
10267   emit_jump_insn (gen_casesi (index, op1, op2,
10268 			      table_label, !default_label
10269 					   ? fallback_label : default_label));
10270   return 1;
10271 }
10272 
10273 /* Attempt to generate a tablejump instruction; same concept.  */
10274 #ifndef HAVE_tablejump
10275 #define HAVE_tablejump 0
10276 #define gen_tablejump(x, y) (0)
10277 #endif
10278 
10279 /* Subroutine of the next function.
10280 
10281    INDEX is the value being switched on, with the lowest value
10282    in the table already subtracted.
10283    MODE is its expected mode (needed if INDEX is constant).
10284    RANGE is the length of the jump table.
10285    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10286 
10287    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10288    index value is out of range.  */
10289 
10290 static void
10291 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10292 	      rtx default_label)
10293 {
10294   rtx temp, vector;
10295 
10296   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
10297     cfun->cfg->max_jumptable_ents = INTVAL (range);
10298 
10299   /* Do an unsigned comparison (in the proper mode) between the index
10300      expression and the value which represents the length of the range.
10301      Since we just finished subtracting the lower bound of the range
10302      from the index expression, this comparison allows us to simultaneously
10303      check that the original index expression value is both greater than
10304      or equal to the minimum value of the range and less than or equal to
10305      the maximum value of the range.  */
10306 
10307   if (default_label)
10308     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10309 			     default_label);
10310 
10311   /* If index is in range, it must fit in Pmode.
10312      Convert to Pmode so we can index with it.  */
10313   if (mode != Pmode)
10314     index = convert_to_mode (Pmode, index, 1);
10315 
10316   /* Don't let a MEM slip through, because then INDEX that comes
10317      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10318      and break_out_memory_refs will go to work on it and mess it up.  */
10319 #ifdef PIC_CASE_VECTOR_ADDRESS
10320   if (flag_pic && !REG_P (index))
10321     index = copy_to_mode_reg (Pmode, index);
10322 #endif
10323 
10324   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10325      GET_MODE_SIZE, because this indicates how large insns are.  The other
10326      uses should all be Pmode, because they are addresses.  This code
10327      could fail if addresses and insns are not the same size.  */
10328   index = gen_rtx_PLUS (Pmode,
10329 			gen_rtx_MULT (Pmode, index,
10330 				      GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10331 			gen_rtx_LABEL_REF (Pmode, table_label));
10332 #ifdef PIC_CASE_VECTOR_ADDRESS
10333   if (flag_pic)
10334     index = PIC_CASE_VECTOR_ADDRESS (index);
10335   else
10336 #endif
10337     index = memory_address (CASE_VECTOR_MODE, index);
10338   temp = gen_reg_rtx (CASE_VECTOR_MODE);
10339   vector = gen_const_mem (CASE_VECTOR_MODE, index);
10340   convert_move (temp, vector, 0);
10341 
10342   emit_jump_insn (gen_tablejump (temp, table_label));
10343 
10344   /* If we are generating PIC code or if the table is PC-relative, the
10345      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
10346   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10347     emit_barrier ();
10348 }
10349 
10350 int
10351 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10352 	       rtx table_label, rtx default_label)
10353 {
10354   rtx index;
10355 
10356   if (! HAVE_tablejump)
10357     return 0;
10358 
10359   index_expr = fold_build2 (MINUS_EXPR, index_type,
10360 			    fold_convert (index_type, index_expr),
10361 			    fold_convert (index_type, minval));
10362   index = expand_normal (index_expr);
10363   do_pending_stack_adjust ();
10364 
10365   do_tablejump (index, TYPE_MODE (index_type),
10366 		convert_modes (TYPE_MODE (index_type),
10367 			       TYPE_MODE (TREE_TYPE (range)),
10368 			       expand_normal (range),
10369 			       TYPE_UNSIGNED (TREE_TYPE (range))),
10370 		table_label, default_label);
10371   return 1;
10372 }
10373 
10374 /* Nonzero if the mode is a valid vector mode for this architecture.
10375    This returns nonzero even if there is no hardware support for the
10376    vector mode, but we can emulate with narrower modes.  */
10377 
10378 int
10379 vector_mode_valid_p (enum machine_mode mode)
10380 {
10381   enum mode_class mclass = GET_MODE_CLASS (mode);
10382   enum machine_mode innermode;
10383 
10384   /* Doh!  What's going on?  */
10385   if (mclass != MODE_VECTOR_INT
10386       && mclass != MODE_VECTOR_FLOAT
10387       && mclass != MODE_VECTOR_FRACT
10388       && mclass != MODE_VECTOR_UFRACT
10389       && mclass != MODE_VECTOR_ACCUM
10390       && mclass != MODE_VECTOR_UACCUM)
10391     return 0;
10392 
10393   /* Hardware support.  Woo hoo!  */
10394   if (targetm.vector_mode_supported_p (mode))
10395     return 1;
10396 
10397   innermode = GET_MODE_INNER (mode);
10398 
10399   /* We should probably return 1 if requesting V4DI and we have no DI,
10400      but we have V2DI, but this is probably very unlikely.  */
10401 
10402   /* If we have support for the inner mode, we can safely emulate it.
10403      We may not have V2DI, but me can emulate with a pair of DIs.  */
10404   return targetm.scalar_mode_supported_p (innermode);
10405 }
10406 
10407 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
10408 static rtx
10409 const_vector_from_tree (tree exp)
10410 {
10411   rtvec v;
10412   int units, i;
10413   tree link, elt;
10414   enum machine_mode inner, mode;
10415 
10416   mode = TYPE_MODE (TREE_TYPE (exp));
10417 
10418   if (initializer_zerop (exp))
10419     return CONST0_RTX (mode);
10420 
10421   units = GET_MODE_NUNITS (mode);
10422   inner = GET_MODE_INNER (mode);
10423 
10424   v = rtvec_alloc (units);
10425 
10426   link = TREE_VECTOR_CST_ELTS (exp);
10427   for (i = 0; link; link = TREE_CHAIN (link), ++i)
10428     {
10429       elt = TREE_VALUE (link);
10430 
10431       if (TREE_CODE (elt) == REAL_CST)
10432 	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10433 							 inner);
10434       else if (TREE_CODE (elt) == FIXED_CST)
10435 	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
10436 							 inner);
10437       else
10438 	RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10439 					       TREE_INT_CST_HIGH (elt),
10440 					       inner);
10441     }
10442 
10443   /* Initialize remaining elements to 0.  */
10444   for (; i < units; ++i)
10445     RTVEC_ELT (v, i) = CONST0_RTX (inner);
10446 
10447   return gen_rtx_CONST_VECTOR (mode, v);
10448 }
10449 
10450 
10451 /* Build a decl for a EH personality function named NAME. */
10452 
10453 tree
10454 build_personality_function (const char *name)
10455 {
10456   tree decl, type;
10457 
10458   type = build_function_type_list (integer_type_node, integer_type_node,
10459 				   long_long_unsigned_type_node,
10460 				   ptr_type_node, ptr_type_node, NULL_TREE);
10461   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
10462 		     get_identifier (name), type);
10463   DECL_ARTIFICIAL (decl) = 1;
10464   DECL_EXTERNAL (decl) = 1;
10465   TREE_PUBLIC (decl) = 1;
10466 
10467   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
10468      are the flags assigned by targetm.encode_section_info.  */
10469   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
10470 
10471   return decl;
10472 }
10473 
10474 /* Extracts the personality function of DECL and returns the corresponding
10475    libfunc.  */
10476 
10477 rtx
10478 get_personality_function (tree decl)
10479 {
10480   tree personality = DECL_FUNCTION_PERSONALITY (decl);
10481   enum eh_personality_kind pk;
10482 
10483   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
10484   if (pk == eh_personality_none)
10485     return NULL;
10486 
10487   if (!personality
10488       && pk == eh_personality_any)
10489     personality = lang_hooks.eh_personality ();
10490 
10491   if (pk == eh_personality_lang)
10492     gcc_assert (personality != NULL_TREE);
10493 
10494   return XEXP (DECL_RTL (personality), 0);
10495 }
10496 
10497 #include "gt-expr.h"
10498