xref: /netbsd-src/external/gpl3/gcc.old/dist/gcc/expr.c (revision ea67e31f69307fc0e2e16d6620631aeb4f6de316)
1 /* Convert tree expression to rtl instructions, for GNU compiler.
2    Copyright (C) 1988-2015 Free Software Foundation, Inc.
3 
4 This file is part of GCC.
5 
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10 
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14 for more details.
15 
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3.  If not see
18 <http://www.gnu.org/licenses/>.  */
19 
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "machmode.h"
25 #include "rtl.h"
26 #include "hash-set.h"
27 #include "vec.h"
28 #include "double-int.h"
29 #include "input.h"
30 #include "alias.h"
31 #include "symtab.h"
32 #include "wide-int.h"
33 #include "inchash.h"
34 #include "tree.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
37 #include "stor-layout.h"
38 #include "attribs.h"
39 #include "varasm.h"
40 #include "flags.h"
41 #include "regs.h"
42 #include "hard-reg-set.h"
43 #include "except.h"
44 #include "function.h"
45 #include "insn-config.h"
46 #include "insn-attr.h"
47 #include "hashtab.h"
48 #include "statistics.h"
49 #include "real.h"
50 #include "fixed-value.h"
51 #include "expmed.h"
52 #include "dojump.h"
53 #include "explow.h"
54 #include "calls.h"
55 #include "emit-rtl.h"
56 #include "stmt.h"
57 /* Include expr.h after insn-config.h so we get HAVE_conditional_move.  */
58 #include "expr.h"
59 #include "insn-codes.h"
60 #include "optabs.h"
61 #include "libfuncs.h"
62 #include "recog.h"
63 #include "reload.h"
64 #include "typeclass.h"
65 #include "toplev.h"
66 #include "langhooks.h"
67 #include "intl.h"
68 #include "tm_p.h"
69 #include "tree-iterator.h"
70 #include "predict.h"
71 #include "dominance.h"
72 #include "cfg.h"
73 #include "basic-block.h"
74 #include "tree-ssa-alias.h"
75 #include "internal-fn.h"
76 #include "gimple-expr.h"
77 #include "is-a.h"
78 #include "gimple.h"
79 #include "gimple-ssa.h"
80 #include "hash-map.h"
81 #include "plugin-api.h"
82 #include "ipa-ref.h"
83 #include "cgraph.h"
84 #include "tree-ssanames.h"
85 #include "target.h"
86 #include "common/common-target.h"
87 #include "timevar.h"
88 #include "df.h"
89 #include "diagnostic.h"
90 #include "tree-ssa-live.h"
91 #include "tree-outof-ssa.h"
92 #include "target-globals.h"
93 #include "params.h"
94 #include "tree-ssa-address.h"
95 #include "cfgexpand.h"
96 #include "builtins.h"
97 #include "tree-chkp.h"
98 #include "rtl-chkp.h"
99 #include "ccmp.h"
100 
101 #ifndef STACK_PUSH_CODE
102 #ifdef STACK_GROWS_DOWNWARD
103 #define STACK_PUSH_CODE PRE_DEC
104 #else
105 #define STACK_PUSH_CODE PRE_INC
106 #endif
107 #endif
108 
109 
110 /* If this is nonzero, we do not bother generating VOLATILE
111    around volatile memory references, and we are willing to
112    output indirect addresses.  If cse is to follow, we reject
113    indirect addresses so a useful potential cse is generated;
114    if it is used only once, instruction combination will produce
115    the same indirect address eventually.  */
116 int cse_not_expected;
117 
118 /* This structure is used by move_by_pieces to describe the move to
119    be performed.  */
120 struct move_by_pieces_d
121 {
122   rtx to;
123   rtx to_addr;
124   int autinc_to;
125   int explicit_inc_to;
126   rtx from;
127   rtx from_addr;
128   int autinc_from;
129   int explicit_inc_from;
130   unsigned HOST_WIDE_INT len;
131   HOST_WIDE_INT offset;
132   int reverse;
133 };
134 
135 /* This structure is used by store_by_pieces to describe the clear to
136    be performed.  */
137 
138 struct store_by_pieces_d
139 {
140   rtx to;
141   rtx to_addr;
142   int autinc_to;
143   int explicit_inc_to;
144   unsigned HOST_WIDE_INT len;
145   HOST_WIDE_INT offset;
146   rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode);
147   void *constfundata;
148   int reverse;
149 };
150 
151 static void move_by_pieces_1 (insn_gen_fn, machine_mode,
152 			      struct move_by_pieces_d *);
153 static bool block_move_libcall_safe_for_call_parm (void);
154 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned, unsigned, HOST_WIDE_INT,
155 					unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
156 					unsigned HOST_WIDE_INT);
157 static tree emit_block_move_libcall_fn (int);
158 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
159 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, machine_mode);
160 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
161 static void store_by_pieces_1 (struct store_by_pieces_d *, unsigned int);
162 static void store_by_pieces_2 (insn_gen_fn, machine_mode,
163 			       struct store_by_pieces_d *);
164 static tree clear_storage_libcall_fn (int);
165 static rtx_insn *compress_float_constant (rtx, rtx);
166 static rtx get_subtarget (rtx);
167 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
168 				     HOST_WIDE_INT, unsigned HOST_WIDE_INT,
169 				     unsigned HOST_WIDE_INT, machine_mode,
170 				     tree, int, alias_set_type);
171 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
172 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT,
173 			unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT,
174 			machine_mode, tree, alias_set_type, bool);
175 
176 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (const_tree, const_tree);
177 
178 static int is_aligning_offset (const_tree, const_tree);
179 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
180 static rtx do_store_flag (sepops, rtx, machine_mode);
181 #ifdef PUSH_ROUNDING
182 static void emit_single_push_insn (machine_mode, rtx, tree);
183 #endif
184 static void do_tablejump (rtx, machine_mode, rtx, rtx, rtx, int);
185 static rtx const_vector_from_tree (tree);
186 static tree tree_expr_size (const_tree);
187 static HOST_WIDE_INT int_expr_size (tree);
188 
189 
190 /* This is run to set up which modes can be used
191    directly in memory and to initialize the block move optab.  It is run
192    at the beginning of compilation and when the target is reinitialized.  */
193 
194 void
195 init_expr_target (void)
196 {
197   rtx insn, pat;
198   machine_mode mode;
199   int num_clobbers;
200   rtx mem, mem1;
201   rtx reg;
202 
203   /* Try indexing by frame ptr and try by stack ptr.
204      It is known that on the Convex the stack ptr isn't a valid index.
205      With luck, one or the other is valid on any machine.  */
206   mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
207   mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
208 
209   /* A scratch register we can modify in-place below to avoid
210      useless RTL allocations.  */
211   reg = gen_rtx_REG (VOIDmode, -1);
212 
213   insn = rtx_alloc (INSN);
214   pat = gen_rtx_SET (VOIDmode, NULL_RTX, NULL_RTX);
215   PATTERN (insn) = pat;
216 
217   for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
218        mode = (machine_mode) ((int) mode + 1))
219     {
220       int regno;
221 
222       direct_load[(int) mode] = direct_store[(int) mode] = 0;
223       PUT_MODE (mem, mode);
224       PUT_MODE (mem1, mode);
225       PUT_MODE (reg, mode);
226 
227       /* See if there is some register that can be used in this mode and
228 	 directly loaded or stored from memory.  */
229 
230       if (mode != VOIDmode && mode != BLKmode)
231 	for (regno = 0; regno < FIRST_PSEUDO_REGISTER
232 	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
233 	     regno++)
234 	  {
235 	    if (! HARD_REGNO_MODE_OK (regno, mode))
236 	      continue;
237 
238 	    SET_REGNO (reg, regno);
239 
240 	    SET_SRC (pat) = mem;
241 	    SET_DEST (pat) = reg;
242 	    if (recog (pat, insn, &num_clobbers) >= 0)
243 	      direct_load[(int) mode] = 1;
244 
245 	    SET_SRC (pat) = mem1;
246 	    SET_DEST (pat) = reg;
247 	    if (recog (pat, insn, &num_clobbers) >= 0)
248 	      direct_load[(int) mode] = 1;
249 
250 	    SET_SRC (pat) = reg;
251 	    SET_DEST (pat) = mem;
252 	    if (recog (pat, insn, &num_clobbers) >= 0)
253 	      direct_store[(int) mode] = 1;
254 
255 	    SET_SRC (pat) = reg;
256 	    SET_DEST (pat) = mem1;
257 	    if (recog (pat, insn, &num_clobbers) >= 0)
258 	      direct_store[(int) mode] = 1;
259 	  }
260     }
261 
262   mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
263 
264   for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
265        mode = GET_MODE_WIDER_MODE (mode))
266     {
267       machine_mode srcmode;
268       for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
269 	   srcmode = GET_MODE_WIDER_MODE (srcmode))
270 	{
271 	  enum insn_code ic;
272 
273 	  ic = can_extend_p (mode, srcmode, 0);
274 	  if (ic == CODE_FOR_nothing)
275 	    continue;
276 
277 	  PUT_MODE (mem, srcmode);
278 
279 	  if (insn_operand_matches (ic, 1, mem))
280 	    float_extend_from_mem[mode][srcmode] = true;
281 	}
282     }
283 }
284 
285 /* This is run at the start of compiling a function.  */
286 
287 void
288 init_expr (void)
289 {
290   memset (&crtl->expr, 0, sizeof (crtl->expr));
291 }
292 
293 /* Copy data from FROM to TO, where the machine modes are not the same.
294    Both modes may be integer, or both may be floating, or both may be
295    fixed-point.
296    UNSIGNEDP should be nonzero if FROM is an unsigned type.
297    This causes zero-extension instead of sign-extension.  */
298 
299 void
300 convert_move (rtx to, rtx from, int unsignedp)
301 {
302   machine_mode to_mode = GET_MODE (to);
303   machine_mode from_mode = GET_MODE (from);
304   int to_real = SCALAR_FLOAT_MODE_P (to_mode);
305   int from_real = SCALAR_FLOAT_MODE_P (from_mode);
306   enum insn_code code;
307   rtx libcall;
308 
309   /* rtx code for making an equivalent value.  */
310   enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
311 			      : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
312 
313 
314   gcc_assert (to_real == from_real);
315   gcc_assert (to_mode != BLKmode);
316   gcc_assert (from_mode != BLKmode);
317 
318   /* If the source and destination are already the same, then there's
319      nothing to do.  */
320   if (to == from)
321     return;
322 
323   /* If FROM is a SUBREG that indicates that we have already done at least
324      the required extension, strip it.  We don't handle such SUBREGs as
325      TO here.  */
326 
327   if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
328       && (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (from)))
329 	  >= GET_MODE_PRECISION (to_mode))
330       && SUBREG_CHECK_PROMOTED_SIGN (from, unsignedp))
331     from = gen_lowpart (to_mode, from), from_mode = to_mode;
332 
333   gcc_assert (GET_CODE (to) != SUBREG || !SUBREG_PROMOTED_VAR_P (to));
334 
335   if (to_mode == from_mode
336       || (from_mode == VOIDmode && CONSTANT_P (from)))
337     {
338       emit_move_insn (to, from);
339       return;
340     }
341 
342   if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
343     {
344       gcc_assert (GET_MODE_BITSIZE (from_mode) == GET_MODE_BITSIZE (to_mode));
345 
346       if (VECTOR_MODE_P (to_mode))
347 	from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
348       else
349 	to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
350 
351       emit_move_insn (to, from);
352       return;
353     }
354 
355   if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
356     {
357       convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
358       convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
359       return;
360     }
361 
362   if (to_real)
363     {
364       rtx value;
365       rtx_insn *insns;
366       convert_optab tab;
367 
368       gcc_assert ((GET_MODE_PRECISION (from_mode)
369 		   != GET_MODE_PRECISION (to_mode))
370 		  || (DECIMAL_FLOAT_MODE_P (from_mode)
371 		      != DECIMAL_FLOAT_MODE_P (to_mode)));
372 
373       if (GET_MODE_PRECISION (from_mode) == GET_MODE_PRECISION (to_mode))
374 	/* Conversion between decimal float and binary float, same size.  */
375 	tab = DECIMAL_FLOAT_MODE_P (from_mode) ? trunc_optab : sext_optab;
376       else if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
377 	tab = sext_optab;
378       else
379 	tab = trunc_optab;
380 
381       /* Try converting directly if the insn is supported.  */
382 
383       code = convert_optab_handler (tab, to_mode, from_mode);
384       if (code != CODE_FOR_nothing)
385 	{
386 	  emit_unop_insn (code, to, from,
387 			  tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
388 	  return;
389 	}
390 
391       /* Otherwise use a libcall.  */
392       libcall = convert_optab_libfunc (tab, to_mode, from_mode);
393 
394       /* Is this conversion implemented yet?  */
395       gcc_assert (libcall);
396 
397       start_sequence ();
398       value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
399 				       1, from, from_mode);
400       insns = get_insns ();
401       end_sequence ();
402       emit_libcall_block (insns, to, value,
403 			  tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
404 								       from)
405 			  : gen_rtx_FLOAT_EXTEND (to_mode, from));
406       return;
407     }
408 
409   /* Handle pointer conversion.  */			/* SPEE 900220.  */
410   /* If the target has a converter from FROM_MODE to TO_MODE, use it.  */
411   {
412     convert_optab ctab;
413 
414     if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
415       ctab = trunc_optab;
416     else if (unsignedp)
417       ctab = zext_optab;
418     else
419       ctab = sext_optab;
420 
421     if (convert_optab_handler (ctab, to_mode, from_mode)
422 	!= CODE_FOR_nothing)
423       {
424 	emit_unop_insn (convert_optab_handler (ctab, to_mode, from_mode),
425 			to, from, UNKNOWN);
426 	return;
427       }
428   }
429 
430   /* Targets are expected to provide conversion insns between PxImode and
431      xImode for all MODE_PARTIAL_INT modes they use, but no others.  */
432   if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
433     {
434       machine_mode full_mode
435 	= smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
436 
437       gcc_assert (convert_optab_handler (trunc_optab, to_mode, full_mode)
438 		  != CODE_FOR_nothing);
439 
440       if (full_mode != from_mode)
441 	from = convert_to_mode (full_mode, from, unsignedp);
442       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, full_mode),
443 		      to, from, UNKNOWN);
444       return;
445     }
446   if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
447     {
448       rtx new_from;
449       machine_mode full_mode
450 	= smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
451       convert_optab ctab = unsignedp ? zext_optab : sext_optab;
452       enum insn_code icode;
453 
454       icode = convert_optab_handler (ctab, full_mode, from_mode);
455       gcc_assert (icode != CODE_FOR_nothing);
456 
457       if (to_mode == full_mode)
458 	{
459 	  emit_unop_insn (icode, to, from, UNKNOWN);
460 	  return;
461 	}
462 
463       new_from = gen_reg_rtx (full_mode);
464       emit_unop_insn (icode, new_from, from, UNKNOWN);
465 
466       /* else proceed to integer conversions below.  */
467       from_mode = full_mode;
468       from = new_from;
469     }
470 
471    /* Make sure both are fixed-point modes or both are not.  */
472    gcc_assert (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode) ==
473 	       ALL_SCALAR_FIXED_POINT_MODE_P (to_mode));
474    if (ALL_SCALAR_FIXED_POINT_MODE_P (from_mode))
475     {
476       /* If we widen from_mode to to_mode and they are in the same class,
477 	 we won't saturate the result.
478 	 Otherwise, always saturate the result to play safe.  */
479       if (GET_MODE_CLASS (from_mode) == GET_MODE_CLASS (to_mode)
480 	  && GET_MODE_SIZE (from_mode) < GET_MODE_SIZE (to_mode))
481 	expand_fixed_convert (to, from, 0, 0);
482       else
483 	expand_fixed_convert (to, from, 0, 1);
484       return;
485     }
486 
487   /* Now both modes are integers.  */
488 
489   /* Handle expanding beyond a word.  */
490   if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode)
491       && GET_MODE_PRECISION (to_mode) > BITS_PER_WORD)
492     {
493       rtx_insn *insns;
494       rtx lowpart;
495       rtx fill_value;
496       rtx lowfrom;
497       int i;
498       machine_mode lowpart_mode;
499       int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
500 
501       /* Try converting directly if the insn is supported.  */
502       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
503 	  != CODE_FOR_nothing)
504 	{
505 	  /* If FROM is a SUBREG, put it into a register.  Do this
506 	     so that we always generate the same set of insns for
507 	     better cse'ing; if an intermediate assignment occurred,
508 	     we won't be doing the operation directly on the SUBREG.  */
509 	  if (optimize > 0 && GET_CODE (from) == SUBREG)
510 	    from = force_reg (from_mode, from);
511 	  emit_unop_insn (code, to, from, equiv_code);
512 	  return;
513 	}
514       /* Next, try converting via full word.  */
515       else if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD
516 	       && ((code = can_extend_p (to_mode, word_mode, unsignedp))
517 		   != CODE_FOR_nothing))
518 	{
519 	  rtx word_to = gen_reg_rtx (word_mode);
520 	  if (REG_P (to))
521 	    {
522 	      if (reg_overlap_mentioned_p (to, from))
523 		from = force_reg (from_mode, from);
524 	      emit_clobber (to);
525 	    }
526 	  convert_move (word_to, from, unsignedp);
527 	  emit_unop_insn (code, to, word_to, equiv_code);
528 	  return;
529 	}
530 
531       /* No special multiword conversion insn; do it by hand.  */
532       start_sequence ();
533 
534       /* Since we will turn this into a no conflict block, we must ensure the
535          the source does not overlap the target so force it into an isolated
536          register when maybe so.  Likewise for any MEM input, since the
537          conversion sequence might require several references to it and we
538          must ensure we're getting the same value every time.  */
539 
540       if (MEM_P (from) || reg_overlap_mentioned_p (to, from))
541 	from = force_reg (from_mode, from);
542 
543       /* Get a copy of FROM widened to a word, if necessary.  */
544       if (GET_MODE_PRECISION (from_mode) < BITS_PER_WORD)
545 	lowpart_mode = word_mode;
546       else
547 	lowpart_mode = from_mode;
548 
549       lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
550 
551       lowpart = gen_lowpart (lowpart_mode, to);
552       emit_move_insn (lowpart, lowfrom);
553 
554       /* Compute the value to put in each remaining word.  */
555       if (unsignedp)
556 	fill_value = const0_rtx;
557       else
558 	fill_value = emit_store_flag_force (gen_reg_rtx (word_mode),
559 					    LT, lowfrom, const0_rtx,
560 					    lowpart_mode, 0, -1);
561 
562       /* Fill the remaining words.  */
563       for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
564 	{
565 	  int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
566 	  rtx subword = operand_subword (to, index, 1, to_mode);
567 
568 	  gcc_assert (subword);
569 
570 	  if (fill_value != subword)
571 	    emit_move_insn (subword, fill_value);
572 	}
573 
574       insns = get_insns ();
575       end_sequence ();
576 
577       emit_insn (insns);
578       return;
579     }
580 
581   /* Truncating multi-word to a word or less.  */
582   if (GET_MODE_PRECISION (from_mode) > BITS_PER_WORD
583       && GET_MODE_PRECISION (to_mode) <= BITS_PER_WORD)
584     {
585       if (!((MEM_P (from)
586 	     && ! MEM_VOLATILE_P (from)
587 	     && direct_load[(int) to_mode]
588 	     && ! mode_dependent_address_p (XEXP (from, 0),
589 					    MEM_ADDR_SPACE (from)))
590 	    || REG_P (from)
591 	    || GET_CODE (from) == SUBREG))
592 	from = force_reg (from_mode, from);
593       convert_move (to, gen_lowpart (word_mode, from), 0);
594       return;
595     }
596 
597   /* Now follow all the conversions between integers
598      no more than a word long.  */
599 
600   /* For truncation, usually we can just refer to FROM in a narrower mode.  */
601   if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
602       && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, from_mode))
603     {
604       if (!((MEM_P (from)
605 	     && ! MEM_VOLATILE_P (from)
606 	     && direct_load[(int) to_mode]
607 	     && ! mode_dependent_address_p (XEXP (from, 0),
608 					    MEM_ADDR_SPACE (from)))
609 	    || REG_P (from)
610 	    || GET_CODE (from) == SUBREG))
611 	from = force_reg (from_mode, from);
612       if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
613 	  && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
614 	from = copy_to_reg (from);
615       emit_move_insn (to, gen_lowpart (to_mode, from));
616       return;
617     }
618 
619   /* Handle extension.  */
620   if (GET_MODE_PRECISION (to_mode) > GET_MODE_PRECISION (from_mode))
621     {
622       /* Convert directly if that works.  */
623       if ((code = can_extend_p (to_mode, from_mode, unsignedp))
624 	  != CODE_FOR_nothing)
625 	{
626 	  emit_unop_insn (code, to, from, equiv_code);
627 	  return;
628 	}
629       else
630 	{
631 	  machine_mode intermediate;
632 	  rtx tmp;
633 	  int shift_amount;
634 
635 	  /* Search for a mode to convert via.  */
636 	  for (intermediate = from_mode; intermediate != VOIDmode;
637 	       intermediate = GET_MODE_WIDER_MODE (intermediate))
638 	    if (((can_extend_p (to_mode, intermediate, unsignedp)
639 		  != CODE_FOR_nothing)
640 		 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
641 		     && TRULY_NOOP_TRUNCATION_MODES_P (to_mode, intermediate)))
642 		&& (can_extend_p (intermediate, from_mode, unsignedp)
643 		    != CODE_FOR_nothing))
644 	      {
645 		convert_move (to, convert_to_mode (intermediate, from,
646 						   unsignedp), unsignedp);
647 		return;
648 	      }
649 
650 	  /* No suitable intermediate mode.
651 	     Generate what we need with	shifts.  */
652 	  shift_amount = (GET_MODE_PRECISION (to_mode)
653 			  - GET_MODE_PRECISION (from_mode));
654 	  from = gen_lowpart (to_mode, force_reg (from_mode, from));
655 	  tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
656 			      to, unsignedp);
657 	  tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
658 			      to, unsignedp);
659 	  if (tmp != to)
660 	    emit_move_insn (to, tmp);
661 	  return;
662 	}
663     }
664 
665   /* Support special truncate insns for certain modes.  */
666   if (convert_optab_handler (trunc_optab, to_mode,
667 			     from_mode) != CODE_FOR_nothing)
668     {
669       emit_unop_insn (convert_optab_handler (trunc_optab, to_mode, from_mode),
670 		      to, from, UNKNOWN);
671       return;
672     }
673 
674   /* Handle truncation of volatile memrefs, and so on;
675      the things that couldn't be truncated directly,
676      and for which there was no special instruction.
677 
678      ??? Code above formerly short-circuited this, for most integer
679      mode pairs, with a force_reg in from_mode followed by a recursive
680      call to this routine.  Appears always to have been wrong.  */
681   if (GET_MODE_PRECISION (to_mode) < GET_MODE_PRECISION (from_mode))
682     {
683       rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
684       emit_move_insn (to, temp);
685       return;
686     }
687 
688   /* Mode combination is not recognized.  */
689   gcc_unreachable ();
690 }
691 
692 /* Return an rtx for a value that would result
693    from converting X to mode MODE.
694    Both X and MODE may be floating, or both integer.
695    UNSIGNEDP is nonzero if X is an unsigned value.
696    This can be done by referring to a part of X in place
697    or by copying to a new temporary with conversion.  */
698 
699 rtx
700 convert_to_mode (machine_mode mode, rtx x, int unsignedp)
701 {
702   return convert_modes (mode, VOIDmode, x, unsignedp);
703 }
704 
705 /* Return an rtx for a value that would result
706    from converting X from mode OLDMODE to mode MODE.
707    Both modes may be floating, or both integer.
708    UNSIGNEDP is nonzero if X is an unsigned value.
709 
710    This can be done by referring to a part of X in place
711    or by copying to a new temporary with conversion.
712 
713    You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.  */
714 
715 rtx
716 convert_modes (machine_mode mode, machine_mode oldmode, rtx x, int unsignedp)
717 {
718   rtx temp;
719 
720   /* If FROM is a SUBREG that indicates that we have already done at least
721      the required extension, strip it.  */
722 
723   if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
724       && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
725       && SUBREG_CHECK_PROMOTED_SIGN (x, unsignedp))
726     x = gen_lowpart (mode, SUBREG_REG (x));
727 
728   if (GET_MODE (x) != VOIDmode)
729     oldmode = GET_MODE (x);
730 
731   if (mode == oldmode)
732     return x;
733 
734   if (CONST_SCALAR_INT_P (x) && GET_MODE_CLASS (mode) == MODE_INT)
735     {
736       /* If the caller did not tell us the old mode, then there is not
737 	 much to do with respect to canonicalization.  We have to
738 	 assume that all the bits are significant.  */
739       if (GET_MODE_CLASS (oldmode) != MODE_INT)
740 	oldmode = MAX_MODE_INT;
741       wide_int w = wide_int::from (std::make_pair (x, oldmode),
742 				   GET_MODE_PRECISION (mode),
743 				   unsignedp ? UNSIGNED : SIGNED);
744       return immed_wide_int_const (w, mode);
745     }
746 
747   /* We can do this with a gen_lowpart if both desired and current modes
748      are integer, and this is either a constant integer, a register, or a
749      non-volatile MEM. */
750   if (GET_MODE_CLASS (mode) == MODE_INT
751       && GET_MODE_CLASS (oldmode) == MODE_INT
752       && GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (oldmode)
753       && ((MEM_P (x) && !MEM_VOLATILE_P (x) && direct_load[(int) mode])
754           || (REG_P (x)
755               && (!HARD_REGISTER_P (x)
756                   || HARD_REGNO_MODE_OK (REGNO (x), mode))
757               && TRULY_NOOP_TRUNCATION_MODES_P (mode, GET_MODE (x)))))
758 
759    return gen_lowpart (mode, x);
760 
761   /* Converting from integer constant into mode is always equivalent to an
762      subreg operation.  */
763   if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
764     {
765       gcc_assert (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (oldmode));
766       return simplify_gen_subreg (mode, x, oldmode, 0);
767     }
768 
769   temp = gen_reg_rtx (mode);
770   convert_move (temp, x, unsignedp);
771   return temp;
772 }
773 
774 /* Return the largest alignment we can use for doing a move (or store)
775    of MAX_PIECES.  ALIGN is the largest alignment we could use.  */
776 
777 static unsigned int
778 alignment_for_piecewise_move (unsigned int max_pieces, unsigned int align)
779 {
780   machine_mode tmode;
781 
782   tmode = mode_for_size (max_pieces * BITS_PER_UNIT, MODE_INT, 1);
783   if (align >= GET_MODE_ALIGNMENT (tmode))
784     align = GET_MODE_ALIGNMENT (tmode);
785   else
786     {
787       machine_mode tmode, xmode;
788 
789       for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT), xmode = tmode;
790 	   tmode != VOIDmode;
791 	   xmode = tmode, tmode = GET_MODE_WIDER_MODE (tmode))
792 	if (GET_MODE_SIZE (tmode) > max_pieces
793 	    || SLOW_UNALIGNED_ACCESS (tmode, align))
794 	  break;
795 
796       align = MAX (align, GET_MODE_ALIGNMENT (xmode));
797     }
798 
799   return align;
800 }
801 
802 /* Return the widest integer mode no wider than SIZE.  If no such mode
803    can be found, return VOIDmode.  */
804 
805 static machine_mode
806 widest_int_mode_for_size (unsigned int size)
807 {
808   machine_mode tmode, mode = VOIDmode;
809 
810   for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
811        tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
812     if (GET_MODE_SIZE (tmode) < size)
813       mode = tmode;
814 
815   return mode;
816 }
817 
818 /* Determine whether the LEN bytes can be moved by using several move
819    instructions.  Return nonzero if a call to move_by_pieces should
820    succeed.  */
821 
822 int
823 can_move_by_pieces (unsigned HOST_WIDE_INT len,
824 		    unsigned int align)
825 {
826   return targetm.use_by_pieces_infrastructure_p (len, align, MOVE_BY_PIECES,
827 						 optimize_insn_for_speed_p ());
828 }
829 
830 /* Generate several move instructions to copy LEN bytes from block FROM to
831    block TO.  (These are MEM rtx's with BLKmode).
832 
833    If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
834    used to push FROM to the stack.
835 
836    ALIGN is maximum stack alignment we can assume.
837 
838    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
839    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
840    stpcpy.  */
841 
842 rtx
843 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
844 		unsigned int align, int endp)
845 {
846   struct move_by_pieces_d data;
847   machine_mode to_addr_mode;
848   machine_mode from_addr_mode = get_address_mode (from);
849   rtx to_addr, from_addr = XEXP (from, 0);
850   unsigned int max_size = MOVE_MAX_PIECES + 1;
851   enum insn_code icode;
852 
853   align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
854 
855   data.offset = 0;
856   data.from_addr = from_addr;
857   if (to)
858     {
859       to_addr_mode = get_address_mode (to);
860       to_addr = XEXP (to, 0);
861       data.to = to;
862       data.autinc_to
863 	= (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
864 	   || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
865       data.reverse
866 	= (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
867     }
868   else
869     {
870       to_addr_mode = VOIDmode;
871       to_addr = NULL_RTX;
872       data.to = NULL_RTX;
873       data.autinc_to = 1;
874 #ifdef STACK_GROWS_DOWNWARD
875       data.reverse = 1;
876 #else
877       data.reverse = 0;
878 #endif
879     }
880   data.to_addr = to_addr;
881   data.from = from;
882   data.autinc_from
883     = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
884        || GET_CODE (from_addr) == POST_INC
885        || GET_CODE (from_addr) == POST_DEC);
886 
887   data.explicit_inc_from = 0;
888   data.explicit_inc_to = 0;
889   if (data.reverse) data.offset = len;
890   data.len = len;
891 
892   /* If copying requires more than two move insns,
893      copy addresses to registers (to make displacements shorter)
894      and use post-increment if available.  */
895   if (!(data.autinc_from && data.autinc_to)
896       && move_by_pieces_ninsns (len, align, max_size) > 2)
897     {
898       /* Find the mode of the largest move...
899 	 MODE might not be used depending on the definitions of the
900 	 USE_* macros below.  */
901       machine_mode mode ATTRIBUTE_UNUSED
902 	= widest_int_mode_for_size (max_size);
903 
904       if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
905 	{
906 	  data.from_addr = copy_to_mode_reg (from_addr_mode,
907 					     plus_constant (from_addr_mode,
908 							    from_addr, len));
909 	  data.autinc_from = 1;
910 	  data.explicit_inc_from = -1;
911 	}
912       if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
913 	{
914 	  data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
915 	  data.autinc_from = 1;
916 	  data.explicit_inc_from = 1;
917 	}
918       if (!data.autinc_from && CONSTANT_P (from_addr))
919 	data.from_addr = copy_to_mode_reg (from_addr_mode, from_addr);
920       if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
921 	{
922 	  data.to_addr = copy_to_mode_reg (to_addr_mode,
923 					   plus_constant (to_addr_mode,
924 							  to_addr, len));
925 	  data.autinc_to = 1;
926 	  data.explicit_inc_to = -1;
927 	}
928       if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
929 	{
930 	  data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
931 	  data.autinc_to = 1;
932 	  data.explicit_inc_to = 1;
933 	}
934       if (!data.autinc_to && CONSTANT_P (to_addr))
935 	data.to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
936     }
937 
938   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
939 
940   /* First move what we can in the largest integer mode, then go to
941      successively smaller modes.  */
942 
943   while (max_size > 1 && data.len > 0)
944     {
945       machine_mode mode = widest_int_mode_for_size (max_size);
946 
947       if (mode == VOIDmode)
948 	break;
949 
950       icode = optab_handler (mov_optab, mode);
951       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
952 	move_by_pieces_1 (GEN_FCN (icode), mode, &data);
953 
954       max_size = GET_MODE_SIZE (mode);
955     }
956 
957   /* The code above should have handled everything.  */
958   gcc_assert (!data.len);
959 
960   if (endp)
961     {
962       rtx to1;
963 
964       gcc_assert (!data.reverse);
965       if (data.autinc_to)
966 	{
967 	  if (endp == 2)
968 	    {
969 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
970 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
971 	      else
972 		data.to_addr = copy_to_mode_reg (to_addr_mode,
973 						 plus_constant (to_addr_mode,
974 								data.to_addr,
975 								-1));
976 	    }
977 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
978 					   data.offset);
979 	}
980       else
981 	{
982 	  if (endp == 2)
983 	    --data.offset;
984 	  to1 = adjust_address (data.to, QImode, data.offset);
985 	}
986       return to1;
987     }
988   else
989     return data.to;
990 }
991 
992 /* Return number of insns required to move L bytes by pieces.
993    ALIGN (in bits) is maximum alignment we can assume.  */
994 
995 unsigned HOST_WIDE_INT
996 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align,
997 		       unsigned int max_size)
998 {
999   unsigned HOST_WIDE_INT n_insns = 0;
1000 
1001   align = alignment_for_piecewise_move (MOVE_MAX_PIECES, align);
1002 
1003   while (max_size > 1 && l > 0)
1004     {
1005       machine_mode mode;
1006       enum insn_code icode;
1007 
1008       mode = widest_int_mode_for_size (max_size);
1009 
1010       if (mode == VOIDmode)
1011 	break;
1012 
1013       icode = optab_handler (mov_optab, mode);
1014       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1015 	n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1016 
1017       max_size = GET_MODE_SIZE (mode);
1018     }
1019 
1020   gcc_assert (!l);
1021   return n_insns;
1022 }
1023 
1024 /* Subroutine of move_by_pieces.  Move as many bytes as appropriate
1025    with move instructions for mode MODE.  GENFUN is the gen_... function
1026    to make a move insn for that mode.  DATA has all the other info.  */
1027 
1028 static void
1029 move_by_pieces_1 (insn_gen_fn genfun, machine_mode mode,
1030 		  struct move_by_pieces_d *data)
1031 {
1032   unsigned int size = GET_MODE_SIZE (mode);
1033   rtx to1 = NULL_RTX, from1;
1034 
1035   while (data->len >= size)
1036     {
1037       if (data->reverse)
1038 	data->offset -= size;
1039 
1040       if (data->to)
1041 	{
1042 	  if (data->autinc_to)
1043 	    to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1044 					     data->offset);
1045 	  else
1046 	    to1 = adjust_address (data->to, mode, data->offset);
1047 	}
1048 
1049       if (data->autinc_from)
1050 	from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1051 					   data->offset);
1052       else
1053 	from1 = adjust_address (data->from, mode, data->offset);
1054 
1055       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1056 	emit_insn (gen_add2_insn (data->to_addr,
1057 				  gen_int_mode (-(HOST_WIDE_INT) size,
1058 						GET_MODE (data->to_addr))));
1059       if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1060 	emit_insn (gen_add2_insn (data->from_addr,
1061 				  gen_int_mode (-(HOST_WIDE_INT) size,
1062 						GET_MODE (data->from_addr))));
1063 
1064       if (data->to)
1065 	emit_insn ((*genfun) (to1, from1));
1066       else
1067 	{
1068 #ifdef PUSH_ROUNDING
1069 	  emit_single_push_insn (mode, from1, NULL);
1070 #else
1071 	  gcc_unreachable ();
1072 #endif
1073 	}
1074 
1075       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1076 	emit_insn (gen_add2_insn (data->to_addr,
1077 				  gen_int_mode (size,
1078 						GET_MODE (data->to_addr))));
1079       if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1080 	emit_insn (gen_add2_insn (data->from_addr,
1081 				  gen_int_mode (size,
1082 						GET_MODE (data->from_addr))));
1083 
1084       if (! data->reverse)
1085 	data->offset += size;
1086 
1087       data->len -= size;
1088     }
1089 }
1090 
1091 /* Emit code to move a block Y to a block X.  This may be done with
1092    string-move instructions, with multiple scalar move instructions,
1093    or with a library call.
1094 
1095    Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1096    SIZE is an rtx that says how long they are.
1097    ALIGN is the maximum alignment we can assume they have.
1098    METHOD describes what kind of copy this is, and what mechanisms may be used.
1099    MIN_SIZE is the minimal size of block to move
1100    MAX_SIZE is the maximal size of block to move, if it can not be represented
1101    in unsigned HOST_WIDE_INT, than it is mask of all ones.
1102 
1103    Return the address of the new block, if memcpy is called and returns it,
1104    0 otherwise.  */
1105 
1106 rtx
1107 emit_block_move_hints (rtx x, rtx y, rtx size, enum block_op_methods method,
1108 		       unsigned int expected_align, HOST_WIDE_INT expected_size,
1109 		       unsigned HOST_WIDE_INT min_size,
1110 		       unsigned HOST_WIDE_INT max_size,
1111 		       unsigned HOST_WIDE_INT probable_max_size)
1112 {
1113   bool may_use_call;
1114   rtx retval = 0;
1115   unsigned int align;
1116 
1117   gcc_assert (size);
1118   if (CONST_INT_P (size)
1119       && INTVAL (size) == 0)
1120     return 0;
1121 
1122   switch (method)
1123     {
1124     case BLOCK_OP_NORMAL:
1125     case BLOCK_OP_TAILCALL:
1126       may_use_call = true;
1127       break;
1128 
1129     case BLOCK_OP_CALL_PARM:
1130       may_use_call = block_move_libcall_safe_for_call_parm ();
1131 
1132       /* Make inhibit_defer_pop nonzero around the library call
1133 	 to force it to pop the arguments right away.  */
1134       NO_DEFER_POP;
1135       break;
1136 
1137     case BLOCK_OP_NO_LIBCALL:
1138       may_use_call = false;
1139       break;
1140 
1141     default:
1142       gcc_unreachable ();
1143     }
1144 
1145   gcc_assert (MEM_P (x) && MEM_P (y));
1146   align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1147   gcc_assert (align >= BITS_PER_UNIT);
1148 
1149   /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1150      block copy is more efficient for other large modes, e.g. DCmode.  */
1151   x = adjust_address (x, BLKmode, 0);
1152   y = adjust_address (y, BLKmode, 0);
1153 
1154   /* Set MEM_SIZE as appropriate for this block copy.  The main place this
1155      can be incorrect is coming from __builtin_memcpy.  */
1156   if (CONST_INT_P (size))
1157     {
1158       x = shallow_copy_rtx (x);
1159       y = shallow_copy_rtx (y);
1160       set_mem_size (x, INTVAL (size));
1161       set_mem_size (y, INTVAL (size));
1162     }
1163 
1164   if (CONST_INT_P (size) && can_move_by_pieces (INTVAL (size), align))
1165     move_by_pieces (x, y, INTVAL (size), align, 0);
1166   else if (emit_block_move_via_movmem (x, y, size, align,
1167 				       expected_align, expected_size,
1168 				       min_size, max_size, probable_max_size))
1169     ;
1170   else if (may_use_call
1171 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (x))
1172 	   && ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (y)))
1173     {
1174       /* Since x and y are passed to a libcall, mark the corresponding
1175 	 tree EXPR as addressable.  */
1176       tree y_expr = MEM_EXPR (y);
1177       tree x_expr = MEM_EXPR (x);
1178       if (y_expr)
1179 	mark_addressable (y_expr);
1180       if (x_expr)
1181 	mark_addressable (x_expr);
1182       retval = emit_block_move_via_libcall (x, y, size,
1183 					    method == BLOCK_OP_TAILCALL);
1184     }
1185 
1186   else
1187     emit_block_move_via_loop (x, y, size, align);
1188 
1189   if (method == BLOCK_OP_CALL_PARM)
1190     OK_DEFER_POP;
1191 
1192   return retval;
1193 }
1194 
1195 rtx
1196 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1197 {
1198   unsigned HOST_WIDE_INT max, min = 0;
1199   if (GET_CODE (size) == CONST_INT)
1200     min = max = UINTVAL (size);
1201   else
1202     max = GET_MODE_MASK (GET_MODE (size));
1203   return emit_block_move_hints (x, y, size, method, 0, -1,
1204 				min, max, max);
1205 }
1206 
1207 /* A subroutine of emit_block_move.  Returns true if calling the
1208    block move libcall will not clobber any parameters which may have
1209    already been placed on the stack.  */
1210 
1211 static bool
1212 block_move_libcall_safe_for_call_parm (void)
1213 {
1214 #if defined (REG_PARM_STACK_SPACE)
1215   tree fn;
1216 #endif
1217 
1218   /* If arguments are pushed on the stack, then they're safe.  */
1219   if (PUSH_ARGS)
1220     return true;
1221 
1222   /* If registers go on the stack anyway, any argument is sure to clobber
1223      an outgoing argument.  */
1224 #if defined (REG_PARM_STACK_SPACE)
1225   fn = emit_block_move_libcall_fn (false);
1226   /* Avoid set but not used warning if *REG_PARM_STACK_SPACE doesn't
1227      depend on its argument.  */
1228   (void) fn;
1229   if (OUTGOING_REG_PARM_STACK_SPACE ((!fn ? NULL_TREE : TREE_TYPE (fn)))
1230       && REG_PARM_STACK_SPACE (fn) != 0)
1231     return false;
1232 #endif
1233 
1234   /* If any argument goes in memory, then it might clobber an outgoing
1235      argument.  */
1236   {
1237     CUMULATIVE_ARGS args_so_far_v;
1238     cumulative_args_t args_so_far;
1239     tree fn, arg;
1240 
1241     fn = emit_block_move_libcall_fn (false);
1242     INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
1243     args_so_far = pack_cumulative_args (&args_so_far_v);
1244 
1245     arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1246     for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1247       {
1248 	machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1249 	rtx tmp = targetm.calls.function_arg (args_so_far, mode,
1250 					      NULL_TREE, true);
1251 	if (!tmp || !REG_P (tmp))
1252 	  return false;
1253 	if (targetm.calls.arg_partial_bytes (args_so_far, mode, NULL, 1))
1254 	  return false;
1255 	targetm.calls.function_arg_advance (args_so_far, mode,
1256 					    NULL_TREE, true);
1257       }
1258   }
1259   return true;
1260 }
1261 
1262 /* A subroutine of emit_block_move.  Expand a movmem pattern;
1263    return true if successful.  */
1264 
1265 static bool
1266 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align,
1267 			    unsigned int expected_align, HOST_WIDE_INT expected_size,
1268 			    unsigned HOST_WIDE_INT min_size,
1269 			    unsigned HOST_WIDE_INT max_size,
1270 			    unsigned HOST_WIDE_INT probable_max_size)
1271 {
1272   int save_volatile_ok = volatile_ok;
1273   machine_mode mode;
1274 
1275   if (expected_align < align)
1276     expected_align = align;
1277   if (expected_size != -1)
1278     {
1279       if ((unsigned HOST_WIDE_INT)expected_size > probable_max_size)
1280 	expected_size = probable_max_size;
1281       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
1282 	expected_size = min_size;
1283     }
1284 
1285   /* Since this is a move insn, we don't care about volatility.  */
1286   volatile_ok = 1;
1287 
1288   /* Try the most limited insn first, because there's no point
1289      including more than one in the machine description unless
1290      the more limited one has some advantage.  */
1291 
1292   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1293        mode = GET_MODE_WIDER_MODE (mode))
1294     {
1295       enum insn_code code = direct_optab_handler (movmem_optab, mode);
1296 
1297       if (code != CODE_FOR_nothing
1298 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1299 	     here because if SIZE is less than the mode mask, as it is
1300 	     returned by the macro, it will definitely be less than the
1301 	     actual mode mask.  Since SIZE is within the Pmode address
1302 	     space, we limit MODE to Pmode.  */
1303 	  && ((CONST_INT_P (size)
1304 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
1305 		   <= (GET_MODE_MASK (mode) >> 1)))
1306 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
1307 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
1308 	{
1309 	  struct expand_operand ops[9];
1310 	  unsigned int nops;
1311 
1312 	  /* ??? When called via emit_block_move_for_call, it'd be
1313 	     nice if there were some way to inform the backend, so
1314 	     that it doesn't fail the expansion because it thinks
1315 	     emitting the libcall would be more efficient.  */
1316 	  nops = insn_data[(int) code].n_generator_args;
1317 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
1318 
1319 	  create_fixed_operand (&ops[0], x);
1320 	  create_fixed_operand (&ops[1], y);
1321 	  /* The check above guarantees that this size conversion is valid.  */
1322 	  create_convert_operand_to (&ops[2], size, mode, true);
1323 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
1324 	  if (nops >= 6)
1325 	    {
1326 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
1327 	      create_integer_operand (&ops[5], expected_size);
1328 	    }
1329 	  if (nops >= 8)
1330 	    {
1331 	      create_integer_operand (&ops[6], min_size);
1332 	      /* If we can not represent the maximal size,
1333 		 make parameter NULL.  */
1334 	      if ((HOST_WIDE_INT) max_size != -1)
1335 	        create_integer_operand (&ops[7], max_size);
1336 	      else
1337 		create_fixed_operand (&ops[7], NULL);
1338 	    }
1339 	  if (nops == 9)
1340 	    {
1341 	      /* If we can not represent the maximal size,
1342 		 make parameter NULL.  */
1343 	      if ((HOST_WIDE_INT) probable_max_size != -1)
1344 	        create_integer_operand (&ops[8], probable_max_size);
1345 	      else
1346 		create_fixed_operand (&ops[8], NULL);
1347 	    }
1348 	  if (maybe_expand_insn (code, nops, ops))
1349 	    {
1350 	      volatile_ok = save_volatile_ok;
1351 	      return true;
1352 	    }
1353 	}
1354     }
1355 
1356   volatile_ok = save_volatile_ok;
1357   return false;
1358 }
1359 
1360 /* A subroutine of emit_block_move.  Expand a call to memcpy.
1361    Return the return value from memcpy, 0 otherwise.  */
1362 
1363 rtx
1364 emit_block_move_via_libcall (rtx dst, rtx src, rtx size, bool tailcall)
1365 {
1366   rtx dst_addr, src_addr;
1367   tree call_expr, fn, src_tree, dst_tree, size_tree;
1368   machine_mode size_mode;
1369   rtx retval;
1370 
1371   /* Emit code to copy the addresses of DST and SRC and SIZE into new
1372      pseudos.  We can then place those new pseudos into a VAR_DECL and
1373      use them later.  */
1374 
1375   dst_addr = copy_addr_to_reg (XEXP (dst, 0));
1376   src_addr = copy_addr_to_reg (XEXP (src, 0));
1377 
1378   dst_addr = convert_memory_address (ptr_mode, dst_addr);
1379   src_addr = convert_memory_address (ptr_mode, src_addr);
1380 
1381   dst_tree = make_tree (ptr_type_node, dst_addr);
1382   src_tree = make_tree (ptr_type_node, src_addr);
1383 
1384   size_mode = TYPE_MODE (sizetype);
1385 
1386   size = convert_to_mode (size_mode, size, 1);
1387   size = copy_to_mode_reg (size_mode, size);
1388 
1389   /* It is incorrect to use the libcall calling conventions to call
1390      memcpy in this context.  This could be a user call to memcpy and
1391      the user may wish to examine the return value from memcpy.  For
1392      targets where libcalls and normal calls have different conventions
1393      for returning pointers, we could end up generating incorrect code.  */
1394 
1395   size_tree = make_tree (sizetype, size);
1396 
1397   fn = emit_block_move_libcall_fn (true);
1398   call_expr = build_call_expr (fn, 3, dst_tree, src_tree, size_tree);
1399   CALL_EXPR_TAILCALL (call_expr) = tailcall;
1400 
1401   retval = expand_normal (call_expr);
1402 
1403   return retval;
1404 }
1405 
1406 /* A subroutine of emit_block_move_via_libcall.  Create the tree node
1407    for the function we use for block copies.  */
1408 
1409 static GTY(()) tree block_move_fn;
1410 
1411 void
1412 init_block_move_fn (const char *asmspec)
1413 {
1414   if (!block_move_fn)
1415     {
1416       tree args, fn, attrs, attr_args;
1417 
1418       fn = get_identifier ("memcpy");
1419       args = build_function_type_list (ptr_type_node, ptr_type_node,
1420 				       const_ptr_type_node, sizetype,
1421 				       NULL_TREE);
1422 
1423       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
1424       DECL_EXTERNAL (fn) = 1;
1425       TREE_PUBLIC (fn) = 1;
1426       DECL_ARTIFICIAL (fn) = 1;
1427       TREE_NOTHROW (fn) = 1;
1428       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
1429       DECL_VISIBILITY_SPECIFIED (fn) = 1;
1430 
1431       attr_args = build_tree_list (NULL_TREE, build_string (1, "1"));
1432       attrs = tree_cons (get_identifier ("fn spec"), attr_args, NULL);
1433 
1434       decl_attributes (&fn, attrs, ATTR_FLAG_BUILT_IN);
1435 
1436       block_move_fn = fn;
1437     }
1438 
1439   if (asmspec)
1440     set_user_assembler_name (block_move_fn, asmspec);
1441 }
1442 
1443 static tree
1444 emit_block_move_libcall_fn (int for_call)
1445 {
1446   static bool emitted_extern;
1447 
1448   if (!block_move_fn)
1449     init_block_move_fn (NULL);
1450 
1451   if (for_call && !emitted_extern)
1452     {
1453       emitted_extern = true;
1454       make_decl_rtl (block_move_fn);
1455     }
1456 
1457   return block_move_fn;
1458 }
1459 
1460 /* A subroutine of emit_block_move.  Copy the data via an explicit
1461    loop.  This is used only when libcalls are forbidden.  */
1462 /* ??? It'd be nice to copy in hunks larger than QImode.  */
1463 
1464 static void
1465 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1466 			  unsigned int align ATTRIBUTE_UNUSED)
1467 {
1468   rtx_code_label *cmp_label, *top_label;
1469   rtx iter, x_addr, y_addr, tmp;
1470   machine_mode x_addr_mode = get_address_mode (x);
1471   machine_mode y_addr_mode = get_address_mode (y);
1472   machine_mode iter_mode;
1473 
1474   iter_mode = GET_MODE (size);
1475   if (iter_mode == VOIDmode)
1476     iter_mode = word_mode;
1477 
1478   top_label = gen_label_rtx ();
1479   cmp_label = gen_label_rtx ();
1480   iter = gen_reg_rtx (iter_mode);
1481 
1482   emit_move_insn (iter, const0_rtx);
1483 
1484   x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1485   y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1486   do_pending_stack_adjust ();
1487 
1488   emit_jump (cmp_label);
1489   emit_label (top_label);
1490 
1491   tmp = convert_modes (x_addr_mode, iter_mode, iter, true);
1492   x_addr = simplify_gen_binary (PLUS, x_addr_mode, x_addr, tmp);
1493 
1494   if (x_addr_mode != y_addr_mode)
1495     tmp = convert_modes (y_addr_mode, iter_mode, iter, true);
1496   y_addr = simplify_gen_binary (PLUS, y_addr_mode, y_addr, tmp);
1497 
1498   x = change_address (x, QImode, x_addr);
1499   y = change_address (y, QImode, y_addr);
1500 
1501   emit_move_insn (x, y);
1502 
1503   tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1504 			     true, OPTAB_LIB_WIDEN);
1505   if (tmp != iter)
1506     emit_move_insn (iter, tmp);
1507 
1508   emit_label (cmp_label);
1509 
1510   emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1511 			   true, top_label, REG_BR_PROB_BASE * 90 / 100);
1512 }
1513 
1514 /* Copy all or part of a value X into registers starting at REGNO.
1515    The number of registers to be filled is NREGS.  */
1516 
1517 void
1518 move_block_to_reg (int regno, rtx x, int nregs, machine_mode mode)
1519 {
1520   int i;
1521 #ifdef HAVE_load_multiple
1522   rtx pat;
1523   rtx_insn *last;
1524 #endif
1525 
1526   if (nregs == 0)
1527     return;
1528 
1529   if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
1530     x = validize_mem (force_const_mem (mode, x));
1531 
1532   /* See if the machine can do this with a load multiple insn.  */
1533 #ifdef HAVE_load_multiple
1534   if (HAVE_load_multiple)
1535     {
1536       last = get_last_insn ();
1537       pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1538 			       GEN_INT (nregs));
1539       if (pat)
1540 	{
1541 	  emit_insn (pat);
1542 	  return;
1543 	}
1544       else
1545 	delete_insns_since (last);
1546     }
1547 #endif
1548 
1549   for (i = 0; i < nregs; i++)
1550     emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1551 		    operand_subword_force (x, i, mode));
1552 }
1553 
1554 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1555    The number of registers to be filled is NREGS.  */
1556 
1557 void
1558 move_block_from_reg (int regno, rtx x, int nregs)
1559 {
1560   int i;
1561 
1562   if (nregs == 0)
1563     return;
1564 
1565   /* See if the machine can do this with a store multiple insn.  */
1566 #ifdef HAVE_store_multiple
1567   if (HAVE_store_multiple)
1568     {
1569       rtx_insn *last = get_last_insn ();
1570       rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1571 				    GEN_INT (nregs));
1572       if (pat)
1573 	{
1574 	  emit_insn (pat);
1575 	  return;
1576 	}
1577       else
1578 	delete_insns_since (last);
1579     }
1580 #endif
1581 
1582   for (i = 0; i < nregs; i++)
1583     {
1584       rtx tem = operand_subword (x, i, 1, BLKmode);
1585 
1586       gcc_assert (tem);
1587 
1588       emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1589     }
1590 }
1591 
1592 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1593    ORIG, where ORIG is a non-consecutive group of registers represented by
1594    a PARALLEL.  The clone is identical to the original except in that the
1595    original set of registers is replaced by a new set of pseudo registers.
1596    The new set has the same modes as the original set.  */
1597 
1598 rtx
1599 gen_group_rtx (rtx orig)
1600 {
1601   int i, length;
1602   rtx *tmps;
1603 
1604   gcc_assert (GET_CODE (orig) == PARALLEL);
1605 
1606   length = XVECLEN (orig, 0);
1607   tmps = XALLOCAVEC (rtx, length);
1608 
1609   /* Skip a NULL entry in first slot.  */
1610   i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1611 
1612   if (i)
1613     tmps[0] = 0;
1614 
1615   for (; i < length; i++)
1616     {
1617       machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1618       rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1619 
1620       tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1621     }
1622 
1623   return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1624 }
1625 
1626 /* A subroutine of emit_group_load.  Arguments as for emit_group_load,
1627    except that values are placed in TMPS[i], and must later be moved
1628    into corresponding XEXP (XVECEXP (DST, 0, i), 0) element.  */
1629 
1630 static void
1631 emit_group_load_1 (rtx *tmps, rtx dst, rtx orig_src, tree type, int ssize)
1632 {
1633   rtx src;
1634   int start, i;
1635   machine_mode m = GET_MODE (orig_src);
1636 
1637   gcc_assert (GET_CODE (dst) == PARALLEL);
1638 
1639   if (m != VOIDmode
1640       && !SCALAR_INT_MODE_P (m)
1641       && !MEM_P (orig_src)
1642       && GET_CODE (orig_src) != CONCAT)
1643     {
1644       machine_mode imode = int_mode_for_mode (GET_MODE (orig_src));
1645       if (imode == BLKmode)
1646 	src = assign_stack_temp (GET_MODE (orig_src), ssize);
1647       else
1648 	src = gen_reg_rtx (imode);
1649       if (imode != BLKmode)
1650 	src = gen_lowpart (GET_MODE (orig_src), src);
1651       emit_move_insn (src, orig_src);
1652       /* ...and back again.  */
1653       if (imode != BLKmode)
1654 	src = gen_lowpart (imode, src);
1655       emit_group_load_1 (tmps, dst, src, type, ssize);
1656       return;
1657     }
1658 
1659   /* Check for a NULL entry, used to indicate that the parameter goes
1660      both on the stack and in registers.  */
1661   if (XEXP (XVECEXP (dst, 0, 0), 0))
1662     start = 0;
1663   else
1664     start = 1;
1665 
1666   /* Process the pieces.  */
1667   for (i = start; i < XVECLEN (dst, 0); i++)
1668     {
1669       machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1670       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1671       unsigned int bytelen = GET_MODE_SIZE (mode);
1672       int shift = 0;
1673 
1674       /* Handle trailing fragments that run over the size of the struct.  */
1675       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1676 	{
1677 	  /* Arrange to shift the fragment to where it belongs.
1678 	     extract_bit_field loads to the lsb of the reg.  */
1679 	  if (
1680 #ifdef BLOCK_REG_PADDING
1681 	      BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1682 	      == (BYTES_BIG_ENDIAN ? upward : downward)
1683 #else
1684 	      BYTES_BIG_ENDIAN
1685 #endif
1686 	      )
1687 	    shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1688 	  bytelen = ssize - bytepos;
1689 	  gcc_assert (bytelen > 0);
1690 	}
1691 
1692       /* If we won't be loading directly from memory, protect the real source
1693 	 from strange tricks we might play; but make sure that the source can
1694 	 be loaded directly into the destination.  */
1695       src = orig_src;
1696       if (!MEM_P (orig_src)
1697 	  && (!CONSTANT_P (orig_src)
1698 	      || (GET_MODE (orig_src) != mode
1699 		  && GET_MODE (orig_src) != VOIDmode)))
1700 	{
1701 	  if (GET_MODE (orig_src) == VOIDmode)
1702 	    src = gen_reg_rtx (mode);
1703 	  else
1704 	    src = gen_reg_rtx (GET_MODE (orig_src));
1705 
1706 	  emit_move_insn (src, orig_src);
1707 	}
1708 
1709       /* Optimize the access just a bit.  */
1710       if (MEM_P (src)
1711 	  && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1712 	      || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1713 	  && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1714 	  && bytelen == GET_MODE_SIZE (mode))
1715 	{
1716 	  tmps[i] = gen_reg_rtx (mode);
1717 	  emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1718 	}
1719       else if (COMPLEX_MODE_P (mode)
1720 	       && GET_MODE (src) == mode
1721 	       && bytelen == GET_MODE_SIZE (mode))
1722 	/* Let emit_move_complex do the bulk of the work.  */
1723 	tmps[i] = src;
1724       else if (GET_CODE (src) == CONCAT)
1725 	{
1726 	  unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1727 	  unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1728 
1729 	  if ((bytepos == 0 && bytelen == slen0)
1730 	      || (bytepos != 0 && bytepos + bytelen <= slen))
1731 	    {
1732 	      /* The following assumes that the concatenated objects all
1733 		 have the same size.  In this case, a simple calculation
1734 		 can be used to determine the object and the bit field
1735 		 to be extracted.  */
1736 	      tmps[i] = XEXP (src, bytepos / slen0);
1737 	      if (! CONSTANT_P (tmps[i])
1738 		  && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1739 		tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1740 					     (bytepos % slen0) * BITS_PER_UNIT,
1741 					     1, NULL_RTX, mode, mode);
1742 	    }
1743 	  else
1744 	    {
1745 	      rtx mem;
1746 
1747 	      gcc_assert (!bytepos);
1748 	      mem = assign_stack_temp (GET_MODE (src), slen);
1749 	      emit_move_insn (mem, src);
1750 	      tmps[i] = extract_bit_field (mem, bytelen * BITS_PER_UNIT,
1751 					   0, 1, NULL_RTX, mode, mode);
1752 	    }
1753 	}
1754       /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1755 	 SIMD register, which is currently broken.  While we get GCC
1756 	 to emit proper RTL for these cases, let's dump to memory.  */
1757       else if (VECTOR_MODE_P (GET_MODE (dst))
1758 	       && REG_P (src))
1759 	{
1760 	  int slen = GET_MODE_SIZE (GET_MODE (src));
1761 	  rtx mem;
1762 
1763 	  mem = assign_stack_temp (GET_MODE (src), slen);
1764 	  emit_move_insn (mem, src);
1765 	  tmps[i] = adjust_address (mem, mode, (int) bytepos);
1766 	}
1767       else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1768                && XVECLEN (dst, 0) > 1)
1769         tmps[i] = simplify_gen_subreg (mode, src, GET_MODE (dst), bytepos);
1770       else if (CONSTANT_P (src))
1771 	{
1772 	  HOST_WIDE_INT len = (HOST_WIDE_INT) bytelen;
1773 
1774 	  if (len == ssize)
1775 	    tmps[i] = src;
1776 	  else
1777 	    {
1778 	      rtx first, second;
1779 
1780 	      /* TODO: const_wide_int can have sizes other than this...  */
1781 	      gcc_assert (2 * len == ssize);
1782 	      split_double (src, &first, &second);
1783 	      if (i)
1784 		tmps[i] = second;
1785 	      else
1786 		tmps[i] = first;
1787 	    }
1788 	}
1789       else if (REG_P (src) && GET_MODE (src) == mode)
1790 	tmps[i] = src;
1791       else
1792 	tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1793 				     bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1794 				     mode, mode);
1795 
1796       if (shift)
1797 	tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1798 				shift, tmps[i], 0);
1799     }
1800 }
1801 
1802 /* Emit code to move a block SRC of type TYPE to a block DST,
1803    where DST is non-consecutive registers represented by a PARALLEL.
1804    SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1805    if not known.  */
1806 
1807 void
1808 emit_group_load (rtx dst, rtx src, tree type, int ssize)
1809 {
1810   rtx *tmps;
1811   int i;
1812 
1813   tmps = XALLOCAVEC (rtx, XVECLEN (dst, 0));
1814   emit_group_load_1 (tmps, dst, src, type, ssize);
1815 
1816   /* Copy the extracted pieces into the proper (probable) hard regs.  */
1817   for (i = 0; i < XVECLEN (dst, 0); i++)
1818     {
1819       rtx d = XEXP (XVECEXP (dst, 0, i), 0);
1820       if (d == NULL)
1821 	continue;
1822       emit_move_insn (d, tmps[i]);
1823     }
1824 }
1825 
1826 /* Similar, but load SRC into new pseudos in a format that looks like
1827    PARALLEL.  This can later be fed to emit_group_move to get things
1828    in the right place.  */
1829 
1830 rtx
1831 emit_group_load_into_temps (rtx parallel, rtx src, tree type, int ssize)
1832 {
1833   rtvec vec;
1834   int i;
1835 
1836   vec = rtvec_alloc (XVECLEN (parallel, 0));
1837   emit_group_load_1 (&RTVEC_ELT (vec, 0), parallel, src, type, ssize);
1838 
1839   /* Convert the vector to look just like the original PARALLEL, except
1840      with the computed values.  */
1841   for (i = 0; i < XVECLEN (parallel, 0); i++)
1842     {
1843       rtx e = XVECEXP (parallel, 0, i);
1844       rtx d = XEXP (e, 0);
1845 
1846       if (d)
1847 	{
1848 	  d = force_reg (GET_MODE (d), RTVEC_ELT (vec, i));
1849 	  e = alloc_EXPR_LIST (REG_NOTE_KIND (e), d, XEXP (e, 1));
1850 	}
1851       RTVEC_ELT (vec, i) = e;
1852     }
1853 
1854   return gen_rtx_PARALLEL (GET_MODE (parallel), vec);
1855 }
1856 
1857 /* Emit code to move a block SRC to block DST, where SRC and DST are
1858    non-consecutive groups of registers, each represented by a PARALLEL.  */
1859 
1860 void
1861 emit_group_move (rtx dst, rtx src)
1862 {
1863   int i;
1864 
1865   gcc_assert (GET_CODE (src) == PARALLEL
1866 	      && GET_CODE (dst) == PARALLEL
1867 	      && XVECLEN (src, 0) == XVECLEN (dst, 0));
1868 
1869   /* Skip first entry if NULL.  */
1870   for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1871     emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1872 		    XEXP (XVECEXP (src, 0, i), 0));
1873 }
1874 
1875 /* Move a group of registers represented by a PARALLEL into pseudos.  */
1876 
1877 rtx
1878 emit_group_move_into_temps (rtx src)
1879 {
1880   rtvec vec = rtvec_alloc (XVECLEN (src, 0));
1881   int i;
1882 
1883   for (i = 0; i < XVECLEN (src, 0); i++)
1884     {
1885       rtx e = XVECEXP (src, 0, i);
1886       rtx d = XEXP (e, 0);
1887 
1888       if (d)
1889 	e = alloc_EXPR_LIST (REG_NOTE_KIND (e), copy_to_reg (d), XEXP (e, 1));
1890       RTVEC_ELT (vec, i) = e;
1891     }
1892 
1893   return gen_rtx_PARALLEL (GET_MODE (src), vec);
1894 }
1895 
1896 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1897    where SRC is non-consecutive registers represented by a PARALLEL.
1898    SSIZE represents the total size of block ORIG_DST, or -1 if not
1899    known.  */
1900 
1901 void
1902 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1903 {
1904   rtx *tmps, dst;
1905   int start, finish, i;
1906   machine_mode m = GET_MODE (orig_dst);
1907 
1908   gcc_assert (GET_CODE (src) == PARALLEL);
1909 
1910   if (!SCALAR_INT_MODE_P (m)
1911       && !MEM_P (orig_dst) && GET_CODE (orig_dst) != CONCAT)
1912     {
1913       machine_mode imode = int_mode_for_mode (GET_MODE (orig_dst));
1914       if (imode == BLKmode)
1915         dst = assign_stack_temp (GET_MODE (orig_dst), ssize);
1916       else
1917         dst = gen_reg_rtx (imode);
1918       emit_group_store (dst, src, type, ssize);
1919       if (imode != BLKmode)
1920         dst = gen_lowpart (GET_MODE (orig_dst), dst);
1921       emit_move_insn (orig_dst, dst);
1922       return;
1923     }
1924 
1925   /* Check for a NULL entry, used to indicate that the parameter goes
1926      both on the stack and in registers.  */
1927   if (XEXP (XVECEXP (src, 0, 0), 0))
1928     start = 0;
1929   else
1930     start = 1;
1931   finish = XVECLEN (src, 0);
1932 
1933   tmps = XALLOCAVEC (rtx, finish);
1934 
1935   /* Copy the (probable) hard regs into pseudos.  */
1936   for (i = start; i < finish; i++)
1937     {
1938       rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1939       if (!REG_P (reg) || REGNO (reg) < FIRST_PSEUDO_REGISTER)
1940 	{
1941 	  tmps[i] = gen_reg_rtx (GET_MODE (reg));
1942 	  emit_move_insn (tmps[i], reg);
1943 	}
1944       else
1945 	tmps[i] = reg;
1946     }
1947 
1948   /* If we won't be storing directly into memory, protect the real destination
1949      from strange tricks we might play.  */
1950   dst = orig_dst;
1951   if (GET_CODE (dst) == PARALLEL)
1952     {
1953       rtx temp;
1954 
1955       /* We can get a PARALLEL dst if there is a conditional expression in
1956 	 a return statement.  In that case, the dst and src are the same,
1957 	 so no action is necessary.  */
1958       if (rtx_equal_p (dst, src))
1959 	return;
1960 
1961       /* It is unclear if we can ever reach here, but we may as well handle
1962 	 it.  Allocate a temporary, and split this into a store/load to/from
1963 	 the temporary.  */
1964       temp = assign_stack_temp (GET_MODE (dst), ssize);
1965       emit_group_store (temp, src, type, ssize);
1966       emit_group_load (dst, temp, type, ssize);
1967       return;
1968     }
1969   else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
1970     {
1971       machine_mode outer = GET_MODE (dst);
1972       machine_mode inner;
1973       HOST_WIDE_INT bytepos;
1974       bool done = false;
1975       rtx temp;
1976 
1977       if (!REG_P (dst) || REGNO (dst) < FIRST_PSEUDO_REGISTER)
1978 	dst = gen_reg_rtx (outer);
1979 
1980       /* Make life a bit easier for combine.  */
1981       /* If the first element of the vector is the low part
1982 	 of the destination mode, use a paradoxical subreg to
1983 	 initialize the destination.  */
1984       if (start < finish)
1985 	{
1986 	  inner = GET_MODE (tmps[start]);
1987 	  bytepos = subreg_lowpart_offset (inner, outer);
1988 	  if (INTVAL (XEXP (XVECEXP (src, 0, start), 1)) == bytepos)
1989 	    {
1990 	      temp = simplify_gen_subreg (outer, tmps[start],
1991 					  inner, 0);
1992 	      if (temp)
1993 		{
1994 		  emit_move_insn (dst, temp);
1995 		  done = true;
1996 		  start++;
1997 		}
1998 	    }
1999 	}
2000 
2001       /* If the first element wasn't the low part, try the last.  */
2002       if (!done
2003 	  && start < finish - 1)
2004 	{
2005 	  inner = GET_MODE (tmps[finish - 1]);
2006 	  bytepos = subreg_lowpart_offset (inner, outer);
2007 	  if (INTVAL (XEXP (XVECEXP (src, 0, finish - 1), 1)) == bytepos)
2008 	    {
2009 	      temp = simplify_gen_subreg (outer, tmps[finish - 1],
2010 					  inner, 0);
2011 	      if (temp)
2012 		{
2013 		  emit_move_insn (dst, temp);
2014 		  done = true;
2015 		  finish--;
2016 		}
2017 	    }
2018 	}
2019 
2020       /* Otherwise, simply initialize the result to zero.  */
2021       if (!done)
2022         emit_move_insn (dst, CONST0_RTX (outer));
2023     }
2024 
2025   /* Process the pieces.  */
2026   for (i = start; i < finish; i++)
2027     {
2028       HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2029       machine_mode mode = GET_MODE (tmps[i]);
2030       unsigned int bytelen = GET_MODE_SIZE (mode);
2031       unsigned int adj_bytelen;
2032       rtx dest = dst;
2033 
2034       /* Handle trailing fragments that run over the size of the struct.  */
2035       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2036 	adj_bytelen = ssize - bytepos;
2037       else
2038 	adj_bytelen = bytelen;
2039 
2040       if (GET_CODE (dst) == CONCAT)
2041 	{
2042 	  if (bytepos + adj_bytelen
2043 	      <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2044 	    dest = XEXP (dst, 0);
2045 	  else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2046 	    {
2047 	      bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2048 	      dest = XEXP (dst, 1);
2049 	    }
2050 	  else
2051 	    {
2052 	      machine_mode dest_mode = GET_MODE (dest);
2053 	      machine_mode tmp_mode = GET_MODE (tmps[i]);
2054 
2055 	      gcc_assert (bytepos == 0 && XVECLEN (src, 0));
2056 
2057 	      if (GET_MODE_ALIGNMENT (dest_mode)
2058 		  >= GET_MODE_ALIGNMENT (tmp_mode))
2059 		{
2060 		  dest = assign_stack_temp (dest_mode,
2061 					    GET_MODE_SIZE (dest_mode));
2062 		  emit_move_insn (adjust_address (dest,
2063 						  tmp_mode,
2064 						  bytepos),
2065 				  tmps[i]);
2066 		  dst = dest;
2067 		}
2068 	      else
2069 		{
2070 		  dest = assign_stack_temp (tmp_mode,
2071 					    GET_MODE_SIZE (tmp_mode));
2072 		  emit_move_insn (dest, tmps[i]);
2073 		  dst = adjust_address (dest, dest_mode, bytepos);
2074 		}
2075 	      break;
2076 	    }
2077 	}
2078 
2079       /* Handle trailing fragments that run over the size of the struct.  */
2080       if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2081 	{
2082 	  /* store_bit_field always takes its value from the lsb.
2083 	     Move the fragment to the lsb if it's not already there.  */
2084 	  if (
2085 #ifdef BLOCK_REG_PADDING
2086 	      BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2087 	      == (BYTES_BIG_ENDIAN ? upward : downward)
2088 #else
2089 	      BYTES_BIG_ENDIAN
2090 #endif
2091 	      )
2092 	    {
2093 	      int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2094 	      tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2095 				      shift, tmps[i], 0);
2096 	    }
2097 
2098 	  /* Make sure not to write past the end of the struct.  */
2099 	  store_bit_field (dest,
2100 			   adj_bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2101 			   bytepos * BITS_PER_UNIT, ssize * BITS_PER_UNIT - 1,
2102 			   VOIDmode, tmps[i]);
2103 	}
2104 
2105       /* Optimize the access just a bit.  */
2106       else if (MEM_P (dest)
2107 	       && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2108 		   || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2109 	       && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2110 	       && bytelen == GET_MODE_SIZE (mode))
2111 	emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2112 
2113       else
2114 	store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2115 			 0, 0, mode, tmps[i]);
2116     }
2117 
2118   /* Copy from the pseudo into the (probable) hard reg.  */
2119   if (orig_dst != dst)
2120     emit_move_insn (orig_dst, dst);
2121 }
2122 
2123 /* Return a form of X that does not use a PARALLEL.  TYPE is the type
2124    of the value stored in X.  */
2125 
2126 rtx
2127 maybe_emit_group_store (rtx x, tree type)
2128 {
2129   machine_mode mode = TYPE_MODE (type);
2130   gcc_checking_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
2131   if (GET_CODE (x) == PARALLEL)
2132     {
2133       rtx result = gen_reg_rtx (mode);
2134       emit_group_store (result, x, type, int_size_in_bytes (type));
2135       return result;
2136     }
2137   return x;
2138 }
2139 
2140 /* Copy a BLKmode object of TYPE out of a register SRCREG into TARGET.
2141 
2142    This is used on targets that return BLKmode values in registers.  */
2143 
2144 void
2145 copy_blkmode_from_reg (rtx target, rtx srcreg, tree type)
2146 {
2147   unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2148   rtx src = NULL, dst = NULL;
2149   unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2150   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2151   machine_mode mode = GET_MODE (srcreg);
2152   machine_mode tmode = GET_MODE (target);
2153   machine_mode copy_mode;
2154 
2155   /* BLKmode registers created in the back-end shouldn't have survived.  */
2156   gcc_assert (mode != BLKmode);
2157 
2158   /* If the structure doesn't take up a whole number of words, see whether
2159      SRCREG is padded on the left or on the right.  If it's on the left,
2160      set PADDING_CORRECTION to the number of bits to skip.
2161 
2162      In most ABIs, the structure will be returned at the least end of
2163      the register, which translates to right padding on little-endian
2164      targets and left padding on big-endian targets.  The opposite
2165      holds if the structure is returned at the most significant
2166      end of the register.  */
2167   if (bytes % UNITS_PER_WORD != 0
2168       && (targetm.calls.return_in_msb (type)
2169 	  ? !BYTES_BIG_ENDIAN
2170 	  : BYTES_BIG_ENDIAN))
2171     padding_correction
2172       = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2173 
2174   /* We can use a single move if we have an exact mode for the size.  */
2175   else if (MEM_P (target)
2176 	   && (!SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
2177 	       || MEM_ALIGN (target) >= GET_MODE_ALIGNMENT (mode))
2178 	   && bytes == GET_MODE_SIZE (mode))
2179   {
2180     emit_move_insn (adjust_address (target, mode, 0), srcreg);
2181     return;
2182   }
2183 
2184   /* And if we additionally have the same mode for a register.  */
2185   else if (REG_P (target)
2186 	   && GET_MODE (target) == mode
2187 	   && bytes == GET_MODE_SIZE (mode))
2188   {
2189     emit_move_insn (target, srcreg);
2190     return;
2191   }
2192 
2193   /* This code assumes srcreg is at least a full word.  If it isn't, copy it
2194      into a new pseudo which is a full word.  */
2195   if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2196     {
2197       srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2198       mode = word_mode;
2199     }
2200 
2201   /* Copy the structure BITSIZE bits at a time.  If the target lives in
2202      memory, take care of not reading/writing past its end by selecting
2203      a copy mode suited to BITSIZE.  This should always be possible given
2204      how it is computed.
2205 
2206      If the target lives in register, make sure not to select a copy mode
2207      larger than the mode of the register.
2208 
2209      We could probably emit more efficient code for machines which do not use
2210      strict alignment, but it doesn't seem worth the effort at the current
2211      time.  */
2212 
2213   copy_mode = word_mode;
2214   if (MEM_P (target))
2215     {
2216       machine_mode mem_mode = mode_for_size (bitsize, MODE_INT, 1);
2217       if (mem_mode != BLKmode)
2218 	copy_mode = mem_mode;
2219     }
2220   else if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2221     copy_mode = tmode;
2222 
2223   for (bitpos = 0, xbitpos = padding_correction;
2224        bitpos < bytes * BITS_PER_UNIT;
2225        bitpos += bitsize, xbitpos += bitsize)
2226     {
2227       /* We need a new source operand each time xbitpos is on a
2228 	 word boundary and when xbitpos == padding_correction
2229 	 (the first time through).  */
2230       if (xbitpos % BITS_PER_WORD == 0 || xbitpos == padding_correction)
2231 	src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, mode);
2232 
2233       /* We need a new destination operand each time bitpos is on
2234 	 a word boundary.  */
2235       if (REG_P (target) && GET_MODE_BITSIZE (tmode) < BITS_PER_WORD)
2236 	dst = target;
2237       else if (bitpos % BITS_PER_WORD == 0)
2238 	dst = operand_subword (target, bitpos / BITS_PER_WORD, 1, tmode);
2239 
2240       /* Use xbitpos for the source extraction (right justified) and
2241 	 bitpos for the destination store (left justified).  */
2242       store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, 0, 0, copy_mode,
2243 		       extract_bit_field (src, bitsize,
2244 					  xbitpos % BITS_PER_WORD, 1,
2245 					  NULL_RTX, copy_mode, copy_mode));
2246     }
2247 }
2248 
2249 /* Copy BLKmode value SRC into a register of mode MODE.  Return the
2250    register if it contains any data, otherwise return null.
2251 
2252    This is used on targets that return BLKmode values in registers.  */
2253 
2254 rtx
2255 copy_blkmode_to_reg (machine_mode mode, tree src)
2256 {
2257   int i, n_regs;
2258   unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0, bytes;
2259   unsigned int bitsize;
2260   rtx *dst_words, dst, x, src_word = NULL_RTX, dst_word = NULL_RTX;
2261   machine_mode dst_mode;
2262 
2263   gcc_assert (TYPE_MODE (TREE_TYPE (src)) == BLKmode);
2264 
2265   x = expand_normal (src);
2266 
2267   bytes = int_size_in_bytes (TREE_TYPE (src));
2268   if (bytes == 0)
2269     return NULL_RTX;
2270 
2271   /* If the structure doesn't take up a whole number of words, see
2272      whether the register value should be padded on the left or on
2273      the right.  Set PADDING_CORRECTION to the number of padding
2274      bits needed on the left side.
2275 
2276      In most ABIs, the structure will be returned at the least end of
2277      the register, which translates to right padding on little-endian
2278      targets and left padding on big-endian targets.  The opposite
2279      holds if the structure is returned at the most significant
2280      end of the register.  */
2281   if (bytes % UNITS_PER_WORD != 0
2282       && (targetm.calls.return_in_msb (TREE_TYPE (src))
2283 	  ? !BYTES_BIG_ENDIAN
2284 	  : BYTES_BIG_ENDIAN))
2285     padding_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2286 					   * BITS_PER_UNIT));
2287 
2288   n_regs = (bytes + UNITS_PER_WORD - 1) / UNITS_PER_WORD;
2289   dst_words = XALLOCAVEC (rtx, n_regs);
2290   bitsize = MIN (TYPE_ALIGN (TREE_TYPE (src)), BITS_PER_WORD);
2291 
2292   /* Copy the structure BITSIZE bits at a time.  */
2293   for (bitpos = 0, xbitpos = padding_correction;
2294        bitpos < bytes * BITS_PER_UNIT;
2295        bitpos += bitsize, xbitpos += bitsize)
2296     {
2297       /* We need a new destination pseudo each time xbitpos is
2298 	 on a word boundary and when xbitpos == padding_correction
2299 	 (the first time through).  */
2300       if (xbitpos % BITS_PER_WORD == 0
2301 	  || xbitpos == padding_correction)
2302 	{
2303 	  /* Generate an appropriate register.  */
2304 	  dst_word = gen_reg_rtx (word_mode);
2305 	  dst_words[xbitpos / BITS_PER_WORD] = dst_word;
2306 
2307 	  /* Clear the destination before we move anything into it.  */
2308 	  emit_move_insn (dst_word, CONST0_RTX (word_mode));
2309 	}
2310 
2311       /* We need a new source operand each time bitpos is on a word
2312 	 boundary.  */
2313       if (bitpos % BITS_PER_WORD == 0)
2314 	src_word = operand_subword_force (x, bitpos / BITS_PER_WORD, BLKmode);
2315 
2316       /* Use bitpos for the source extraction (left justified) and
2317 	 xbitpos for the destination store (right justified).  */
2318       store_bit_field (dst_word, bitsize, xbitpos % BITS_PER_WORD,
2319 		       0, 0, word_mode,
2320 		       extract_bit_field (src_word, bitsize,
2321 					  bitpos % BITS_PER_WORD, 1,
2322 					  NULL_RTX, word_mode, word_mode));
2323     }
2324 
2325   if (mode == BLKmode)
2326     {
2327       /* Find the smallest integer mode large enough to hold the
2328 	 entire structure.  */
2329       for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2330 	   mode != VOIDmode;
2331 	   mode = GET_MODE_WIDER_MODE (mode))
2332 	/* Have we found a large enough mode?  */
2333 	if (GET_MODE_SIZE (mode) >= bytes)
2334 	  break;
2335 
2336       /* A suitable mode should have been found.  */
2337       gcc_assert (mode != VOIDmode);
2338     }
2339 
2340   if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (word_mode))
2341     dst_mode = word_mode;
2342   else
2343     dst_mode = mode;
2344   dst = gen_reg_rtx (dst_mode);
2345 
2346   for (i = 0; i < n_regs; i++)
2347     emit_move_insn (operand_subword (dst, i, 0, dst_mode), dst_words[i]);
2348 
2349   if (mode != dst_mode)
2350     dst = gen_lowpart (mode, dst);
2351 
2352   return dst;
2353 }
2354 
2355 /* Add a USE expression for REG to the (possibly empty) list pointed
2356    to by CALL_FUSAGE.  REG must denote a hard register.  */
2357 
2358 void
2359 use_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2360 {
2361   gcc_assert (REG_P (reg));
2362 
2363   if (!HARD_REGISTER_P (reg))
2364     return;
2365 
2366   *call_fusage
2367     = gen_rtx_EXPR_LIST (mode, gen_rtx_USE (VOIDmode, reg), *call_fusage);
2368 }
2369 
2370 /* Add a CLOBBER expression for REG to the (possibly empty) list pointed
2371    to by CALL_FUSAGE.  REG must denote a hard register.  */
2372 
2373 void
2374 clobber_reg_mode (rtx *call_fusage, rtx reg, machine_mode mode)
2375 {
2376   gcc_assert (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER);
2377 
2378   *call_fusage
2379     = gen_rtx_EXPR_LIST (mode, gen_rtx_CLOBBER (VOIDmode, reg), *call_fusage);
2380 }
2381 
2382 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2383    starting at REGNO.  All of these registers must be hard registers.  */
2384 
2385 void
2386 use_regs (rtx *call_fusage, int regno, int nregs)
2387 {
2388   int i;
2389 
2390   gcc_assert (regno + nregs <= FIRST_PSEUDO_REGISTER);
2391 
2392   for (i = 0; i < nregs; i++)
2393     use_reg (call_fusage, regno_reg_rtx[regno + i]);
2394 }
2395 
2396 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2397    PARALLEL REGS.  This is for calls that pass values in multiple
2398    non-contiguous locations.  The Irix 6 ABI has examples of this.  */
2399 
2400 void
2401 use_group_regs (rtx *call_fusage, rtx regs)
2402 {
2403   int i;
2404 
2405   for (i = 0; i < XVECLEN (regs, 0); i++)
2406     {
2407       rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2408 
2409       /* A NULL entry means the parameter goes both on the stack and in
2410 	 registers.  This can also be a MEM for targets that pass values
2411 	 partially on the stack and partially in registers.  */
2412       if (reg != 0 && REG_P (reg))
2413 	use_reg (call_fusage, reg);
2414     }
2415 }
2416 
2417 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2418    assigment and the code of the expresion on the RHS is CODE.  Return
2419    NULL otherwise.  */
2420 
2421 static gimple
2422 get_def_for_expr (tree name, enum tree_code code)
2423 {
2424   gimple def_stmt;
2425 
2426   if (TREE_CODE (name) != SSA_NAME)
2427     return NULL;
2428 
2429   def_stmt = get_gimple_for_ssa_name (name);
2430   if (!def_stmt
2431       || gimple_assign_rhs_code (def_stmt) != code)
2432     return NULL;
2433 
2434   return def_stmt;
2435 }
2436 
2437 #ifdef HAVE_conditional_move
2438 /* Return the defining gimple statement for SSA_NAME NAME if it is an
2439    assigment and the class of the expresion on the RHS is CLASS.  Return
2440    NULL otherwise.  */
2441 
2442 static gimple
2443 get_def_for_expr_class (tree name, enum tree_code_class tclass)
2444 {
2445   gimple def_stmt;
2446 
2447   if (TREE_CODE (name) != SSA_NAME)
2448     return NULL;
2449 
2450   def_stmt = get_gimple_for_ssa_name (name);
2451   if (!def_stmt
2452       || TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)) != tclass)
2453     return NULL;
2454 
2455   return def_stmt;
2456 }
2457 #endif
2458 
2459 
2460 /* Determine whether the LEN bytes generated by CONSTFUN can be
2461    stored to memory using several move instructions.  CONSTFUNDATA is
2462    a pointer which will be passed as argument in every CONSTFUN call.
2463    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2464    a memset operation and false if it's a copy of a constant string.
2465    Return nonzero if a call to store_by_pieces should succeed.  */
2466 
2467 int
2468 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2469 		     rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2470 		     void *constfundata, unsigned int align, bool memsetp)
2471 {
2472   unsigned HOST_WIDE_INT l;
2473   unsigned int max_size;
2474   HOST_WIDE_INT offset = 0;
2475   machine_mode mode;
2476   enum insn_code icode;
2477   int reverse;
2478   /* cst is set but not used if LEGITIMATE_CONSTANT doesn't use it.  */
2479   rtx cst ATTRIBUTE_UNUSED;
2480 
2481   if (len == 0)
2482     return 1;
2483 
2484   if (!targetm.use_by_pieces_infrastructure_p (len, align,
2485 					       memsetp
2486 						 ? SET_BY_PIECES
2487 						 : STORE_BY_PIECES,
2488 					       optimize_insn_for_speed_p ()))
2489     return 0;
2490 
2491   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2492 
2493   /* We would first store what we can in the largest integer mode, then go to
2494      successively smaller modes.  */
2495 
2496   for (reverse = 0;
2497        reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2498        reverse++)
2499     {
2500       l = len;
2501       max_size = STORE_MAX_PIECES + 1;
2502       while (max_size > 1 && l > 0)
2503 	{
2504 	  mode = widest_int_mode_for_size (max_size);
2505 
2506 	  if (mode == VOIDmode)
2507 	    break;
2508 
2509 	  icode = optab_handler (mov_optab, mode);
2510 	  if (icode != CODE_FOR_nothing
2511 	      && align >= GET_MODE_ALIGNMENT (mode))
2512 	    {
2513 	      unsigned int size = GET_MODE_SIZE (mode);
2514 
2515 	      while (l >= size)
2516 		{
2517 		  if (reverse)
2518 		    offset -= size;
2519 
2520 		  cst = (*constfun) (constfundata, offset, mode);
2521 		  if (!targetm.legitimate_constant_p (mode, cst))
2522 		    return 0;
2523 
2524 		  if (!reverse)
2525 		    offset += size;
2526 
2527 		  l -= size;
2528 		}
2529 	    }
2530 
2531 	  max_size = GET_MODE_SIZE (mode);
2532 	}
2533 
2534       /* The code above should have handled everything.  */
2535       gcc_assert (!l);
2536     }
2537 
2538   return 1;
2539 }
2540 
2541 /* Generate several move instructions to store LEN bytes generated by
2542    CONSTFUN to block TO.  (A MEM rtx with BLKmode).  CONSTFUNDATA is a
2543    pointer which will be passed as argument in every CONSTFUN call.
2544    ALIGN is maximum alignment we can assume.  MEMSETP is true if this is
2545    a memset operation and false if it's a copy of a constant string.
2546    If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2547    mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2548    stpcpy.  */
2549 
2550 rtx
2551 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2552 		 rtx (*constfun) (void *, HOST_WIDE_INT, machine_mode),
2553 		 void *constfundata, unsigned int align, bool memsetp, int endp)
2554 {
2555   machine_mode to_addr_mode = get_address_mode (to);
2556   struct store_by_pieces_d data;
2557 
2558   if (len == 0)
2559     {
2560       gcc_assert (endp != 2);
2561       return to;
2562     }
2563 
2564   gcc_assert (targetm.use_by_pieces_infrastructure_p
2565 		(len, align,
2566 		 memsetp
2567 		   ? SET_BY_PIECES
2568 		   : STORE_BY_PIECES,
2569 		 optimize_insn_for_speed_p ()));
2570 
2571   data.constfun = constfun;
2572   data.constfundata = constfundata;
2573   data.len = len;
2574   data.to = to;
2575   store_by_pieces_1 (&data, align);
2576   if (endp)
2577     {
2578       rtx to1;
2579 
2580       gcc_assert (!data.reverse);
2581       if (data.autinc_to)
2582 	{
2583 	  if (endp == 2)
2584 	    {
2585 	      if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2586 		emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2587 	      else
2588 		data.to_addr = copy_to_mode_reg (to_addr_mode,
2589 						 plus_constant (to_addr_mode,
2590 								data.to_addr,
2591 								-1));
2592 	    }
2593 	  to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2594 					   data.offset);
2595 	}
2596       else
2597 	{
2598 	  if (endp == 2)
2599 	    --data.offset;
2600 	  to1 = adjust_address (data.to, QImode, data.offset);
2601 	}
2602       return to1;
2603     }
2604   else
2605     return data.to;
2606 }
2607 
2608 /* Generate several move instructions to clear LEN bytes of block TO.  (A MEM
2609    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2610 
2611 static void
2612 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2613 {
2614   struct store_by_pieces_d data;
2615 
2616   if (len == 0)
2617     return;
2618 
2619   data.constfun = clear_by_pieces_1;
2620   data.constfundata = NULL;
2621   data.len = len;
2622   data.to = to;
2623   store_by_pieces_1 (&data, align);
2624 }
2625 
2626 /* Callback routine for clear_by_pieces.
2627    Return const0_rtx unconditionally.  */
2628 
2629 static rtx
2630 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2631 		   HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2632 		   machine_mode mode ATTRIBUTE_UNUSED)
2633 {
2634   return const0_rtx;
2635 }
2636 
2637 /* Subroutine of clear_by_pieces and store_by_pieces.
2638    Generate several move instructions to store LEN bytes of block TO.  (A MEM
2639    rtx with BLKmode).  ALIGN is maximum alignment we can assume.  */
2640 
2641 static void
2642 store_by_pieces_1 (struct store_by_pieces_d *data ATTRIBUTE_UNUSED,
2643 		   unsigned int align ATTRIBUTE_UNUSED)
2644 {
2645   machine_mode to_addr_mode = get_address_mode (data->to);
2646   rtx to_addr = XEXP (data->to, 0);
2647   unsigned int max_size = STORE_MAX_PIECES + 1;
2648   enum insn_code icode;
2649 
2650   data->offset = 0;
2651   data->to_addr = to_addr;
2652   data->autinc_to
2653     = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2654        || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2655 
2656   data->explicit_inc_to = 0;
2657   data->reverse
2658     = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2659   if (data->reverse)
2660     data->offset = data->len;
2661 
2662   /* If storing requires more than two move insns,
2663      copy addresses to registers (to make displacements shorter)
2664      and use post-increment if available.  */
2665   if (!data->autinc_to
2666       && move_by_pieces_ninsns (data->len, align, max_size) > 2)
2667     {
2668       /* Determine the main mode we'll be using.
2669 	 MODE might not be used depending on the definitions of the
2670 	 USE_* macros below.  */
2671       machine_mode mode ATTRIBUTE_UNUSED
2672 	= widest_int_mode_for_size (max_size);
2673 
2674       if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2675 	{
2676 	  data->to_addr = copy_to_mode_reg (to_addr_mode,
2677 					    plus_constant (to_addr_mode,
2678 							   to_addr,
2679 							   data->len));
2680 	  data->autinc_to = 1;
2681 	  data->explicit_inc_to = -1;
2682 	}
2683 
2684       if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2685 	  && ! data->autinc_to)
2686 	{
2687 	  data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2688 	  data->autinc_to = 1;
2689 	  data->explicit_inc_to = 1;
2690 	}
2691 
2692       if ( !data->autinc_to && CONSTANT_P (to_addr))
2693 	data->to_addr = copy_to_mode_reg (to_addr_mode, to_addr);
2694     }
2695 
2696   align = alignment_for_piecewise_move (STORE_MAX_PIECES, align);
2697 
2698   /* First store what we can in the largest integer mode, then go to
2699      successively smaller modes.  */
2700 
2701   while (max_size > 1 && data->len > 0)
2702     {
2703       machine_mode mode = widest_int_mode_for_size (max_size);
2704 
2705       if (mode == VOIDmode)
2706 	break;
2707 
2708       icode = optab_handler (mov_optab, mode);
2709       if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2710 	store_by_pieces_2 (GEN_FCN (icode), mode, data);
2711 
2712       max_size = GET_MODE_SIZE (mode);
2713     }
2714 
2715   /* The code above should have handled everything.  */
2716   gcc_assert (!data->len);
2717 }
2718 
2719 /* Subroutine of store_by_pieces_1.  Store as many bytes as appropriate
2720    with move instructions for mode MODE.  GENFUN is the gen_... function
2721    to make a move insn for that mode.  DATA has all the other info.  */
2722 
2723 static void
2724 store_by_pieces_2 (insn_gen_fn genfun, machine_mode mode,
2725 		   struct store_by_pieces_d *data)
2726 {
2727   unsigned int size = GET_MODE_SIZE (mode);
2728   rtx to1, cst;
2729 
2730   while (data->len >= size)
2731     {
2732       if (data->reverse)
2733 	data->offset -= size;
2734 
2735       if (data->autinc_to)
2736 	to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2737 					 data->offset);
2738       else
2739 	to1 = adjust_address (data->to, mode, data->offset);
2740 
2741       if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2742 	emit_insn (gen_add2_insn (data->to_addr,
2743 				  gen_int_mode (-(HOST_WIDE_INT) size,
2744 						GET_MODE (data->to_addr))));
2745 
2746       cst = (*data->constfun) (data->constfundata, data->offset, mode);
2747       emit_insn ((*genfun) (to1, cst));
2748 
2749       if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2750 	emit_insn (gen_add2_insn (data->to_addr,
2751 				  gen_int_mode (size,
2752 						GET_MODE (data->to_addr))));
2753 
2754       if (! data->reverse)
2755 	data->offset += size;
2756 
2757       data->len -= size;
2758     }
2759 }
2760 
2761 /* Write zeros through the storage of OBJECT.  If OBJECT has BLKmode, SIZE is
2762    its length in bytes.  */
2763 
2764 rtx
2765 clear_storage_hints (rtx object, rtx size, enum block_op_methods method,
2766 		     unsigned int expected_align, HOST_WIDE_INT expected_size,
2767 		     unsigned HOST_WIDE_INT min_size,
2768 		     unsigned HOST_WIDE_INT max_size,
2769 		     unsigned HOST_WIDE_INT probable_max_size)
2770 {
2771   machine_mode mode = GET_MODE (object);
2772   unsigned int align;
2773 
2774   gcc_assert (method == BLOCK_OP_NORMAL || method == BLOCK_OP_TAILCALL);
2775 
2776   /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2777      just move a zero.  Otherwise, do this a piece at a time.  */
2778   if (mode != BLKmode
2779       && CONST_INT_P (size)
2780       && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (mode))
2781     {
2782       rtx zero = CONST0_RTX (mode);
2783       if (zero != NULL)
2784 	{
2785 	  emit_move_insn (object, zero);
2786 	  return NULL;
2787 	}
2788 
2789       if (COMPLEX_MODE_P (mode))
2790 	{
2791 	  zero = CONST0_RTX (GET_MODE_INNER (mode));
2792 	  if (zero != NULL)
2793 	    {
2794 	      write_complex_part (object, zero, 0);
2795 	      write_complex_part (object, zero, 1);
2796 	      return NULL;
2797 	    }
2798 	}
2799     }
2800 
2801   if (size == const0_rtx)
2802     return NULL;
2803 
2804   align = MEM_ALIGN (object);
2805 
2806   if (CONST_INT_P (size)
2807       && targetm.use_by_pieces_infrastructure_p (INTVAL (size), align,
2808 						 CLEAR_BY_PIECES,
2809 						 optimize_insn_for_speed_p ()))
2810     clear_by_pieces (object, INTVAL (size), align);
2811   else if (set_storage_via_setmem (object, size, const0_rtx, align,
2812 				   expected_align, expected_size,
2813 				   min_size, max_size, probable_max_size))
2814     ;
2815   else if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (object)))
2816     return set_storage_via_libcall (object, size, const0_rtx,
2817 				    method == BLOCK_OP_TAILCALL);
2818   else
2819     gcc_unreachable ();
2820 
2821   return NULL;
2822 }
2823 
2824 rtx
2825 clear_storage (rtx object, rtx size, enum block_op_methods method)
2826 {
2827   unsigned HOST_WIDE_INT max, min = 0;
2828   if (GET_CODE (size) == CONST_INT)
2829     min = max = UINTVAL (size);
2830   else
2831     max = GET_MODE_MASK (GET_MODE (size));
2832   return clear_storage_hints (object, size, method, 0, -1, min, max, max);
2833 }
2834 
2835 
2836 /* A subroutine of clear_storage.  Expand a call to memset.
2837    Return the return value of memset, 0 otherwise.  */
2838 
2839 rtx
2840 set_storage_via_libcall (rtx object, rtx size, rtx val, bool tailcall)
2841 {
2842   tree call_expr, fn, object_tree, size_tree, val_tree;
2843   machine_mode size_mode;
2844   rtx retval;
2845 
2846   /* Emit code to copy OBJECT and SIZE into new pseudos.  We can then
2847      place those into new pseudos into a VAR_DECL and use them later.  */
2848 
2849   object = copy_addr_to_reg (XEXP (object, 0));
2850 
2851   size_mode = TYPE_MODE (sizetype);
2852   size = convert_to_mode (size_mode, size, 1);
2853   size = copy_to_mode_reg (size_mode, size);
2854 
2855   /* It is incorrect to use the libcall calling conventions to call
2856      memset in this context.  This could be a user call to memset and
2857      the user may wish to examine the return value from memset.  For
2858      targets where libcalls and normal calls have different conventions
2859      for returning pointers, we could end up generating incorrect code.  */
2860 
2861   object_tree = make_tree (ptr_type_node, object);
2862   if (!CONST_INT_P (val))
2863     val = convert_to_mode (TYPE_MODE (integer_type_node), val, 1);
2864   size_tree = make_tree (sizetype, size);
2865   val_tree = make_tree (integer_type_node, val);
2866 
2867   fn = clear_storage_libcall_fn (true);
2868   call_expr = build_call_expr (fn, 3, object_tree, val_tree, size_tree);
2869   CALL_EXPR_TAILCALL (call_expr) = tailcall;
2870 
2871   retval = expand_normal (call_expr);
2872 
2873   return retval;
2874 }
2875 
2876 /* A subroutine of set_storage_via_libcall.  Create the tree node
2877    for the function we use for block clears.  */
2878 
2879 tree block_clear_fn;
2880 
2881 void
2882 init_block_clear_fn (const char *asmspec)
2883 {
2884   if (!block_clear_fn)
2885     {
2886       tree fn, args;
2887 
2888       fn = get_identifier ("memset");
2889       args = build_function_type_list (ptr_type_node, ptr_type_node,
2890 				       integer_type_node, sizetype,
2891 				       NULL_TREE);
2892 
2893       fn = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, fn, args);
2894       DECL_EXTERNAL (fn) = 1;
2895       TREE_PUBLIC (fn) = 1;
2896       DECL_ARTIFICIAL (fn) = 1;
2897       TREE_NOTHROW (fn) = 1;
2898       DECL_VISIBILITY (fn) = VISIBILITY_DEFAULT;
2899       DECL_VISIBILITY_SPECIFIED (fn) = 1;
2900 
2901       block_clear_fn = fn;
2902     }
2903 
2904   if (asmspec)
2905     set_user_assembler_name (block_clear_fn, asmspec);
2906 }
2907 
2908 static tree
2909 clear_storage_libcall_fn (int for_call)
2910 {
2911   static bool emitted_extern;
2912 
2913   if (!block_clear_fn)
2914     init_block_clear_fn (NULL);
2915 
2916   if (for_call && !emitted_extern)
2917     {
2918       emitted_extern = true;
2919       make_decl_rtl (block_clear_fn);
2920     }
2921 
2922   return block_clear_fn;
2923 }
2924 
2925 /* Expand a setmem pattern; return true if successful.  */
2926 
2927 bool
2928 set_storage_via_setmem (rtx object, rtx size, rtx val, unsigned int align,
2929 			unsigned int expected_align, HOST_WIDE_INT expected_size,
2930 			unsigned HOST_WIDE_INT min_size,
2931 			unsigned HOST_WIDE_INT max_size,
2932 			unsigned HOST_WIDE_INT probable_max_size)
2933 {
2934   /* Try the most limited insn first, because there's no point
2935      including more than one in the machine description unless
2936      the more limited one has some advantage.  */
2937 
2938   machine_mode mode;
2939 
2940   if (expected_align < align)
2941     expected_align = align;
2942   if (expected_size != -1)
2943     {
2944       if ((unsigned HOST_WIDE_INT)expected_size > max_size)
2945 	expected_size = max_size;
2946       if ((unsigned HOST_WIDE_INT)expected_size < min_size)
2947 	expected_size = min_size;
2948     }
2949 
2950   for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2951        mode = GET_MODE_WIDER_MODE (mode))
2952     {
2953       enum insn_code code = direct_optab_handler (setmem_optab, mode);
2954 
2955       if (code != CODE_FOR_nothing
2956 	  /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
2957 	     here because if SIZE is less than the mode mask, as it is
2958 	     returned by the macro, it will definitely be less than the
2959 	     actual mode mask.  Since SIZE is within the Pmode address
2960 	     space, we limit MODE to Pmode.  */
2961 	  && ((CONST_INT_P (size)
2962 	       && ((unsigned HOST_WIDE_INT) INTVAL (size)
2963 		   <= (GET_MODE_MASK (mode) >> 1)))
2964 	      || max_size <= (GET_MODE_MASK (mode) >> 1)
2965 	      || GET_MODE_BITSIZE (mode) >= GET_MODE_BITSIZE (Pmode)))
2966 	{
2967 	  struct expand_operand ops[9];
2968 	  unsigned int nops;
2969 
2970 	  nops = insn_data[(int) code].n_generator_args;
2971 	  gcc_assert (nops == 4 || nops == 6 || nops == 8 || nops == 9);
2972 
2973 	  create_fixed_operand (&ops[0], object);
2974 	  /* The check above guarantees that this size conversion is valid.  */
2975 	  create_convert_operand_to (&ops[1], size, mode, true);
2976 	  create_convert_operand_from (&ops[2], val, byte_mode, true);
2977 	  create_integer_operand (&ops[3], align / BITS_PER_UNIT);
2978 	  if (nops >= 6)
2979 	    {
2980 	      create_integer_operand (&ops[4], expected_align / BITS_PER_UNIT);
2981 	      create_integer_operand (&ops[5], expected_size);
2982 	    }
2983 	  if (nops >= 8)
2984 	    {
2985 	      create_integer_operand (&ops[6], min_size);
2986 	      /* If we can not represent the maximal size,
2987 		 make parameter NULL.  */
2988 	      if ((HOST_WIDE_INT) max_size != -1)
2989 	        create_integer_operand (&ops[7], max_size);
2990 	      else
2991 		create_fixed_operand (&ops[7], NULL);
2992 	    }
2993 	  if (nops == 9)
2994 	    {
2995 	      /* If we can not represent the maximal size,
2996 		 make parameter NULL.  */
2997 	      if ((HOST_WIDE_INT) probable_max_size != -1)
2998 	        create_integer_operand (&ops[8], probable_max_size);
2999 	      else
3000 		create_fixed_operand (&ops[8], NULL);
3001 	    }
3002 	  if (maybe_expand_insn (code, nops, ops))
3003 	    return true;
3004 	}
3005     }
3006 
3007   return false;
3008 }
3009 
3010 
3011 /* Write to one of the components of the complex value CPLX.  Write VAL to
3012    the real part if IMAG_P is false, and the imaginary part if its true.  */
3013 
3014 void
3015 write_complex_part (rtx cplx, rtx val, bool imag_p)
3016 {
3017   machine_mode cmode;
3018   machine_mode imode;
3019   unsigned ibitsize;
3020 
3021   if (GET_CODE (cplx) == CONCAT)
3022     {
3023       emit_move_insn (XEXP (cplx, imag_p), val);
3024       return;
3025     }
3026 
3027   cmode = GET_MODE (cplx);
3028   imode = GET_MODE_INNER (cmode);
3029   ibitsize = GET_MODE_BITSIZE (imode);
3030 
3031   /* For MEMs simplify_gen_subreg may generate an invalid new address
3032      because, e.g., the original address is considered mode-dependent
3033      by the target, which restricts simplify_subreg from invoking
3034      adjust_address_nv.  Instead of preparing fallback support for an
3035      invalid address, we call adjust_address_nv directly.  */
3036   if (MEM_P (cplx))
3037     {
3038       emit_move_insn (adjust_address_nv (cplx, imode,
3039 					 imag_p ? GET_MODE_SIZE (imode) : 0),
3040 		      val);
3041       return;
3042     }
3043 
3044   /* If the sub-object is at least word sized, then we know that subregging
3045      will work.  This special case is important, since store_bit_field
3046      wants to operate on integer modes, and there's rarely an OImode to
3047      correspond to TCmode.  */
3048   if (ibitsize >= BITS_PER_WORD
3049       /* For hard regs we have exact predicates.  Assume we can split
3050 	 the original object if it spans an even number of hard regs.
3051 	 This special case is important for SCmode on 64-bit platforms
3052 	 where the natural size of floating-point regs is 32-bit.  */
3053       || (REG_P (cplx)
3054 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3055 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3056     {
3057       rtx part = simplify_gen_subreg (imode, cplx, cmode,
3058 				      imag_p ? GET_MODE_SIZE (imode) : 0);
3059       if (part)
3060         {
3061 	  emit_move_insn (part, val);
3062 	  return;
3063 	}
3064       else
3065 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3066 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3067     }
3068 
3069   store_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0, 0, 0, imode, val);
3070 }
3071 
3072 /* Extract one of the components of the complex value CPLX.  Extract the
3073    real part if IMAG_P is false, and the imaginary part if it's true.  */
3074 
3075 static rtx
3076 read_complex_part (rtx cplx, bool imag_p)
3077 {
3078   machine_mode cmode, imode;
3079   unsigned ibitsize;
3080 
3081   if (GET_CODE (cplx) == CONCAT)
3082     return XEXP (cplx, imag_p);
3083 
3084   cmode = GET_MODE (cplx);
3085   imode = GET_MODE_INNER (cmode);
3086   ibitsize = GET_MODE_BITSIZE (imode);
3087 
3088   /* Special case reads from complex constants that got spilled to memory.  */
3089   if (MEM_P (cplx) && GET_CODE (XEXP (cplx, 0)) == SYMBOL_REF)
3090     {
3091       tree decl = SYMBOL_REF_DECL (XEXP (cplx, 0));
3092       if (decl && TREE_CODE (decl) == COMPLEX_CST)
3093 	{
3094 	  tree part = imag_p ? TREE_IMAGPART (decl) : TREE_REALPART (decl);
3095 	  if (CONSTANT_CLASS_P (part))
3096 	    return expand_expr (part, NULL_RTX, imode, EXPAND_NORMAL);
3097 	}
3098     }
3099 
3100   /* For MEMs simplify_gen_subreg may generate an invalid new address
3101      because, e.g., the original address is considered mode-dependent
3102      by the target, which restricts simplify_subreg from invoking
3103      adjust_address_nv.  Instead of preparing fallback support for an
3104      invalid address, we call adjust_address_nv directly.  */
3105   if (MEM_P (cplx))
3106     return adjust_address_nv (cplx, imode,
3107 			      imag_p ? GET_MODE_SIZE (imode) : 0);
3108 
3109   /* If the sub-object is at least word sized, then we know that subregging
3110      will work.  This special case is important, since extract_bit_field
3111      wants to operate on integer modes, and there's rarely an OImode to
3112      correspond to TCmode.  */
3113   if (ibitsize >= BITS_PER_WORD
3114       /* For hard regs we have exact predicates.  Assume we can split
3115 	 the original object if it spans an even number of hard regs.
3116 	 This special case is important for SCmode on 64-bit platforms
3117 	 where the natural size of floating-point regs is 32-bit.  */
3118       || (REG_P (cplx)
3119 	  && REGNO (cplx) < FIRST_PSEUDO_REGISTER
3120 	  && hard_regno_nregs[REGNO (cplx)][cmode] % 2 == 0))
3121     {
3122       rtx ret = simplify_gen_subreg (imode, cplx, cmode,
3123 				     imag_p ? GET_MODE_SIZE (imode) : 0);
3124       if (ret)
3125         return ret;
3126       else
3127 	/* simplify_gen_subreg may fail for sub-word MEMs.  */
3128 	gcc_assert (MEM_P (cplx) && ibitsize < BITS_PER_WORD);
3129     }
3130 
3131   return extract_bit_field (cplx, ibitsize, imag_p ? ibitsize : 0,
3132 			    true, NULL_RTX, imode, imode);
3133 }
3134 
3135 /* A subroutine of emit_move_insn_1.  Yet another lowpart generator.
3136    NEW_MODE and OLD_MODE are the same size.  Return NULL if X cannot be
3137    represented in NEW_MODE.  If FORCE is true, this will never happen, as
3138    we'll force-create a SUBREG if needed.  */
3139 
3140 static rtx
3141 emit_move_change_mode (machine_mode new_mode,
3142 		       machine_mode old_mode, rtx x, bool force)
3143 {
3144   rtx ret;
3145 
3146   if (push_operand (x, GET_MODE (x)))
3147     {
3148       ret = gen_rtx_MEM (new_mode, XEXP (x, 0));
3149       MEM_COPY_ATTRIBUTES (ret, x);
3150     }
3151   else if (MEM_P (x))
3152     {
3153       /* We don't have to worry about changing the address since the
3154 	 size in bytes is supposed to be the same.  */
3155       if (reload_in_progress)
3156 	{
3157 	  /* Copy the MEM to change the mode and move any
3158 	     substitutions from the old MEM to the new one.  */
3159 	  ret = adjust_address_nv (x, new_mode, 0);
3160 	  copy_replacements (x, ret);
3161 	}
3162       else
3163 	ret = adjust_address (x, new_mode, 0);
3164     }
3165   else
3166     {
3167       /* Note that we do want simplify_subreg's behavior of validating
3168 	 that the new mode is ok for a hard register.  If we were to use
3169 	 simplify_gen_subreg, we would create the subreg, but would
3170 	 probably run into the target not being able to implement it.  */
3171       /* Except, of course, when FORCE is true, when this is exactly what
3172 	 we want.  Which is needed for CCmodes on some targets.  */
3173       if (force)
3174 	ret = simplify_gen_subreg (new_mode, x, old_mode, 0);
3175       else
3176 	ret = simplify_subreg (new_mode, x, old_mode, 0);
3177     }
3178 
3179   return ret;
3180 }
3181 
3182 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X using
3183    an integer mode of the same size as MODE.  Returns the instruction
3184    emitted, or NULL if such a move could not be generated.  */
3185 
3186 static rtx_insn *
3187 emit_move_via_integer (machine_mode mode, rtx x, rtx y, bool force)
3188 {
3189   machine_mode imode;
3190   enum insn_code code;
3191 
3192   /* There must exist a mode of the exact size we require.  */
3193   imode = int_mode_for_mode (mode);
3194   if (imode == BLKmode)
3195     return NULL;
3196 
3197   /* The target must support moves in this mode.  */
3198   code = optab_handler (mov_optab, imode);
3199   if (code == CODE_FOR_nothing)
3200     return NULL;
3201 
3202   x = emit_move_change_mode (imode, mode, x, force);
3203   if (x == NULL_RTX)
3204     return NULL;
3205   y = emit_move_change_mode (imode, mode, y, force);
3206   if (y == NULL_RTX)
3207     return NULL;
3208   return emit_insn (GEN_FCN (code) (x, y));
3209 }
3210 
3211 /* A subroutine of emit_move_insn_1.  X is a push_operand in MODE.
3212    Return an equivalent MEM that does not use an auto-increment.  */
3213 
3214 rtx
3215 emit_move_resolve_push (machine_mode mode, rtx x)
3216 {
3217   enum rtx_code code = GET_CODE (XEXP (x, 0));
3218   HOST_WIDE_INT adjust;
3219   rtx temp;
3220 
3221   adjust = GET_MODE_SIZE (mode);
3222 #ifdef PUSH_ROUNDING
3223   adjust = PUSH_ROUNDING (adjust);
3224 #endif
3225   if (code == PRE_DEC || code == POST_DEC)
3226     adjust = -adjust;
3227   else if (code == PRE_MODIFY || code == POST_MODIFY)
3228     {
3229       rtx expr = XEXP (XEXP (x, 0), 1);
3230       HOST_WIDE_INT val;
3231 
3232       gcc_assert (GET_CODE (expr) == PLUS || GET_CODE (expr) == MINUS);
3233       gcc_assert (CONST_INT_P (XEXP (expr, 1)));
3234       val = INTVAL (XEXP (expr, 1));
3235       if (GET_CODE (expr) == MINUS)
3236 	val = -val;
3237       gcc_assert (adjust == val || adjust == -val);
3238       adjust = val;
3239     }
3240 
3241   /* Do not use anti_adjust_stack, since we don't want to update
3242      stack_pointer_delta.  */
3243   temp = expand_simple_binop (Pmode, PLUS, stack_pointer_rtx,
3244 			      gen_int_mode (adjust, Pmode), stack_pointer_rtx,
3245 			      0, OPTAB_LIB_WIDEN);
3246   if (temp != stack_pointer_rtx)
3247     emit_move_insn (stack_pointer_rtx, temp);
3248 
3249   switch (code)
3250     {
3251     case PRE_INC:
3252     case PRE_DEC:
3253     case PRE_MODIFY:
3254       temp = stack_pointer_rtx;
3255       break;
3256     case POST_INC:
3257     case POST_DEC:
3258     case POST_MODIFY:
3259       temp = plus_constant (Pmode, stack_pointer_rtx, -adjust);
3260       break;
3261     default:
3262       gcc_unreachable ();
3263     }
3264 
3265   return replace_equiv_address (x, temp);
3266 }
3267 
3268 /* A subroutine of emit_move_complex.  Generate a move from Y into X.
3269    X is known to satisfy push_operand, and MODE is known to be complex.
3270    Returns the last instruction emitted.  */
3271 
3272 rtx_insn *
3273 emit_move_complex_push (machine_mode mode, rtx x, rtx y)
3274 {
3275   machine_mode submode = GET_MODE_INNER (mode);
3276   bool imag_first;
3277 
3278 #ifdef PUSH_ROUNDING
3279   unsigned int submodesize = GET_MODE_SIZE (submode);
3280 
3281   /* In case we output to the stack, but the size is smaller than the
3282      machine can push exactly, we need to use move instructions.  */
3283   if (PUSH_ROUNDING (submodesize) != submodesize)
3284     {
3285       x = emit_move_resolve_push (mode, x);
3286       return emit_move_insn (x, y);
3287     }
3288 #endif
3289 
3290   /* Note that the real part always precedes the imag part in memory
3291      regardless of machine's endianness.  */
3292   switch (GET_CODE (XEXP (x, 0)))
3293     {
3294     case PRE_DEC:
3295     case POST_DEC:
3296       imag_first = true;
3297       break;
3298     case PRE_INC:
3299     case POST_INC:
3300       imag_first = false;
3301       break;
3302     default:
3303       gcc_unreachable ();
3304     }
3305 
3306   emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3307 		  read_complex_part (y, imag_first));
3308   return emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
3309 			 read_complex_part (y, !imag_first));
3310 }
3311 
3312 /* A subroutine of emit_move_complex.  Perform the move from Y to X
3313    via two moves of the parts.  Returns the last instruction emitted.  */
3314 
3315 rtx_insn *
3316 emit_move_complex_parts (rtx x, rtx y)
3317 {
3318   /* Show the output dies here.  This is necessary for SUBREGs
3319      of pseudos since we cannot track their lifetimes correctly;
3320      hard regs shouldn't appear here except as return values.  */
3321   if (!reload_completed && !reload_in_progress
3322       && REG_P (x) && !reg_overlap_mentioned_p (x, y))
3323     emit_clobber (x);
3324 
3325   write_complex_part (x, read_complex_part (y, false), false);
3326   write_complex_part (x, read_complex_part (y, true), true);
3327 
3328   return get_last_insn ();
3329 }
3330 
3331 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3332    MODE is known to be complex.  Returns the last instruction emitted.  */
3333 
3334 static rtx_insn *
3335 emit_move_complex (machine_mode mode, rtx x, rtx y)
3336 {
3337   bool try_int;
3338 
3339   /* Need to take special care for pushes, to maintain proper ordering
3340      of the data, and possibly extra padding.  */
3341   if (push_operand (x, mode))
3342     return emit_move_complex_push (mode, x, y);
3343 
3344   /* See if we can coerce the target into moving both values at once, except
3345      for floating point where we favor moving as parts if this is easy.  */
3346   if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
3347       && optab_handler (mov_optab, GET_MODE_INNER (mode)) != CODE_FOR_nothing
3348       && !(REG_P (x)
3349 	   && HARD_REGISTER_P (x)
3350 	   && hard_regno_nregs[REGNO (x)][mode] == 1)
3351       && !(REG_P (y)
3352 	   && HARD_REGISTER_P (y)
3353 	   && hard_regno_nregs[REGNO (y)][mode] == 1))
3354     try_int = false;
3355   /* Not possible if the values are inherently not adjacent.  */
3356   else if (GET_CODE (x) == CONCAT || GET_CODE (y) == CONCAT)
3357     try_int = false;
3358   /* Is possible if both are registers (or subregs of registers).  */
3359   else if (register_operand (x, mode) && register_operand (y, mode))
3360     try_int = true;
3361   /* If one of the operands is a memory, and alignment constraints
3362      are friendly enough, we may be able to do combined memory operations.
3363      We do not attempt this if Y is a constant because that combination is
3364      usually better with the by-parts thing below.  */
3365   else if ((MEM_P (x) ? !CONSTANT_P (y) : MEM_P (y))
3366 	   && (!STRICT_ALIGNMENT
3367 	       || get_mode_alignment (mode) == BIGGEST_ALIGNMENT))
3368     try_int = true;
3369   else
3370     try_int = false;
3371 
3372   if (try_int)
3373     {
3374       rtx_insn *ret;
3375 
3376       /* For memory to memory moves, optimal behavior can be had with the
3377 	 existing block move logic.  */
3378       if (MEM_P (x) && MEM_P (y))
3379 	{
3380 	  emit_block_move (x, y, GEN_INT (GET_MODE_SIZE (mode)),
3381 			   BLOCK_OP_NO_LIBCALL);
3382 	  return get_last_insn ();
3383 	}
3384 
3385       ret = emit_move_via_integer (mode, x, y, true);
3386       if (ret)
3387 	return ret;
3388     }
3389 
3390   return emit_move_complex_parts (x, y);
3391 }
3392 
3393 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3394    MODE is known to be MODE_CC.  Returns the last instruction emitted.  */
3395 
3396 static rtx_insn *
3397 emit_move_ccmode (machine_mode mode, rtx x, rtx y)
3398 {
3399   rtx_insn *ret;
3400 
3401   /* Assume all MODE_CC modes are equivalent; if we have movcc, use it.  */
3402   if (mode != CCmode)
3403     {
3404       enum insn_code code = optab_handler (mov_optab, CCmode);
3405       if (code != CODE_FOR_nothing)
3406 	{
3407 	  x = emit_move_change_mode (CCmode, mode, x, true);
3408 	  y = emit_move_change_mode (CCmode, mode, y, true);
3409 	  return emit_insn (GEN_FCN (code) (x, y));
3410 	}
3411     }
3412 
3413   /* Otherwise, find the MODE_INT mode of the same width.  */
3414   ret = emit_move_via_integer (mode, x, y, false);
3415   gcc_assert (ret != NULL);
3416   return ret;
3417 }
3418 
3419 /* Return true if word I of OP lies entirely in the
3420    undefined bits of a paradoxical subreg.  */
3421 
3422 static bool
3423 undefined_operand_subword_p (const_rtx op, int i)
3424 {
3425   machine_mode innermode, innermostmode;
3426   int offset;
3427   if (GET_CODE (op) != SUBREG)
3428     return false;
3429   innermode = GET_MODE (op);
3430   innermostmode = GET_MODE (SUBREG_REG (op));
3431   offset = i * UNITS_PER_WORD + SUBREG_BYTE (op);
3432   /* The SUBREG_BYTE represents offset, as if the value were stored in
3433      memory, except for a paradoxical subreg where we define
3434      SUBREG_BYTE to be 0; undo this exception as in
3435      simplify_subreg.  */
3436   if (SUBREG_BYTE (op) == 0
3437       && GET_MODE_SIZE (innermostmode) < GET_MODE_SIZE (innermode))
3438     {
3439       int difference = (GET_MODE_SIZE (innermostmode) - GET_MODE_SIZE (innermode));
3440       if (WORDS_BIG_ENDIAN)
3441 	offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
3442       if (BYTES_BIG_ENDIAN)
3443 	offset += difference % UNITS_PER_WORD;
3444     }
3445   if (offset >= GET_MODE_SIZE (innermostmode)
3446       || offset <= -GET_MODE_SIZE (word_mode))
3447     return true;
3448   return false;
3449 }
3450 
3451 /* A subroutine of emit_move_insn_1.  Generate a move from Y into X.
3452    MODE is any multi-word or full-word mode that lacks a move_insn
3453    pattern.  Note that you will get better code if you define such
3454    patterns, even if they must turn into multiple assembler instructions.  */
3455 
3456 static rtx_insn *
3457 emit_move_multi_word (machine_mode mode, rtx x, rtx y)
3458 {
3459   rtx_insn *last_insn = 0;
3460   rtx_insn *seq;
3461   rtx inner;
3462   bool need_clobber;
3463   int i;
3464 
3465   gcc_assert (GET_MODE_SIZE (mode) >= UNITS_PER_WORD);
3466 
3467   /* If X is a push on the stack, do the push now and replace
3468      X with a reference to the stack pointer.  */
3469   if (push_operand (x, mode))
3470     x = emit_move_resolve_push (mode, x);
3471 
3472   /* If we are in reload, see if either operand is a MEM whose address
3473      is scheduled for replacement.  */
3474   if (reload_in_progress && MEM_P (x)
3475       && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3476     x = replace_equiv_address_nv (x, inner);
3477   if (reload_in_progress && MEM_P (y)
3478       && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3479     y = replace_equiv_address_nv (y, inner);
3480 
3481   start_sequence ();
3482 
3483   need_clobber = false;
3484   for (i = 0;
3485        i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3486        i++)
3487     {
3488       rtx xpart = operand_subword (x, i, 1, mode);
3489       rtx ypart;
3490 
3491       /* Do not generate code for a move if it would come entirely
3492 	 from the undefined bits of a paradoxical subreg.  */
3493       if (undefined_operand_subword_p (y, i))
3494 	continue;
3495 
3496       ypart = operand_subword (y, i, 1, mode);
3497 
3498       /* If we can't get a part of Y, put Y into memory if it is a
3499 	 constant.  Otherwise, force it into a register.  Then we must
3500 	 be able to get a part of Y.  */
3501       if (ypart == 0 && CONSTANT_P (y))
3502 	{
3503 	  y = use_anchored_address (force_const_mem (mode, y));
3504 	  ypart = operand_subword (y, i, 1, mode);
3505 	}
3506       else if (ypart == 0)
3507 	ypart = operand_subword_force (y, i, mode);
3508 
3509       gcc_assert (xpart && ypart);
3510 
3511       need_clobber |= (GET_CODE (xpart) == SUBREG);
3512 
3513       last_insn = emit_move_insn (xpart, ypart);
3514     }
3515 
3516   seq = get_insns ();
3517   end_sequence ();
3518 
3519   /* Show the output dies here.  This is necessary for SUBREGs
3520      of pseudos since we cannot track their lifetimes correctly;
3521      hard regs shouldn't appear here except as return values.
3522      We never want to emit such a clobber after reload.  */
3523   if (x != y
3524       && ! (reload_in_progress || reload_completed)
3525       && need_clobber != 0)
3526     emit_clobber (x);
3527 
3528   emit_insn (seq);
3529 
3530   return last_insn;
3531 }
3532 
3533 /* Low level part of emit_move_insn.
3534    Called just like emit_move_insn, but assumes X and Y
3535    are basically valid.  */
3536 
3537 rtx_insn *
3538 emit_move_insn_1 (rtx x, rtx y)
3539 {
3540   machine_mode mode = GET_MODE (x);
3541   enum insn_code code;
3542 
3543   gcc_assert ((unsigned int) mode < (unsigned int) MAX_MACHINE_MODE);
3544 
3545   code = optab_handler (mov_optab, mode);
3546   if (code != CODE_FOR_nothing)
3547     return emit_insn (GEN_FCN (code) (x, y));
3548 
3549   /* Expand complex moves by moving real part and imag part.  */
3550   if (COMPLEX_MODE_P (mode))
3551     return emit_move_complex (mode, x, y);
3552 
3553   if (GET_MODE_CLASS (mode) == MODE_DECIMAL_FLOAT
3554       || ALL_FIXED_POINT_MODE_P (mode))
3555     {
3556       rtx_insn *result = emit_move_via_integer (mode, x, y, true);
3557 
3558       /* If we can't find an integer mode, use multi words.  */
3559       if (result)
3560 	return result;
3561       else
3562 	return emit_move_multi_word (mode, x, y);
3563     }
3564 
3565   if (GET_MODE_CLASS (mode) == MODE_CC)
3566     return emit_move_ccmode (mode, x, y);
3567 
3568   /* Try using a move pattern for the corresponding integer mode.  This is
3569      only safe when simplify_subreg can convert MODE constants into integer
3570      constants.  At present, it can only do this reliably if the value
3571      fits within a HOST_WIDE_INT.  */
3572   if (!CONSTANT_P (y) || GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3573     {
3574       rtx_insn *ret = emit_move_via_integer (mode, x, y, lra_in_progress);
3575 
3576       if (ret)
3577 	{
3578 	  if (! lra_in_progress || recog (PATTERN (ret), ret, 0) >= 0)
3579 	    return ret;
3580 	}
3581     }
3582 
3583   return emit_move_multi_word (mode, x, y);
3584 }
3585 
3586 /* Generate code to copy Y into X.
3587    Both Y and X must have the same mode, except that
3588    Y can be a constant with VOIDmode.
3589    This mode cannot be BLKmode; use emit_block_move for that.
3590 
3591    Return the last instruction emitted.  */
3592 
3593 rtx_insn *
3594 emit_move_insn (rtx x, rtx y)
3595 {
3596   machine_mode mode = GET_MODE (x);
3597   rtx y_cst = NULL_RTX;
3598   rtx_insn *last_insn;
3599   rtx set;
3600 
3601   gcc_assert (mode != BLKmode
3602 	      && (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode));
3603 
3604   if (CONSTANT_P (y))
3605     {
3606       if (optimize
3607 	  && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3608 	  && (last_insn = compress_float_constant (x, y)))
3609 	return last_insn;
3610 
3611       y_cst = y;
3612 
3613       if (!targetm.legitimate_constant_p (mode, y))
3614 	{
3615 	  y = force_const_mem (mode, y);
3616 
3617 	  /* If the target's cannot_force_const_mem prevented the spill,
3618 	     assume that the target's move expanders will also take care
3619 	     of the non-legitimate constant.  */
3620 	  if (!y)
3621 	    y = y_cst;
3622 	  else
3623 	    y = use_anchored_address (y);
3624 	}
3625     }
3626 
3627   /* If X or Y are memory references, verify that their addresses are valid
3628      for the machine.  */
3629   if (MEM_P (x)
3630       && (! memory_address_addr_space_p (GET_MODE (x), XEXP (x, 0),
3631 					 MEM_ADDR_SPACE (x))
3632 	  && ! push_operand (x, GET_MODE (x))))
3633     x = validize_mem (x);
3634 
3635   if (MEM_P (y)
3636       && ! memory_address_addr_space_p (GET_MODE (y), XEXP (y, 0),
3637 					MEM_ADDR_SPACE (y)))
3638     y = validize_mem (y);
3639 
3640   gcc_assert (mode != BLKmode);
3641 
3642   last_insn = emit_move_insn_1 (x, y);
3643 
3644   if (y_cst && REG_P (x)
3645       && (set = single_set (last_insn)) != NULL_RTX
3646       && SET_DEST (set) == x
3647       && ! rtx_equal_p (y_cst, SET_SRC (set)))
3648     set_unique_reg_note (last_insn, REG_EQUAL, copy_rtx (y_cst));
3649 
3650   return last_insn;
3651 }
3652 
3653 /* Generate the body of an instruction to copy Y into X.
3654    It may be a list of insns, if one insn isn't enough.  */
3655 
3656 rtx
3657 gen_move_insn (rtx x, rtx y)
3658 {
3659   rtx_insn *seq;
3660 
3661   start_sequence ();
3662   emit_move_insn_1 (x, y);
3663   seq = get_insns ();
3664   end_sequence ();
3665   return seq;
3666 }
3667 
3668 /* If Y is representable exactly in a narrower mode, and the target can
3669    perform the extension directly from constant or memory, then emit the
3670    move as an extension.  */
3671 
3672 static rtx_insn *
3673 compress_float_constant (rtx x, rtx y)
3674 {
3675   machine_mode dstmode = GET_MODE (x);
3676   machine_mode orig_srcmode = GET_MODE (y);
3677   machine_mode srcmode;
3678   REAL_VALUE_TYPE r;
3679   int oldcost, newcost;
3680   bool speed = optimize_insn_for_speed_p ();
3681 
3682   REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3683 
3684   if (targetm.legitimate_constant_p (dstmode, y))
3685     oldcost = set_src_cost (y, speed);
3686   else
3687     oldcost = set_src_cost (force_const_mem (dstmode, y), speed);
3688 
3689   for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3690        srcmode != orig_srcmode;
3691        srcmode = GET_MODE_WIDER_MODE (srcmode))
3692     {
3693       enum insn_code ic;
3694       rtx trunc_y;
3695       rtx_insn *last_insn;
3696 
3697       /* Skip if the target can't extend this way.  */
3698       ic = can_extend_p (dstmode, srcmode, 0);
3699       if (ic == CODE_FOR_nothing)
3700 	continue;
3701 
3702       /* Skip if the narrowed value isn't exact.  */
3703       if (! exact_real_truncate (srcmode, &r))
3704 	continue;
3705 
3706       trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3707 
3708       if (targetm.legitimate_constant_p (srcmode, trunc_y))
3709 	{
3710 	  /* Skip if the target needs extra instructions to perform
3711 	     the extension.  */
3712 	  if (!insn_operand_matches (ic, 1, trunc_y))
3713 	    continue;
3714 	  /* This is valid, but may not be cheaper than the original. */
3715 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3716 				  speed);
3717 	  if (oldcost < newcost)
3718 	    continue;
3719 	}
3720       else if (float_extend_from_mem[dstmode][srcmode])
3721 	{
3722 	  trunc_y = force_const_mem (srcmode, trunc_y);
3723 	  /* This is valid, but may not be cheaper than the original. */
3724 	  newcost = set_src_cost (gen_rtx_FLOAT_EXTEND (dstmode, trunc_y),
3725 				  speed);
3726 	  if (oldcost < newcost)
3727 	    continue;
3728 	  trunc_y = validize_mem (trunc_y);
3729 	}
3730       else
3731 	continue;
3732 
3733       /* For CSE's benefit, force the compressed constant pool entry
3734 	 into a new pseudo.  This constant may be used in different modes,
3735 	 and if not, combine will put things back together for us.  */
3736       trunc_y = force_reg (srcmode, trunc_y);
3737 
3738       /* If x is a hard register, perform the extension into a pseudo,
3739 	 so that e.g. stack realignment code is aware of it.  */
3740       rtx target = x;
3741       if (REG_P (x) && HARD_REGISTER_P (x))
3742 	target = gen_reg_rtx (dstmode);
3743 
3744       emit_unop_insn (ic, target, trunc_y, UNKNOWN);
3745       last_insn = get_last_insn ();
3746 
3747       if (REG_P (target))
3748 	set_unique_reg_note (last_insn, REG_EQUAL, y);
3749 
3750       if (target != x)
3751 	return emit_move_insn (x, target);
3752       return last_insn;
3753     }
3754 
3755   return NULL;
3756 }
3757 
3758 /* Pushing data onto the stack.  */
3759 
3760 /* Push a block of length SIZE (perhaps variable)
3761    and return an rtx to address the beginning of the block.
3762    The value may be virtual_outgoing_args_rtx.
3763 
3764    EXTRA is the number of bytes of padding to push in addition to SIZE.
3765    BELOW nonzero means this padding comes at low addresses;
3766    otherwise, the padding comes at high addresses.  */
3767 
3768 rtx
3769 push_block (rtx size, int extra, int below)
3770 {
3771   rtx temp;
3772 
3773   size = convert_modes (Pmode, ptr_mode, size, 1);
3774   if (CONSTANT_P (size))
3775     anti_adjust_stack (plus_constant (Pmode, size, extra));
3776   else if (REG_P (size) && extra == 0)
3777     anti_adjust_stack (size);
3778   else
3779     {
3780       temp = copy_to_mode_reg (Pmode, size);
3781       if (extra != 0)
3782 	temp = expand_binop (Pmode, add_optab, temp,
3783 			     gen_int_mode (extra, Pmode),
3784 			     temp, 0, OPTAB_LIB_WIDEN);
3785       anti_adjust_stack (temp);
3786     }
3787 
3788 #ifndef STACK_GROWS_DOWNWARD
3789   if (0)
3790 #else
3791   if (1)
3792 #endif
3793     {
3794       temp = virtual_outgoing_args_rtx;
3795       if (extra != 0 && below)
3796 	temp = plus_constant (Pmode, temp, extra);
3797     }
3798   else
3799     {
3800       if (CONST_INT_P (size))
3801 	temp = plus_constant (Pmode, virtual_outgoing_args_rtx,
3802 			      -INTVAL (size) - (below ? 0 : extra));
3803       else if (extra != 0 && !below)
3804 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3805 			     negate_rtx (Pmode, plus_constant (Pmode, size,
3806 							       extra)));
3807       else
3808 	temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3809 			     negate_rtx (Pmode, size));
3810     }
3811 
3812   return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3813 }
3814 
3815 /* A utility routine that returns the base of an auto-inc memory, or NULL.  */
3816 
3817 static rtx
3818 mem_autoinc_base (rtx mem)
3819 {
3820   if (MEM_P (mem))
3821     {
3822       rtx addr = XEXP (mem, 0);
3823       if (GET_RTX_CLASS (GET_CODE (addr)) == RTX_AUTOINC)
3824 	return XEXP (addr, 0);
3825     }
3826   return NULL;
3827 }
3828 
3829 /* A utility routine used here, in reload, and in try_split.  The insns
3830    after PREV up to and including LAST are known to adjust the stack,
3831    with a final value of END_ARGS_SIZE.  Iterate backward from LAST
3832    placing notes as appropriate.  PREV may be NULL, indicating the
3833    entire insn sequence prior to LAST should be scanned.
3834 
3835    The set of allowed stack pointer modifications is small:
3836      (1) One or more auto-inc style memory references (aka pushes),
3837      (2) One or more addition/subtraction with the SP as destination,
3838      (3) A single move insn with the SP as destination,
3839      (4) A call_pop insn,
3840      (5) Noreturn call insns if !ACCUMULATE_OUTGOING_ARGS.
3841 
3842    Insns in the sequence that do not modify the SP are ignored,
3843    except for noreturn calls.
3844 
3845    The return value is the amount of adjustment that can be trivially
3846    verified, via immediate operand or auto-inc.  If the adjustment
3847    cannot be trivially extracted, the return value is INT_MIN.  */
3848 
3849 HOST_WIDE_INT
3850 find_args_size_adjust (rtx_insn *insn)
3851 {
3852   rtx dest, set, pat;
3853   int i;
3854 
3855   pat = PATTERN (insn);
3856   set = NULL;
3857 
3858   /* Look for a call_pop pattern.  */
3859   if (CALL_P (insn))
3860     {
3861       /* We have to allow non-call_pop patterns for the case
3862 	 of emit_single_push_insn of a TLS address.  */
3863       if (GET_CODE (pat) != PARALLEL)
3864 	return 0;
3865 
3866       /* All call_pop have a stack pointer adjust in the parallel.
3867 	 The call itself is always first, and the stack adjust is
3868 	 usually last, so search from the end.  */
3869       for (i = XVECLEN (pat, 0) - 1; i > 0; --i)
3870 	{
3871 	  set = XVECEXP (pat, 0, i);
3872 	  if (GET_CODE (set) != SET)
3873 	    continue;
3874 	  dest = SET_DEST (set);
3875 	  if (dest == stack_pointer_rtx)
3876 	    break;
3877 	}
3878       /* We'd better have found the stack pointer adjust.  */
3879       if (i == 0)
3880 	return 0;
3881       /* Fall through to process the extracted SET and DEST
3882 	 as if it was a standalone insn.  */
3883     }
3884   else if (GET_CODE (pat) == SET)
3885     set = pat;
3886   else if ((set = single_set (insn)) != NULL)
3887     ;
3888   else if (GET_CODE (pat) == PARALLEL)
3889     {
3890       /* ??? Some older ports use a parallel with a stack adjust
3891 	 and a store for a PUSH_ROUNDING pattern, rather than a
3892 	 PRE/POST_MODIFY rtx.  Don't force them to update yet...  */
3893       /* ??? See h8300 and m68k, pushqi1.  */
3894       for (i = XVECLEN (pat, 0) - 1; i >= 0; --i)
3895 	{
3896 	  set = XVECEXP (pat, 0, i);
3897 	  if (GET_CODE (set) != SET)
3898 	    continue;
3899 	  dest = SET_DEST (set);
3900 	  if (dest == stack_pointer_rtx)
3901 	    break;
3902 
3903 	  /* We do not expect an auto-inc of the sp in the parallel.  */
3904 	  gcc_checking_assert (mem_autoinc_base (dest) != stack_pointer_rtx);
3905 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3906 			       != stack_pointer_rtx);
3907 	}
3908       if (i < 0)
3909 	return 0;
3910     }
3911   else
3912     return 0;
3913 
3914   dest = SET_DEST (set);
3915 
3916   /* Look for direct modifications of the stack pointer.  */
3917   if (REG_P (dest) && REGNO (dest) == STACK_POINTER_REGNUM)
3918     {
3919       /* Look for a trivial adjustment, otherwise assume nothing.  */
3920       /* Note that the SPU restore_stack_block pattern refers to
3921 	 the stack pointer in V4SImode.  Consider that non-trivial.  */
3922       if (SCALAR_INT_MODE_P (GET_MODE (dest))
3923 	  && GET_CODE (SET_SRC (set)) == PLUS
3924 	  && XEXP (SET_SRC (set), 0) == stack_pointer_rtx
3925 	  && CONST_INT_P (XEXP (SET_SRC (set), 1)))
3926 	return INTVAL (XEXP (SET_SRC (set), 1));
3927       /* ??? Reload can generate no-op moves, which will be cleaned
3928 	 up later.  Recognize it and continue searching.  */
3929       else if (rtx_equal_p (dest, SET_SRC (set)))
3930 	return 0;
3931       else
3932 	return HOST_WIDE_INT_MIN;
3933     }
3934   else
3935     {
3936       rtx mem, addr;
3937 
3938       /* Otherwise only think about autoinc patterns.  */
3939       if (mem_autoinc_base (dest) == stack_pointer_rtx)
3940 	{
3941 	  mem = dest;
3942 	  gcc_checking_assert (mem_autoinc_base (SET_SRC (set))
3943 			       != stack_pointer_rtx);
3944 	}
3945       else if (mem_autoinc_base (SET_SRC (set)) == stack_pointer_rtx)
3946 	mem = SET_SRC (set);
3947       else
3948 	return 0;
3949 
3950       addr = XEXP (mem, 0);
3951       switch (GET_CODE (addr))
3952 	{
3953 	case PRE_INC:
3954 	case POST_INC:
3955 	  return GET_MODE_SIZE (GET_MODE (mem));
3956 	case PRE_DEC:
3957 	case POST_DEC:
3958 	  return -GET_MODE_SIZE (GET_MODE (mem));
3959 	case PRE_MODIFY:
3960 	case POST_MODIFY:
3961 	  addr = XEXP (addr, 1);
3962 	  gcc_assert (GET_CODE (addr) == PLUS);
3963 	  gcc_assert (XEXP (addr, 0) == stack_pointer_rtx);
3964 	  gcc_assert (CONST_INT_P (XEXP (addr, 1)));
3965 	  return INTVAL (XEXP (addr, 1));
3966 	default:
3967 	  gcc_unreachable ();
3968 	}
3969     }
3970 }
3971 
3972 int
3973 fixup_args_size_notes (rtx_insn *prev, rtx_insn *last, int end_args_size)
3974 {
3975   int args_size = end_args_size;
3976   bool saw_unknown = false;
3977   rtx_insn *insn;
3978 
3979   for (insn = last; insn != prev; insn = PREV_INSN (insn))
3980     {
3981       HOST_WIDE_INT this_delta;
3982 
3983       if (!NONDEBUG_INSN_P (insn))
3984 	continue;
3985 
3986       this_delta = find_args_size_adjust (insn);
3987       if (this_delta == 0)
3988 	{
3989 	  if (!CALL_P (insn)
3990 	      || ACCUMULATE_OUTGOING_ARGS
3991 	      || find_reg_note (insn, REG_NORETURN, NULL_RTX) == NULL_RTX)
3992 	    continue;
3993 	}
3994 
3995       gcc_assert (!saw_unknown);
3996       if (this_delta == HOST_WIDE_INT_MIN)
3997 	saw_unknown = true;
3998 
3999       add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (args_size));
4000 #ifdef STACK_GROWS_DOWNWARD
4001       this_delta = -(unsigned HOST_WIDE_INT) this_delta;
4002 #endif
4003       args_size -= this_delta;
4004     }
4005 
4006   return saw_unknown ? INT_MIN : args_size;
4007 }
4008 
4009 #ifdef PUSH_ROUNDING
4010 /* Emit single push insn.  */
4011 
4012 static void
4013 emit_single_push_insn_1 (machine_mode mode, rtx x, tree type)
4014 {
4015   rtx dest_addr;
4016   unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
4017   rtx dest;
4018   enum insn_code icode;
4019 
4020   stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
4021   /* If there is push pattern, use it.  Otherwise try old way of throwing
4022      MEM representing push operation to move expander.  */
4023   icode = optab_handler (push_optab, mode);
4024   if (icode != CODE_FOR_nothing)
4025     {
4026       struct expand_operand ops[1];
4027 
4028       create_input_operand (&ops[0], x, mode);
4029       if (maybe_expand_insn (icode, 1, ops))
4030 	return;
4031     }
4032   if (GET_MODE_SIZE (mode) == rounded_size)
4033     dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
4034   /* If we are to pad downward, adjust the stack pointer first and
4035      then store X into the stack location using an offset.  This is
4036      because emit_move_insn does not know how to pad; it does not have
4037      access to type.  */
4038   else if (FUNCTION_ARG_PADDING (mode, type) == downward)
4039     {
4040       unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
4041       HOST_WIDE_INT offset;
4042 
4043       emit_move_insn (stack_pointer_rtx,
4044 		      expand_binop (Pmode,
4045 #ifdef STACK_GROWS_DOWNWARD
4046 				    sub_optab,
4047 #else
4048 				    add_optab,
4049 #endif
4050 				    stack_pointer_rtx,
4051 				    gen_int_mode (rounded_size, Pmode),
4052 				    NULL_RTX, 0, OPTAB_LIB_WIDEN));
4053 
4054       offset = (HOST_WIDE_INT) padding_size;
4055 #ifdef STACK_GROWS_DOWNWARD
4056       if (STACK_PUSH_CODE == POST_DEC)
4057 	/* We have already decremented the stack pointer, so get the
4058 	   previous value.  */
4059 	offset += (HOST_WIDE_INT) rounded_size;
4060 #else
4061       if (STACK_PUSH_CODE == POST_INC)
4062 	/* We have already incremented the stack pointer, so get the
4063 	   previous value.  */
4064 	offset -= (HOST_WIDE_INT) rounded_size;
4065 #endif
4066       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4067 				gen_int_mode (offset, Pmode));
4068     }
4069   else
4070     {
4071 #ifdef STACK_GROWS_DOWNWARD
4072       /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC.  */
4073       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4074 				gen_int_mode (-(HOST_WIDE_INT) rounded_size,
4075 					      Pmode));
4076 #else
4077       /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC.  */
4078       dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
4079 				gen_int_mode (rounded_size, Pmode));
4080 #endif
4081       dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
4082     }
4083 
4084   dest = gen_rtx_MEM (mode, dest_addr);
4085 
4086   if (type != 0)
4087     {
4088       set_mem_attributes (dest, type, 1);
4089 
4090       if (cfun->tail_call_marked)
4091 	/* Function incoming arguments may overlap with sibling call
4092 	   outgoing arguments and we cannot allow reordering of reads
4093 	   from function arguments with stores to outgoing arguments
4094 	   of sibling calls.  */
4095 	set_mem_alias_set (dest, 0);
4096     }
4097   emit_move_insn (dest, x);
4098 }
4099 
4100 /* Emit and annotate a single push insn.  */
4101 
4102 static void
4103 emit_single_push_insn (machine_mode mode, rtx x, tree type)
4104 {
4105   int delta, old_delta = stack_pointer_delta;
4106   rtx_insn *prev = get_last_insn ();
4107   rtx_insn *last;
4108 
4109   emit_single_push_insn_1 (mode, x, type);
4110 
4111   last = get_last_insn ();
4112 
4113   /* Notice the common case where we emitted exactly one insn.  */
4114   if (PREV_INSN (last) == prev)
4115     {
4116       add_reg_note (last, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
4117       return;
4118     }
4119 
4120   delta = fixup_args_size_notes (prev, last, stack_pointer_delta);
4121   gcc_assert (delta == INT_MIN || delta == old_delta);
4122 }
4123 #endif
4124 
4125 /* Generate code to push X onto the stack, assuming it has mode MODE and
4126    type TYPE.
4127    MODE is redundant except when X is a CONST_INT (since they don't
4128    carry mode info).
4129    SIZE is an rtx for the size of data to be copied (in bytes),
4130    needed only if X is BLKmode.
4131 
4132    ALIGN (in bits) is maximum alignment we can assume.
4133 
4134    If PARTIAL and REG are both nonzero, then copy that many of the first
4135    bytes of X into registers starting with REG, and push the rest of X.
4136    The amount of space pushed is decreased by PARTIAL bytes.
4137    REG must be a hard register in this case.
4138    If REG is zero but PARTIAL is not, take any all others actions for an
4139    argument partially in registers, but do not actually load any
4140    registers.
4141 
4142    EXTRA is the amount in bytes of extra space to leave next to this arg.
4143    This is ignored if an argument block has already been allocated.
4144 
4145    On a machine that lacks real push insns, ARGS_ADDR is the address of
4146    the bottom of the argument block for this call.  We use indexing off there
4147    to store the arg.  On machines with push insns, ARGS_ADDR is 0 when a
4148    argument block has not been preallocated.
4149 
4150    ARGS_SO_FAR is the size of args previously pushed for this call.
4151 
4152    REG_PARM_STACK_SPACE is nonzero if functions require stack space
4153    for arguments passed in registers.  If nonzero, it will be the number
4154    of bytes required.  */
4155 
4156 void
4157 emit_push_insn (rtx x, machine_mode mode, tree type, rtx size,
4158 		unsigned int align, int partial, rtx reg, int extra,
4159 		rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
4160 		rtx alignment_pad)
4161 {
4162   rtx xinner;
4163   enum direction stack_direction
4164 #ifdef STACK_GROWS_DOWNWARD
4165     = downward;
4166 #else
4167     = upward;
4168 #endif
4169 
4170   /* Decide where to pad the argument: `downward' for below,
4171      `upward' for above, or `none' for don't pad it.
4172      Default is below for small data on big-endian machines; else above.  */
4173   enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
4174 
4175   /* Invert direction if stack is post-decrement.
4176      FIXME: why?  */
4177   if (STACK_PUSH_CODE == POST_DEC)
4178     if (where_pad != none)
4179       where_pad = (where_pad == downward ? upward : downward);
4180 
4181   xinner = x;
4182 
4183   if (mode == BLKmode
4184       || (STRICT_ALIGNMENT && align < GET_MODE_ALIGNMENT (mode)
4185 	  && type != NULL_TREE))
4186     {
4187       /* Copy a block into the stack, entirely or partially.  */
4188 
4189       rtx temp;
4190       int used;
4191       int offset;
4192       int skip;
4193 
4194       offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4195       used = partial - offset;
4196 
4197       if (mode != BLKmode)
4198 	{
4199 	  /* A value is to be stored in an insufficiently aligned
4200 	     stack slot; copy via a suitably aligned slot if
4201 	     necessary.  */
4202 	  size = GEN_INT (GET_MODE_SIZE (mode));
4203 	  if (!MEM_P (xinner))
4204 	    {
4205 	      temp = assign_temp (type, 1, 1);
4206 	      emit_move_insn (temp, xinner);
4207 	      xinner = temp;
4208 	    }
4209 	}
4210 
4211       gcc_assert (size);
4212 
4213       /* USED is now the # of bytes we need not copy to the stack
4214 	 because registers will take care of them.  */
4215 
4216       if (partial != 0)
4217 	xinner = adjust_address (xinner, BLKmode, used);
4218 
4219       /* If the partial register-part of the arg counts in its stack size,
4220 	 skip the part of stack space corresponding to the registers.
4221 	 Otherwise, start copying to the beginning of the stack space,
4222 	 by setting SKIP to 0.  */
4223       skip = (reg_parm_stack_space == 0) ? 0 : used;
4224 
4225 #ifdef PUSH_ROUNDING
4226       /* Do it with several push insns if that doesn't take lots of insns
4227 	 and if there is no difficulty with push insns that skip bytes
4228 	 on the stack for alignment purposes.  */
4229       if (args_addr == 0
4230 	  && PUSH_ARGS
4231 	  && CONST_INT_P (size)
4232 	  && skip == 0
4233 	  && MEM_ALIGN (xinner) >= align
4234 	  && can_move_by_pieces ((unsigned) INTVAL (size) - used, align)
4235 	  /* Here we avoid the case of a structure whose weak alignment
4236 	     forces many pushes of a small amount of data,
4237 	     and such small pushes do rounding that causes trouble.  */
4238 	  && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
4239 	      || align >= BIGGEST_ALIGNMENT
4240 	      || (PUSH_ROUNDING (align / BITS_PER_UNIT)
4241 		  == (align / BITS_PER_UNIT)))
4242 	  && (HOST_WIDE_INT) PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
4243 	{
4244 	  /* Push padding now if padding above and stack grows down,
4245 	     or if padding below and stack grows up.
4246 	     But if space already allocated, this has already been done.  */
4247 	  if (extra && args_addr == 0
4248 	      && where_pad != none && where_pad != stack_direction)
4249 	    anti_adjust_stack (GEN_INT (extra));
4250 
4251 	  move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
4252 	}
4253       else
4254 #endif /* PUSH_ROUNDING  */
4255 	{
4256 	  rtx target;
4257 
4258 	  /* Otherwise make space on the stack and copy the data
4259 	     to the address of that space.  */
4260 
4261 	  /* Deduct words put into registers from the size we must copy.  */
4262 	  if (partial != 0)
4263 	    {
4264 	      if (CONST_INT_P (size))
4265 		size = GEN_INT (INTVAL (size) - used);
4266 	      else
4267 		size = expand_binop (GET_MODE (size), sub_optab, size,
4268 				     gen_int_mode (used, GET_MODE (size)),
4269 				     NULL_RTX, 0, OPTAB_LIB_WIDEN);
4270 	    }
4271 
4272 	  /* Get the address of the stack space.
4273 	     In this case, we do not deal with EXTRA separately.
4274 	     A single stack adjust will do.  */
4275 	  if (! args_addr)
4276 	    {
4277 	      temp = push_block (size, extra, where_pad == downward);
4278 	      extra = 0;
4279 	    }
4280 	  else if (CONST_INT_P (args_so_far))
4281 	    temp = memory_address (BLKmode,
4282 				   plus_constant (Pmode, args_addr,
4283 						  skip + INTVAL (args_so_far)));
4284 	  else
4285 	    temp = memory_address (BLKmode,
4286 				   plus_constant (Pmode,
4287 						  gen_rtx_PLUS (Pmode,
4288 								args_addr,
4289 								args_so_far),
4290 						  skip));
4291 
4292 	  if (!ACCUMULATE_OUTGOING_ARGS)
4293 	    {
4294 	      /* If the source is referenced relative to the stack pointer,
4295 		 copy it to another register to stabilize it.  We do not need
4296 		 to do this if we know that we won't be changing sp.  */
4297 
4298 	      if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4299 		  || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4300 		temp = copy_to_reg (temp);
4301 	    }
4302 
4303 	  target = gen_rtx_MEM (BLKmode, temp);
4304 
4305 	  /* We do *not* set_mem_attributes here, because incoming arguments
4306 	     may overlap with sibling call outgoing arguments and we cannot
4307 	     allow reordering of reads from function arguments with stores
4308 	     to outgoing arguments of sibling calls.  We do, however, want
4309 	     to record the alignment of the stack slot.  */
4310 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4311 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4312 	  set_mem_align (target, align);
4313 
4314 	  emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
4315 	}
4316     }
4317   else if (partial > 0)
4318     {
4319       /* Scalar partly in registers.  */
4320 
4321       int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4322       int i;
4323       int not_stack;
4324       /* # bytes of start of argument
4325 	 that we must make space for but need not store.  */
4326       int offset = partial % (PARM_BOUNDARY / BITS_PER_UNIT);
4327       int args_offset = INTVAL (args_so_far);
4328       int skip;
4329 
4330       /* Push padding now if padding above and stack grows down,
4331 	 or if padding below and stack grows up.
4332 	 But if space already allocated, this has already been done.  */
4333       if (extra && args_addr == 0
4334 	  && where_pad != none && where_pad != stack_direction)
4335 	anti_adjust_stack (GEN_INT (extra));
4336 
4337       /* If we make space by pushing it, we might as well push
4338 	 the real data.  Otherwise, we can leave OFFSET nonzero
4339 	 and leave the space uninitialized.  */
4340       if (args_addr == 0)
4341 	offset = 0;
4342 
4343       /* Now NOT_STACK gets the number of words that we don't need to
4344 	 allocate on the stack.  Convert OFFSET to words too.  */
4345       not_stack = (partial - offset) / UNITS_PER_WORD;
4346       offset /= UNITS_PER_WORD;
4347 
4348       /* If the partial register-part of the arg counts in its stack size,
4349 	 skip the part of stack space corresponding to the registers.
4350 	 Otherwise, start copying to the beginning of the stack space,
4351 	 by setting SKIP to 0.  */
4352       skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
4353 
4354       if (CONSTANT_P (x) && !targetm.legitimate_constant_p (mode, x))
4355 	x = validize_mem (force_const_mem (mode, x));
4356 
4357       /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4358 	 SUBREGs of such registers are not allowed.  */
4359       if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
4360 	   && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4361 	x = copy_to_reg (x);
4362 
4363       /* Loop over all the words allocated on the stack for this arg.  */
4364       /* We can do it by words, because any scalar bigger than a word
4365 	 has a size a multiple of a word.  */
4366       for (i = size - 1; i >= not_stack; i--)
4367 	if (i >= not_stack + offset)
4368 	  emit_push_insn (operand_subword_force (x, i, mode),
4369 			  word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4370 			  0, args_addr,
4371 			  GEN_INT (args_offset + ((i - not_stack + skip)
4372 						  * UNITS_PER_WORD)),
4373 			  reg_parm_stack_space, alignment_pad);
4374     }
4375   else
4376     {
4377       rtx addr;
4378       rtx dest;
4379 
4380       /* Push padding now if padding above and stack grows down,
4381 	 or if padding below and stack grows up.
4382 	 But if space already allocated, this has already been done.  */
4383       if (extra && args_addr == 0
4384 	  && where_pad != none && where_pad != stack_direction)
4385 	anti_adjust_stack (GEN_INT (extra));
4386 
4387 #ifdef PUSH_ROUNDING
4388       if (args_addr == 0 && PUSH_ARGS)
4389 	emit_single_push_insn (mode, x, type);
4390       else
4391 #endif
4392 	{
4393 	  if (CONST_INT_P (args_so_far))
4394 	    addr
4395 	      = memory_address (mode,
4396 				plus_constant (Pmode, args_addr,
4397 					       INTVAL (args_so_far)));
4398 	  else
4399 	    addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4400 						       args_so_far));
4401 	  dest = gen_rtx_MEM (mode, addr);
4402 
4403 	  /* We do *not* set_mem_attributes here, because incoming arguments
4404 	     may overlap with sibling call outgoing arguments and we cannot
4405 	     allow reordering of reads from function arguments with stores
4406 	     to outgoing arguments of sibling calls.  We do, however, want
4407 	     to record the alignment of the stack slot.  */
4408 	  /* ALIGN may well be better aligned than TYPE, e.g. due to
4409 	     PARM_BOUNDARY.  Assume the caller isn't lying.  */
4410 	  set_mem_align (dest, align);
4411 
4412 	  emit_move_insn (dest, x);
4413 	}
4414     }
4415 
4416   /* If part should go in registers, copy that part
4417      into the appropriate registers.  Do this now, at the end,
4418      since mem-to-mem copies above may do function calls.  */
4419   if (partial > 0 && reg != 0)
4420     {
4421       /* Handle calls that pass values in multiple non-contiguous locations.
4422 	 The Irix 6 ABI has examples of this.  */
4423       if (GET_CODE (reg) == PARALLEL)
4424 	emit_group_load (reg, x, type, -1);
4425       else
4426 	{
4427 	  gcc_assert (partial % UNITS_PER_WORD == 0);
4428 	  move_block_to_reg (REGNO (reg), x, partial / UNITS_PER_WORD, mode);
4429 	}
4430     }
4431 
4432   if (extra && args_addr == 0 && where_pad == stack_direction)
4433     anti_adjust_stack (GEN_INT (extra));
4434 
4435   if (alignment_pad && args_addr == 0)
4436     anti_adjust_stack (alignment_pad);
4437 }
4438 
4439 /* Return X if X can be used as a subtarget in a sequence of arithmetic
4440    operations.  */
4441 
4442 static rtx
4443 get_subtarget (rtx x)
4444 {
4445   return (optimize
4446           || x == 0
4447 	   /* Only registers can be subtargets.  */
4448 	   || !REG_P (x)
4449 	   /* Don't use hard regs to avoid extending their life.  */
4450 	   || REGNO (x) < FIRST_PSEUDO_REGISTER
4451 	  ? 0 : x);
4452 }
4453 
4454 /* A subroutine of expand_assignment.  Optimize FIELD op= VAL, where
4455    FIELD is a bitfield.  Returns true if the optimization was successful,
4456    and there's nothing else to do.  */
4457 
4458 static bool
4459 optimize_bitfield_assignment_op (unsigned HOST_WIDE_INT bitsize,
4460 				 unsigned HOST_WIDE_INT bitpos,
4461 				 unsigned HOST_WIDE_INT bitregion_start,
4462 				 unsigned HOST_WIDE_INT bitregion_end,
4463 				 machine_mode mode1, rtx str_rtx,
4464 				 tree to, tree src)
4465 {
4466   machine_mode str_mode = GET_MODE (str_rtx);
4467   unsigned int str_bitsize = GET_MODE_BITSIZE (str_mode);
4468   tree op0, op1;
4469   rtx value, result;
4470   optab binop;
4471   gimple srcstmt;
4472   enum tree_code code;
4473 
4474   if (mode1 != VOIDmode
4475       || bitsize >= BITS_PER_WORD
4476       || str_bitsize > BITS_PER_WORD
4477       || TREE_SIDE_EFFECTS (to)
4478       || TREE_THIS_VOLATILE (to))
4479     return false;
4480 
4481   STRIP_NOPS (src);
4482   if (TREE_CODE (src) != SSA_NAME)
4483     return false;
4484   if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE)
4485     return false;
4486 
4487   srcstmt = get_gimple_for_ssa_name (src);
4488   if (!srcstmt
4489       || TREE_CODE_CLASS (gimple_assign_rhs_code (srcstmt)) != tcc_binary)
4490     return false;
4491 
4492   code = gimple_assign_rhs_code (srcstmt);
4493 
4494   op0 = gimple_assign_rhs1 (srcstmt);
4495 
4496   /* If OP0 is an SSA_NAME, then we want to walk the use-def chain
4497      to find its initialization.  Hopefully the initialization will
4498      be from a bitfield load.  */
4499   if (TREE_CODE (op0) == SSA_NAME)
4500     {
4501       gimple op0stmt = get_gimple_for_ssa_name (op0);
4502 
4503       /* We want to eventually have OP0 be the same as TO, which
4504 	 should be a bitfield.  */
4505       if (!op0stmt
4506 	  || !is_gimple_assign (op0stmt)
4507 	  || gimple_assign_rhs_code (op0stmt) != TREE_CODE (to))
4508 	return false;
4509       op0 = gimple_assign_rhs1 (op0stmt);
4510     }
4511 
4512   op1 = gimple_assign_rhs2 (srcstmt);
4513 
4514   if (!operand_equal_p (to, op0, 0))
4515     return false;
4516 
4517   if (MEM_P (str_rtx))
4518     {
4519       unsigned HOST_WIDE_INT offset1;
4520 
4521       if (str_bitsize == 0 || str_bitsize > BITS_PER_WORD)
4522 	str_mode = word_mode;
4523       str_mode = get_best_mode (bitsize, bitpos,
4524 				bitregion_start, bitregion_end,
4525 				MEM_ALIGN (str_rtx), str_mode, 0);
4526       if (str_mode == VOIDmode)
4527 	return false;
4528       str_bitsize = GET_MODE_BITSIZE (str_mode);
4529 
4530       offset1 = bitpos;
4531       bitpos %= str_bitsize;
4532       offset1 = (offset1 - bitpos) / BITS_PER_UNIT;
4533       str_rtx = adjust_address (str_rtx, str_mode, offset1);
4534     }
4535   else if (!REG_P (str_rtx) && GET_CODE (str_rtx) != SUBREG)
4536     return false;
4537 
4538   /* If the bit field covers the whole REG/MEM, store_field
4539      will likely generate better code.  */
4540   if (bitsize >= str_bitsize)
4541     return false;
4542 
4543   /* We can't handle fields split across multiple entities.  */
4544   if (bitpos + bitsize > str_bitsize)
4545     return false;
4546 
4547   if (BYTES_BIG_ENDIAN)
4548     bitpos = str_bitsize - bitpos - bitsize;
4549 
4550   switch (code)
4551     {
4552     case PLUS_EXPR:
4553     case MINUS_EXPR:
4554       /* For now, just optimize the case of the topmost bitfield
4555 	 where we don't need to do any masking and also
4556 	 1 bit bitfields where xor can be used.
4557 	 We might win by one instruction for the other bitfields
4558 	 too if insv/extv instructions aren't used, so that
4559 	 can be added later.  */
4560       if (bitpos + bitsize != str_bitsize
4561 	  && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
4562 	break;
4563 
4564       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4565       value = convert_modes (str_mode,
4566 			     TYPE_MODE (TREE_TYPE (op1)), value,
4567 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4568 
4569       /* We may be accessing data outside the field, which means
4570 	 we can alias adjacent data.  */
4571       if (MEM_P (str_rtx))
4572 	{
4573 	  str_rtx = shallow_copy_rtx (str_rtx);
4574 	  set_mem_alias_set (str_rtx, 0);
4575 	  set_mem_expr (str_rtx, 0);
4576 	}
4577 
4578       binop = code == PLUS_EXPR ? add_optab : sub_optab;
4579       if (bitsize == 1 && bitpos + bitsize != str_bitsize)
4580 	{
4581 	  value = expand_and (str_mode, value, const1_rtx, NULL);
4582 	  binop = xor_optab;
4583 	}
4584       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4585       result = expand_binop (str_mode, binop, str_rtx,
4586 			     value, str_rtx, 1, OPTAB_WIDEN);
4587       if (result != str_rtx)
4588 	emit_move_insn (str_rtx, result);
4589       return true;
4590 
4591     case BIT_IOR_EXPR:
4592     case BIT_XOR_EXPR:
4593       if (TREE_CODE (op1) != INTEGER_CST)
4594 	break;
4595       value = expand_expr (op1, NULL_RTX, str_mode, EXPAND_NORMAL);
4596       value = convert_modes (str_mode,
4597 			     TYPE_MODE (TREE_TYPE (op1)), value,
4598 			     TYPE_UNSIGNED (TREE_TYPE (op1)));
4599 
4600       /* We may be accessing data outside the field, which means
4601 	 we can alias adjacent data.  */
4602       if (MEM_P (str_rtx))
4603 	{
4604 	  str_rtx = shallow_copy_rtx (str_rtx);
4605 	  set_mem_alias_set (str_rtx, 0);
4606 	  set_mem_expr (str_rtx, 0);
4607 	}
4608 
4609       binop = code == BIT_IOR_EXPR ? ior_optab : xor_optab;
4610       if (bitpos + bitsize != str_bitsize)
4611 	{
4612 	  rtx mask = gen_int_mode (((unsigned HOST_WIDE_INT) 1 << bitsize) - 1,
4613 				   str_mode);
4614 	  value = expand_and (str_mode, value, mask, NULL_RTX);
4615 	}
4616       value = expand_shift (LSHIFT_EXPR, str_mode, value, bitpos, NULL_RTX, 1);
4617       result = expand_binop (str_mode, binop, str_rtx,
4618 			     value, str_rtx, 1, OPTAB_WIDEN);
4619       if (result != str_rtx)
4620 	emit_move_insn (str_rtx, result);
4621       return true;
4622 
4623     default:
4624       break;
4625     }
4626 
4627   return false;
4628 }
4629 
4630 /* In the C++ memory model, consecutive bit fields in a structure are
4631    considered one memory location.
4632 
4633    Given a COMPONENT_REF EXP at position (BITPOS, OFFSET), this function
4634    returns the bit range of consecutive bits in which this COMPONENT_REF
4635    belongs.  The values are returned in *BITSTART and *BITEND.  *BITPOS
4636    and *OFFSET may be adjusted in the process.
4637 
4638    If the access does not need to be restricted, 0 is returned in both
4639    *BITSTART and *BITEND.  */
4640 
4641 static void
4642 get_bit_range (unsigned HOST_WIDE_INT *bitstart,
4643 	       unsigned HOST_WIDE_INT *bitend,
4644 	       tree exp,
4645 	       HOST_WIDE_INT *bitpos,
4646 	       tree *offset)
4647 {
4648   HOST_WIDE_INT bitoffset;
4649   tree field, repr;
4650 
4651   gcc_assert (TREE_CODE (exp) == COMPONENT_REF);
4652 
4653   field = TREE_OPERAND (exp, 1);
4654   repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
4655   /* If we do not have a DECL_BIT_FIELD_REPRESENTATIVE there is no
4656      need to limit the range we can access.  */
4657   if (!repr)
4658     {
4659       *bitstart = *bitend = 0;
4660       return;
4661     }
4662 
4663   /* If we have a DECL_BIT_FIELD_REPRESENTATIVE but the enclosing record is
4664      part of a larger bit field, then the representative does not serve any
4665      useful purpose.  This can occur in Ada.  */
4666   if (handled_component_p (TREE_OPERAND (exp, 0)))
4667     {
4668       machine_mode rmode;
4669       HOST_WIDE_INT rbitsize, rbitpos;
4670       tree roffset;
4671       int unsignedp;
4672       int volatilep = 0;
4673       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
4674 			   &roffset, &rmode, &unsignedp, &volatilep, false);
4675       if ((rbitpos % BITS_PER_UNIT) != 0)
4676 	{
4677 	  *bitstart = *bitend = 0;
4678 	  return;
4679 	}
4680     }
4681 
4682   /* Compute the adjustment to bitpos from the offset of the field
4683      relative to the representative.  DECL_FIELD_OFFSET of field and
4684      repr are the same by construction if they are not constants,
4685      see finish_bitfield_layout.  */
4686   if (tree_fits_uhwi_p (DECL_FIELD_OFFSET (field))
4687       && tree_fits_uhwi_p (DECL_FIELD_OFFSET (repr)))
4688     bitoffset = (tree_to_uhwi (DECL_FIELD_OFFSET (field))
4689 		 - tree_to_uhwi (DECL_FIELD_OFFSET (repr))) * BITS_PER_UNIT;
4690   else
4691     bitoffset = 0;
4692   bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
4693 		- tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
4694 
4695   /* If the adjustment is larger than bitpos, we would have a negative bit
4696      position for the lower bound and this may wreak havoc later.  Adjust
4697      offset and bitpos to make the lower bound non-negative in that case.  */
4698   if (bitoffset > *bitpos)
4699     {
4700       HOST_WIDE_INT adjust = bitoffset - *bitpos;
4701       gcc_assert ((adjust % BITS_PER_UNIT) == 0);
4702 
4703       *bitpos += adjust;
4704       if (*offset == NULL_TREE)
4705 	*offset = size_int (-adjust / BITS_PER_UNIT);
4706       else
4707 	*offset
4708 	  = size_binop (MINUS_EXPR, *offset, size_int (adjust / BITS_PER_UNIT));
4709       *bitstart = 0;
4710     }
4711   else
4712     *bitstart = *bitpos - bitoffset;
4713 
4714   *bitend = *bitstart + tree_to_uhwi (DECL_SIZE (repr)) - 1;
4715 }
4716 
4717 /* Returns true if ADDR is an ADDR_EXPR of a DECL that does not reside
4718    in memory and has non-BLKmode.  DECL_RTL must not be a MEM; if
4719    DECL_RTL was not set yet, return NORTL.  */
4720 
4721 static inline bool
4722 addr_expr_of_non_mem_decl_p_1 (tree addr, bool nortl)
4723 {
4724   if (TREE_CODE (addr) != ADDR_EXPR)
4725     return false;
4726 
4727   tree base = TREE_OPERAND (addr, 0);
4728 
4729   if (!DECL_P (base)
4730       || TREE_ADDRESSABLE (base)
4731       || DECL_MODE (base) == BLKmode)
4732     return false;
4733 
4734   if (!DECL_RTL_SET_P (base))
4735     return nortl;
4736 
4737   return (!MEM_P (DECL_RTL (base)));
4738 }
4739 
4740 /* Returns true if the MEM_REF REF refers to an object that does not
4741    reside in memory and has non-BLKmode.  */
4742 
4743 static inline bool
4744 mem_ref_refers_to_non_mem_p (tree ref)
4745 {
4746   tree base = TREE_OPERAND (ref, 0);
4747   return addr_expr_of_non_mem_decl_p_1 (base, false);
4748 }
4749 
4750 /* Expand an assignment that stores the value of FROM into TO.  If NONTEMPORAL
4751    is true, try generating a nontemporal store.  */
4752 
4753 void
4754 expand_assignment (tree to, tree from, bool nontemporal)
4755 {
4756   rtx to_rtx = 0;
4757   rtx result;
4758   machine_mode mode;
4759   unsigned int align;
4760   enum insn_code icode;
4761 
4762   /* Don't crash if the lhs of the assignment was erroneous.  */
4763   if (TREE_CODE (to) == ERROR_MARK)
4764     {
4765       expand_normal (from);
4766       return;
4767     }
4768 
4769   /* Optimize away no-op moves without side-effects.  */
4770   if (operand_equal_p (to, from, 0))
4771     return;
4772 
4773   /* Handle misaligned stores.  */
4774   mode = TYPE_MODE (TREE_TYPE (to));
4775   if ((TREE_CODE (to) == MEM_REF
4776        || TREE_CODE (to) == TARGET_MEM_REF)
4777       && mode != BLKmode
4778       && !mem_ref_refers_to_non_mem_p (to)
4779       && ((align = get_object_alignment (to))
4780 	  < GET_MODE_ALIGNMENT (mode))
4781       && (((icode = optab_handler (movmisalign_optab, mode))
4782 	   != CODE_FOR_nothing)
4783 	  || SLOW_UNALIGNED_ACCESS (mode, align)))
4784     {
4785       rtx reg, mem;
4786 
4787       reg = expand_expr (from, NULL_RTX, VOIDmode, EXPAND_NORMAL);
4788       reg = force_not_mem (reg);
4789       mem = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4790 
4791       if (icode != CODE_FOR_nothing)
4792 	{
4793 	  struct expand_operand ops[2];
4794 
4795 	  create_fixed_operand (&ops[0], mem);
4796 	  create_input_operand (&ops[1], reg, mode);
4797 	  /* The movmisalign<mode> pattern cannot fail, else the assignment
4798 	     would silently be omitted.  */
4799 	  expand_insn (icode, 2, ops);
4800 	}
4801       else
4802 	store_bit_field (mem, GET_MODE_BITSIZE (mode), 0, 0, 0, mode, reg);
4803       return;
4804     }
4805 
4806   /* Assignment of a structure component needs special treatment
4807      if the structure component's rtx is not simply a MEM.
4808      Assignment of an array element at a constant index, and assignment of
4809      an array element in an unaligned packed structure field, has the same
4810      problem.  Same for (partially) storing into a non-memory object.  */
4811   if (handled_component_p (to)
4812       || (TREE_CODE (to) == MEM_REF
4813 	  && mem_ref_refers_to_non_mem_p (to))
4814       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
4815     {
4816       machine_mode mode1;
4817       HOST_WIDE_INT bitsize, bitpos;
4818       unsigned HOST_WIDE_INT bitregion_start = 0;
4819       unsigned HOST_WIDE_INT bitregion_end = 0;
4820       tree offset;
4821       int unsignedp;
4822       int volatilep = 0;
4823       tree tem;
4824 
4825       push_temp_slots ();
4826       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4827 				 &unsignedp, &volatilep, true);
4828 
4829       /* Make sure bitpos is not negative, it can wreak havoc later.  */
4830       if (bitpos < 0)
4831 	{
4832 	  gcc_assert (offset == NULL_TREE);
4833 	  offset = size_int (bitpos >> (BITS_PER_UNIT == 8
4834 					? 3 : exact_log2 (BITS_PER_UNIT)));
4835 	  bitpos &= BITS_PER_UNIT - 1;
4836 	}
4837 
4838       if (TREE_CODE (to) == COMPONENT_REF
4839 	  && DECL_BIT_FIELD_TYPE (TREE_OPERAND (to, 1)))
4840 	get_bit_range (&bitregion_start, &bitregion_end, to, &bitpos, &offset);
4841       /* The C++ memory model naturally applies to byte-aligned fields.
4842 	 However, if we do not have a DECL_BIT_FIELD_TYPE but BITPOS or
4843 	 BITSIZE are not byte-aligned, there is no need to limit the range
4844 	 we can access.  This can occur with packed structures in Ada.  */
4845       else if (bitsize > 0
4846 	       && bitsize % BITS_PER_UNIT == 0
4847 	       && bitpos % BITS_PER_UNIT == 0)
4848 	{
4849 	  bitregion_start = bitpos;
4850 	  bitregion_end = bitpos + bitsize - 1;
4851 	}
4852 
4853       to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_WRITE);
4854 
4855       /* If the field has a mode, we want to access it in the
4856 	 field's mode, not the computed mode.
4857 	 If a MEM has VOIDmode (external with incomplete type),
4858 	 use BLKmode for it instead.  */
4859       if (MEM_P (to_rtx))
4860 	{
4861 	  if (mode1 != VOIDmode)
4862 	    to_rtx = adjust_address (to_rtx, mode1, 0);
4863 	  else if (GET_MODE (to_rtx) == VOIDmode)
4864 	    to_rtx = adjust_address (to_rtx, BLKmode, 0);
4865 	}
4866 
4867       if (offset != 0)
4868 	{
4869 	  machine_mode address_mode;
4870 	  rtx offset_rtx;
4871 
4872 	  if (!MEM_P (to_rtx))
4873 	    {
4874 	      /* We can get constant negative offsets into arrays with broken
4875 		 user code.  Translate this to a trap instead of ICEing.  */
4876 	      gcc_assert (TREE_CODE (offset) == INTEGER_CST);
4877 	      expand_builtin_trap ();
4878 	      to_rtx = gen_rtx_MEM (BLKmode, const0_rtx);
4879 	    }
4880 
4881 	  offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4882 	  address_mode = get_address_mode (to_rtx);
4883 	  if (GET_MODE (offset_rtx) != address_mode)
4884 	    offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
4885 
4886 	  /* If we have an expression in OFFSET_RTX and a non-zero
4887 	     byte offset in BITPOS, adding the byte offset before the
4888 	     OFFSET_RTX results in better intermediate code, which makes
4889 	     later rtl optimization passes perform better.
4890 
4891 	     We prefer intermediate code like this:
4892 
4893 	     r124:DI=r123:DI+0x18
4894 	     [r124:DI]=r121:DI
4895 
4896 	     ... instead of ...
4897 
4898 	     r124:DI=r123:DI+0x10
4899 	     [r124:DI+0x8]=r121:DI
4900 
4901 	     This is only done for aligned data values, as these can
4902 	     be expected to result in single move instructions.  */
4903 	  if (mode1 != VOIDmode
4904 	      && bitpos != 0
4905 	      && bitsize > 0
4906 	      && (bitpos % bitsize) == 0
4907 	      && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4908 	      && MEM_ALIGN (to_rtx) >= GET_MODE_ALIGNMENT (mode1))
4909 	    {
4910 	      to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4911 	      bitregion_start = 0;
4912 	      if (bitregion_end >= (unsigned HOST_WIDE_INT) bitpos)
4913 		bitregion_end -= bitpos;
4914 	      bitpos = 0;
4915 	    }
4916 
4917 	  to_rtx = offset_address (to_rtx, offset_rtx,
4918 				   highest_pow2_factor_for_target (to,
4919 				   				   offset));
4920 	}
4921 
4922       /* No action is needed if the target is not a memory and the field
4923 	 lies completely outside that target.  This can occur if the source
4924 	 code contains an out-of-bounds access to a small array.  */
4925       if (!MEM_P (to_rtx)
4926 	  && GET_MODE (to_rtx) != BLKmode
4927 	  && (unsigned HOST_WIDE_INT) bitpos
4928 	     >= GET_MODE_PRECISION (GET_MODE (to_rtx)))
4929 	{
4930 	  expand_normal (from);
4931 	  result = NULL;
4932 	}
4933       /* Handle expand_expr of a complex value returning a CONCAT.  */
4934       else if (GET_CODE (to_rtx) == CONCAT)
4935 	{
4936 	  unsigned short mode_bitsize = GET_MODE_BITSIZE (GET_MODE (to_rtx));
4937 	  if (COMPLEX_MODE_P (TYPE_MODE (TREE_TYPE (from)))
4938 	      && bitpos == 0
4939 	      && bitsize == mode_bitsize)
4940 	    result = store_expr (from, to_rtx, false, nontemporal);
4941 	  else if (bitsize == mode_bitsize / 2
4942 		   && (bitpos == 0 || bitpos == mode_bitsize / 2))
4943 	    result = store_expr (from, XEXP (to_rtx, bitpos != 0), false,
4944 				 nontemporal);
4945 	  else if (bitpos + bitsize <= mode_bitsize / 2)
4946 	    result = store_field (XEXP (to_rtx, 0), bitsize, bitpos,
4947 				  bitregion_start, bitregion_end,
4948 				  mode1, from,
4949 				  get_alias_set (to), nontemporal);
4950 	  else if (bitpos >= mode_bitsize / 2)
4951 	    result = store_field (XEXP (to_rtx, 1), bitsize,
4952 				  bitpos - mode_bitsize / 2,
4953 				  bitregion_start, bitregion_end,
4954 				  mode1, from,
4955 				  get_alias_set (to), nontemporal);
4956 	  else if (bitpos == 0 && bitsize == mode_bitsize)
4957 	    {
4958 	      rtx from_rtx;
4959 	      result = expand_normal (from);
4960 	      from_rtx = simplify_gen_subreg (GET_MODE (to_rtx), result,
4961 					      TYPE_MODE (TREE_TYPE (from)), 0);
4962 	      emit_move_insn (XEXP (to_rtx, 0),
4963 			      read_complex_part (from_rtx, false));
4964 	      emit_move_insn (XEXP (to_rtx, 1),
4965 			      read_complex_part (from_rtx, true));
4966 	    }
4967 	  else
4968 	    {
4969 	      rtx temp = assign_stack_temp (GET_MODE (to_rtx),
4970 					    GET_MODE_SIZE (GET_MODE (to_rtx)));
4971 	      write_complex_part (temp, XEXP (to_rtx, 0), false);
4972 	      write_complex_part (temp, XEXP (to_rtx, 1), true);
4973 	      result = store_field (temp, bitsize, bitpos,
4974 				    bitregion_start, bitregion_end,
4975 				    mode1, from,
4976 				    get_alias_set (to), nontemporal);
4977 	      emit_move_insn (XEXP (to_rtx, 0), read_complex_part (temp, false));
4978 	      emit_move_insn (XEXP (to_rtx, 1), read_complex_part (temp, true));
4979 	    }
4980 	}
4981       else
4982 	{
4983 	  if (MEM_P (to_rtx))
4984 	    {
4985 	      /* If the field is at offset zero, we could have been given the
4986 		 DECL_RTX of the parent struct.  Don't munge it.  */
4987 	      to_rtx = shallow_copy_rtx (to_rtx);
4988 	      set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4989 	      if (volatilep)
4990 		MEM_VOLATILE_P (to_rtx) = 1;
4991 	    }
4992 
4993 	  if (optimize_bitfield_assignment_op (bitsize, bitpos,
4994 					       bitregion_start, bitregion_end,
4995 					       mode1,
4996 					       to_rtx, to, from))
4997 	    result = NULL;
4998 	  else
4999 	    result = store_field (to_rtx, bitsize, bitpos,
5000 				  bitregion_start, bitregion_end,
5001 				  mode1, from,
5002 				  get_alias_set (to), nontemporal);
5003 	}
5004 
5005       if (result)
5006 	preserve_temp_slots (result);
5007       pop_temp_slots ();
5008       return;
5009     }
5010 
5011   /* If the rhs is a function call and its value is not an aggregate,
5012      call the function before we start to compute the lhs.
5013      This is needed for correct code for cases such as
5014      val = setjmp (buf) on machines where reference to val
5015      requires loading up part of an address in a separate insn.
5016 
5017      Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
5018      since it might be a promoted variable where the zero- or sign- extension
5019      needs to be done.  Handling this in the normal way is safe because no
5020      computation is done before the call.  The same is true for SSA names.  */
5021   if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
5022       && COMPLETE_TYPE_P (TREE_TYPE (from))
5023       && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
5024       && ! (((TREE_CODE (to) == VAR_DECL
5025 	      || TREE_CODE (to) == PARM_DECL
5026 	      || TREE_CODE (to) == RESULT_DECL)
5027 	     && REG_P (DECL_RTL (to)))
5028 	    || TREE_CODE (to) == SSA_NAME))
5029     {
5030       rtx value;
5031       rtx bounds;
5032 
5033       push_temp_slots ();
5034       value = expand_normal (from);
5035 
5036       /* Split value and bounds to store them separately.  */
5037       chkp_split_slot (value, &value, &bounds);
5038 
5039       if (to_rtx == 0)
5040 	to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5041 
5042       /* Handle calls that return values in multiple non-contiguous locations.
5043 	 The Irix 6 ABI has examples of this.  */
5044       if (GET_CODE (to_rtx) == PARALLEL)
5045 	{
5046 	  if (GET_CODE (value) == PARALLEL)
5047 	    emit_group_move (to_rtx, value);
5048 	  else
5049 	    emit_group_load (to_rtx, value, TREE_TYPE (from),
5050 			     int_size_in_bytes (TREE_TYPE (from)));
5051 	}
5052       else if (GET_CODE (value) == PARALLEL)
5053 	emit_group_store (to_rtx, value, TREE_TYPE (from),
5054 			  int_size_in_bytes (TREE_TYPE (from)));
5055       else if (GET_MODE (to_rtx) == BLKmode)
5056 	{
5057 	  /* Handle calls that return BLKmode values in registers.  */
5058 	  if (REG_P (value))
5059 	    copy_blkmode_from_reg (to_rtx, value, TREE_TYPE (from));
5060 	  else
5061 	    emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
5062 	}
5063       else
5064 	{
5065 	  if (POINTER_TYPE_P (TREE_TYPE (to)))
5066 	    value = convert_memory_address_addr_space
5067 		      (GET_MODE (to_rtx), value,
5068 		       TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (to))));
5069 
5070 	  emit_move_insn (to_rtx, value);
5071 	}
5072 
5073       /* Store bounds if required.  */
5074       if (bounds
5075 	  && (BOUNDED_P (to) || chkp_type_has_pointer (TREE_TYPE (to))))
5076 	{
5077 	  gcc_assert (MEM_P (to_rtx));
5078 	  chkp_emit_bounds_store (bounds, value, to_rtx);
5079 	}
5080 
5081       preserve_temp_slots (to_rtx);
5082       pop_temp_slots ();
5083       return;
5084     }
5085 
5086   /* Ordinary treatment.  Expand TO to get a REG or MEM rtx.  */
5087   to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
5088 
5089   /* Don't move directly into a return register.  */
5090   if (TREE_CODE (to) == RESULT_DECL
5091       && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
5092     {
5093       rtx temp;
5094 
5095       push_temp_slots ();
5096 
5097       /* If the source is itself a return value, it still is in a pseudo at
5098 	 this point so we can move it back to the return register directly.  */
5099       if (REG_P (to_rtx)
5100 	  && TYPE_MODE (TREE_TYPE (from)) == BLKmode
5101 	  && TREE_CODE (from) != CALL_EXPR)
5102 	temp = copy_blkmode_to_reg (GET_MODE (to_rtx), from);
5103       else
5104 	temp = expand_expr (from, NULL_RTX, GET_MODE (to_rtx), EXPAND_NORMAL);
5105 
5106       /* Handle calls that return values in multiple non-contiguous locations.
5107 	 The Irix 6 ABI has examples of this.  */
5108       if (GET_CODE (to_rtx) == PARALLEL)
5109 	{
5110 	  if (GET_CODE (temp) == PARALLEL)
5111 	    emit_group_move (to_rtx, temp);
5112 	  else
5113 	    emit_group_load (to_rtx, temp, TREE_TYPE (from),
5114 			     int_size_in_bytes (TREE_TYPE (from)));
5115 	}
5116       else if (temp)
5117 	emit_move_insn (to_rtx, temp);
5118 
5119       preserve_temp_slots (to_rtx);
5120       pop_temp_slots ();
5121       return;
5122     }
5123 
5124   /* In case we are returning the contents of an object which overlaps
5125      the place the value is being stored, use a safe function when copying
5126      a value through a pointer into a structure value return block.  */
5127   if (TREE_CODE (to) == RESULT_DECL
5128       && TREE_CODE (from) == INDIRECT_REF
5129       && ADDR_SPACE_GENERIC_P
5130 	   (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (from, 0)))))
5131       && refs_may_alias_p (to, from)
5132       && cfun->returns_struct
5133       && !cfun->returns_pcc_struct)
5134     {
5135       rtx from_rtx, size;
5136 
5137       push_temp_slots ();
5138       size = expr_size (from);
5139       from_rtx = expand_normal (from);
5140 
5141       emit_library_call (memmove_libfunc, LCT_NORMAL,
5142 			 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
5143 			 XEXP (from_rtx, 0), Pmode,
5144 			 convert_to_mode (TYPE_MODE (sizetype),
5145 					  size, TYPE_UNSIGNED (sizetype)),
5146 			 TYPE_MODE (sizetype));
5147 
5148       preserve_temp_slots (to_rtx);
5149       pop_temp_slots ();
5150       return;
5151     }
5152 
5153   /* Compute FROM and store the value in the rtx we got.  */
5154 
5155   push_temp_slots ();
5156   result = store_expr_with_bounds (from, to_rtx, 0, nontemporal, to);
5157   preserve_temp_slots (result);
5158   pop_temp_slots ();
5159   return;
5160 }
5161 
5162 /* Emits nontemporal store insn that moves FROM to TO.  Returns true if this
5163    succeeded, false otherwise.  */
5164 
5165 bool
5166 emit_storent_insn (rtx to, rtx from)
5167 {
5168   struct expand_operand ops[2];
5169   machine_mode mode = GET_MODE (to);
5170   enum insn_code code = optab_handler (storent_optab, mode);
5171 
5172   if (code == CODE_FOR_nothing)
5173     return false;
5174 
5175   create_fixed_operand (&ops[0], to);
5176   create_input_operand (&ops[1], from, mode);
5177   return maybe_expand_insn (code, 2, ops);
5178 }
5179 
5180 /* Generate code for computing expression EXP,
5181    and storing the value into TARGET.
5182 
5183    If the mode is BLKmode then we may return TARGET itself.
5184    It turns out that in BLKmode it doesn't cause a problem.
5185    because C has no operators that could combine two different
5186    assignments into the same BLKmode object with different values
5187    with no sequence point.  Will other languages need this to
5188    be more thorough?
5189 
5190    If CALL_PARAM_P is nonzero, this is a store into a call param on the
5191    stack, and block moves may need to be treated specially.
5192 
5193    If NONTEMPORAL is true, try using a nontemporal store instruction.
5194 
5195    If BTARGET is not NULL then computed bounds of EXP are
5196    associated with BTARGET.  */
5197 
5198 rtx
5199 store_expr_with_bounds (tree exp, rtx target, int call_param_p,
5200 			bool nontemporal, tree btarget)
5201 {
5202   rtx temp;
5203   rtx alt_rtl = NULL_RTX;
5204   location_t loc = curr_insn_location ();
5205 
5206   if (VOID_TYPE_P (TREE_TYPE (exp)))
5207     {
5208       /* C++ can generate ?: expressions with a throw expression in one
5209 	 branch and an rvalue in the other. Here, we resolve attempts to
5210 	 store the throw expression's nonexistent result.  */
5211       gcc_assert (!call_param_p);
5212       expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
5213       return NULL_RTX;
5214     }
5215   if (TREE_CODE (exp) == COMPOUND_EXPR)
5216     {
5217       /* Perform first part of compound expression, then assign from second
5218 	 part.  */
5219       expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5220 		   call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5221       return store_expr_with_bounds (TREE_OPERAND (exp, 1), target,
5222 				     call_param_p, nontemporal, btarget);
5223     }
5224   else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
5225     {
5226       /* For conditional expression, get safe form of the target.  Then
5227 	 test the condition, doing the appropriate assignment on either
5228 	 side.  This avoids the creation of unnecessary temporaries.
5229 	 For non-BLKmode, it is more efficient not to do this.  */
5230 
5231       rtx_code_label *lab1 = gen_label_rtx (), *lab2 = gen_label_rtx ();
5232 
5233       do_pending_stack_adjust ();
5234       NO_DEFER_POP;
5235       jumpifnot (TREE_OPERAND (exp, 0), lab1, -1);
5236       store_expr_with_bounds (TREE_OPERAND (exp, 1), target, call_param_p,
5237 			      nontemporal, btarget);
5238       emit_jump_insn (gen_jump (lab2));
5239       emit_barrier ();
5240       emit_label (lab1);
5241       store_expr_with_bounds (TREE_OPERAND (exp, 2), target, call_param_p,
5242 			      nontemporal, btarget);
5243       emit_label (lab2);
5244       OK_DEFER_POP;
5245 
5246       return NULL_RTX;
5247     }
5248   else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
5249     /* If this is a scalar in a register that is stored in a wider mode
5250        than the declared mode, compute the result into its declared mode
5251        and then convert to the wider mode.  Our value is the computed
5252        expression.  */
5253     {
5254       rtx inner_target = 0;
5255 
5256       /* We can do the conversion inside EXP, which will often result
5257 	 in some optimizations.  Do the conversion in two steps: first
5258 	 change the signedness, if needed, then the extend.  But don't
5259 	 do this if the type of EXP is a subtype of something else
5260 	 since then the conversion might involve more than just
5261 	 converting modes.  */
5262       if (INTEGRAL_TYPE_P (TREE_TYPE (exp))
5263 	  && TREE_TYPE (TREE_TYPE (exp)) == 0
5264 	  && GET_MODE_PRECISION (GET_MODE (target))
5265 	     == TYPE_PRECISION (TREE_TYPE (exp)))
5266 	{
5267 	  if (!SUBREG_CHECK_PROMOTED_SIGN (target,
5268 					  TYPE_UNSIGNED (TREE_TYPE (exp))))
5269 	    {
5270 	      /* Some types, e.g. Fortran's logical*4, won't have a signed
5271 		 version, so use the mode instead.  */
5272 	      tree ntype
5273 		= (signed_or_unsigned_type_for
5274 		   (SUBREG_PROMOTED_SIGN (target), TREE_TYPE (exp)));
5275 	      if (ntype == NULL)
5276 		ntype = lang_hooks.types.type_for_mode
5277 		  (TYPE_MODE (TREE_TYPE (exp)),
5278 		   SUBREG_PROMOTED_SIGN (target));
5279 
5280 	      exp = fold_convert_loc (loc, ntype, exp);
5281 	    }
5282 
5283 	  exp = fold_convert_loc (loc, lang_hooks.types.type_for_mode
5284 				  (GET_MODE (SUBREG_REG (target)),
5285 				   SUBREG_PROMOTED_SIGN (target)),
5286 				  exp);
5287 
5288 	  inner_target = SUBREG_REG (target);
5289 	}
5290 
5291       temp = expand_expr (exp, inner_target, VOIDmode,
5292 			  call_param_p ? EXPAND_STACK_PARM : EXPAND_NORMAL);
5293 
5294       /* Handle bounds returned by call.  */
5295       if (TREE_CODE (exp) == CALL_EXPR)
5296 	{
5297 	  rtx bounds;
5298 	  chkp_split_slot (temp, &temp, &bounds);
5299 	  if (bounds && btarget)
5300 	    {
5301 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5302 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5303 	      chkp_set_rtl_bounds (btarget, tmp);
5304 	    }
5305 	}
5306 
5307       /* If TEMP is a VOIDmode constant, use convert_modes to make
5308 	 sure that we properly convert it.  */
5309       if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
5310 	{
5311 	  temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5312 				temp, SUBREG_PROMOTED_SIGN (target));
5313 	  temp = convert_modes (GET_MODE (SUBREG_REG (target)),
5314 			        GET_MODE (target), temp,
5315 				SUBREG_PROMOTED_SIGN (target));
5316 	}
5317 
5318       convert_move (SUBREG_REG (target), temp,
5319 		    SUBREG_PROMOTED_SIGN (target));
5320 
5321       return NULL_RTX;
5322     }
5323   else if ((TREE_CODE (exp) == STRING_CST
5324 	    || (TREE_CODE (exp) == MEM_REF
5325 		&& TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
5326 		&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5327 		   == STRING_CST
5328 		&& integer_zerop (TREE_OPERAND (exp, 1))))
5329 	   && !nontemporal && !call_param_p
5330 	   && MEM_P (target))
5331     {
5332       /* Optimize initialization of an array with a STRING_CST.  */
5333       HOST_WIDE_INT exp_len, str_copy_len;
5334       rtx dest_mem;
5335       tree str = TREE_CODE (exp) == STRING_CST
5336 		 ? exp : TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5337 
5338       exp_len = int_expr_size (exp);
5339       if (exp_len <= 0)
5340 	goto normal_expr;
5341 
5342       if (TREE_STRING_LENGTH (str) <= 0)
5343 	goto normal_expr;
5344 
5345       str_copy_len = strlen (TREE_STRING_POINTER (str));
5346       if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
5347 	goto normal_expr;
5348 
5349       str_copy_len = TREE_STRING_LENGTH (str);
5350       if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0
5351 	  && TREE_STRING_POINTER (str)[TREE_STRING_LENGTH (str) - 1] == '\0')
5352 	{
5353 	  str_copy_len += STORE_MAX_PIECES - 1;
5354 	  str_copy_len &= ~(STORE_MAX_PIECES - 1);
5355 	}
5356       str_copy_len = MIN (str_copy_len, exp_len);
5357       if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
5358 				CONST_CAST (char *, TREE_STRING_POINTER (str)),
5359 				MEM_ALIGN (target), false))
5360 	goto normal_expr;
5361 
5362       dest_mem = target;
5363 
5364       dest_mem = store_by_pieces (dest_mem,
5365 				  str_copy_len, builtin_strncpy_read_str,
5366 				  CONST_CAST (char *,
5367 					      TREE_STRING_POINTER (str)),
5368 				  MEM_ALIGN (target), false,
5369 				  exp_len > str_copy_len ? 1 : 0);
5370       if (exp_len > str_copy_len)
5371 	clear_storage (adjust_address (dest_mem, BLKmode, 0),
5372 		       GEN_INT (exp_len - str_copy_len),
5373 		       BLOCK_OP_NORMAL);
5374       return NULL_RTX;
5375     }
5376   else
5377     {
5378       rtx tmp_target;
5379 
5380   normal_expr:
5381       /* If we want to use a nontemporal store, force the value to
5382 	 register first.  */
5383       tmp_target = nontemporal ? NULL_RTX : target;
5384       temp = expand_expr_real (exp, tmp_target, GET_MODE (target),
5385 			       (call_param_p
5386 				? EXPAND_STACK_PARM : EXPAND_NORMAL),
5387 			       &alt_rtl, false);
5388 
5389       /* Handle bounds returned by call.  */
5390       if (TREE_CODE (exp) == CALL_EXPR)
5391 	{
5392 	  rtx bounds;
5393 	  chkp_split_slot (temp, &temp, &bounds);
5394 	  if (bounds && btarget)
5395 	    {
5396 	      gcc_assert (TREE_CODE (btarget) == SSA_NAME);
5397 	      rtx tmp = targetm.calls.load_returned_bounds (bounds);
5398 	      chkp_set_rtl_bounds (btarget, tmp);
5399 	    }
5400 	}
5401     }
5402 
5403   /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
5404      the same as that of TARGET, adjust the constant.  This is needed, for
5405      example, in case it is a CONST_DOUBLE or CONST_WIDE_INT and we want
5406      only a word-sized value.  */
5407   if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
5408       && TREE_CODE (exp) != ERROR_MARK
5409       && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
5410     temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
5411 			  temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5412 
5413   /* If value was not generated in the target, store it there.
5414      Convert the value to TARGET's type first if necessary and emit the
5415      pending incrementations that have been queued when expanding EXP.
5416      Note that we cannot emit the whole queue blindly because this will
5417      effectively disable the POST_INC optimization later.
5418 
5419      If TEMP and TARGET compare equal according to rtx_equal_p, but
5420      one or both of them are volatile memory refs, we have to distinguish
5421      two cases:
5422      - expand_expr has used TARGET.  In this case, we must not generate
5423        another copy.  This can be detected by TARGET being equal according
5424        to == .
5425      - expand_expr has not used TARGET - that means that the source just
5426        happens to have the same RTX form.  Since temp will have been created
5427        by expand_expr, it will compare unequal according to == .
5428        We must generate a copy in this case, to reach the correct number
5429        of volatile memory references.  */
5430 
5431   if ((! rtx_equal_p (temp, target)
5432        || (temp != target && (side_effects_p (temp)
5433 			      || side_effects_p (target))))
5434       && TREE_CODE (exp) != ERROR_MARK
5435       /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
5436 	 but TARGET is not valid memory reference, TEMP will differ
5437 	 from TARGET although it is really the same location.  */
5438       && !(alt_rtl
5439 	   && rtx_equal_p (alt_rtl, target)
5440 	   && !side_effects_p (alt_rtl)
5441 	   && !side_effects_p (target))
5442       /* If there's nothing to copy, don't bother.  Don't call
5443 	 expr_size unless necessary, because some front-ends (C++)
5444 	 expr_size-hook must not be given objects that are not
5445 	 supposed to be bit-copied or bit-initialized.  */
5446       && expr_size (exp) != const0_rtx)
5447     {
5448       if (GET_MODE (temp) != GET_MODE (target) && GET_MODE (temp) != VOIDmode)
5449 	{
5450 	  if (GET_MODE (target) == BLKmode)
5451 	    {
5452 	      /* Handle calls that return BLKmode values in registers.  */
5453 	      if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
5454 		copy_blkmode_from_reg (target, temp, TREE_TYPE (exp));
5455 	      else
5456 		store_bit_field (target,
5457 				 INTVAL (expr_size (exp)) * BITS_PER_UNIT,
5458 				 0, 0, 0, GET_MODE (temp), temp);
5459 	    }
5460 	  else
5461 	    convert_move (target, temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
5462 	}
5463 
5464       else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
5465 	{
5466 	  /* Handle copying a string constant into an array.  The string
5467 	     constant may be shorter than the array.  So copy just the string's
5468 	     actual length, and clear the rest.  First get the size of the data
5469 	     type of the string, which is actually the size of the target.  */
5470 	  rtx size = expr_size (exp);
5471 
5472 	  if (CONST_INT_P (size)
5473 	      && INTVAL (size) < TREE_STRING_LENGTH (exp))
5474 	    emit_block_move (target, temp, size,
5475 			     (call_param_p
5476 			      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5477 	  else
5478 	    {
5479 	      machine_mode pointer_mode
5480 		= targetm.addr_space.pointer_mode (MEM_ADDR_SPACE (target));
5481 	      machine_mode address_mode = get_address_mode (target);
5482 
5483 	      /* Compute the size of the data to copy from the string.  */
5484 	      tree copy_size
5485 		= size_binop_loc (loc, MIN_EXPR,
5486 				  make_tree (sizetype, size),
5487 				  size_int (TREE_STRING_LENGTH (exp)));
5488 	      rtx copy_size_rtx
5489 		= expand_expr (copy_size, NULL_RTX, VOIDmode,
5490 			       (call_param_p
5491 				? EXPAND_STACK_PARM : EXPAND_NORMAL));
5492 	      rtx_code_label *label = 0;
5493 
5494 	      /* Copy that much.  */
5495 	      copy_size_rtx = convert_to_mode (pointer_mode, copy_size_rtx,
5496 					       TYPE_UNSIGNED (sizetype));
5497 	      emit_block_move (target, temp, copy_size_rtx,
5498 			       (call_param_p
5499 				? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5500 
5501 	      /* Figure out how much is left in TARGET that we have to clear.
5502 		 Do all calculations in pointer_mode.  */
5503 	      if (CONST_INT_P (copy_size_rtx))
5504 		{
5505 		  size = plus_constant (address_mode, size,
5506 					-INTVAL (copy_size_rtx));
5507 		  target = adjust_address (target, BLKmode,
5508 					   INTVAL (copy_size_rtx));
5509 		}
5510 	      else
5511 		{
5512 		  size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
5513 				       copy_size_rtx, NULL_RTX, 0,
5514 				       OPTAB_LIB_WIDEN);
5515 
5516 		  if (GET_MODE (copy_size_rtx) != address_mode)
5517 		    copy_size_rtx = convert_to_mode (address_mode,
5518 						     copy_size_rtx,
5519 						     TYPE_UNSIGNED (sizetype));
5520 
5521 		  target = offset_address (target, copy_size_rtx,
5522 					   highest_pow2_factor (copy_size));
5523 		  label = gen_label_rtx ();
5524 		  emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
5525 					   GET_MODE (size), 0, label);
5526 		}
5527 
5528 	      if (size != const0_rtx)
5529 		clear_storage (target, size, BLOCK_OP_NORMAL);
5530 
5531 	      if (label)
5532 		emit_label (label);
5533 	    }
5534 	}
5535       /* Handle calls that return values in multiple non-contiguous locations.
5536 	 The Irix 6 ABI has examples of this.  */
5537       else if (GET_CODE (target) == PARALLEL)
5538 	{
5539 	  if (GET_CODE (temp) == PARALLEL)
5540 	    emit_group_move (target, temp);
5541 	  else
5542 	    emit_group_load (target, temp, TREE_TYPE (exp),
5543 			     int_size_in_bytes (TREE_TYPE (exp)));
5544 	}
5545       else if (GET_CODE (temp) == PARALLEL)
5546 	emit_group_store (target, temp, TREE_TYPE (exp),
5547 			  int_size_in_bytes (TREE_TYPE (exp)));
5548       else if (GET_MODE (temp) == BLKmode)
5549 	emit_block_move (target, temp, expr_size (exp),
5550 			 (call_param_p
5551 			  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
5552       /* If we emit a nontemporal store, there is nothing else to do.  */
5553       else if (nontemporal && emit_storent_insn (target, temp))
5554 	;
5555       else
5556 	{
5557 	  temp = force_operand (temp, target);
5558 	  if (temp != target)
5559 	    emit_move_insn (target, temp);
5560 	}
5561     }
5562 
5563   return NULL_RTX;
5564 }
5565 
5566 /* Same as store_expr_with_bounds but ignoring bounds of EXP.  */
5567 rtx
5568 store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
5569 {
5570   return store_expr_with_bounds (exp, target, call_param_p, nontemporal, NULL);
5571 }
5572 
5573 /* Return true if field F of structure TYPE is a flexible array.  */
5574 
5575 static bool
5576 flexible_array_member_p (const_tree f, const_tree type)
5577 {
5578   const_tree tf;
5579 
5580   tf = TREE_TYPE (f);
5581   return (DECL_CHAIN (f) == NULL
5582 	  && TREE_CODE (tf) == ARRAY_TYPE
5583 	  && TYPE_DOMAIN (tf)
5584 	  && TYPE_MIN_VALUE (TYPE_DOMAIN (tf))
5585 	  && integer_zerop (TYPE_MIN_VALUE (TYPE_DOMAIN (tf)))
5586 	  && !TYPE_MAX_VALUE (TYPE_DOMAIN (tf))
5587 	  && int_size_in_bytes (type) >= 0);
5588 }
5589 
5590 /* If FOR_CTOR_P, return the number of top-level elements that a constructor
5591    must have in order for it to completely initialize a value of type TYPE.
5592    Return -1 if the number isn't known.
5593 
5594    If !FOR_CTOR_P, return an estimate of the number of scalars in TYPE.  */
5595 
5596 static HOST_WIDE_INT
5597 count_type_elements (const_tree type, bool for_ctor_p)
5598 {
5599   switch (TREE_CODE (type))
5600     {
5601     case ARRAY_TYPE:
5602       {
5603 	tree nelts;
5604 
5605 	nelts = array_type_nelts (type);
5606 	if (nelts && tree_fits_uhwi_p (nelts))
5607 	  {
5608 	    unsigned HOST_WIDE_INT n;
5609 
5610 	    n = tree_to_uhwi (nelts) + 1;
5611 	    if (n == 0 || for_ctor_p)
5612 	      return n;
5613 	    else
5614 	      return n * count_type_elements (TREE_TYPE (type), false);
5615 	  }
5616 	return for_ctor_p ? -1 : 1;
5617       }
5618 
5619     case RECORD_TYPE:
5620       {
5621 	unsigned HOST_WIDE_INT n;
5622 	tree f;
5623 
5624 	n = 0;
5625 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5626 	  if (TREE_CODE (f) == FIELD_DECL)
5627 	    {
5628 	      if (!for_ctor_p)
5629 		n += count_type_elements (TREE_TYPE (f), false);
5630 	      else if (!flexible_array_member_p (f, type))
5631 		/* Don't count flexible arrays, which are not supposed
5632 		   to be initialized.  */
5633 		n += 1;
5634 	    }
5635 
5636 	return n;
5637       }
5638 
5639     case UNION_TYPE:
5640     case QUAL_UNION_TYPE:
5641       {
5642 	tree f;
5643 	HOST_WIDE_INT n, m;
5644 
5645 	gcc_assert (!for_ctor_p);
5646 	/* Estimate the number of scalars in each field and pick the
5647 	   maximum.  Other estimates would do instead; the idea is simply
5648 	   to make sure that the estimate is not sensitive to the ordering
5649 	   of the fields.  */
5650 	n = 1;
5651 	for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
5652 	  if (TREE_CODE (f) == FIELD_DECL)
5653 	    {
5654 	      m = count_type_elements (TREE_TYPE (f), false);
5655 	      /* If the field doesn't span the whole union, add an extra
5656 		 scalar for the rest.  */
5657 	      if (simple_cst_equal (TYPE_SIZE (TREE_TYPE (f)),
5658 				    TYPE_SIZE (type)) != 1)
5659 		m++;
5660 	      if (n < m)
5661 		n = m;
5662 	    }
5663 	return n;
5664       }
5665 
5666     case COMPLEX_TYPE:
5667       return 2;
5668 
5669     case VECTOR_TYPE:
5670       return TYPE_VECTOR_SUBPARTS (type);
5671 
5672     case INTEGER_TYPE:
5673     case REAL_TYPE:
5674     case FIXED_POINT_TYPE:
5675     case ENUMERAL_TYPE:
5676     case BOOLEAN_TYPE:
5677     case POINTER_TYPE:
5678     case OFFSET_TYPE:
5679     case REFERENCE_TYPE:
5680     case NULLPTR_TYPE:
5681       return 1;
5682 
5683     case ERROR_MARK:
5684       return 0;
5685 
5686     case VOID_TYPE:
5687     case METHOD_TYPE:
5688     case FUNCTION_TYPE:
5689     case LANG_TYPE:
5690     default:
5691       gcc_unreachable ();
5692     }
5693 }
5694 
5695 /* Helper for categorize_ctor_elements.  Identical interface.  */
5696 
5697 static bool
5698 categorize_ctor_elements_1 (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5699 			    HOST_WIDE_INT *p_init_elts, bool *p_complete)
5700 {
5701   unsigned HOST_WIDE_INT idx;
5702   HOST_WIDE_INT nz_elts, init_elts, num_fields;
5703   tree value, purpose, elt_type;
5704 
5705   /* Whether CTOR is a valid constant initializer, in accordance with what
5706      initializer_constant_valid_p does.  If inferred from the constructor
5707      elements, true until proven otherwise.  */
5708   bool const_from_elts_p = constructor_static_from_elts_p (ctor);
5709   bool const_p = const_from_elts_p ? true : TREE_STATIC (ctor);
5710 
5711   nz_elts = 0;
5712   init_elts = 0;
5713   num_fields = 0;
5714   elt_type = NULL_TREE;
5715 
5716   FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), idx, purpose, value)
5717     {
5718       HOST_WIDE_INT mult = 1;
5719 
5720       if (purpose && TREE_CODE (purpose) == RANGE_EXPR)
5721 	{
5722 	  tree lo_index = TREE_OPERAND (purpose, 0);
5723 	  tree hi_index = TREE_OPERAND (purpose, 1);
5724 
5725 	  if (tree_fits_uhwi_p (lo_index) && tree_fits_uhwi_p (hi_index))
5726 	    mult = (tree_to_uhwi (hi_index)
5727 		    - tree_to_uhwi (lo_index) + 1);
5728 	}
5729       num_fields += mult;
5730       elt_type = TREE_TYPE (value);
5731 
5732       switch (TREE_CODE (value))
5733 	{
5734 	case CONSTRUCTOR:
5735 	  {
5736 	    HOST_WIDE_INT nz = 0, ic = 0;
5737 
5738 	    bool const_elt_p = categorize_ctor_elements_1 (value, &nz, &ic,
5739 							   p_complete);
5740 
5741 	    nz_elts += mult * nz;
5742  	    init_elts += mult * ic;
5743 
5744 	    if (const_from_elts_p && const_p)
5745 	      const_p = const_elt_p;
5746 	  }
5747 	  break;
5748 
5749 	case INTEGER_CST:
5750 	case REAL_CST:
5751 	case FIXED_CST:
5752 	  if (!initializer_zerop (value))
5753 	    nz_elts += mult;
5754 	  init_elts += mult;
5755 	  break;
5756 
5757 	case STRING_CST:
5758 	  nz_elts += mult * TREE_STRING_LENGTH (value);
5759 	  init_elts += mult * TREE_STRING_LENGTH (value);
5760 	  break;
5761 
5762 	case COMPLEX_CST:
5763 	  if (!initializer_zerop (TREE_REALPART (value)))
5764 	    nz_elts += mult;
5765 	  if (!initializer_zerop (TREE_IMAGPART (value)))
5766 	    nz_elts += mult;
5767 	  init_elts += mult;
5768 	  break;
5769 
5770 	case VECTOR_CST:
5771 	  {
5772 	    unsigned i;
5773 	    for (i = 0; i < VECTOR_CST_NELTS (value); ++i)
5774 	      {
5775 		tree v = VECTOR_CST_ELT (value, i);
5776 		if (!initializer_zerop (v))
5777 		  nz_elts += mult;
5778 		init_elts += mult;
5779 	      }
5780 	  }
5781 	  break;
5782 
5783 	default:
5784 	  {
5785 	    HOST_WIDE_INT tc = count_type_elements (elt_type, false);
5786 	    nz_elts += mult * tc;
5787 	    init_elts += mult * tc;
5788 
5789 	    if (const_from_elts_p && const_p)
5790 	      const_p = initializer_constant_valid_p (value, elt_type)
5791 			!= NULL_TREE;
5792 	  }
5793 	  break;
5794 	}
5795     }
5796 
5797   if (*p_complete && !complete_ctor_at_level_p (TREE_TYPE (ctor),
5798 						num_fields, elt_type))
5799     *p_complete = false;
5800 
5801   *p_nz_elts += nz_elts;
5802   *p_init_elts += init_elts;
5803 
5804   return const_p;
5805 }
5806 
5807 /* Examine CTOR to discover:
5808    * how many scalar fields are set to nonzero values,
5809      and place it in *P_NZ_ELTS;
5810    * how many scalar fields in total are in CTOR,
5811      and place it in *P_ELT_COUNT.
5812    * whether the constructor is complete -- in the sense that every
5813      meaningful byte is explicitly given a value --
5814      and place it in *P_COMPLETE.
5815 
5816    Return whether or not CTOR is a valid static constant initializer, the same
5817    as "initializer_constant_valid_p (CTOR, TREE_TYPE (CTOR)) != 0".  */
5818 
5819 bool
5820 categorize_ctor_elements (const_tree ctor, HOST_WIDE_INT *p_nz_elts,
5821 			  HOST_WIDE_INT *p_init_elts, bool *p_complete)
5822 {
5823   *p_nz_elts = 0;
5824   *p_init_elts = 0;
5825   *p_complete = true;
5826 
5827   return categorize_ctor_elements_1 (ctor, p_nz_elts, p_init_elts, p_complete);
5828 }
5829 
5830 /* TYPE is initialized by a constructor with NUM_ELTS elements, the last
5831    of which had type LAST_TYPE.  Each element was itself a complete
5832    initializer, in the sense that every meaningful byte was explicitly
5833    given a value.  Return true if the same is true for the constructor
5834    as a whole.  */
5835 
5836 bool
5837 complete_ctor_at_level_p (const_tree type, HOST_WIDE_INT num_elts,
5838 			  const_tree last_type)
5839 {
5840   if (TREE_CODE (type) == UNION_TYPE
5841       || TREE_CODE (type) == QUAL_UNION_TYPE)
5842     {
5843       if (num_elts == 0)
5844 	return false;
5845 
5846       gcc_assert (num_elts == 1 && last_type);
5847 
5848       /* ??? We could look at each element of the union, and find the
5849 	 largest element.  Which would avoid comparing the size of the
5850 	 initialized element against any tail padding in the union.
5851 	 Doesn't seem worth the effort...  */
5852       return simple_cst_equal (TYPE_SIZE (type), TYPE_SIZE (last_type)) == 1;
5853     }
5854 
5855   return count_type_elements (type, true) == num_elts;
5856 }
5857 
5858 /* Return 1 if EXP contains mostly (3/4)  zeros.  */
5859 
5860 static int
5861 mostly_zeros_p (const_tree exp)
5862 {
5863   if (TREE_CODE (exp) == CONSTRUCTOR)
5864     {
5865       HOST_WIDE_INT nz_elts, init_elts;
5866       bool complete_p;
5867 
5868       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5869       return !complete_p || nz_elts < init_elts / 4;
5870     }
5871 
5872   return initializer_zerop (exp);
5873 }
5874 
5875 /* Return 1 if EXP contains all zeros.  */
5876 
5877 static int
5878 all_zeros_p (const_tree exp)
5879 {
5880   if (TREE_CODE (exp) == CONSTRUCTOR)
5881     {
5882       HOST_WIDE_INT nz_elts, init_elts;
5883       bool complete_p;
5884 
5885       categorize_ctor_elements (exp, &nz_elts, &init_elts, &complete_p);
5886       return nz_elts == 0;
5887     }
5888 
5889   return initializer_zerop (exp);
5890 }
5891 
5892 /* Helper function for store_constructor.
5893    TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
5894    CLEARED is as for store_constructor.
5895    ALIAS_SET is the alias set to use for any stores.
5896 
5897    This provides a recursive shortcut back to store_constructor when it isn't
5898    necessary to go through store_field.  This is so that we can pass through
5899    the cleared field to let store_constructor know that we may not have to
5900    clear a substructure if the outer structure has already been cleared.  */
5901 
5902 static void
5903 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
5904 			 HOST_WIDE_INT bitpos,
5905 			 unsigned HOST_WIDE_INT bitregion_start,
5906 			 unsigned HOST_WIDE_INT bitregion_end,
5907 			 machine_mode mode,
5908 			 tree exp, int cleared, alias_set_type alias_set)
5909 {
5910   if (TREE_CODE (exp) == CONSTRUCTOR
5911       /* We can only call store_constructor recursively if the size and
5912 	 bit position are on a byte boundary.  */
5913       && bitpos % BITS_PER_UNIT == 0
5914       && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
5915       /* If we have a nonzero bitpos for a register target, then we just
5916 	 let store_field do the bitfield handling.  This is unlikely to
5917 	 generate unnecessary clear instructions anyways.  */
5918       && (bitpos == 0 || MEM_P (target)))
5919     {
5920       if (MEM_P (target))
5921 	target
5922 	  = adjust_address (target,
5923 			    GET_MODE (target) == BLKmode
5924 			    || 0 != (bitpos
5925 				     % GET_MODE_ALIGNMENT (GET_MODE (target)))
5926 			    ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
5927 
5928 
5929       /* Update the alias set, if required.  */
5930       if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
5931 	  && MEM_ALIAS_SET (target) != 0)
5932 	{
5933 	  target = copy_rtx (target);
5934 	  set_mem_alias_set (target, alias_set);
5935 	}
5936 
5937       store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
5938     }
5939   else
5940     store_field (target, bitsize, bitpos, bitregion_start, bitregion_end, mode,
5941 		 exp, alias_set, false);
5942 }
5943 
5944 
5945 /* Returns the number of FIELD_DECLs in TYPE.  */
5946 
5947 static int
5948 fields_length (const_tree type)
5949 {
5950   tree t = TYPE_FIELDS (type);
5951   int count = 0;
5952 
5953   for (; t; t = DECL_CHAIN (t))
5954     if (TREE_CODE (t) == FIELD_DECL)
5955       ++count;
5956 
5957   return count;
5958 }
5959 
5960 
5961 /* Store the value of constructor EXP into the rtx TARGET.
5962    TARGET is either a REG or a MEM; we know it cannot conflict, since
5963    safe_from_p has been called.
5964    CLEARED is true if TARGET is known to have been zero'd.
5965    SIZE is the number of bytes of TARGET we are allowed to modify: this
5966    may not be the same as the size of EXP if we are assigning to a field
5967    which has been packed to exclude padding bits.  */
5968 
5969 static void
5970 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
5971 {
5972   tree type = TREE_TYPE (exp);
5973 #ifdef WORD_REGISTER_OPERATIONS
5974   HOST_WIDE_INT exp_size = int_size_in_bytes (type);
5975 #endif
5976   HOST_WIDE_INT bitregion_end = size > 0 ? size * BITS_PER_UNIT - 1 : 0;
5977 
5978   switch (TREE_CODE (type))
5979     {
5980     case RECORD_TYPE:
5981     case UNION_TYPE:
5982     case QUAL_UNION_TYPE:
5983       {
5984 	unsigned HOST_WIDE_INT idx;
5985 	tree field, value;
5986 
5987 	/* If size is zero or the target is already cleared, do nothing.  */
5988 	if (size == 0 || cleared)
5989 	  cleared = 1;
5990 	/* We either clear the aggregate or indicate the value is dead.  */
5991 	else if ((TREE_CODE (type) == UNION_TYPE
5992 		  || TREE_CODE (type) == QUAL_UNION_TYPE)
5993 		 && ! CONSTRUCTOR_ELTS (exp))
5994 	  /* If the constructor is empty, clear the union.  */
5995 	  {
5996 	    clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
5997 	    cleared = 1;
5998 	  }
5999 
6000 	/* If we are building a static constructor into a register,
6001 	   set the initial value as zero so we can fold the value into
6002 	   a constant.  But if more than one register is involved,
6003 	   this probably loses.  */
6004 	else if (REG_P (target) && TREE_STATIC (exp)
6005 		 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
6006 	  {
6007 	    emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6008 	    cleared = 1;
6009 	  }
6010 
6011         /* If the constructor has fewer fields than the structure or
6012 	   if we are initializing the structure to mostly zeros, clear
6013 	   the whole structure first.  Don't do this if TARGET is a
6014 	   register whose mode size isn't equal to SIZE since
6015 	   clear_storage can't handle this case.  */
6016 	else if (size > 0
6017 		 && (((int)vec_safe_length (CONSTRUCTOR_ELTS (exp))
6018 		      != fields_length (type))
6019 		     || mostly_zeros_p (exp))
6020 		 && (!REG_P (target)
6021 		     || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
6022 			 == size)))
6023 	  {
6024 	    clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6025 	    cleared = 1;
6026 	  }
6027 
6028 	if (REG_P (target) && !cleared)
6029 	  emit_clobber (target);
6030 
6031 	/* Store each element of the constructor into the
6032 	   corresponding field of TARGET.  */
6033 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, field, value)
6034 	  {
6035 	    machine_mode mode;
6036 	    HOST_WIDE_INT bitsize;
6037 	    HOST_WIDE_INT bitpos = 0;
6038 	    tree offset;
6039 	    rtx to_rtx = target;
6040 
6041 	    /* Just ignore missing fields.  We cleared the whole
6042 	       structure, above, if any fields are missing.  */
6043 	    if (field == 0)
6044 	      continue;
6045 
6046 	    if (cleared && initializer_zerop (value))
6047 	      continue;
6048 
6049 	    if (tree_fits_uhwi_p (DECL_SIZE (field)))
6050 	      bitsize = tree_to_uhwi (DECL_SIZE (field));
6051 	    else
6052 	      gcc_unreachable ();
6053 
6054 	    mode = DECL_MODE (field);
6055 	    if (DECL_BIT_FIELD (field))
6056 	      mode = VOIDmode;
6057 
6058 	    offset = DECL_FIELD_OFFSET (field);
6059 	    if (tree_fits_shwi_p (offset)
6060 		&& tree_fits_shwi_p (bit_position (field)))
6061 	      {
6062 		bitpos = int_bit_position (field);
6063 		offset = 0;
6064 	      }
6065 	    else
6066 	      gcc_unreachable ();
6067 
6068 #ifdef WORD_REGISTER_OPERATIONS
6069 	    /* If this initializes a field that is smaller than a
6070 	       word, at the start of a word, try to widen it to a full
6071 	       word.  This special case allows us to output C++ member
6072 	       function initializations in a form that the optimizers
6073 	       can understand.  */
6074 	    if (REG_P (target)
6075 		&& bitsize < BITS_PER_WORD
6076 		&& bitpos % BITS_PER_WORD == 0
6077 		&& GET_MODE_CLASS (mode) == MODE_INT
6078 		&& TREE_CODE (value) == INTEGER_CST
6079 		&& exp_size >= 0
6080 		&& bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
6081 	      {
6082 		tree type = TREE_TYPE (value);
6083 
6084 		if (TYPE_PRECISION (type) < BITS_PER_WORD)
6085 		  {
6086 		    type = lang_hooks.types.type_for_mode
6087 		      (word_mode, TYPE_UNSIGNED (type));
6088 		    value = fold_convert (type, value);
6089 		    /* Make sure the bits beyond the original bitsize are zero
6090 		       so that we can correctly avoid extra zeroing stores in
6091 		       later constructor elements.  */
6092 		    tree bitsize_mask
6093 		      = wide_int_to_tree (type, wi::mask (bitsize, false,
6094 							   BITS_PER_WORD));
6095 		    value = fold_build2 (BIT_AND_EXPR, type, value, bitsize_mask);
6096 		  }
6097 
6098 		if (BYTES_BIG_ENDIAN)
6099 		  value
6100 		   = fold_build2 (LSHIFT_EXPR, type, value,
6101 				   build_int_cst (type,
6102 						  BITS_PER_WORD - bitsize));
6103 		bitsize = BITS_PER_WORD;
6104 		mode = word_mode;
6105 	      }
6106 #endif
6107 
6108 	    if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
6109 		&& DECL_NONADDRESSABLE_P (field))
6110 	      {
6111 		to_rtx = copy_rtx (to_rtx);
6112 		MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
6113 	      }
6114 
6115 	    store_constructor_field (to_rtx, bitsize, bitpos,
6116 				     0, bitregion_end, mode,
6117 				     value, cleared,
6118 				     get_alias_set (TREE_TYPE (field)));
6119 	  }
6120 	break;
6121       }
6122     case ARRAY_TYPE:
6123       {
6124 	tree value, index;
6125 	unsigned HOST_WIDE_INT i;
6126 	int need_to_clear;
6127 	tree domain;
6128 	tree elttype = TREE_TYPE (type);
6129 	int const_bounds_p;
6130 	HOST_WIDE_INT minelt = 0;
6131 	HOST_WIDE_INT maxelt = 0;
6132 
6133 	domain = TYPE_DOMAIN (type);
6134 	const_bounds_p = (TYPE_MIN_VALUE (domain)
6135 			  && TYPE_MAX_VALUE (domain)
6136 			  && tree_fits_shwi_p (TYPE_MIN_VALUE (domain))
6137 			  && tree_fits_shwi_p (TYPE_MAX_VALUE (domain)));
6138 
6139 	/* If we have constant bounds for the range of the type, get them.  */
6140 	if (const_bounds_p)
6141 	  {
6142 	    minelt = tree_to_shwi (TYPE_MIN_VALUE (domain));
6143 	    maxelt = tree_to_shwi (TYPE_MAX_VALUE (domain));
6144 	  }
6145 
6146 	/* If the constructor has fewer elements than the array, clear
6147            the whole array first.  Similarly if this is static
6148            constructor of a non-BLKmode object.  */
6149 	if (cleared)
6150 	  need_to_clear = 0;
6151 	else if (REG_P (target) && TREE_STATIC (exp))
6152 	  need_to_clear = 1;
6153 	else
6154 	  {
6155 	    unsigned HOST_WIDE_INT idx;
6156 	    tree index, value;
6157 	    HOST_WIDE_INT count = 0, zero_count = 0;
6158 	    need_to_clear = ! const_bounds_p;
6159 
6160 	    /* This loop is a more accurate version of the loop in
6161 	       mostly_zeros_p (it handles RANGE_EXPR in an index).  It
6162 	       is also needed to check for missing elements.  */
6163 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), idx, index, value)
6164 	      {
6165 		HOST_WIDE_INT this_node_count;
6166 
6167 		if (need_to_clear)
6168 		  break;
6169 
6170 		if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6171 		  {
6172 		    tree lo_index = TREE_OPERAND (index, 0);
6173 		    tree hi_index = TREE_OPERAND (index, 1);
6174 
6175 		    if (! tree_fits_uhwi_p (lo_index)
6176 			|| ! tree_fits_uhwi_p (hi_index))
6177 		      {
6178 			need_to_clear = 1;
6179 			break;
6180 		      }
6181 
6182 		    this_node_count = (tree_to_uhwi (hi_index)
6183 				       - tree_to_uhwi (lo_index) + 1);
6184 		  }
6185 		else
6186 		  this_node_count = 1;
6187 
6188 		count += this_node_count;
6189 		if (mostly_zeros_p (value))
6190 		  zero_count += this_node_count;
6191 	      }
6192 
6193 	    /* Clear the entire array first if there are any missing
6194 	       elements, or if the incidence of zero elements is >=
6195 	       75%.  */
6196 	    if (! need_to_clear
6197 		&& (count < maxelt - minelt + 1
6198 		    || 4 * zero_count >= 3 * count))
6199 	      need_to_clear = 1;
6200 	  }
6201 
6202 	if (need_to_clear && size > 0)
6203 	  {
6204 	    if (REG_P (target))
6205 	      emit_move_insn (target,  CONST0_RTX (GET_MODE (target)));
6206 	    else
6207 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6208 	    cleared = 1;
6209 	  }
6210 
6211 	if (!cleared && REG_P (target))
6212 	  /* Inform later passes that the old value is dead.  */
6213 	  emit_clobber (target);
6214 
6215 	/* Store each element of the constructor into the
6216 	   corresponding element of TARGET, determined by counting the
6217 	   elements.  */
6218 	FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (exp), i, index, value)
6219 	  {
6220 	    machine_mode mode;
6221 	    HOST_WIDE_INT bitsize;
6222 	    HOST_WIDE_INT bitpos;
6223 	    rtx xtarget = target;
6224 
6225 	    if (cleared && initializer_zerop (value))
6226 	      continue;
6227 
6228 	    mode = TYPE_MODE (elttype);
6229 	    if (mode == BLKmode)
6230 	      bitsize = (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6231 			 ? tree_to_uhwi (TYPE_SIZE (elttype))
6232 			 : -1);
6233 	    else
6234 	      bitsize = GET_MODE_BITSIZE (mode);
6235 
6236 	    if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
6237 	      {
6238 		tree lo_index = TREE_OPERAND (index, 0);
6239 		tree hi_index = TREE_OPERAND (index, 1);
6240 		rtx index_r, pos_rtx;
6241 		HOST_WIDE_INT lo, hi, count;
6242 		tree position;
6243 
6244 		/* If the range is constant and "small", unroll the loop.  */
6245 		if (const_bounds_p
6246 		    && tree_fits_shwi_p (lo_index)
6247 		    && tree_fits_shwi_p (hi_index)
6248 		    && (lo = tree_to_shwi (lo_index),
6249 			hi = tree_to_shwi (hi_index),
6250 			count = hi - lo + 1,
6251 			(!MEM_P (target)
6252 			 || count <= 2
6253 			 || (tree_fits_uhwi_p (TYPE_SIZE (elttype))
6254 			     && (tree_to_uhwi (TYPE_SIZE (elttype)) * count
6255 				 <= 40 * 8)))))
6256 		  {
6257 		    lo -= minelt;  hi -= minelt;
6258 		    for (; lo <= hi; lo++)
6259 		      {
6260 			bitpos = lo * tree_to_shwi (TYPE_SIZE (elttype));
6261 
6262 			if (MEM_P (target)
6263 			    && !MEM_KEEP_ALIAS_SET_P (target)
6264 			    && TREE_CODE (type) == ARRAY_TYPE
6265 			    && TYPE_NONALIASED_COMPONENT (type))
6266 			  {
6267 			    target = copy_rtx (target);
6268 			    MEM_KEEP_ALIAS_SET_P (target) = 1;
6269 			  }
6270 
6271 			store_constructor_field
6272 			  (target, bitsize, bitpos, 0, bitregion_end,
6273 			   mode, value, cleared,
6274 			   get_alias_set (elttype));
6275 		      }
6276 		  }
6277 		else
6278 		  {
6279 		    rtx_code_label *loop_start = gen_label_rtx ();
6280 		    rtx_code_label *loop_end = gen_label_rtx ();
6281 		    tree exit_cond;
6282 
6283 		    expand_normal (hi_index);
6284 
6285 		    index = build_decl (EXPR_LOCATION (exp),
6286 					VAR_DECL, NULL_TREE, domain);
6287 		    index_r = gen_reg_rtx (promote_decl_mode (index, NULL));
6288 		    SET_DECL_RTL (index, index_r);
6289 		    store_expr (lo_index, index_r, 0, false);
6290 
6291 		    /* Build the head of the loop.  */
6292 		    do_pending_stack_adjust ();
6293 		    emit_label (loop_start);
6294 
6295 		    /* Assign value to element index.  */
6296 		    position =
6297 		      fold_convert (ssizetype,
6298 				    fold_build2 (MINUS_EXPR,
6299 						 TREE_TYPE (index),
6300 						 index,
6301 						 TYPE_MIN_VALUE (domain)));
6302 
6303 		    position =
6304 			size_binop (MULT_EXPR, position,
6305 				    fold_convert (ssizetype,
6306 						  TYPE_SIZE_UNIT (elttype)));
6307 
6308 		    pos_rtx = expand_normal (position);
6309 		    xtarget = offset_address (target, pos_rtx,
6310 					      highest_pow2_factor (position));
6311 		    xtarget = adjust_address (xtarget, mode, 0);
6312 		    if (TREE_CODE (value) == CONSTRUCTOR)
6313 		      store_constructor (value, xtarget, cleared,
6314 					 bitsize / BITS_PER_UNIT);
6315 		    else
6316 		      store_expr (value, xtarget, 0, false);
6317 
6318 		    /* Generate a conditional jump to exit the loop.  */
6319 		    exit_cond = build2 (LT_EXPR, integer_type_node,
6320 					index, hi_index);
6321 		    jumpif (exit_cond, loop_end, -1);
6322 
6323 		    /* Update the loop counter, and jump to the head of
6324 		       the loop.  */
6325 		    expand_assignment (index,
6326 				       build2 (PLUS_EXPR, TREE_TYPE (index),
6327 					       index, integer_one_node),
6328 				       false);
6329 
6330 		    emit_jump (loop_start);
6331 
6332 		    /* Build the end of the loop.  */
6333 		    emit_label (loop_end);
6334 		  }
6335 	      }
6336 	    else if ((index != 0 && ! tree_fits_shwi_p (index))
6337 		     || ! tree_fits_uhwi_p (TYPE_SIZE (elttype)))
6338 	      {
6339 		tree position;
6340 
6341 		if (index == 0)
6342 		  index = ssize_int (1);
6343 
6344 		if (minelt)
6345 		  index = fold_convert (ssizetype,
6346 					fold_build2 (MINUS_EXPR,
6347 						     TREE_TYPE (index),
6348 						     index,
6349 						     TYPE_MIN_VALUE (domain)));
6350 
6351 		position =
6352 		  size_binop (MULT_EXPR, index,
6353 			      fold_convert (ssizetype,
6354 					    TYPE_SIZE_UNIT (elttype)));
6355 		xtarget = offset_address (target,
6356 					  expand_normal (position),
6357 					  highest_pow2_factor (position));
6358 		xtarget = adjust_address (xtarget, mode, 0);
6359 		store_expr (value, xtarget, 0, false);
6360 	      }
6361 	    else
6362 	      {
6363 		if (index != 0)
6364 		  bitpos = ((tree_to_shwi (index) - minelt)
6365 			    * tree_to_uhwi (TYPE_SIZE (elttype)));
6366 		else
6367 		  bitpos = (i * tree_to_uhwi (TYPE_SIZE (elttype)));
6368 
6369 		if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
6370 		    && TREE_CODE (type) == ARRAY_TYPE
6371 		    && TYPE_NONALIASED_COMPONENT (type))
6372 		  {
6373 		    target = copy_rtx (target);
6374 		    MEM_KEEP_ALIAS_SET_P (target) = 1;
6375 		  }
6376 		store_constructor_field (target, bitsize, bitpos, 0,
6377 					 bitregion_end, mode, value,
6378 					 cleared, get_alias_set (elttype));
6379 	      }
6380 	  }
6381 	break;
6382       }
6383 
6384     case VECTOR_TYPE:
6385       {
6386 	unsigned HOST_WIDE_INT idx;
6387 	constructor_elt *ce;
6388 	int i;
6389 	int need_to_clear;
6390 	int icode = CODE_FOR_nothing;
6391 	tree elttype = TREE_TYPE (type);
6392 	int elt_size = tree_to_uhwi (TYPE_SIZE (elttype));
6393 	machine_mode eltmode = TYPE_MODE (elttype);
6394 	HOST_WIDE_INT bitsize;
6395 	HOST_WIDE_INT bitpos;
6396 	rtvec vector = NULL;
6397 	unsigned n_elts;
6398 	alias_set_type alias;
6399 
6400 	gcc_assert (eltmode != BLKmode);
6401 
6402 	n_elts = TYPE_VECTOR_SUBPARTS (type);
6403 	if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
6404 	  {
6405 	    machine_mode mode = GET_MODE (target);
6406 
6407 	    icode = (int) optab_handler (vec_init_optab, mode);
6408 	    /* Don't use vec_init<mode> if some elements have VECTOR_TYPE.  */
6409 	    if (icode != CODE_FOR_nothing)
6410 	      {
6411 		tree value;
6412 
6413 		FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6414 		  if (TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE)
6415 		    {
6416 		      icode = CODE_FOR_nothing;
6417 		      break;
6418 		    }
6419 	      }
6420 	    if (icode != CODE_FOR_nothing)
6421 	      {
6422 		unsigned int i;
6423 
6424 		vector = rtvec_alloc (n_elts);
6425 		for (i = 0; i < n_elts; i++)
6426 		  RTVEC_ELT (vector, i) = CONST0_RTX (GET_MODE_INNER (mode));
6427 	      }
6428 	  }
6429 
6430 	/* If the constructor has fewer elements than the vector,
6431 	   clear the whole array first.  Similarly if this is static
6432 	   constructor of a non-BLKmode object.  */
6433 	if (cleared)
6434 	  need_to_clear = 0;
6435 	else if (REG_P (target) && TREE_STATIC (exp))
6436 	  need_to_clear = 1;
6437 	else
6438 	  {
6439 	    unsigned HOST_WIDE_INT count = 0, zero_count = 0;
6440 	    tree value;
6441 
6442 	    FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
6443 	      {
6444 		int n_elts_here = tree_to_uhwi
6445 		  (int_const_binop (TRUNC_DIV_EXPR,
6446 				    TYPE_SIZE (TREE_TYPE (value)),
6447 				    TYPE_SIZE (elttype)));
6448 
6449 		count += n_elts_here;
6450 		if (mostly_zeros_p (value))
6451 		  zero_count += n_elts_here;
6452 	      }
6453 
6454 	    /* Clear the entire vector first if there are any missing elements,
6455 	       or if the incidence of zero elements is >= 75%.  */
6456 	    need_to_clear = (count < n_elts || 4 * zero_count >= 3 * count);
6457 	  }
6458 
6459 	if (need_to_clear && size > 0 && !vector)
6460 	  {
6461 	    if (REG_P (target))
6462 	      emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6463 	    else
6464 	      clear_storage (target, GEN_INT (size), BLOCK_OP_NORMAL);
6465 	    cleared = 1;
6466 	  }
6467 
6468 	/* Inform later passes that the old value is dead.  */
6469 	if (!cleared && !vector && REG_P (target))
6470 	  emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
6471 
6472         if (MEM_P (target))
6473 	  alias = MEM_ALIAS_SET (target);
6474 	else
6475 	  alias = get_alias_set (elttype);
6476 
6477         /* Store each element of the constructor into the corresponding
6478 	   element of TARGET, determined by counting the elements.  */
6479 	for (idx = 0, i = 0;
6480 	     vec_safe_iterate (CONSTRUCTOR_ELTS (exp), idx, &ce);
6481 	     idx++, i += bitsize / elt_size)
6482 	  {
6483 	    HOST_WIDE_INT eltpos;
6484 	    tree value = ce->value;
6485 
6486 	    bitsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (value)));
6487 	    if (cleared && initializer_zerop (value))
6488 	      continue;
6489 
6490 	    if (ce->index)
6491 	      eltpos = tree_to_uhwi (ce->index);
6492 	    else
6493 	      eltpos = i;
6494 
6495 	    if (vector)
6496 	      {
6497 		/* vec_init<mode> should not be used if there are VECTOR_TYPE
6498 		   elements.  */
6499 		gcc_assert (TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE);
6500 		RTVEC_ELT (vector, eltpos)
6501 		  = expand_normal (value);
6502 	      }
6503 	    else
6504 	      {
6505 		machine_mode value_mode =
6506 		  TREE_CODE (TREE_TYPE (value)) == VECTOR_TYPE
6507 		  ? TYPE_MODE (TREE_TYPE (value))
6508 		  : eltmode;
6509 		bitpos = eltpos * elt_size;
6510 		store_constructor_field (target, bitsize, bitpos, 0,
6511 					 bitregion_end, value_mode,
6512 					 value, cleared, alias);
6513 	      }
6514 	  }
6515 
6516 	if (vector)
6517 	  emit_insn (GEN_FCN (icode)
6518 		     (target,
6519 		      gen_rtx_PARALLEL (GET_MODE (target), vector)));
6520 	break;
6521       }
6522 
6523     default:
6524       gcc_unreachable ();
6525     }
6526 }
6527 
6528 /* Store the value of EXP (an expression tree)
6529    into a subfield of TARGET which has mode MODE and occupies
6530    BITSIZE bits, starting BITPOS bits from the start of TARGET.
6531    If MODE is VOIDmode, it means that we are storing into a bit-field.
6532 
6533    BITREGION_START is bitpos of the first bitfield in this region.
6534    BITREGION_END is the bitpos of the ending bitfield in this region.
6535    These two fields are 0, if the C++ memory model does not apply,
6536    or we are not interested in keeping track of bitfield regions.
6537 
6538    Always return const0_rtx unless we have something particular to
6539    return.
6540 
6541    ALIAS_SET is the alias set for the destination.  This value will
6542    (in general) be different from that for TARGET, since TARGET is a
6543    reference to the containing structure.
6544 
6545    If NONTEMPORAL is true, try generating a nontemporal store.  */
6546 
6547 static rtx
6548 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
6549 	     unsigned HOST_WIDE_INT bitregion_start,
6550 	     unsigned HOST_WIDE_INT bitregion_end,
6551 	     machine_mode mode, tree exp,
6552 	     alias_set_type alias_set, bool nontemporal)
6553 {
6554   if (TREE_CODE (exp) == ERROR_MARK)
6555     return const0_rtx;
6556 
6557   /* If we have nothing to store, do nothing unless the expression has
6558      side-effects.  */
6559   if (bitsize == 0)
6560     return expand_expr (exp, const0_rtx, VOIDmode, EXPAND_NORMAL);
6561 
6562   if (GET_CODE (target) == CONCAT)
6563     {
6564       /* We're storing into a struct containing a single __complex.  */
6565 
6566       gcc_assert (!bitpos);
6567       return store_expr (exp, target, 0, nontemporal);
6568     }
6569 
6570   /* If the structure is in a register or if the component
6571      is a bit field, we cannot use addressing to access it.
6572      Use bit-field techniques or SUBREG to store in it.  */
6573 
6574   if (mode == VOIDmode
6575       || (mode != BLKmode && ! direct_store[(int) mode]
6576 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6577 	  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
6578       || REG_P (target)
6579       || GET_CODE (target) == SUBREG
6580       /* If the field isn't aligned enough to store as an ordinary memref,
6581 	 store it as a bit field.  */
6582       || (mode != BLKmode
6583 	  && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
6584 		|| bitpos % GET_MODE_ALIGNMENT (mode))
6585 	       && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
6586 	      || (bitpos % BITS_PER_UNIT != 0)))
6587       || (bitsize >= 0 && mode != BLKmode
6588 	  && GET_MODE_BITSIZE (mode) > bitsize)
6589       /* If the RHS and field are a constant size and the size of the
6590 	 RHS isn't the same size as the bitfield, we must use bitfield
6591 	 operations.  */
6592       || (bitsize >= 0
6593 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
6594 	  && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
6595       /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
6596          decl we must use bitfield operations.  */
6597       || (bitsize >= 0
6598 	  && TREE_CODE (exp) == MEM_REF
6599 	  && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
6600 	  && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6601 	  && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
6602 	  && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
6603     {
6604       rtx temp;
6605       gimple nop_def;
6606 
6607       /* If EXP is a NOP_EXPR of precision less than its mode, then that
6608 	 implies a mask operation.  If the precision is the same size as
6609 	 the field we're storing into, that mask is redundant.  This is
6610 	 particularly common with bit field assignments generated by the
6611 	 C front end.  */
6612       nop_def = get_def_for_expr (exp, NOP_EXPR);
6613       if (nop_def)
6614 	{
6615 	  tree type = TREE_TYPE (exp);
6616 	  if (INTEGRAL_TYPE_P (type)
6617 	      && TYPE_PRECISION (type) < GET_MODE_BITSIZE (TYPE_MODE (type))
6618 	      && bitsize == TYPE_PRECISION (type))
6619 	    {
6620 	      tree op = gimple_assign_rhs1 (nop_def);
6621 	      type = TREE_TYPE (op);
6622 	      if (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) >= bitsize)
6623 		exp = op;
6624 	    }
6625 	}
6626 
6627       temp = expand_normal (exp);
6628 
6629       /* If BITSIZE is narrower than the size of the type of EXP
6630 	 we will be narrowing TEMP.  Normally, what's wanted are the
6631 	 low-order bits.  However, if EXP's type is a record and this is
6632 	 big-endian machine, we want the upper BITSIZE bits.  */
6633       if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6634 	  && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
6635 	  && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
6636 	temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
6637 			     GET_MODE_BITSIZE (GET_MODE (temp)) - bitsize,
6638 			     NULL_RTX, 1);
6639 
6640       /* Unless MODE is VOIDmode or BLKmode, convert TEMP to MODE.  */
6641       if (mode != VOIDmode && mode != BLKmode
6642 	  && mode != TYPE_MODE (TREE_TYPE (exp)))
6643 	temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
6644 
6645       /* If TEMP is not a PARALLEL (see below) and its mode and that of TARGET
6646 	 are both BLKmode, both must be in memory and BITPOS must be aligned
6647 	 on a byte boundary.  If so, we simply do a block copy.  Likewise for
6648 	 a BLKmode-like TARGET.  */
6649       if (GET_CODE (temp) != PARALLEL
6650 	  && GET_MODE (temp) == BLKmode
6651 	  && (GET_MODE (target) == BLKmode
6652 	      || (MEM_P (target)
6653 		  && GET_MODE_CLASS (GET_MODE (target)) == MODE_INT
6654 		  && (bitpos % BITS_PER_UNIT) == 0
6655 		  && (bitsize % BITS_PER_UNIT) == 0)))
6656 	{
6657 	  gcc_assert (MEM_P (target) && MEM_P (temp)
6658 		      && (bitpos % BITS_PER_UNIT) == 0);
6659 
6660 	  target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
6661 	  emit_block_move (target, temp,
6662 			   GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6663 				    / BITS_PER_UNIT),
6664 			   BLOCK_OP_NORMAL);
6665 
6666 	  return const0_rtx;
6667 	}
6668 
6669       /* Handle calls that return values in multiple non-contiguous locations.
6670 	 The Irix 6 ABI has examples of this.  */
6671       if (GET_CODE (temp) == PARALLEL)
6672 	{
6673 	  HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6674 	  rtx temp_target;
6675 	  if (mode == BLKmode || mode == VOIDmode)
6676 	    mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6677 	  temp_target = gen_reg_rtx (mode);
6678 	  emit_group_store (temp_target, temp, TREE_TYPE (exp), size);
6679 	  temp = temp_target;
6680 	}
6681       else if (mode == BLKmode)
6682 	{
6683 	  /* Handle calls that return BLKmode values in registers.  */
6684 	  if (REG_P (temp) && TREE_CODE (exp) == CALL_EXPR)
6685 	    {
6686 	      rtx temp_target = gen_reg_rtx (GET_MODE (temp));
6687 	      copy_blkmode_from_reg (temp_target, temp, TREE_TYPE (exp));
6688 	      temp = temp_target;
6689 	    }
6690 	  else
6691 	    {
6692 	      HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (exp));
6693 	      rtx temp_target;
6694 	      mode = smallest_mode_for_size (size * BITS_PER_UNIT, MODE_INT);
6695 	      temp_target = gen_reg_rtx (mode);
6696 	      temp_target
6697 	        = extract_bit_field (temp, size * BITS_PER_UNIT, 0, 1,
6698 				     temp_target, mode, mode);
6699 	      temp = temp_target;
6700 	    }
6701 	}
6702 
6703       /* Store the value in the bitfield.  */
6704       store_bit_field (target, bitsize, bitpos,
6705 		       bitregion_start, bitregion_end,
6706 		       mode, temp);
6707 
6708       return const0_rtx;
6709     }
6710   else
6711     {
6712       /* Now build a reference to just the desired component.  */
6713       rtx to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
6714 
6715       if (to_rtx == target)
6716 	to_rtx = copy_rtx (to_rtx);
6717 
6718       if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
6719 	set_mem_alias_set (to_rtx, alias_set);
6720 
6721       return store_expr (exp, to_rtx, 0, nontemporal);
6722     }
6723 }
6724 
6725 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
6726    an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
6727    codes and find the ultimate containing object, which we return.
6728 
6729    We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
6730    bit position, and *PUNSIGNEDP to the signedness of the field.
6731    If the position of the field is variable, we store a tree
6732    giving the variable offset (in units) in *POFFSET.
6733    This offset is in addition to the bit position.
6734    If the position is not variable, we store 0 in *POFFSET.
6735 
6736    If any of the extraction expressions is volatile,
6737    we store 1 in *PVOLATILEP.  Otherwise we don't change that.
6738 
6739    If the field is a non-BLKmode bit-field, *PMODE is set to VOIDmode.
6740    Otherwise, it is a mode that can be used to access the field.
6741 
6742    If the field describes a variable-sized object, *PMODE is set to
6743    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
6744    this case, but the address of the object can be found.
6745 
6746    If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
6747    look through nodes that serve as markers of a greater alignment than
6748    the one that can be deduced from the expression.  These nodes make it
6749    possible for front-ends to prevent temporaries from being created by
6750    the middle-end on alignment considerations.  For that purpose, the
6751    normal operating mode at high-level is to always pass FALSE so that
6752    the ultimate containing object is really returned; moreover, the
6753    associated predicate handled_component_p will always return TRUE
6754    on these nodes, thus indicating that they are essentially handled
6755    by get_inner_reference.  TRUE should only be passed when the caller
6756    is scanning the expression in order to build another representation
6757    and specifically knows how to handle these nodes; as such, this is
6758    the normal operating mode in the RTL expanders.  */
6759 
6760 tree
6761 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
6762 		     HOST_WIDE_INT *pbitpos, tree *poffset,
6763 		     machine_mode *pmode, int *punsignedp,
6764 		     int *pvolatilep, bool keep_aligning)
6765 {
6766   tree size_tree = 0;
6767   machine_mode mode = VOIDmode;
6768   bool blkmode_bitfield = false;
6769   tree offset = size_zero_node;
6770   offset_int bit_offset = 0;
6771 
6772   /* First get the mode, signedness, and size.  We do this from just the
6773      outermost expression.  */
6774   *pbitsize = -1;
6775   if (TREE_CODE (exp) == COMPONENT_REF)
6776     {
6777       tree field = TREE_OPERAND (exp, 1);
6778       size_tree = DECL_SIZE (field);
6779       if (flag_strict_volatile_bitfields > 0
6780 	  && TREE_THIS_VOLATILE (exp)
6781 	  && DECL_BIT_FIELD_TYPE (field)
6782 	  && DECL_MODE (field) != BLKmode)
6783 	/* Volatile bitfields should be accessed in the mode of the
6784 	     field's type, not the mode computed based on the bit
6785 	     size.  */
6786 	mode = TYPE_MODE (DECL_BIT_FIELD_TYPE (field));
6787       else if (!DECL_BIT_FIELD (field))
6788 	mode = DECL_MODE (field);
6789       else if (DECL_MODE (field) == BLKmode)
6790 	blkmode_bitfield = true;
6791 
6792       *punsignedp = DECL_UNSIGNED (field);
6793     }
6794   else if (TREE_CODE (exp) == BIT_FIELD_REF)
6795     {
6796       size_tree = TREE_OPERAND (exp, 1);
6797       *punsignedp = (! INTEGRAL_TYPE_P (TREE_TYPE (exp))
6798 		     || TYPE_UNSIGNED (TREE_TYPE (exp)));
6799 
6800       /* For vector types, with the correct size of access, use the mode of
6801 	 inner type.  */
6802       if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == VECTOR_TYPE
6803 	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)))
6804 	  && tree_int_cst_equal (size_tree, TYPE_SIZE (TREE_TYPE (exp))))
6805         mode = TYPE_MODE (TREE_TYPE (exp));
6806     }
6807   else
6808     {
6809       mode = TYPE_MODE (TREE_TYPE (exp));
6810       *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
6811 
6812       if (mode == BLKmode)
6813 	size_tree = TYPE_SIZE (TREE_TYPE (exp));
6814       else
6815 	*pbitsize = GET_MODE_BITSIZE (mode);
6816     }
6817 
6818   if (size_tree != 0)
6819     {
6820       if (! tree_fits_uhwi_p (size_tree))
6821 	mode = BLKmode, *pbitsize = -1;
6822       else
6823 	*pbitsize = tree_to_uhwi (size_tree);
6824     }
6825 
6826   /* Compute cumulative bit-offset for nested component-refs and array-refs,
6827      and find the ultimate containing object.  */
6828   while (1)
6829     {
6830       switch (TREE_CODE (exp))
6831 	{
6832 	case BIT_FIELD_REF:
6833 	  bit_offset += wi::to_offset (TREE_OPERAND (exp, 2));
6834 	  break;
6835 
6836 	case COMPONENT_REF:
6837 	  {
6838 	    tree field = TREE_OPERAND (exp, 1);
6839 	    tree this_offset = component_ref_field_offset (exp);
6840 
6841 	    /* If this field hasn't been filled in yet, don't go past it.
6842 	       This should only happen when folding expressions made during
6843 	       type construction.  */
6844 	    if (this_offset == 0)
6845 	      break;
6846 
6847 	    offset = size_binop (PLUS_EXPR, offset, this_offset);
6848 	    bit_offset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
6849 
6850 	    /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN.  */
6851 	  }
6852 	  break;
6853 
6854 	case ARRAY_REF:
6855 	case ARRAY_RANGE_REF:
6856 	  {
6857 	    tree index = TREE_OPERAND (exp, 1);
6858 	    tree low_bound = array_ref_low_bound (exp);
6859 	    tree unit_size = array_ref_element_size (exp);
6860 
6861 	    /* We assume all arrays have sizes that are a multiple of a byte.
6862 	       First subtract the lower bound, if any, in the type of the
6863 	       index, then convert to sizetype and multiply by the size of
6864 	       the array element.  */
6865 	    if (! integer_zerop (low_bound))
6866 	      index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
6867 				   index, low_bound);
6868 
6869 	    offset = size_binop (PLUS_EXPR, offset,
6870 			         size_binop (MULT_EXPR,
6871 					     fold_convert (sizetype, index),
6872 					     unit_size));
6873 	  }
6874 	  break;
6875 
6876 	case REALPART_EXPR:
6877 	  break;
6878 
6879 	case IMAGPART_EXPR:
6880 	  bit_offset += *pbitsize;
6881 	  break;
6882 
6883 	case VIEW_CONVERT_EXPR:
6884 	  if (keep_aligning && STRICT_ALIGNMENT
6885 	      && (TYPE_ALIGN (TREE_TYPE (exp))
6886 	       > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
6887 	      && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
6888 		  < BIGGEST_ALIGNMENT)
6889 	      && (TYPE_ALIGN_OK (TREE_TYPE (exp))
6890 		  || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6891 	    goto done;
6892 	  break;
6893 
6894 	case MEM_REF:
6895 	  /* Hand back the decl for MEM[&decl, off].  */
6896 	  if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
6897 	    {
6898 	      tree off = TREE_OPERAND (exp, 1);
6899 	      if (!integer_zerop (off))
6900 		{
6901 		  offset_int boff, coff = mem_ref_offset (exp);
6902 		  boff = wi::lshift (coff, LOG2_BITS_PER_UNIT);
6903 		  bit_offset += boff;
6904 		}
6905 	      exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6906 	    }
6907 	  goto done;
6908 
6909 	default:
6910 	  goto done;
6911 	}
6912 
6913       /* If any reference in the chain is volatile, the effect is volatile.  */
6914       if (TREE_THIS_VOLATILE (exp))
6915 	*pvolatilep = 1;
6916 
6917       exp = TREE_OPERAND (exp, 0);
6918     }
6919  done:
6920 
6921   /* If OFFSET is constant, see if we can return the whole thing as a
6922      constant bit position.  Make sure to handle overflow during
6923      this conversion.  */
6924   if (TREE_CODE (offset) == INTEGER_CST)
6925     {
6926       offset_int tem = wi::sext (wi::to_offset (offset),
6927 				 TYPE_PRECISION (sizetype));
6928       tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
6929       tem += bit_offset;
6930       if (wi::fits_shwi_p (tem))
6931 	{
6932 	  *pbitpos = tem.to_shwi ();
6933 	  *poffset = offset = NULL_TREE;
6934 	}
6935     }
6936 
6937   /* Otherwise, split it up.  */
6938   if (offset)
6939     {
6940       /* Avoid returning a negative bitpos as this may wreak havoc later.  */
6941       if (wi::neg_p (bit_offset) || !wi::fits_shwi_p (bit_offset))
6942         {
6943 	  offset_int mask = wi::mask <offset_int> (LOG2_BITS_PER_UNIT, false);
6944 	  offset_int tem = bit_offset.and_not (mask);
6945 	  /* TEM is the bitpos rounded to BITS_PER_UNIT towards -Inf.
6946 	     Subtract it to BIT_OFFSET and add it (scaled) to OFFSET.  */
6947 	  bit_offset -= tem;
6948 	  tem = wi::arshift (tem, LOG2_BITS_PER_UNIT);
6949 	  offset = size_binop (PLUS_EXPR, offset,
6950 			       wide_int_to_tree (sizetype, tem));
6951 	}
6952 
6953       *pbitpos = bit_offset.to_shwi ();
6954       *poffset = offset;
6955     }
6956 
6957   /* We can use BLKmode for a byte-aligned BLKmode bitfield.  */
6958   if (mode == VOIDmode
6959       && blkmode_bitfield
6960       && (*pbitpos % BITS_PER_UNIT) == 0
6961       && (*pbitsize % BITS_PER_UNIT) == 0)
6962     *pmode = BLKmode;
6963   else
6964     *pmode = mode;
6965 
6966   return exp;
6967 }
6968 
6969 /* Return a tree of sizetype representing the size, in bytes, of the element
6970    of EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6971 
6972 tree
6973 array_ref_element_size (tree exp)
6974 {
6975   tree aligned_size = TREE_OPERAND (exp, 3);
6976   tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6977   location_t loc = EXPR_LOCATION (exp);
6978 
6979   /* If a size was specified in the ARRAY_REF, it's the size measured
6980      in alignment units of the element type.  So multiply by that value.  */
6981   if (aligned_size)
6982     {
6983       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
6984 	 sizetype from another type of the same width and signedness.  */
6985       if (TREE_TYPE (aligned_size) != sizetype)
6986 	aligned_size = fold_convert_loc (loc, sizetype, aligned_size);
6987       return size_binop_loc (loc, MULT_EXPR, aligned_size,
6988 			     size_int (TYPE_ALIGN_UNIT (elmt_type)));
6989     }
6990 
6991   /* Otherwise, take the size from that of the element type.  Substitute
6992      any PLACEHOLDER_EXPR that we have.  */
6993   else
6994     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
6995 }
6996 
6997 /* Return a tree representing the lower bound of the array mentioned in
6998    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
6999 
7000 tree
7001 array_ref_low_bound (tree exp)
7002 {
7003   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7004 
7005   /* If a lower bound is specified in EXP, use it.  */
7006   if (TREE_OPERAND (exp, 2))
7007     return TREE_OPERAND (exp, 2);
7008 
7009   /* Otherwise, if there is a domain type and it has a lower bound, use it,
7010      substituting for a PLACEHOLDER_EXPR as needed.  */
7011   if (domain_type && TYPE_MIN_VALUE (domain_type))
7012     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
7013 
7014   /* Otherwise, return a zero of the appropriate type.  */
7015   return build_int_cst (TREE_TYPE (TREE_OPERAND (exp, 1)), 0);
7016 }
7017 
7018 /* Returns true if REF is an array reference to an array at the end of
7019    a structure.  If this is the case, the array may be allocated larger
7020    than its upper bound implies.  */
7021 
7022 bool
7023 array_at_struct_end_p (tree ref)
7024 {
7025   if (TREE_CODE (ref) != ARRAY_REF
7026       && TREE_CODE (ref) != ARRAY_RANGE_REF)
7027     return false;
7028 
7029   while (handled_component_p (ref))
7030     {
7031       /* If the reference chain contains a component reference to a
7032          non-union type and there follows another field the reference
7033 	 is not at the end of a structure.  */
7034       if (TREE_CODE (ref) == COMPONENT_REF
7035 	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0))) == RECORD_TYPE)
7036 	{
7037 	  tree nextf = DECL_CHAIN (TREE_OPERAND (ref, 1));
7038 	  while (nextf && TREE_CODE (nextf) != FIELD_DECL)
7039 	    nextf = DECL_CHAIN (nextf);
7040 	  if (nextf)
7041 	    return false;
7042 	}
7043 
7044       ref = TREE_OPERAND (ref, 0);
7045     }
7046 
7047   /* If the reference is based on a declared entity, the size of the array
7048      is constrained by its given domain.  */
7049   if (DECL_P (ref))
7050     return false;
7051 
7052   return true;
7053 }
7054 
7055 /* Return a tree representing the upper bound of the array mentioned in
7056    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
7057 
7058 tree
7059 array_ref_up_bound (tree exp)
7060 {
7061   tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
7062 
7063   /* If there is a domain type and it has an upper bound, use it, substituting
7064      for a PLACEHOLDER_EXPR as needed.  */
7065   if (domain_type && TYPE_MAX_VALUE (domain_type))
7066     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
7067 
7068   /* Otherwise fail.  */
7069   return NULL_TREE;
7070 }
7071 
7072 /* Return a tree representing the offset, in bytes, of the field referenced
7073    by EXP.  This does not include any offset in DECL_FIELD_BIT_OFFSET.  */
7074 
7075 tree
7076 component_ref_field_offset (tree exp)
7077 {
7078   tree aligned_offset = TREE_OPERAND (exp, 2);
7079   tree field = TREE_OPERAND (exp, 1);
7080   location_t loc = EXPR_LOCATION (exp);
7081 
7082   /* If an offset was specified in the COMPONENT_REF, it's the offset measured
7083      in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT.  So multiply by that
7084      value.  */
7085   if (aligned_offset)
7086     {
7087       /* ??? tree_ssa_useless_type_conversion will eliminate casts to
7088 	 sizetype from another type of the same width and signedness.  */
7089       if (TREE_TYPE (aligned_offset) != sizetype)
7090 	aligned_offset = fold_convert_loc (loc, sizetype, aligned_offset);
7091       return size_binop_loc (loc, MULT_EXPR, aligned_offset,
7092 			     size_int (DECL_OFFSET_ALIGN (field)
7093 				       / BITS_PER_UNIT));
7094     }
7095 
7096   /* Otherwise, take the offset from that of the field.  Substitute
7097      any PLACEHOLDER_EXPR that we have.  */
7098   else
7099     return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
7100 }
7101 
7102 /* Alignment in bits the TARGET of an assignment may be assumed to have.  */
7103 
7104 static unsigned HOST_WIDE_INT
7105 target_align (const_tree target)
7106 {
7107   /* We might have a chain of nested references with intermediate misaligning
7108      bitfields components, so need to recurse to find out.  */
7109 
7110   unsigned HOST_WIDE_INT this_align, outer_align;
7111 
7112   switch (TREE_CODE (target))
7113     {
7114     case BIT_FIELD_REF:
7115       return 1;
7116 
7117     case COMPONENT_REF:
7118       this_align = DECL_ALIGN (TREE_OPERAND (target, 1));
7119       outer_align = target_align (TREE_OPERAND (target, 0));
7120       return MIN (this_align, outer_align);
7121 
7122     case ARRAY_REF:
7123     case ARRAY_RANGE_REF:
7124       this_align = TYPE_ALIGN (TREE_TYPE (target));
7125       outer_align = target_align (TREE_OPERAND (target, 0));
7126       return MIN (this_align, outer_align);
7127 
7128     CASE_CONVERT:
7129     case NON_LVALUE_EXPR:
7130     case VIEW_CONVERT_EXPR:
7131       this_align = TYPE_ALIGN (TREE_TYPE (target));
7132       outer_align = target_align (TREE_OPERAND (target, 0));
7133       return MAX (this_align, outer_align);
7134 
7135     default:
7136       return TYPE_ALIGN (TREE_TYPE (target));
7137     }
7138 }
7139 
7140 
7141 /* Given an rtx VALUE that may contain additions and multiplications, return
7142    an equivalent value that just refers to a register, memory, or constant.
7143    This is done by generating instructions to perform the arithmetic and
7144    returning a pseudo-register containing the value.
7145 
7146    The returned value may be a REG, SUBREG, MEM or constant.  */
7147 
7148 rtx
7149 force_operand (rtx value, rtx target)
7150 {
7151   rtx op1, op2;
7152   /* Use subtarget as the target for operand 0 of a binary operation.  */
7153   rtx subtarget = get_subtarget (target);
7154   enum rtx_code code = GET_CODE (value);
7155 
7156   /* Check for subreg applied to an expression produced by loop optimizer.  */
7157   if (code == SUBREG
7158       && !REG_P (SUBREG_REG (value))
7159       && !MEM_P (SUBREG_REG (value)))
7160     {
7161       value
7162 	= simplify_gen_subreg (GET_MODE (value),
7163 			       force_reg (GET_MODE (SUBREG_REG (value)),
7164 					  force_operand (SUBREG_REG (value),
7165 							 NULL_RTX)),
7166 			       GET_MODE (SUBREG_REG (value)),
7167 			       SUBREG_BYTE (value));
7168       code = GET_CODE (value);
7169     }
7170 
7171   /* Check for a PIC address load.  */
7172   if ((code == PLUS || code == MINUS)
7173       && XEXP (value, 0) == pic_offset_table_rtx
7174       && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
7175 	  || GET_CODE (XEXP (value, 1)) == LABEL_REF
7176 	  || GET_CODE (XEXP (value, 1)) == CONST))
7177     {
7178       if (!subtarget)
7179 	subtarget = gen_reg_rtx (GET_MODE (value));
7180       emit_move_insn (subtarget, value);
7181       return subtarget;
7182     }
7183 
7184   if (ARITHMETIC_P (value))
7185     {
7186       op2 = XEXP (value, 1);
7187       if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
7188 	subtarget = 0;
7189       if (code == MINUS && CONST_INT_P (op2))
7190 	{
7191 	  code = PLUS;
7192 	  op2 = negate_rtx (GET_MODE (value), op2);
7193 	}
7194 
7195       /* Check for an addition with OP2 a constant integer and our first
7196          operand a PLUS of a virtual register and something else.  In that
7197          case, we want to emit the sum of the virtual register and the
7198          constant first and then add the other value.  This allows virtual
7199          register instantiation to simply modify the constant rather than
7200          creating another one around this addition.  */
7201       if (code == PLUS && CONST_INT_P (op2)
7202 	  && GET_CODE (XEXP (value, 0)) == PLUS
7203 	  && REG_P (XEXP (XEXP (value, 0), 0))
7204 	  && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
7205 	  && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
7206 	{
7207 	  rtx temp = expand_simple_binop (GET_MODE (value), code,
7208 					  XEXP (XEXP (value, 0), 0), op2,
7209 					  subtarget, 0, OPTAB_LIB_WIDEN);
7210 	  return expand_simple_binop (GET_MODE (value), code, temp,
7211 				      force_operand (XEXP (XEXP (value,
7212 								 0), 1), 0),
7213 				      target, 0, OPTAB_LIB_WIDEN);
7214 	}
7215 
7216       op1 = force_operand (XEXP (value, 0), subtarget);
7217       op2 = force_operand (op2, NULL_RTX);
7218       switch (code)
7219 	{
7220 	case MULT:
7221 	  return expand_mult (GET_MODE (value), op1, op2, target, 1);
7222 	case DIV:
7223 	  if (!INTEGRAL_MODE_P (GET_MODE (value)))
7224 	    return expand_simple_binop (GET_MODE (value), code, op1, op2,
7225 					target, 1, OPTAB_LIB_WIDEN);
7226 	  else
7227 	    return expand_divmod (0,
7228 				  FLOAT_MODE_P (GET_MODE (value))
7229 				  ? RDIV_EXPR : TRUNC_DIV_EXPR,
7230 				  GET_MODE (value), op1, op2, target, 0);
7231 	case MOD:
7232 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7233 				target, 0);
7234 	case UDIV:
7235 	  return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
7236 				target, 1);
7237 	case UMOD:
7238 	  return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
7239 				target, 1);
7240 	case ASHIFTRT:
7241 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7242 				      target, 0, OPTAB_LIB_WIDEN);
7243 	default:
7244 	  return expand_simple_binop (GET_MODE (value), code, op1, op2,
7245 				      target, 1, OPTAB_LIB_WIDEN);
7246 	}
7247     }
7248   if (UNARY_P (value))
7249     {
7250       if (!target)
7251 	target = gen_reg_rtx (GET_MODE (value));
7252       op1 = force_operand (XEXP (value, 0), NULL_RTX);
7253       switch (code)
7254 	{
7255 	case ZERO_EXTEND:
7256 	case SIGN_EXTEND:
7257 	case TRUNCATE:
7258 	case FLOAT_EXTEND:
7259 	case FLOAT_TRUNCATE:
7260 	  convert_move (target, op1, code == ZERO_EXTEND);
7261 	  return target;
7262 
7263 	case FIX:
7264 	case UNSIGNED_FIX:
7265 	  expand_fix (target, op1, code == UNSIGNED_FIX);
7266 	  return target;
7267 
7268 	case FLOAT:
7269 	case UNSIGNED_FLOAT:
7270 	  expand_float (target, op1, code == UNSIGNED_FLOAT);
7271 	  return target;
7272 
7273 	default:
7274 	  return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
7275 	}
7276     }
7277 
7278 #ifdef INSN_SCHEDULING
7279   /* On machines that have insn scheduling, we want all memory reference to be
7280      explicit, so we need to deal with such paradoxical SUBREGs.  */
7281   if (paradoxical_subreg_p (value) && MEM_P (SUBREG_REG (value)))
7282     value
7283       = simplify_gen_subreg (GET_MODE (value),
7284 			     force_reg (GET_MODE (SUBREG_REG (value)),
7285 					force_operand (SUBREG_REG (value),
7286 						       NULL_RTX)),
7287 			     GET_MODE (SUBREG_REG (value)),
7288 			     SUBREG_BYTE (value));
7289 #endif
7290 
7291   return value;
7292 }
7293 
7294 /* Subroutine of expand_expr: return nonzero iff there is no way that
7295    EXP can reference X, which is being modified.  TOP_P is nonzero if this
7296    call is going to be used to determine whether we need a temporary
7297    for EXP, as opposed to a recursive call to this function.
7298 
7299    It is always safe for this routine to return zero since it merely
7300    searches for optimization opportunities.  */
7301 
7302 int
7303 safe_from_p (const_rtx x, tree exp, int top_p)
7304 {
7305   rtx exp_rtl = 0;
7306   int i, nops;
7307 
7308   if (x == 0
7309       /* If EXP has varying size, we MUST use a target since we currently
7310 	 have no way of allocating temporaries of variable size
7311 	 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
7312 	 So we assume here that something at a higher level has prevented a
7313 	 clash.  This is somewhat bogus, but the best we can do.  Only
7314 	 do this when X is BLKmode and when we are at the top level.  */
7315       || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
7316 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
7317 	  && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
7318 	      || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
7319 	      || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
7320 	      != INTEGER_CST)
7321 	  && GET_MODE (x) == BLKmode)
7322       /* If X is in the outgoing argument area, it is always safe.  */
7323       || (MEM_P (x)
7324 	  && (XEXP (x, 0) == virtual_outgoing_args_rtx
7325 	      || (GET_CODE (XEXP (x, 0)) == PLUS
7326 		  && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
7327     return 1;
7328 
7329   /* If this is a subreg of a hard register, declare it unsafe, otherwise,
7330      find the underlying pseudo.  */
7331   if (GET_CODE (x) == SUBREG)
7332     {
7333       x = SUBREG_REG (x);
7334       if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7335 	return 0;
7336     }
7337 
7338   /* Now look at our tree code and possibly recurse.  */
7339   switch (TREE_CODE_CLASS (TREE_CODE (exp)))
7340     {
7341     case tcc_declaration:
7342       exp_rtl = DECL_RTL_IF_SET (exp);
7343       break;
7344 
7345     case tcc_constant:
7346       return 1;
7347 
7348     case tcc_exceptional:
7349       if (TREE_CODE (exp) == TREE_LIST)
7350 	{
7351 	  while (1)
7352 	    {
7353 	      if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
7354 		return 0;
7355 	      exp = TREE_CHAIN (exp);
7356 	      if (!exp)
7357 		return 1;
7358 	      if (TREE_CODE (exp) != TREE_LIST)
7359 		return safe_from_p (x, exp, 0);
7360 	    }
7361 	}
7362       else if (TREE_CODE (exp) == CONSTRUCTOR)
7363 	{
7364 	  constructor_elt *ce;
7365 	  unsigned HOST_WIDE_INT idx;
7366 
7367 	  FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (exp), idx, ce)
7368 	    if ((ce->index != NULL_TREE && !safe_from_p (x, ce->index, 0))
7369 		|| !safe_from_p (x, ce->value, 0))
7370 	      return 0;
7371 	  return 1;
7372 	}
7373       else if (TREE_CODE (exp) == ERROR_MARK)
7374 	return 1;	/* An already-visited SAVE_EXPR? */
7375       else
7376 	return 0;
7377 
7378     case tcc_statement:
7379       /* The only case we look at here is the DECL_INITIAL inside a
7380 	 DECL_EXPR.  */
7381       return (TREE_CODE (exp) != DECL_EXPR
7382 	      || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
7383 	      || !DECL_INITIAL (DECL_EXPR_DECL (exp))
7384 	      || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
7385 
7386     case tcc_binary:
7387     case tcc_comparison:
7388       if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
7389 	return 0;
7390       /* Fall through.  */
7391 
7392     case tcc_unary:
7393       return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7394 
7395     case tcc_expression:
7396     case tcc_reference:
7397     case tcc_vl_exp:
7398       /* Now do code-specific tests.  EXP_RTL is set to any rtx we find in
7399 	 the expression.  If it is set, we conflict iff we are that rtx or
7400 	 both are in memory.  Otherwise, we check all operands of the
7401 	 expression recursively.  */
7402 
7403       switch (TREE_CODE (exp))
7404 	{
7405 	case ADDR_EXPR:
7406 	  /* If the operand is static or we are static, we can't conflict.
7407 	     Likewise if we don't conflict with the operand at all.  */
7408 	  if (staticp (TREE_OPERAND (exp, 0))
7409 	      || TREE_STATIC (exp)
7410 	      || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
7411 	    return 1;
7412 
7413 	  /* Otherwise, the only way this can conflict is if we are taking
7414 	     the address of a DECL a that address if part of X, which is
7415 	     very rare.  */
7416 	  exp = TREE_OPERAND (exp, 0);
7417 	  if (DECL_P (exp))
7418 	    {
7419 	      if (!DECL_RTL_SET_P (exp)
7420 		  || !MEM_P (DECL_RTL (exp)))
7421 		return 0;
7422 	      else
7423 		exp_rtl = XEXP (DECL_RTL (exp), 0);
7424 	    }
7425 	  break;
7426 
7427 	case MEM_REF:
7428 	  if (MEM_P (x)
7429 	      && alias_sets_conflict_p (MEM_ALIAS_SET (x),
7430 					get_alias_set (exp)))
7431 	    return 0;
7432 	  break;
7433 
7434 	case CALL_EXPR:
7435 	  /* Assume that the call will clobber all hard registers and
7436 	     all of memory.  */
7437 	  if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
7438 	      || MEM_P (x))
7439 	    return 0;
7440 	  break;
7441 
7442 	case WITH_CLEANUP_EXPR:
7443 	case CLEANUP_POINT_EXPR:
7444 	  /* Lowered by gimplify.c.  */
7445 	  gcc_unreachable ();
7446 
7447 	case SAVE_EXPR:
7448 	  return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
7449 
7450 	default:
7451 	  break;
7452 	}
7453 
7454       /* If we have an rtx, we do not need to scan our operands.  */
7455       if (exp_rtl)
7456 	break;
7457 
7458       nops = TREE_OPERAND_LENGTH (exp);
7459       for (i = 0; i < nops; i++)
7460 	if (TREE_OPERAND (exp, i) != 0
7461 	    && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
7462 	  return 0;
7463 
7464       break;
7465 
7466     case tcc_type:
7467       /* Should never get a type here.  */
7468       gcc_unreachable ();
7469     }
7470 
7471   /* If we have an rtl, find any enclosed object.  Then see if we conflict
7472      with it.  */
7473   if (exp_rtl)
7474     {
7475       if (GET_CODE (exp_rtl) == SUBREG)
7476 	{
7477 	  exp_rtl = SUBREG_REG (exp_rtl);
7478 	  if (REG_P (exp_rtl)
7479 	      && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
7480 	    return 0;
7481 	}
7482 
7483       /* If the rtl is X, then it is not safe.  Otherwise, it is unless both
7484 	 are memory and they conflict.  */
7485       return ! (rtx_equal_p (x, exp_rtl)
7486 		|| (MEM_P (x) && MEM_P (exp_rtl)
7487 		    && true_dependence (exp_rtl, VOIDmode, x)));
7488     }
7489 
7490   /* If we reach here, it is safe.  */
7491   return 1;
7492 }
7493 
7494 
7495 /* Return the highest power of two that EXP is known to be a multiple of.
7496    This is used in updating alignment of MEMs in array references.  */
7497 
7498 unsigned HOST_WIDE_INT
7499 highest_pow2_factor (const_tree exp)
7500 {
7501   unsigned HOST_WIDE_INT ret;
7502   int trailing_zeros = tree_ctz (exp);
7503   if (trailing_zeros >= HOST_BITS_PER_WIDE_INT)
7504     return BIGGEST_ALIGNMENT;
7505   ret = (unsigned HOST_WIDE_INT) 1 << trailing_zeros;
7506   if (ret > BIGGEST_ALIGNMENT)
7507     return BIGGEST_ALIGNMENT;
7508   return ret;
7509 }
7510 
7511 /* Similar, except that the alignment requirements of TARGET are
7512    taken into account.  Assume it is at least as aligned as its
7513    type, unless it is a COMPONENT_REF in which case the layout of
7514    the structure gives the alignment.  */
7515 
7516 static unsigned HOST_WIDE_INT
7517 highest_pow2_factor_for_target (const_tree target, const_tree exp)
7518 {
7519   unsigned HOST_WIDE_INT talign = target_align (target) / BITS_PER_UNIT;
7520   unsigned HOST_WIDE_INT factor = highest_pow2_factor (exp);
7521 
7522   return MAX (factor, talign);
7523 }
7524 
7525 #ifdef HAVE_conditional_move
7526 /* Convert the tree comparison code TCODE to the rtl one where the
7527    signedness is UNSIGNEDP.  */
7528 
7529 static enum rtx_code
7530 convert_tree_comp_to_rtx (enum tree_code tcode, int unsignedp)
7531 {
7532   enum rtx_code code;
7533   switch (tcode)
7534     {
7535     case EQ_EXPR:
7536       code = EQ;
7537       break;
7538     case NE_EXPR:
7539       code = NE;
7540       break;
7541     case LT_EXPR:
7542       code = unsignedp ? LTU : LT;
7543       break;
7544     case LE_EXPR:
7545       code = unsignedp ? LEU : LE;
7546       break;
7547     case GT_EXPR:
7548       code = unsignedp ? GTU : GT;
7549       break;
7550     case GE_EXPR:
7551       code = unsignedp ? GEU : GE;
7552       break;
7553     case UNORDERED_EXPR:
7554       code = UNORDERED;
7555       break;
7556     case ORDERED_EXPR:
7557       code = ORDERED;
7558       break;
7559     case UNLT_EXPR:
7560       code = UNLT;
7561       break;
7562     case UNLE_EXPR:
7563       code = UNLE;
7564       break;
7565     case UNGT_EXPR:
7566       code = UNGT;
7567       break;
7568     case UNGE_EXPR:
7569       code = UNGE;
7570       break;
7571     case UNEQ_EXPR:
7572       code = UNEQ;
7573       break;
7574     case LTGT_EXPR:
7575       code = LTGT;
7576       break;
7577 
7578     default:
7579       gcc_unreachable ();
7580     }
7581   return code;
7582 }
7583 #endif
7584 
7585 /* Subroutine of expand_expr.  Expand the two operands of a binary
7586    expression EXP0 and EXP1 placing the results in OP0 and OP1.
7587    The value may be stored in TARGET if TARGET is nonzero.  The
7588    MODIFIER argument is as documented by expand_expr.  */
7589 
7590 void
7591 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
7592 		 enum expand_modifier modifier)
7593 {
7594   if (! safe_from_p (target, exp1, 1))
7595     target = 0;
7596   if (operand_equal_p (exp0, exp1, 0))
7597     {
7598       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7599       *op1 = copy_rtx (*op0);
7600     }
7601   else
7602     {
7603       /* If we need to preserve evaluation order, copy exp0 into its own
7604 	 temporary variable so that it can't be clobbered by exp1.  */
7605       if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
7606 	exp0 = save_expr (exp0);
7607       *op0 = expand_expr (exp0, target, VOIDmode, modifier);
7608       *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
7609     }
7610 }
7611 
7612 
7613 /* Return a MEM that contains constant EXP.  DEFER is as for
7614    output_constant_def and MODIFIER is as for expand_expr.  */
7615 
7616 static rtx
7617 expand_expr_constant (tree exp, int defer, enum expand_modifier modifier)
7618 {
7619   rtx mem;
7620 
7621   mem = output_constant_def (exp, defer);
7622   if (modifier != EXPAND_INITIALIZER)
7623     mem = use_anchored_address (mem);
7624   return mem;
7625 }
7626 
7627 /* A subroutine of expand_expr_addr_expr.  Evaluate the address of EXP.
7628    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7629 
7630 static rtx
7631 expand_expr_addr_expr_1 (tree exp, rtx target, machine_mode tmode,
7632 		         enum expand_modifier modifier, addr_space_t as)
7633 {
7634   rtx result, subtarget;
7635   tree inner, offset;
7636   HOST_WIDE_INT bitsize, bitpos;
7637   int volatilep, unsignedp;
7638   machine_mode mode1;
7639 
7640   /* If we are taking the address of a constant and are at the top level,
7641      we have to use output_constant_def since we can't call force_const_mem
7642      at top level.  */
7643   /* ??? This should be considered a front-end bug.  We should not be
7644      generating ADDR_EXPR of something that isn't an LVALUE.  The only
7645      exception here is STRING_CST.  */
7646   if (CONSTANT_CLASS_P (exp))
7647     {
7648       result = XEXP (expand_expr_constant (exp, 0, modifier), 0);
7649       if (modifier < EXPAND_SUM)
7650 	result = force_operand (result, target);
7651       return result;
7652     }
7653 
7654   /* Everything must be something allowed by is_gimple_addressable.  */
7655   switch (TREE_CODE (exp))
7656     {
7657     case INDIRECT_REF:
7658       /* This case will happen via recursion for &a->b.  */
7659       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7660 
7661     case MEM_REF:
7662       {
7663 	tree tem = TREE_OPERAND (exp, 0);
7664 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
7665 	  tem = fold_build_pointer_plus (tem, TREE_OPERAND (exp, 1));
7666 	return expand_expr (tem, target, tmode, modifier);
7667       }
7668 
7669     case CONST_DECL:
7670       /* Expand the initializer like constants above.  */
7671       result = XEXP (expand_expr_constant (DECL_INITIAL (exp),
7672 					   0, modifier), 0);
7673       if (modifier < EXPAND_SUM)
7674 	result = force_operand (result, target);
7675       return result;
7676 
7677     case REALPART_EXPR:
7678       /* The real part of the complex number is always first, therefore
7679 	 the address is the same as the address of the parent object.  */
7680       offset = 0;
7681       bitpos = 0;
7682       inner = TREE_OPERAND (exp, 0);
7683       break;
7684 
7685     case IMAGPART_EXPR:
7686       /* The imaginary part of the complex number is always second.
7687 	 The expression is therefore always offset by the size of the
7688 	 scalar type.  */
7689       offset = 0;
7690       bitpos = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (exp)));
7691       inner = TREE_OPERAND (exp, 0);
7692       break;
7693 
7694     case COMPOUND_LITERAL_EXPR:
7695       /* Allow COMPOUND_LITERAL_EXPR in initializers or coming from
7696 	 initializers, if e.g. rtl_for_decl_init is called on DECL_INITIAL
7697 	 with COMPOUND_LITERAL_EXPRs in it, or ARRAY_REF on a const static
7698 	 array with address of COMPOUND_LITERAL_EXPR in DECL_INITIAL;
7699 	 the initializers aren't gimplified.  */
7700       if (COMPOUND_LITERAL_EXPR_DECL (exp)
7701 	  && TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (exp)))
7702 	return expand_expr_addr_expr_1 (COMPOUND_LITERAL_EXPR_DECL (exp),
7703 					target, tmode, modifier, as);
7704       /* FALLTHRU */
7705     default:
7706       /* If the object is a DECL, then expand it for its rtl.  Don't bypass
7707 	 expand_expr, as that can have various side effects; LABEL_DECLs for
7708 	 example, may not have their DECL_RTL set yet.  Expand the rtl of
7709 	 CONSTRUCTORs too, which should yield a memory reference for the
7710 	 constructor's contents.  Assume language specific tree nodes can
7711 	 be expanded in some interesting way.  */
7712       gcc_assert (TREE_CODE (exp) < LAST_AND_UNUSED_TREE_CODE);
7713       if (DECL_P (exp)
7714 	  || TREE_CODE (exp) == CONSTRUCTOR
7715 	  || TREE_CODE (exp) == COMPOUND_LITERAL_EXPR)
7716 	{
7717 	  result = expand_expr (exp, target, tmode,
7718 				modifier == EXPAND_INITIALIZER
7719 				? EXPAND_INITIALIZER : EXPAND_CONST_ADDRESS);
7720 
7721 	  /* If the DECL isn't in memory, then the DECL wasn't properly
7722 	     marked TREE_ADDRESSABLE, which will be either a front-end
7723 	     or a tree optimizer bug.  */
7724 
7725 	  if (TREE_ADDRESSABLE (exp)
7726 	      && ! MEM_P (result)
7727 	      && ! targetm.calls.allocate_stack_slots_for_args ())
7728 	    {
7729 	      error ("local frame unavailable (naked function?)");
7730 	      return result;
7731 	    }
7732 	  else
7733 	    gcc_assert (MEM_P (result));
7734 	  result = XEXP (result, 0);
7735 
7736 	  /* ??? Is this needed anymore?  */
7737 	  if (DECL_P (exp))
7738 	    TREE_USED (exp) = 1;
7739 
7740 	  if (modifier != EXPAND_INITIALIZER
7741 	      && modifier != EXPAND_CONST_ADDRESS
7742 	      && modifier != EXPAND_SUM)
7743 	    result = force_operand (result, target);
7744 	  return result;
7745 	}
7746 
7747       /* Pass FALSE as the last argument to get_inner_reference although
7748 	 we are expanding to RTL.  The rationale is that we know how to
7749 	 handle "aligning nodes" here: we can just bypass them because
7750 	 they won't change the final object whose address will be returned
7751 	 (they actually exist only for that purpose).  */
7752       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7753 				   &mode1, &unsignedp, &volatilep, false);
7754       break;
7755     }
7756 
7757   /* We must have made progress.  */
7758   gcc_assert (inner != exp);
7759 
7760   subtarget = offset || bitpos ? NULL_RTX : target;
7761   /* For VIEW_CONVERT_EXPR, where the outer alignment is bigger than
7762      inner alignment, force the inner to be sufficiently aligned.  */
7763   if (CONSTANT_CLASS_P (inner)
7764       && TYPE_ALIGN (TREE_TYPE (inner)) < TYPE_ALIGN (TREE_TYPE (exp)))
7765     {
7766       inner = copy_node (inner);
7767       TREE_TYPE (inner) = copy_node (TREE_TYPE (inner));
7768       TYPE_ALIGN (TREE_TYPE (inner)) = TYPE_ALIGN (TREE_TYPE (exp));
7769       TYPE_USER_ALIGN (TREE_TYPE (inner)) = 1;
7770     }
7771   result = expand_expr_addr_expr_1 (inner, subtarget, tmode, modifier, as);
7772 
7773   if (offset)
7774     {
7775       rtx tmp;
7776 
7777       if (modifier != EXPAND_NORMAL)
7778 	result = force_operand (result, NULL);
7779       tmp = expand_expr (offset, NULL_RTX, tmode,
7780 			 modifier == EXPAND_INITIALIZER
7781 			  ? EXPAND_INITIALIZER : EXPAND_NORMAL);
7782 
7783       /* expand_expr is allowed to return an object in a mode other
7784 	 than TMODE.  If it did, we need to convert.  */
7785       if (GET_MODE (tmp) != VOIDmode && tmode != GET_MODE (tmp))
7786 	tmp = convert_modes (tmode, GET_MODE (tmp),
7787 			     tmp, TYPE_UNSIGNED (TREE_TYPE (offset)));
7788       result = convert_memory_address_addr_space (tmode, result, as);
7789       tmp = convert_memory_address_addr_space (tmode, tmp, as);
7790 
7791       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7792 	result = simplify_gen_binary (PLUS, tmode, result, tmp);
7793       else
7794 	{
7795 	  subtarget = bitpos ? NULL_RTX : target;
7796 	  result = expand_simple_binop (tmode, PLUS, result, tmp, subtarget,
7797 					1, OPTAB_LIB_WIDEN);
7798 	}
7799     }
7800 
7801   if (bitpos)
7802     {
7803       /* Someone beforehand should have rejected taking the address
7804 	 of such an object.  */
7805       gcc_assert ((bitpos % BITS_PER_UNIT) == 0);
7806 
7807       result = convert_memory_address_addr_space (tmode, result, as);
7808       result = plus_constant (tmode, result, bitpos / BITS_PER_UNIT);
7809       if (modifier < EXPAND_SUM)
7810 	result = force_operand (result, target);
7811     }
7812 
7813   return result;
7814 }
7815 
7816 /* A subroutine of expand_expr.  Evaluate EXP, which is an ADDR_EXPR.
7817    The TARGET, TMODE and MODIFIER arguments are as for expand_expr.  */
7818 
7819 static rtx
7820 expand_expr_addr_expr (tree exp, rtx target, machine_mode tmode,
7821 		       enum expand_modifier modifier)
7822 {
7823   addr_space_t as = ADDR_SPACE_GENERIC;
7824   machine_mode address_mode = Pmode;
7825   machine_mode pointer_mode = ptr_mode;
7826   machine_mode rmode;
7827   rtx result;
7828 
7829   /* Target mode of VOIDmode says "whatever's natural".  */
7830   if (tmode == VOIDmode)
7831     tmode = TYPE_MODE (TREE_TYPE (exp));
7832 
7833   if (POINTER_TYPE_P (TREE_TYPE (exp)))
7834     {
7835       as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp)));
7836       address_mode = targetm.addr_space.address_mode (as);
7837       pointer_mode = targetm.addr_space.pointer_mode (as);
7838     }
7839 
7840   /* We can get called with some Weird Things if the user does silliness
7841      like "(short) &a".  In that case, convert_memory_address won't do
7842      the right thing, so ignore the given target mode.  */
7843   if (tmode != address_mode && tmode != pointer_mode)
7844     tmode = address_mode;
7845 
7846   result = expand_expr_addr_expr_1 (TREE_OPERAND (exp, 0), target,
7847 				    tmode, modifier, as);
7848 
7849   /* Despite expand_expr claims concerning ignoring TMODE when not
7850      strictly convenient, stuff breaks if we don't honor it.  Note
7851      that combined with the above, we only do this for pointer modes.  */
7852   rmode = GET_MODE (result);
7853   if (rmode == VOIDmode)
7854     rmode = tmode;
7855   if (rmode != tmode)
7856     result = convert_memory_address_addr_space (tmode, result, as);
7857 
7858   return result;
7859 }
7860 
7861 /* Generate code for computing CONSTRUCTOR EXP.
7862    An rtx for the computed value is returned.  If AVOID_TEMP_MEM
7863    is TRUE, instead of creating a temporary variable in memory
7864    NULL is returned and the caller needs to handle it differently.  */
7865 
7866 static rtx
7867 expand_constructor (tree exp, rtx target, enum expand_modifier modifier,
7868 		    bool avoid_temp_mem)
7869 {
7870   tree type = TREE_TYPE (exp);
7871   machine_mode mode = TYPE_MODE (type);
7872 
7873   /* Try to avoid creating a temporary at all.  This is possible
7874      if all of the initializer is zero.
7875      FIXME: try to handle all [0..255] initializers we can handle
7876      with memset.  */
7877   if (TREE_STATIC (exp)
7878       && !TREE_ADDRESSABLE (exp)
7879       && target != 0 && mode == BLKmode
7880       && all_zeros_p (exp))
7881     {
7882       clear_storage (target, expr_size (exp), BLOCK_OP_NORMAL);
7883       return target;
7884     }
7885 
7886   /* All elts simple constants => refer to a constant in memory.  But
7887      if this is a non-BLKmode mode, let it store a field at a time
7888      since that should make a CONST_INT, CONST_WIDE_INT or
7889      CONST_DOUBLE when we fold.  Likewise, if we have a target we can
7890      use, it is best to store directly into the target unless the type
7891      is large enough that memcpy will be used.  If we are making an
7892      initializer and all operands are constant, put it in memory as
7893      well.
7894 
7895      FIXME: Avoid trying to fill vector constructors piece-meal.
7896      Output them with output_constant_def below unless we're sure
7897      they're zeros.  This should go away when vector initializers
7898      are treated like VECTOR_CST instead of arrays.  */
7899   if ((TREE_STATIC (exp)
7900        && ((mode == BLKmode
7901 	    && ! (target != 0 && safe_from_p (target, exp, 1)))
7902 		  || TREE_ADDRESSABLE (exp)
7903 		  || (tree_fits_uhwi_p (TYPE_SIZE_UNIT (type))
7904 		      && (! can_move_by_pieces
7905 				     (tree_to_uhwi (TYPE_SIZE_UNIT (type)),
7906 				      TYPE_ALIGN (type)))
7907 		      && ! mostly_zeros_p (exp))))
7908       || ((modifier == EXPAND_INITIALIZER || modifier == EXPAND_CONST_ADDRESS)
7909 	  && TREE_CONSTANT (exp)))
7910     {
7911       rtx constructor;
7912 
7913       if (avoid_temp_mem)
7914 	return NULL_RTX;
7915 
7916       constructor = expand_expr_constant (exp, 1, modifier);
7917 
7918       if (modifier != EXPAND_CONST_ADDRESS
7919 	  && modifier != EXPAND_INITIALIZER
7920 	  && modifier != EXPAND_SUM)
7921 	constructor = validize_mem (constructor);
7922 
7923       return constructor;
7924     }
7925 
7926   /* Handle calls that pass values in multiple non-contiguous
7927      locations.  The Irix 6 ABI has examples of this.  */
7928   if (target == 0 || ! safe_from_p (target, exp, 1)
7929       || GET_CODE (target) == PARALLEL || modifier == EXPAND_STACK_PARM)
7930     {
7931       if (avoid_temp_mem)
7932 	return NULL_RTX;
7933 
7934       target = assign_temp (type, TREE_ADDRESSABLE (exp), 1);
7935     }
7936 
7937   store_constructor (exp, target, 0, int_expr_size (exp));
7938   return target;
7939 }
7940 
7941 
7942 /* expand_expr: generate code for computing expression EXP.
7943    An rtx for the computed value is returned.  The value is never null.
7944    In the case of a void EXP, const0_rtx is returned.
7945 
7946    The value may be stored in TARGET if TARGET is nonzero.
7947    TARGET is just a suggestion; callers must assume that
7948    the rtx returned may not be the same as TARGET.
7949 
7950    If TARGET is CONST0_RTX, it means that the value will be ignored.
7951 
7952    If TMODE is not VOIDmode, it suggests generating the
7953    result in mode TMODE.  But this is done only when convenient.
7954    Otherwise, TMODE is ignored and the value generated in its natural mode.
7955    TMODE is just a suggestion; callers must assume that
7956    the rtx returned may not have mode TMODE.
7957 
7958    Note that TARGET may have neither TMODE nor MODE.  In that case, it
7959    probably will not be used.
7960 
7961    If MODIFIER is EXPAND_SUM then when EXP is an addition
7962    we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
7963    or a nest of (PLUS ...) and (MINUS ...) where the terms are
7964    products as above, or REG or MEM, or constant.
7965    Ordinarily in such cases we would output mul or add instructions
7966    and then return a pseudo reg containing the sum.
7967 
7968    EXPAND_INITIALIZER is much like EXPAND_SUM except that
7969    it also marks a label as absolutely required (it can't be dead).
7970    It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
7971    This is used for outputting expressions used in initializers.
7972 
7973    EXPAND_CONST_ADDRESS says that it is okay to return a MEM
7974    with a constant address even if that address is not normally legitimate.
7975    EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
7976 
7977    EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
7978    a call parameter.  Such targets require special care as we haven't yet
7979    marked TARGET so that it's safe from being trashed by libcalls.  We
7980    don't want to use TARGET for anything but the final result;
7981    Intermediate values must go elsewhere.   Additionally, calls to
7982    emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
7983 
7984    If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
7985    address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
7986    DECL_RTL of the VAR_DECL.  *ALT_RTL is also set if EXP is a
7987    COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
7988    recursively.
7989 
7990    If INNER_REFERENCE_P is true, we are expanding an inner reference.
7991    In this case, we don't adjust a returned MEM rtx that wouldn't be
7992    sufficiently aligned for its mode; instead, it's up to the caller
7993    to deal with it afterwards.  This is used to make sure that unaligned
7994    base objects for which out-of-bounds accesses are supported, for
7995    example record types with trailing arrays, aren't realigned behind
7996    the back of the caller.
7997    The normal operating mode is to pass FALSE for this parameter.  */
7998 
7999 rtx
8000 expand_expr_real (tree exp, rtx target, machine_mode tmode,
8001 		  enum expand_modifier modifier, rtx *alt_rtl,
8002 		  bool inner_reference_p)
8003 {
8004   rtx ret;
8005 
8006   /* Handle ERROR_MARK before anybody tries to access its type.  */
8007   if (TREE_CODE (exp) == ERROR_MARK
8008       || (TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK))
8009     {
8010       ret = CONST0_RTX (tmode);
8011       return ret ? ret : const0_rtx;
8012     }
8013 
8014   ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl,
8015 			    inner_reference_p);
8016   return ret;
8017 }
8018 
8019 /* Try to expand the conditional expression which is represented by
8020    TREEOP0 ? TREEOP1 : TREEOP2 using conditonal moves.  If succeseds
8021    return the rtl reg which repsents the result.  Otherwise return
8022    NULL_RTL.  */
8023 
8024 static rtx
8025 expand_cond_expr_using_cmove (tree treeop0 ATTRIBUTE_UNUSED,
8026 			      tree treeop1 ATTRIBUTE_UNUSED,
8027 			      tree treeop2 ATTRIBUTE_UNUSED)
8028 {
8029 #ifdef HAVE_conditional_move
8030   rtx insn;
8031   rtx op00, op01, op1, op2;
8032   enum rtx_code comparison_code;
8033   machine_mode comparison_mode;
8034   gimple srcstmt;
8035   rtx temp;
8036   tree type = TREE_TYPE (treeop1);
8037   int unsignedp = TYPE_UNSIGNED (type);
8038   machine_mode mode = TYPE_MODE (type);
8039   machine_mode orig_mode = mode;
8040 
8041   /* If we cannot do a conditional move on the mode, try doing it
8042      with the promoted mode. */
8043   if (!can_conditionally_move_p (mode))
8044     {
8045       mode = promote_mode (type, mode, &unsignedp);
8046       if (!can_conditionally_move_p (mode))
8047 	return NULL_RTX;
8048       temp = assign_temp (type, 0, 0); /* Use promoted mode for temp.  */
8049     }
8050   else
8051     temp = assign_temp (type, 0, 1);
8052 
8053   start_sequence ();
8054   expand_operands (treeop1, treeop2,
8055 		   temp, &op1, &op2, EXPAND_NORMAL);
8056 
8057   if (TREE_CODE (treeop0) == SSA_NAME
8058       && (srcstmt = get_def_for_expr_class (treeop0, tcc_comparison)))
8059     {
8060       tree type = TREE_TYPE (gimple_assign_rhs1 (srcstmt));
8061       enum tree_code cmpcode = gimple_assign_rhs_code (srcstmt);
8062       op00 = expand_normal (gimple_assign_rhs1 (srcstmt));
8063       op01 = expand_normal (gimple_assign_rhs2 (srcstmt));
8064       comparison_mode = TYPE_MODE (type);
8065       unsignedp = TYPE_UNSIGNED (type);
8066       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8067     }
8068   else if (TREE_CODE_CLASS (TREE_CODE (treeop0)) == tcc_comparison)
8069     {
8070       tree type = TREE_TYPE (TREE_OPERAND (treeop0, 0));
8071       enum tree_code cmpcode = TREE_CODE (treeop0);
8072       op00 = expand_normal (TREE_OPERAND (treeop0, 0));
8073       op01 = expand_normal (TREE_OPERAND (treeop0, 1));
8074       unsignedp = TYPE_UNSIGNED (type);
8075       comparison_mode = TYPE_MODE (type);
8076       comparison_code = convert_tree_comp_to_rtx (cmpcode, unsignedp);
8077     }
8078   else
8079     {
8080       op00 = expand_normal (treeop0);
8081       op01 = const0_rtx;
8082       comparison_code = NE;
8083       comparison_mode = GET_MODE (op00);
8084       if (comparison_mode == VOIDmode)
8085 	comparison_mode = TYPE_MODE (TREE_TYPE (treeop0));
8086     }
8087 
8088   if (GET_MODE (op1) != mode)
8089     op1 = gen_lowpart (mode, op1);
8090 
8091   if (GET_MODE (op2) != mode)
8092     op2 = gen_lowpart (mode, op2);
8093 
8094   /* Try to emit the conditional move.  */
8095   insn = emit_conditional_move (temp, comparison_code,
8096 				op00, op01, comparison_mode,
8097 				op1, op2, mode,
8098 				unsignedp);
8099 
8100   /* If we could do the conditional move, emit the sequence,
8101      and return.  */
8102   if (insn)
8103     {
8104       rtx_insn *seq = get_insns ();
8105       end_sequence ();
8106       emit_insn (seq);
8107       return convert_modes (orig_mode, mode, temp, 0);
8108     }
8109 
8110   /* Otherwise discard the sequence and fall back to code with
8111      branches.  */
8112   end_sequence ();
8113 #endif
8114   return NULL_RTX;
8115 }
8116 
8117 rtx
8118 expand_expr_real_2 (sepops ops, rtx target, machine_mode tmode,
8119 		    enum expand_modifier modifier)
8120 {
8121   rtx op0, op1, op2, temp;
8122   tree type;
8123   int unsignedp;
8124   machine_mode mode;
8125   enum tree_code code = ops->code;
8126   optab this_optab;
8127   rtx subtarget, original_target;
8128   int ignore;
8129   bool reduce_bit_field;
8130   location_t loc = ops->location;
8131   tree treeop0, treeop1, treeop2;
8132 #define REDUCE_BIT_FIELD(expr)	(reduce_bit_field			  \
8133 				 ? reduce_to_bit_field_precision ((expr), \
8134 								  target, \
8135 								  type)	  \
8136 				 : (expr))
8137 
8138   type = ops->type;
8139   mode = TYPE_MODE (type);
8140   unsignedp = TYPE_UNSIGNED (type);
8141 
8142   treeop0 = ops->op0;
8143   treeop1 = ops->op1;
8144   treeop2 = ops->op2;
8145 
8146   /* We should be called only on simple (binary or unary) expressions,
8147      exactly those that are valid in gimple expressions that aren't
8148      GIMPLE_SINGLE_RHS (or invalid).  */
8149   gcc_assert (get_gimple_rhs_class (code) == GIMPLE_UNARY_RHS
8150 	      || get_gimple_rhs_class (code) == GIMPLE_BINARY_RHS
8151 	      || get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS);
8152 
8153   ignore = (target == const0_rtx
8154 	    || ((CONVERT_EXPR_CODE_P (code)
8155 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
8156 		&& TREE_CODE (type) == VOID_TYPE));
8157 
8158   /* We should be called only if we need the result.  */
8159   gcc_assert (!ignore);
8160 
8161   /* An operation in what may be a bit-field type needs the
8162      result to be reduced to the precision of the bit-field type,
8163      which is narrower than that of the type's mode.  */
8164   reduce_bit_field = (INTEGRAL_TYPE_P (type)
8165 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
8166 
8167   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
8168     target = 0;
8169 
8170   /* Use subtarget as the target for operand 0 of a binary operation.  */
8171   subtarget = get_subtarget (target);
8172   original_target = target;
8173 
8174   switch (code)
8175     {
8176     case NON_LVALUE_EXPR:
8177     case PAREN_EXPR:
8178     CASE_CONVERT:
8179       if (treeop0 == error_mark_node)
8180 	return const0_rtx;
8181 
8182       if (TREE_CODE (type) == UNION_TYPE)
8183 	{
8184 	  tree valtype = TREE_TYPE (treeop0);
8185 
8186 	  /* If both input and output are BLKmode, this conversion isn't doing
8187 	     anything except possibly changing memory attribute.  */
8188 	  if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
8189 	    {
8190 	      rtx result = expand_expr (treeop0, target, tmode,
8191 					modifier);
8192 
8193 	      result = copy_rtx (result);
8194 	      set_mem_attributes (result, type, 0);
8195 	      return result;
8196 	    }
8197 
8198 	  if (target == 0)
8199 	    {
8200 	      if (TYPE_MODE (type) != BLKmode)
8201 		target = gen_reg_rtx (TYPE_MODE (type));
8202 	      else
8203 		target = assign_temp (type, 1, 1);
8204 	    }
8205 
8206 	  if (MEM_P (target))
8207 	    /* Store data into beginning of memory target.  */
8208 	    store_expr (treeop0,
8209 			adjust_address (target, TYPE_MODE (valtype), 0),
8210 			modifier == EXPAND_STACK_PARM,
8211 			false);
8212 
8213 	  else
8214 	    {
8215 	      gcc_assert (REG_P (target));
8216 
8217 	      /* Store this field into a union of the proper type.  */
8218 	      store_field (target,
8219 			   MIN ((int_size_in_bytes (TREE_TYPE
8220 						    (treeop0))
8221 				 * BITS_PER_UNIT),
8222 				(HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
8223 			   0, 0, 0, TYPE_MODE (valtype), treeop0, 0, false);
8224 	    }
8225 
8226 	  /* Return the entire union.  */
8227 	  return target;
8228 	}
8229 
8230       if (mode == TYPE_MODE (TREE_TYPE (treeop0)))
8231 	{
8232 	  op0 = expand_expr (treeop0, target, VOIDmode,
8233 			     modifier);
8234 
8235 	  /* If the signedness of the conversion differs and OP0 is
8236 	     a promoted SUBREG, clear that indication since we now
8237 	     have to do the proper extension.  */
8238 	  if (TYPE_UNSIGNED (TREE_TYPE (treeop0)) != unsignedp
8239 	      && GET_CODE (op0) == SUBREG)
8240 	    SUBREG_PROMOTED_VAR_P (op0) = 0;
8241 
8242 	  return REDUCE_BIT_FIELD (op0);
8243 	}
8244 
8245       op0 = expand_expr (treeop0, NULL_RTX, mode,
8246 			 modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier);
8247       if (GET_MODE (op0) == mode)
8248 	;
8249 
8250       /* If OP0 is a constant, just convert it into the proper mode.  */
8251       else if (CONSTANT_P (op0))
8252 	{
8253 	  tree inner_type = TREE_TYPE (treeop0);
8254 	  machine_mode inner_mode = GET_MODE (op0);
8255 
8256 	  if (inner_mode == VOIDmode)
8257 	    inner_mode = TYPE_MODE (inner_type);
8258 
8259 	  if (modifier == EXPAND_INITIALIZER)
8260 	    op0 = simplify_gen_subreg (mode, op0, inner_mode,
8261 				       subreg_lowpart_offset (mode,
8262 							      inner_mode));
8263 	  else
8264 	    op0=  convert_modes (mode, inner_mode, op0,
8265 				 TYPE_UNSIGNED (inner_type));
8266 	}
8267 
8268       else if (modifier == EXPAND_INITIALIZER)
8269 	op0 = gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
8270 
8271       else if (target == 0)
8272 	op0 = convert_to_mode (mode, op0,
8273 			       TYPE_UNSIGNED (TREE_TYPE
8274 					      (treeop0)));
8275       else
8276 	{
8277 	  convert_move (target, op0,
8278 			TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8279 	  op0 = target;
8280 	}
8281 
8282       return REDUCE_BIT_FIELD (op0);
8283 
8284     case ADDR_SPACE_CONVERT_EXPR:
8285       {
8286 	tree treeop0_type = TREE_TYPE (treeop0);
8287 	addr_space_t as_to;
8288 	addr_space_t as_from;
8289 
8290 	gcc_assert (POINTER_TYPE_P (type));
8291 	gcc_assert (POINTER_TYPE_P (treeop0_type));
8292 
8293 	as_to = TYPE_ADDR_SPACE (TREE_TYPE (type));
8294 	as_from = TYPE_ADDR_SPACE (TREE_TYPE (treeop0_type));
8295 
8296         /* Conversions between pointers to the same address space should
8297 	   have been implemented via CONVERT_EXPR / NOP_EXPR.  */
8298 	gcc_assert (as_to != as_from);
8299 
8300         /* Ask target code to handle conversion between pointers
8301 	   to overlapping address spaces.  */
8302 	if (targetm.addr_space.subset_p (as_to, as_from)
8303 	    || targetm.addr_space.subset_p (as_from, as_to))
8304 	  {
8305 	    op0 = expand_expr (treeop0, NULL_RTX, VOIDmode, modifier);
8306 	    op0 = targetm.addr_space.convert (op0, treeop0_type, type);
8307 	    gcc_assert (op0);
8308 	    return op0;
8309 	  }
8310 
8311 	/* For disjoint address spaces, converting anything but
8312 	   a null pointer invokes undefined behaviour.  We simply
8313 	   always return a null pointer here.  */
8314 	return CONST0_RTX (mode);
8315       }
8316 
8317     case POINTER_PLUS_EXPR:
8318       /* Even though the sizetype mode and the pointer's mode can be different
8319          expand is able to handle this correctly and get the correct result out
8320          of the PLUS_EXPR code.  */
8321       /* Make sure to sign-extend the sizetype offset in a POINTER_PLUS_EXPR
8322          if sizetype precision is smaller than pointer precision.  */
8323       if (TYPE_PRECISION (sizetype) < TYPE_PRECISION (type))
8324 	treeop1 = fold_convert_loc (loc, type,
8325 				    fold_convert_loc (loc, ssizetype,
8326 						      treeop1));
8327       /* If sizetype precision is larger than pointer precision, truncate the
8328 	 offset to have matching modes.  */
8329       else if (TYPE_PRECISION (sizetype) > TYPE_PRECISION (type))
8330 	treeop1 = fold_convert_loc (loc, type, treeop1);
8331 
8332     case PLUS_EXPR:
8333       /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
8334 	 something else, make sure we add the register to the constant and
8335 	 then to the other thing.  This case can occur during strength
8336 	 reduction and doing it this way will produce better code if the
8337 	 frame pointer or argument pointer is eliminated.
8338 
8339 	 fold-const.c will ensure that the constant is always in the inner
8340 	 PLUS_EXPR, so the only case we need to do anything about is if
8341 	 sp, ap, or fp is our second argument, in which case we must swap
8342 	 the innermost first argument and our second argument.  */
8343 
8344       if (TREE_CODE (treeop0) == PLUS_EXPR
8345 	  && TREE_CODE (TREE_OPERAND (treeop0, 1)) == INTEGER_CST
8346 	  && TREE_CODE (treeop1) == VAR_DECL
8347 	  && (DECL_RTL (treeop1) == frame_pointer_rtx
8348 	      || DECL_RTL (treeop1) == stack_pointer_rtx
8349 	      || DECL_RTL (treeop1) == arg_pointer_rtx))
8350 	{
8351 	  gcc_unreachable ();
8352 	}
8353 
8354       /* If the result is to be ptr_mode and we are adding an integer to
8355 	 something, we might be forming a constant.  So try to use
8356 	 plus_constant.  If it produces a sum and we can't accept it,
8357 	 use force_operand.  This allows P = &ARR[const] to generate
8358 	 efficient code on machines where a SYMBOL_REF is not a valid
8359 	 address.
8360 
8361 	 If this is an EXPAND_SUM call, always return the sum.  */
8362       if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
8363 	  || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
8364 	{
8365 	  if (modifier == EXPAND_STACK_PARM)
8366 	    target = 0;
8367 	  if (TREE_CODE (treeop0) == INTEGER_CST
8368 	      && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8369 	      && TREE_CONSTANT (treeop1))
8370 	    {
8371 	      rtx constant_part;
8372 	      HOST_WIDE_INT wc;
8373 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop1));
8374 
8375 	      op1 = expand_expr (treeop1, subtarget, VOIDmode,
8376 				 EXPAND_SUM);
8377 	      /* Use wi::shwi to ensure that the constant is
8378 		 truncated according to the mode of OP1, then sign extended
8379 		 to a HOST_WIDE_INT.  Using the constant directly can result
8380 		 in non-canonical RTL in a 64x32 cross compile.  */
8381 	      wc = TREE_INT_CST_LOW (treeop0);
8382 	      constant_part =
8383 		immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8384 	      op1 = plus_constant (mode, op1, INTVAL (constant_part));
8385 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8386 		op1 = force_operand (op1, target);
8387 	      return REDUCE_BIT_FIELD (op1);
8388 	    }
8389 
8390 	  else if (TREE_CODE (treeop1) == INTEGER_CST
8391 		   && GET_MODE_PRECISION (mode) <= HOST_BITS_PER_WIDE_INT
8392 		   && TREE_CONSTANT (treeop0))
8393 	    {
8394 	      rtx constant_part;
8395 	      HOST_WIDE_INT wc;
8396 	      machine_mode wmode = TYPE_MODE (TREE_TYPE (treeop0));
8397 
8398 	      op0 = expand_expr (treeop0, subtarget, VOIDmode,
8399 				 (modifier == EXPAND_INITIALIZER
8400 				 ? EXPAND_INITIALIZER : EXPAND_SUM));
8401 	      if (! CONSTANT_P (op0))
8402 		{
8403 		  op1 = expand_expr (treeop1, NULL_RTX,
8404 				     VOIDmode, modifier);
8405 		  /* Return a PLUS if modifier says it's OK.  */
8406 		  if (modifier == EXPAND_SUM
8407 		      || modifier == EXPAND_INITIALIZER)
8408 		    return simplify_gen_binary (PLUS, mode, op0, op1);
8409 		  goto binop2;
8410 		}
8411 	      /* Use wi::shwi to ensure that the constant is
8412 		 truncated according to the mode of OP1, then sign extended
8413 		 to a HOST_WIDE_INT.  Using the constant directly can result
8414 		 in non-canonical RTL in a 64x32 cross compile.  */
8415 	      wc = TREE_INT_CST_LOW (treeop1);
8416 	      constant_part
8417 		= immed_wide_int_const (wi::shwi (wc, wmode), wmode);
8418 	      op0 = plus_constant (mode, op0, INTVAL (constant_part));
8419 	      if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8420 		op0 = force_operand (op0, target);
8421 	      return REDUCE_BIT_FIELD (op0);
8422 	    }
8423 	}
8424 
8425       /* Use TER to expand pointer addition of a negated value
8426 	 as pointer subtraction.  */
8427       if ((POINTER_TYPE_P (TREE_TYPE (treeop0))
8428 	   || (TREE_CODE (TREE_TYPE (treeop0)) == VECTOR_TYPE
8429 	       && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (treeop0)))))
8430 	  && TREE_CODE (treeop1) == SSA_NAME
8431 	  && TYPE_MODE (TREE_TYPE (treeop0))
8432 	     == TYPE_MODE (TREE_TYPE (treeop1)))
8433 	{
8434 	  gimple def = get_def_for_expr (treeop1, NEGATE_EXPR);
8435 	  if (def)
8436 	    {
8437 	      treeop1 = gimple_assign_rhs1 (def);
8438 	      code = MINUS_EXPR;
8439 	      goto do_minus;
8440 	    }
8441 	}
8442 
8443       /* No sense saving up arithmetic to be done
8444 	 if it's all in the wrong mode to form part of an address.
8445 	 And force_operand won't know whether to sign-extend or
8446 	 zero-extend.  */
8447       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8448 	  || mode != ptr_mode)
8449 	{
8450 	  expand_operands (treeop0, treeop1,
8451 			   subtarget, &op0, &op1, EXPAND_NORMAL);
8452 	  if (op0 == const0_rtx)
8453 	    return op1;
8454 	  if (op1 == const0_rtx)
8455 	    return op0;
8456 	  goto binop2;
8457 	}
8458 
8459       expand_operands (treeop0, treeop1,
8460 		       subtarget, &op0, &op1, modifier);
8461       return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8462 
8463     case MINUS_EXPR:
8464     do_minus:
8465       /* For initializers, we are allowed to return a MINUS of two
8466 	 symbolic constants.  Here we handle all cases when both operands
8467 	 are constant.  */
8468       /* Handle difference of two symbolic constants,
8469 	 for the sake of an initializer.  */
8470       if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8471 	  && really_constant_p (treeop0)
8472 	  && really_constant_p (treeop1))
8473 	{
8474 	  expand_operands (treeop0, treeop1,
8475 			   NULL_RTX, &op0, &op1, modifier);
8476 
8477 	  /* If the last operand is a CONST_INT, use plus_constant of
8478 	     the negated constant.  Else make the MINUS.  */
8479 	  if (CONST_INT_P (op1))
8480 	    return REDUCE_BIT_FIELD (plus_constant (mode, op0,
8481 						    -INTVAL (op1)));
8482 	  else
8483 	    return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
8484 	}
8485 
8486       /* No sense saving up arithmetic to be done
8487 	 if it's all in the wrong mode to form part of an address.
8488 	 And force_operand won't know whether to sign-extend or
8489 	 zero-extend.  */
8490       if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8491 	  || mode != ptr_mode)
8492 	goto binop;
8493 
8494       expand_operands (treeop0, treeop1,
8495 		       subtarget, &op0, &op1, modifier);
8496 
8497       /* Convert A - const to A + (-const).  */
8498       if (CONST_INT_P (op1))
8499 	{
8500 	  op1 = negate_rtx (mode, op1);
8501 	  return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
8502 	}
8503 
8504       goto binop2;
8505 
8506     case WIDEN_MULT_PLUS_EXPR:
8507     case WIDEN_MULT_MINUS_EXPR:
8508       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
8509       op2 = expand_normal (treeop2);
8510       target = expand_widen_pattern_expr (ops, op0, op1, op2,
8511 					  target, unsignedp);
8512       return target;
8513 
8514     case WIDEN_MULT_EXPR:
8515       /* If first operand is constant, swap them.
8516 	 Thus the following special case checks need only
8517 	 check the second operand.  */
8518       if (TREE_CODE (treeop0) == INTEGER_CST)
8519 	{
8520 	  tree t1 = treeop0;
8521 	  treeop0 = treeop1;
8522 	  treeop1 = t1;
8523 	}
8524 
8525       /* First, check if we have a multiplication of one signed and one
8526 	 unsigned operand.  */
8527       if (TREE_CODE (treeop1) != INTEGER_CST
8528 	  && (TYPE_UNSIGNED (TREE_TYPE (treeop0))
8529 	      != TYPE_UNSIGNED (TREE_TYPE (treeop1))))
8530 	{
8531 	  machine_mode innermode = TYPE_MODE (TREE_TYPE (treeop0));
8532 	  this_optab = usmul_widen_optab;
8533 	  if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8534 		!= CODE_FOR_nothing)
8535 	    {
8536 	      if (TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8537 		expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8538 				 EXPAND_NORMAL);
8539 	      else
8540 		expand_operands (treeop0, treeop1, NULL_RTX, &op1, &op0,
8541 				 EXPAND_NORMAL);
8542 	      /* op0 and op1 might still be constant, despite the above
8543 		 != INTEGER_CST check.  Handle it.  */
8544 	      if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8545 		{
8546 		  op0 = convert_modes (innermode, mode, op0, true);
8547 		  op1 = convert_modes (innermode, mode, op1, false);
8548 		  return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8549 							target, unsignedp));
8550 		}
8551 	      goto binop3;
8552 	    }
8553 	}
8554       /* Check for a multiplication with matching signedness.  */
8555       else if ((TREE_CODE (treeop1) == INTEGER_CST
8556 		&& int_fits_type_p (treeop1, TREE_TYPE (treeop0)))
8557 	       || (TYPE_UNSIGNED (TREE_TYPE (treeop1))
8558 		   == TYPE_UNSIGNED (TREE_TYPE (treeop0))))
8559 	{
8560 	  tree op0type = TREE_TYPE (treeop0);
8561 	  machine_mode innermode = TYPE_MODE (op0type);
8562 	  bool zextend_p = TYPE_UNSIGNED (op0type);
8563 	  optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8564 	  this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8565 
8566 	  if (TREE_CODE (treeop0) != INTEGER_CST)
8567 	    {
8568 	      if (find_widening_optab_handler (this_optab, mode, innermode, 0)
8569 		    != CODE_FOR_nothing)
8570 		{
8571 		  expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1,
8572 				   EXPAND_NORMAL);
8573 		  /* op0 and op1 might still be constant, despite the above
8574 		     != INTEGER_CST check.  Handle it.  */
8575 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8576 		    {
8577 		     widen_mult_const:
8578 		      op0 = convert_modes (innermode, mode, op0, zextend_p);
8579 		      op1
8580 			= convert_modes (innermode, mode, op1,
8581 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8582 		      return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1,
8583 							    target,
8584 							    unsignedp));
8585 		    }
8586 		  temp = expand_widening_mult (mode, op0, op1, target,
8587 					       unsignedp, this_optab);
8588 		  return REDUCE_BIT_FIELD (temp);
8589 		}
8590 	      if (find_widening_optab_handler (other_optab, mode, innermode, 0)
8591 		    != CODE_FOR_nothing
8592 		  && innermode == word_mode)
8593 		{
8594 		  rtx htem, hipart;
8595 		  op0 = expand_normal (treeop0);
8596 		  if (TREE_CODE (treeop1) == INTEGER_CST)
8597 		    op1 = convert_modes (innermode, mode,
8598 					 expand_normal (treeop1),
8599 					 TYPE_UNSIGNED (TREE_TYPE (treeop1)));
8600 		  else
8601 		    op1 = expand_normal (treeop1);
8602 		  /* op0 and op1 might still be constant, despite the above
8603 		     != INTEGER_CST check.  Handle it.  */
8604 		  if (GET_MODE (op0) == VOIDmode && GET_MODE (op1) == VOIDmode)
8605 		    goto widen_mult_const;
8606 		  temp = expand_binop (mode, other_optab, op0, op1, target,
8607 				       unsignedp, OPTAB_LIB_WIDEN);
8608 		  hipart = gen_highpart (innermode, temp);
8609 		  htem = expand_mult_highpart_adjust (innermode, hipart,
8610 						      op0, op1, hipart,
8611 						      zextend_p);
8612 		  if (htem != hipart)
8613 		    emit_move_insn (hipart, htem);
8614 		  return REDUCE_BIT_FIELD (temp);
8615 		}
8616 	    }
8617 	}
8618       treeop0 = fold_build1 (CONVERT_EXPR, type, treeop0);
8619       treeop1 = fold_build1 (CONVERT_EXPR, type, treeop1);
8620       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8621       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8622 
8623     case FMA_EXPR:
8624       {
8625 	optab opt = fma_optab;
8626 	gimple def0, def2;
8627 
8628 	/* If there is no insn for FMA, emit it as __builtin_fma{,f,l}
8629 	   call.  */
8630 	if (optab_handler (fma_optab, mode) == CODE_FOR_nothing)
8631 	  {
8632 	    tree fn = mathfn_built_in (TREE_TYPE (treeop0), BUILT_IN_FMA);
8633 	    tree call_expr;
8634 
8635 	    gcc_assert (fn != NULL_TREE);
8636 	    call_expr = build_call_expr (fn, 3, treeop0, treeop1, treeop2);
8637 	    return expand_builtin (call_expr, target, subtarget, mode, false);
8638 	  }
8639 
8640 	def0 = get_def_for_expr (treeop0, NEGATE_EXPR);
8641 	/* The multiplication is commutative - look at its 2nd operand
8642 	   if the first isn't fed by a negate.  */
8643 	if (!def0)
8644 	  {
8645 	    def0 = get_def_for_expr (treeop1, NEGATE_EXPR);
8646 	    /* Swap operands if the 2nd operand is fed by a negate.  */
8647 	    if (def0)
8648 	      {
8649 		tree tem = treeop0;
8650 		treeop0 = treeop1;
8651 		treeop1 = tem;
8652 	      }
8653 	  }
8654 	def2 = get_def_for_expr (treeop2, NEGATE_EXPR);
8655 
8656 	op0 = op2 = NULL;
8657 
8658 	if (def0 && def2
8659 	    && optab_handler (fnms_optab, mode) != CODE_FOR_nothing)
8660 	  {
8661 	    opt = fnms_optab;
8662 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8663 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8664 	  }
8665 	else if (def0
8666 		 && optab_handler (fnma_optab, mode) != CODE_FOR_nothing)
8667 	  {
8668 	    opt = fnma_optab;
8669 	    op0 = expand_normal (gimple_assign_rhs1 (def0));
8670 	  }
8671 	else if (def2
8672 		 && optab_handler (fms_optab, mode) != CODE_FOR_nothing)
8673 	  {
8674 	    opt = fms_optab;
8675 	    op2 = expand_normal (gimple_assign_rhs1 (def2));
8676 	  }
8677 
8678 	if (op0 == NULL)
8679 	  op0 = expand_expr (treeop0, subtarget, VOIDmode, EXPAND_NORMAL);
8680 	if (op2 == NULL)
8681 	  op2 = expand_normal (treeop2);
8682 	op1 = expand_normal (treeop1);
8683 
8684 	return expand_ternary_op (TYPE_MODE (type), opt,
8685 				  op0, op1, op2, target, 0);
8686       }
8687 
8688     case MULT_EXPR:
8689       /* If this is a fixed-point operation, then we cannot use the code
8690 	 below because "expand_mult" doesn't support sat/no-sat fixed-point
8691          multiplications.   */
8692       if (ALL_FIXED_POINT_MODE_P (mode))
8693 	goto binop;
8694 
8695       /* If first operand is constant, swap them.
8696 	 Thus the following special case checks need only
8697 	 check the second operand.  */
8698       if (TREE_CODE (treeop0) == INTEGER_CST)
8699 	{
8700 	  tree t1 = treeop0;
8701 	  treeop0 = treeop1;
8702 	  treeop1 = t1;
8703 	}
8704 
8705       /* Attempt to return something suitable for generating an
8706 	 indexed address, for machines that support that.  */
8707 
8708       if (modifier == EXPAND_SUM && mode == ptr_mode
8709 	  && tree_fits_shwi_p (treeop1))
8710 	{
8711 	  tree exp1 = treeop1;
8712 
8713 	  op0 = expand_expr (treeop0, subtarget, VOIDmode,
8714 			     EXPAND_SUM);
8715 
8716 	  if (!REG_P (op0))
8717 	    op0 = force_operand (op0, NULL_RTX);
8718 	  if (!REG_P (op0))
8719 	    op0 = copy_to_mode_reg (mode, op0);
8720 
8721 	  return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
8722 			       gen_int_mode (tree_to_shwi (exp1),
8723 					     TYPE_MODE (TREE_TYPE (exp1)))));
8724 	}
8725 
8726       if (modifier == EXPAND_STACK_PARM)
8727 	target = 0;
8728 
8729       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8730       return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
8731 
8732     case TRUNC_DIV_EXPR:
8733     case FLOOR_DIV_EXPR:
8734     case CEIL_DIV_EXPR:
8735     case ROUND_DIV_EXPR:
8736     case EXACT_DIV_EXPR:
8737       /* If this is a fixed-point operation, then we cannot use the code
8738 	 below because "expand_divmod" doesn't support sat/no-sat fixed-point
8739          divisions.   */
8740       if (ALL_FIXED_POINT_MODE_P (mode))
8741 	goto binop;
8742 
8743       if (modifier == EXPAND_STACK_PARM)
8744 	target = 0;
8745       /* Possible optimization: compute the dividend with EXPAND_SUM
8746 	 then if the divisor is constant can optimize the case
8747 	 where some terms of the dividend have coeffs divisible by it.  */
8748       expand_operands (treeop0, treeop1,
8749 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8750       return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8751 
8752     case RDIV_EXPR:
8753       goto binop;
8754 
8755     case MULT_HIGHPART_EXPR:
8756       expand_operands (treeop0, treeop1, subtarget, &op0, &op1, EXPAND_NORMAL);
8757       temp = expand_mult_highpart (mode, op0, op1, target, unsignedp);
8758       gcc_assert (temp);
8759       return temp;
8760 
8761     case TRUNC_MOD_EXPR:
8762     case FLOOR_MOD_EXPR:
8763     case CEIL_MOD_EXPR:
8764     case ROUND_MOD_EXPR:
8765       if (modifier == EXPAND_STACK_PARM)
8766 	target = 0;
8767       expand_operands (treeop0, treeop1,
8768 		       subtarget, &op0, &op1, EXPAND_NORMAL);
8769       return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8770 
8771     case FIXED_CONVERT_EXPR:
8772       op0 = expand_normal (treeop0);
8773       if (target == 0 || modifier == EXPAND_STACK_PARM)
8774 	target = gen_reg_rtx (mode);
8775 
8776       if ((TREE_CODE (TREE_TYPE (treeop0)) == INTEGER_TYPE
8777 	   && TYPE_UNSIGNED (TREE_TYPE (treeop0)))
8778           || (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type)))
8779 	expand_fixed_convert (target, op0, 1, TYPE_SATURATING (type));
8780       else
8781 	expand_fixed_convert (target, op0, 0, TYPE_SATURATING (type));
8782       return target;
8783 
8784     case FIX_TRUNC_EXPR:
8785       op0 = expand_normal (treeop0);
8786       if (target == 0 || modifier == EXPAND_STACK_PARM)
8787 	target = gen_reg_rtx (mode);
8788       expand_fix (target, op0, unsignedp);
8789       return target;
8790 
8791     case FLOAT_EXPR:
8792       op0 = expand_normal (treeop0);
8793       if (target == 0 || modifier == EXPAND_STACK_PARM)
8794 	target = gen_reg_rtx (mode);
8795       /* expand_float can't figure out what to do if FROM has VOIDmode.
8796 	 So give it the correct mode.  With -O, cse will optimize this.  */
8797       if (GET_MODE (op0) == VOIDmode)
8798 	op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (treeop0)),
8799 				op0);
8800       expand_float (target, op0,
8801 		    TYPE_UNSIGNED (TREE_TYPE (treeop0)));
8802       return target;
8803 
8804     case NEGATE_EXPR:
8805       op0 = expand_expr (treeop0, subtarget,
8806 			 VOIDmode, EXPAND_NORMAL);
8807       if (modifier == EXPAND_STACK_PARM)
8808 	target = 0;
8809       temp = expand_unop (mode,
8810       			  optab_for_tree_code (NEGATE_EXPR, type,
8811 					       optab_default),
8812 			  op0, target, 0);
8813       gcc_assert (temp);
8814       return REDUCE_BIT_FIELD (temp);
8815 
8816     case ABS_EXPR:
8817       op0 = expand_expr (treeop0, subtarget,
8818 			 VOIDmode, EXPAND_NORMAL);
8819       if (modifier == EXPAND_STACK_PARM)
8820 	target = 0;
8821 
8822       /* ABS_EXPR is not valid for complex arguments.  */
8823       gcc_assert (GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
8824 		  && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT);
8825 
8826       /* Unsigned abs is simply the operand.  Testing here means we don't
8827 	 risk generating incorrect code below.  */
8828       if (TYPE_UNSIGNED (type))
8829 	return op0;
8830 
8831       return expand_abs (mode, op0, target, unsignedp,
8832 			 safe_from_p (target, treeop0, 1));
8833 
8834     case MAX_EXPR:
8835     case MIN_EXPR:
8836       target = original_target;
8837       if (target == 0
8838 	  || modifier == EXPAND_STACK_PARM
8839 	  || (MEM_P (target) && MEM_VOLATILE_P (target))
8840 	  || GET_MODE (target) != mode
8841 	  || (REG_P (target)
8842 	      && REGNO (target) < FIRST_PSEUDO_REGISTER))
8843 	target = gen_reg_rtx (mode);
8844       expand_operands (treeop0, treeop1,
8845 		       target, &op0, &op1, EXPAND_NORMAL);
8846 
8847       /* First try to do it with a special MIN or MAX instruction.
8848 	 If that does not win, use a conditional jump to select the proper
8849 	 value.  */
8850       this_optab = optab_for_tree_code (code, type, optab_default);
8851       temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8852 			   OPTAB_WIDEN);
8853       if (temp != 0)
8854 	return temp;
8855 
8856       /* For vector MIN <x, y>, expand it a VEC_COND_EXPR <x <= y, x, y>
8857 	 and similarly for MAX <x, y>.  */
8858       if (VECTOR_TYPE_P (type))
8859 	{
8860 	  tree t0 = make_tree (type, op0);
8861 	  tree t1 = make_tree (type, op1);
8862 	  tree comparison = build2 (code == MIN_EXPR ? LE_EXPR : GE_EXPR,
8863 				    type, t0, t1);
8864 	  return expand_vec_cond_expr (type, comparison, t0, t1,
8865 				       original_target);
8866 	}
8867 
8868       /* At this point, a MEM target is no longer useful; we will get better
8869 	 code without it.  */
8870 
8871       if (! REG_P (target))
8872 	target = gen_reg_rtx (mode);
8873 
8874       /* If op1 was placed in target, swap op0 and op1.  */
8875       if (target != op0 && target == op1)
8876 	{
8877 	  temp = op0;
8878 	  op0 = op1;
8879 	  op1 = temp;
8880 	}
8881 
8882       /* We generate better code and avoid problems with op1 mentioning
8883 	 target by forcing op1 into a pseudo if it isn't a constant.  */
8884       if (! CONSTANT_P (op1))
8885 	op1 = force_reg (mode, op1);
8886 
8887       {
8888 	enum rtx_code comparison_code;
8889 	rtx cmpop1 = op1;
8890 
8891 	if (code == MAX_EXPR)
8892 	  comparison_code = unsignedp ? GEU : GE;
8893 	else
8894 	  comparison_code = unsignedp ? LEU : LE;
8895 
8896 	/* Canonicalize to comparisons against 0.  */
8897 	if (op1 == const1_rtx)
8898 	  {
8899 	    /* Converting (a >= 1 ? a : 1) into (a > 0 ? a : 1)
8900 	       or (a != 0 ? a : 1) for unsigned.
8901 	       For MIN we are safe converting (a <= 1 ? a : 1)
8902 	       into (a <= 0 ? a : 1)  */
8903 	    cmpop1 = const0_rtx;
8904 	    if (code == MAX_EXPR)
8905 	      comparison_code = unsignedp ? NE : GT;
8906 	  }
8907 	if (op1 == constm1_rtx && !unsignedp)
8908 	  {
8909 	    /* Converting (a >= -1 ? a : -1) into (a >= 0 ? a : -1)
8910 	       and (a <= -1 ? a : -1) into (a < 0 ? a : -1) */
8911 	    cmpop1 = const0_rtx;
8912 	    if (code == MIN_EXPR)
8913 	      comparison_code = LT;
8914 	  }
8915 #ifdef HAVE_conditional_move
8916 	/* Use a conditional move if possible.  */
8917 	if (can_conditionally_move_p (mode))
8918 	  {
8919 	    rtx insn;
8920 
8921 	    start_sequence ();
8922 
8923 	    /* Try to emit the conditional move.  */
8924 	    insn = emit_conditional_move (target, comparison_code,
8925 					  op0, cmpop1, mode,
8926 					  op0, op1, mode,
8927 					  unsignedp);
8928 
8929 	    /* If we could do the conditional move, emit the sequence,
8930 	       and return.  */
8931 	    if (insn)
8932 	      {
8933 		rtx_insn *seq = get_insns ();
8934 		end_sequence ();
8935 		emit_insn (seq);
8936 		return target;
8937 	      }
8938 
8939 	    /* Otherwise discard the sequence and fall back to code with
8940 	       branches.  */
8941 	    end_sequence ();
8942 	  }
8943 #endif
8944 	if (target != op0)
8945 	  emit_move_insn (target, op0);
8946 
8947 	temp = gen_label_rtx ();
8948 	do_compare_rtx_and_jump (target, cmpop1, comparison_code,
8949 				 unsignedp, mode, NULL_RTX, NULL_RTX, temp,
8950 				 -1);
8951       }
8952       emit_move_insn (target, op1);
8953       emit_label (temp);
8954       return target;
8955 
8956     case BIT_NOT_EXPR:
8957       op0 = expand_expr (treeop0, subtarget,
8958 			 VOIDmode, EXPAND_NORMAL);
8959       if (modifier == EXPAND_STACK_PARM)
8960 	target = 0;
8961       /* In case we have to reduce the result to bitfield precision
8962 	 for unsigned bitfield expand this as XOR with a proper constant
8963 	 instead.  */
8964       if (reduce_bit_field && TYPE_UNSIGNED (type))
8965 	{
8966 	  wide_int mask = wi::mask (TYPE_PRECISION (type),
8967 				    false, GET_MODE_PRECISION (mode));
8968 
8969 	  temp = expand_binop (mode, xor_optab, op0,
8970 			       immed_wide_int_const (mask, mode),
8971 			       target, 1, OPTAB_LIB_WIDEN);
8972 	}
8973       else
8974 	temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8975       gcc_assert (temp);
8976       return temp;
8977 
8978       /* ??? Can optimize bitwise operations with one arg constant.
8979 	 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8980 	 and (a bitwise1 b) bitwise2 b (etc)
8981 	 but that is probably not worth while.  */
8982 
8983     case BIT_AND_EXPR:
8984     case BIT_IOR_EXPR:
8985     case BIT_XOR_EXPR:
8986       goto binop;
8987 
8988     case LROTATE_EXPR:
8989     case RROTATE_EXPR:
8990       gcc_assert (VECTOR_MODE_P (TYPE_MODE (type))
8991 		  || (GET_MODE_PRECISION (TYPE_MODE (type))
8992 		      == TYPE_PRECISION (type)));
8993       /* fall through */
8994 
8995     case LSHIFT_EXPR:
8996     case RSHIFT_EXPR:
8997       /* If this is a fixed-point operation, then we cannot use the code
8998 	 below because "expand_shift" doesn't support sat/no-sat fixed-point
8999          shifts.   */
9000       if (ALL_FIXED_POINT_MODE_P (mode))
9001 	goto binop;
9002 
9003       if (! safe_from_p (subtarget, treeop1, 1))
9004 	subtarget = 0;
9005       if (modifier == EXPAND_STACK_PARM)
9006 	target = 0;
9007       op0 = expand_expr (treeop0, subtarget,
9008 			 VOIDmode, EXPAND_NORMAL);
9009       temp = expand_variable_shift (code, mode, op0, treeop1, target,
9010 				    unsignedp);
9011       if (code == LSHIFT_EXPR)
9012 	temp = REDUCE_BIT_FIELD (temp);
9013       return temp;
9014 
9015       /* Could determine the answer when only additive constants differ.  Also,
9016 	 the addition of one can be handled by changing the condition.  */
9017     case LT_EXPR:
9018     case LE_EXPR:
9019     case GT_EXPR:
9020     case GE_EXPR:
9021     case EQ_EXPR:
9022     case NE_EXPR:
9023     case UNORDERED_EXPR:
9024     case ORDERED_EXPR:
9025     case UNLT_EXPR:
9026     case UNLE_EXPR:
9027     case UNGT_EXPR:
9028     case UNGE_EXPR:
9029     case UNEQ_EXPR:
9030     case LTGT_EXPR:
9031       temp = do_store_flag (ops,
9032 			    modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
9033 			    tmode != VOIDmode ? tmode : mode);
9034       if (temp)
9035 	return temp;
9036 
9037       /* Use a compare and a jump for BLKmode comparisons, or for function
9038 	 type comparisons is HAVE_canonicalize_funcptr_for_compare.  */
9039 
9040       if ((target == 0
9041 	   || modifier == EXPAND_STACK_PARM
9042 	   || ! safe_from_p (target, treeop0, 1)
9043 	   || ! safe_from_p (target, treeop1, 1)
9044 	   /* Make sure we don't have a hard reg (such as function's return
9045 	      value) live across basic blocks, if not optimizing.  */
9046 	   || (!optimize && REG_P (target)
9047 	       && REGNO (target) < FIRST_PSEUDO_REGISTER)))
9048 	target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
9049 
9050       emit_move_insn (target, const0_rtx);
9051 
9052       op1 = gen_label_rtx ();
9053       jumpifnot_1 (code, treeop0, treeop1, op1, -1);
9054 
9055       if (TYPE_PRECISION (type) == 1 && !TYPE_UNSIGNED (type))
9056 	emit_move_insn (target, constm1_rtx);
9057       else
9058 	emit_move_insn (target, const1_rtx);
9059 
9060       emit_label (op1);
9061       return target;
9062 
9063     case COMPLEX_EXPR:
9064       /* Get the rtx code of the operands.  */
9065       op0 = expand_normal (treeop0);
9066       op1 = expand_normal (treeop1);
9067 
9068       if (!target)
9069 	target = gen_reg_rtx (TYPE_MODE (type));
9070       else
9071 	/* If target overlaps with op1, then either we need to force
9072 	   op1 into a pseudo (if target also overlaps with op0),
9073 	   or write the complex parts in reverse order.  */
9074 	switch (GET_CODE (target))
9075 	  {
9076 	  case CONCAT:
9077 	    if (reg_overlap_mentioned_p (XEXP (target, 0), op1))
9078 	      {
9079 		if (reg_overlap_mentioned_p (XEXP (target, 1), op0))
9080 		  {
9081 		  complex_expr_force_op1:
9082 		    temp = gen_reg_rtx (GET_MODE_INNER (GET_MODE (target)));
9083 		    emit_move_insn (temp, op1);
9084 		    op1 = temp;
9085 		    break;
9086 		  }
9087 	      complex_expr_swap_order:
9088 		/* Move the imaginary (op1) and real (op0) parts to their
9089 		   location.  */
9090 		write_complex_part (target, op1, true);
9091 		write_complex_part (target, op0, false);
9092 
9093 		return target;
9094 	      }
9095 	    break;
9096 	  case MEM:
9097 	    temp = adjust_address_nv (target,
9098 				      GET_MODE_INNER (GET_MODE (target)), 0);
9099 	    if (reg_overlap_mentioned_p (temp, op1))
9100 	      {
9101 		machine_mode imode = GET_MODE_INNER (GET_MODE (target));
9102 		temp = adjust_address_nv (target, imode,
9103 					  GET_MODE_SIZE (imode));
9104 		if (reg_overlap_mentioned_p (temp, op0))
9105 		  goto complex_expr_force_op1;
9106 		goto complex_expr_swap_order;
9107 	      }
9108 	    break;
9109 	  default:
9110 	    if (reg_overlap_mentioned_p (target, op1))
9111 	      {
9112 		if (reg_overlap_mentioned_p (target, op0))
9113 		  goto complex_expr_force_op1;
9114 		goto complex_expr_swap_order;
9115 	      }
9116 	    break;
9117 	  }
9118 
9119       /* Move the real (op0) and imaginary (op1) parts to their location.  */
9120       write_complex_part (target, op0, false);
9121       write_complex_part (target, op1, true);
9122 
9123       return target;
9124 
9125     case WIDEN_SUM_EXPR:
9126       {
9127         tree oprnd0 = treeop0;
9128         tree oprnd1 = treeop1;
9129 
9130         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9131         target = expand_widen_pattern_expr (ops, op0, NULL_RTX, op1,
9132                                             target, unsignedp);
9133         return target;
9134       }
9135 
9136     case REDUC_MAX_EXPR:
9137     case REDUC_MIN_EXPR:
9138     case REDUC_PLUS_EXPR:
9139       {
9140         op0 = expand_normal (treeop0);
9141         this_optab = optab_for_tree_code (code, type, optab_default);
9142         machine_mode vec_mode = TYPE_MODE (TREE_TYPE (treeop0));
9143 
9144 	if (optab_handler (this_optab, vec_mode) != CODE_FOR_nothing)
9145 	  {
9146 	    struct expand_operand ops[2];
9147 	    enum insn_code icode = optab_handler (this_optab, vec_mode);
9148 
9149 	    create_output_operand (&ops[0], target, mode);
9150 	    create_input_operand (&ops[1], op0, vec_mode);
9151 	    if (maybe_expand_insn (icode, 2, ops))
9152 	      {
9153 		target = ops[0].value;
9154 		if (GET_MODE (target) != mode)
9155 		  return gen_lowpart (tmode, target);
9156 		return target;
9157 	      }
9158 	  }
9159 	/* Fall back to optab with vector result, and then extract scalar.  */
9160 	this_optab = scalar_reduc_to_vector (this_optab, type);
9161         temp = expand_unop (vec_mode, this_optab, op0, NULL_RTX, unsignedp);
9162         gcc_assert (temp);
9163         /* The tree code produces a scalar result, but (somewhat by convention)
9164            the optab produces a vector with the result in element 0 if
9165            little-endian, or element N-1 if big-endian.  So pull the scalar
9166            result out of that element.  */
9167         int index = BYTES_BIG_ENDIAN ? GET_MODE_NUNITS (vec_mode) - 1 : 0;
9168         int bitsize = GET_MODE_BITSIZE (GET_MODE_INNER (vec_mode));
9169         temp = extract_bit_field (temp, bitsize, bitsize * index, unsignedp,
9170 				  target, mode, mode);
9171         gcc_assert (temp);
9172         return temp;
9173       }
9174 
9175     case VEC_UNPACK_HI_EXPR:
9176     case VEC_UNPACK_LO_EXPR:
9177       {
9178 	op0 = expand_normal (treeop0);
9179 	temp = expand_widen_pattern_expr (ops, op0, NULL_RTX, NULL_RTX,
9180 					  target, unsignedp);
9181 	gcc_assert (temp);
9182 	return temp;
9183       }
9184 
9185     case VEC_UNPACK_FLOAT_HI_EXPR:
9186     case VEC_UNPACK_FLOAT_LO_EXPR:
9187       {
9188 	op0 = expand_normal (treeop0);
9189 	/* The signedness is determined from input operand.  */
9190 	temp = expand_widen_pattern_expr
9191 	  (ops, op0, NULL_RTX, NULL_RTX,
9192 	   target, TYPE_UNSIGNED (TREE_TYPE (treeop0)));
9193 
9194 	gcc_assert (temp);
9195 	return temp;
9196       }
9197 
9198     case VEC_WIDEN_MULT_HI_EXPR:
9199     case VEC_WIDEN_MULT_LO_EXPR:
9200     case VEC_WIDEN_MULT_EVEN_EXPR:
9201     case VEC_WIDEN_MULT_ODD_EXPR:
9202     case VEC_WIDEN_LSHIFT_HI_EXPR:
9203     case VEC_WIDEN_LSHIFT_LO_EXPR:
9204       expand_operands (treeop0, treeop1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9205       target = expand_widen_pattern_expr (ops, op0, op1, NULL_RTX,
9206 					  target, unsignedp);
9207       gcc_assert (target);
9208       return target;
9209 
9210     case VEC_PACK_TRUNC_EXPR:
9211     case VEC_PACK_SAT_EXPR:
9212     case VEC_PACK_FIX_TRUNC_EXPR:
9213       mode = TYPE_MODE (TREE_TYPE (treeop0));
9214       goto binop;
9215 
9216     case VEC_PERM_EXPR:
9217       expand_operands (treeop0, treeop1, target, &op0, &op1, EXPAND_NORMAL);
9218       op2 = expand_normal (treeop2);
9219 
9220       /* Careful here: if the target doesn't support integral vector modes,
9221 	 a constant selection vector could wind up smooshed into a normal
9222 	 integral constant.  */
9223       if (CONSTANT_P (op2) && GET_CODE (op2) != CONST_VECTOR)
9224 	{
9225 	  tree sel_type = TREE_TYPE (treeop2);
9226 	  machine_mode vmode
9227 	    = mode_for_vector (TYPE_MODE (TREE_TYPE (sel_type)),
9228 			       TYPE_VECTOR_SUBPARTS (sel_type));
9229 	  gcc_assert (GET_MODE_CLASS (vmode) == MODE_VECTOR_INT);
9230 	  op2 = simplify_subreg (vmode, op2, TYPE_MODE (sel_type), 0);
9231 	  gcc_assert (op2 && GET_CODE (op2) == CONST_VECTOR);
9232 	}
9233       else
9234         gcc_assert (GET_MODE_CLASS (GET_MODE (op2)) == MODE_VECTOR_INT);
9235 
9236       temp = expand_vec_perm (mode, op0, op1, op2, target);
9237       gcc_assert (temp);
9238       return temp;
9239 
9240     case DOT_PROD_EXPR:
9241       {
9242 	tree oprnd0 = treeop0;
9243 	tree oprnd1 = treeop1;
9244 	tree oprnd2 = treeop2;
9245 	rtx op2;
9246 
9247 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9248 	op2 = expand_normal (oprnd2);
9249 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9250 					    target, unsignedp);
9251 	return target;
9252       }
9253 
9254       case SAD_EXPR:
9255       {
9256 	tree oprnd0 = treeop0;
9257 	tree oprnd1 = treeop1;
9258 	tree oprnd2 = treeop2;
9259 	rtx op2;
9260 
9261 	expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9262 	op2 = expand_normal (oprnd2);
9263 	target = expand_widen_pattern_expr (ops, op0, op1, op2,
9264 					    target, unsignedp);
9265 	return target;
9266       }
9267 
9268     case REALIGN_LOAD_EXPR:
9269       {
9270         tree oprnd0 = treeop0;
9271         tree oprnd1 = treeop1;
9272         tree oprnd2 = treeop2;
9273         rtx op2;
9274 
9275         this_optab = optab_for_tree_code (code, type, optab_default);
9276         expand_operands (oprnd0, oprnd1, NULL_RTX, &op0, &op1, EXPAND_NORMAL);
9277         op2 = expand_normal (oprnd2);
9278         temp = expand_ternary_op (mode, this_optab, op0, op1, op2,
9279 				  target, unsignedp);
9280         gcc_assert (temp);
9281         return temp;
9282       }
9283 
9284     case COND_EXPR:
9285       /* A COND_EXPR with its type being VOID_TYPE represents a
9286 	 conditional jump and is handled in
9287 	 expand_gimple_cond_expr.  */
9288       gcc_assert (!VOID_TYPE_P (type));
9289 
9290       /* Note that COND_EXPRs whose type is a structure or union
9291 	 are required to be constructed to contain assignments of
9292 	 a temporary variable, so that we can evaluate them here
9293 	 for side effect only.  If type is void, we must do likewise.  */
9294 
9295       gcc_assert (!TREE_ADDRESSABLE (type)
9296 		  && !ignore
9297 		  && TREE_TYPE (treeop1) != void_type_node
9298 		  && TREE_TYPE (treeop2) != void_type_node);
9299 
9300       temp = expand_cond_expr_using_cmove (treeop0, treeop1, treeop2);
9301       if (temp)
9302 	return temp;
9303 
9304       /* If we are not to produce a result, we have no target.  Otherwise,
9305 	 if a target was specified use it; it will not be used as an
9306 	 intermediate target unless it is safe.  If no target, use a
9307 	 temporary.  */
9308 
9309       if (modifier != EXPAND_STACK_PARM
9310 	  && original_target
9311 	  && safe_from_p (original_target, treeop0, 1)
9312 	  && GET_MODE (original_target) == mode
9313 	  && !MEM_P (original_target))
9314 	temp = original_target;
9315       else
9316 	temp = assign_temp (type, 0, 1);
9317 
9318       do_pending_stack_adjust ();
9319       NO_DEFER_POP;
9320       op0 = gen_label_rtx ();
9321       op1 = gen_label_rtx ();
9322       jumpifnot (treeop0, op0, -1);
9323       store_expr (treeop1, temp,
9324 		  modifier == EXPAND_STACK_PARM,
9325 		  false);
9326 
9327       emit_jump_insn (gen_jump (op1));
9328       emit_barrier ();
9329       emit_label (op0);
9330       store_expr (treeop2, temp,
9331 		  modifier == EXPAND_STACK_PARM,
9332 		  false);
9333 
9334       emit_label (op1);
9335       OK_DEFER_POP;
9336       return temp;
9337 
9338     case VEC_COND_EXPR:
9339       target = expand_vec_cond_expr (type, treeop0, treeop1, treeop2, target);
9340       return target;
9341 
9342     default:
9343       gcc_unreachable ();
9344     }
9345 
9346   /* Here to do an ordinary binary operator.  */
9347  binop:
9348   expand_operands (treeop0, treeop1,
9349 		   subtarget, &op0, &op1, EXPAND_NORMAL);
9350  binop2:
9351   this_optab = optab_for_tree_code (code, type, optab_default);
9352  binop3:
9353   if (modifier == EXPAND_STACK_PARM)
9354     target = 0;
9355   temp = expand_binop (mode, this_optab, op0, op1, target,
9356 		       unsignedp, OPTAB_LIB_WIDEN);
9357   gcc_assert (temp);
9358   /* Bitwise operations do not need bitfield reduction as we expect their
9359      operands being properly truncated.  */
9360   if (code == BIT_XOR_EXPR
9361       || code == BIT_AND_EXPR
9362       || code == BIT_IOR_EXPR)
9363     return temp;
9364   return REDUCE_BIT_FIELD (temp);
9365 }
9366 #undef REDUCE_BIT_FIELD
9367 
9368 
9369 /* Return TRUE if expression STMT is suitable for replacement.
9370    Never consider memory loads as replaceable, because those don't ever lead
9371    into constant expressions.  */
9372 
9373 static bool
9374 stmt_is_replaceable_p (gimple stmt)
9375 {
9376   if (ssa_is_replaceable_p (stmt))
9377     {
9378       /* Don't move around loads.  */
9379       if (!gimple_assign_single_p (stmt)
9380 	  || is_gimple_val (gimple_assign_rhs1 (stmt)))
9381 	return true;
9382     }
9383   return false;
9384 }
9385 
9386 rtx
9387 expand_expr_real_1 (tree exp, rtx target, machine_mode tmode,
9388 		    enum expand_modifier modifier, rtx *alt_rtl,
9389 		    bool inner_reference_p)
9390 {
9391   rtx op0, op1, temp, decl_rtl;
9392   tree type;
9393   int unsignedp;
9394   machine_mode mode;
9395   enum tree_code code = TREE_CODE (exp);
9396   rtx subtarget, original_target;
9397   int ignore;
9398   tree context;
9399   bool reduce_bit_field;
9400   location_t loc = EXPR_LOCATION (exp);
9401   struct separate_ops ops;
9402   tree treeop0, treeop1, treeop2;
9403   tree ssa_name = NULL_TREE;
9404   gimple g;
9405 
9406   type = TREE_TYPE (exp);
9407   mode = TYPE_MODE (type);
9408   unsignedp = TYPE_UNSIGNED (type);
9409 
9410   treeop0 = treeop1 = treeop2 = NULL_TREE;
9411   if (!VL_EXP_CLASS_P (exp))
9412     switch (TREE_CODE_LENGTH (code))
9413       {
9414 	default:
9415 	case 3: treeop2 = TREE_OPERAND (exp, 2);
9416 	case 2: treeop1 = TREE_OPERAND (exp, 1);
9417 	case 1: treeop0 = TREE_OPERAND (exp, 0);
9418 	case 0: break;
9419       }
9420   ops.code = code;
9421   ops.type = type;
9422   ops.op0 = treeop0;
9423   ops.op1 = treeop1;
9424   ops.op2 = treeop2;
9425   ops.location = loc;
9426 
9427   ignore = (target == const0_rtx
9428 	    || ((CONVERT_EXPR_CODE_P (code)
9429 		 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
9430 		&& TREE_CODE (type) == VOID_TYPE));
9431 
9432   /* An operation in what may be a bit-field type needs the
9433      result to be reduced to the precision of the bit-field type,
9434      which is narrower than that of the type's mode.  */
9435   reduce_bit_field = (!ignore
9436 		      && INTEGRAL_TYPE_P (type)
9437 		      && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type));
9438 
9439   /* If we are going to ignore this result, we need only do something
9440      if there is a side-effect somewhere in the expression.  If there
9441      is, short-circuit the most common cases here.  Note that we must
9442      not call expand_expr with anything but const0_rtx in case this
9443      is an initial expansion of a size that contains a PLACEHOLDER_EXPR.  */
9444 
9445   if (ignore)
9446     {
9447       if (! TREE_SIDE_EFFECTS (exp))
9448 	return const0_rtx;
9449 
9450       /* Ensure we reference a volatile object even if value is ignored, but
9451 	 don't do this if all we are doing is taking its address.  */
9452       if (TREE_THIS_VOLATILE (exp)
9453 	  && TREE_CODE (exp) != FUNCTION_DECL
9454 	  && mode != VOIDmode && mode != BLKmode
9455 	  && modifier != EXPAND_CONST_ADDRESS)
9456 	{
9457 	  temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
9458 	  if (MEM_P (temp))
9459 	    copy_to_reg (temp);
9460 	  return const0_rtx;
9461 	}
9462 
9463       if (TREE_CODE_CLASS (code) == tcc_unary
9464 	  || code == BIT_FIELD_REF
9465 	  || code == COMPONENT_REF
9466 	  || code == INDIRECT_REF)
9467 	return expand_expr (treeop0, const0_rtx, VOIDmode,
9468 			    modifier);
9469 
9470       else if (TREE_CODE_CLASS (code) == tcc_binary
9471 	       || TREE_CODE_CLASS (code) == tcc_comparison
9472 	       || code == ARRAY_REF || code == ARRAY_RANGE_REF)
9473 	{
9474 	  expand_expr (treeop0, const0_rtx, VOIDmode, modifier);
9475 	  expand_expr (treeop1, const0_rtx, VOIDmode, modifier);
9476 	  return const0_rtx;
9477 	}
9478 
9479       target = 0;
9480     }
9481 
9482   if (reduce_bit_field && modifier == EXPAND_STACK_PARM)
9483     target = 0;
9484 
9485   /* Use subtarget as the target for operand 0 of a binary operation.  */
9486   subtarget = get_subtarget (target);
9487   original_target = target;
9488 
9489   switch (code)
9490     {
9491     case LABEL_DECL:
9492       {
9493 	tree function = decl_function_context (exp);
9494 
9495 	temp = label_rtx (exp);
9496 	temp = gen_rtx_LABEL_REF (Pmode, temp);
9497 
9498 	if (function != current_function_decl
9499 	    && function != 0)
9500 	  LABEL_REF_NONLOCAL_P (temp) = 1;
9501 
9502 	temp = gen_rtx_MEM (FUNCTION_MODE, temp);
9503 	return temp;
9504       }
9505 
9506     case SSA_NAME:
9507       /* ??? ivopts calls expander, without any preparation from
9508          out-of-ssa.  So fake instructions as if this was an access to the
9509 	 base variable.  This unnecessarily allocates a pseudo, see how we can
9510 	 reuse it, if partition base vars have it set already.  */
9511       if (!currently_expanding_to_rtl)
9512 	{
9513 	  tree var = SSA_NAME_VAR (exp);
9514 	  if (var && DECL_RTL_SET_P (var))
9515 	    return DECL_RTL (var);
9516 	  return gen_raw_REG (TYPE_MODE (TREE_TYPE (exp)),
9517 			      LAST_VIRTUAL_REGISTER + 1);
9518 	}
9519 
9520       g = get_gimple_for_ssa_name (exp);
9521       /* For EXPAND_INITIALIZER try harder to get something simpler.  */
9522       if (g == NULL
9523 	  && modifier == EXPAND_INITIALIZER
9524 	  && !SSA_NAME_IS_DEFAULT_DEF (exp)
9525 	  && (optimize || DECL_IGNORED_P (SSA_NAME_VAR (exp)))
9526 	  && stmt_is_replaceable_p (SSA_NAME_DEF_STMT (exp)))
9527 	g = SSA_NAME_DEF_STMT (exp);
9528       if (g)
9529 	{
9530 	  rtx r;
9531 	  ops.code = gimple_assign_rhs_code (g);
9532           switch (get_gimple_rhs_class (ops.code))
9533 	    {
9534 	    case GIMPLE_TERNARY_RHS:
9535 	      ops.op2 = gimple_assign_rhs3 (g);
9536 	      /* Fallthru */
9537 	    case GIMPLE_BINARY_RHS:
9538 	      ops.op1 = gimple_assign_rhs2 (g);
9539 
9540 	      /* Try to expand conditonal compare.  */
9541 	      if (targetm.gen_ccmp_first)
9542 		{
9543 		  gcc_checking_assert (targetm.gen_ccmp_next != NULL);
9544 		  r = expand_ccmp_expr (g);
9545 		  if (r)
9546 		    break;
9547 		}
9548 	      /* Fallthru */
9549 	    case GIMPLE_UNARY_RHS:
9550 	      ops.op0 = gimple_assign_rhs1 (g);
9551 	      ops.type = TREE_TYPE (gimple_assign_lhs (g));
9552 	      ops.location = gimple_location (g);
9553 	      r = expand_expr_real_2 (&ops, target, tmode, modifier);
9554 	      break;
9555 	    case GIMPLE_SINGLE_RHS:
9556 	      {
9557 		location_t saved_loc = curr_insn_location ();
9558 		set_curr_insn_location (gimple_location (g));
9559 		r = expand_expr_real (gimple_assign_rhs1 (g), target,
9560 				      tmode, modifier, NULL, inner_reference_p);
9561 		set_curr_insn_location (saved_loc);
9562 		break;
9563 	      }
9564 	    default:
9565 	      gcc_unreachable ();
9566 	    }
9567 	  if (REG_P (r) && !REG_EXPR (r))
9568 	    set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (exp), r);
9569 	  return r;
9570 	}
9571 
9572       ssa_name = exp;
9573       decl_rtl = get_rtx_for_ssa_name (ssa_name);
9574       exp = SSA_NAME_VAR (ssa_name);
9575       goto expand_decl_rtl;
9576 
9577     case PARM_DECL:
9578     case VAR_DECL:
9579       /* If a static var's type was incomplete when the decl was written,
9580 	 but the type is complete now, lay out the decl now.  */
9581       if (DECL_SIZE (exp) == 0
9582 	  && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
9583 	  && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
9584 	layout_decl (exp, 0);
9585 
9586       /* ... fall through ...  */
9587 
9588     case FUNCTION_DECL:
9589     case RESULT_DECL:
9590       decl_rtl = DECL_RTL (exp);
9591     expand_decl_rtl:
9592       gcc_assert (decl_rtl);
9593       decl_rtl = copy_rtx (decl_rtl);
9594       /* Record writes to register variables.  */
9595       if (modifier == EXPAND_WRITE
9596 	  && REG_P (decl_rtl)
9597 	  && HARD_REGISTER_P (decl_rtl))
9598         add_to_hard_reg_set (&crtl->asm_clobbers,
9599 			     GET_MODE (decl_rtl), REGNO (decl_rtl));
9600 
9601       /* Ensure variable marked as used even if it doesn't go through
9602 	 a parser.  If it hasn't be used yet, write out an external
9603 	 definition.  */
9604       TREE_USED (exp) = 1;
9605 
9606       /* Show we haven't gotten RTL for this yet.  */
9607       temp = 0;
9608 
9609       /* Variables inherited from containing functions should have
9610 	 been lowered by this point.  */
9611       context = decl_function_context (exp);
9612       gcc_assert (SCOPE_FILE_SCOPE_P (context)
9613 		  || context == current_function_decl
9614 		  || TREE_STATIC (exp)
9615 		  || DECL_EXTERNAL (exp)
9616 		  /* ??? C++ creates functions that are not TREE_STATIC.  */
9617 		  || TREE_CODE (exp) == FUNCTION_DECL);
9618 
9619       /* This is the case of an array whose size is to be determined
9620 	 from its initializer, while the initializer is still being parsed.
9621 	 ??? We aren't parsing while expanding anymore.  */
9622 
9623       if (MEM_P (decl_rtl) && REG_P (XEXP (decl_rtl, 0)))
9624 	temp = validize_mem (decl_rtl);
9625 
9626       /* If DECL_RTL is memory, we are in the normal case and the
9627 	 address is not valid, get the address into a register.  */
9628 
9629       else if (MEM_P (decl_rtl) && modifier != EXPAND_INITIALIZER)
9630 	{
9631 	  if (alt_rtl)
9632 	    *alt_rtl = decl_rtl;
9633 	  decl_rtl = use_anchored_address (decl_rtl);
9634 	  if (modifier != EXPAND_CONST_ADDRESS
9635 	      && modifier != EXPAND_SUM
9636 	      && !memory_address_addr_space_p (DECL_MODE (exp),
9637 					       XEXP (decl_rtl, 0),
9638 					       MEM_ADDR_SPACE (decl_rtl)))
9639 	    temp = replace_equiv_address (decl_rtl,
9640 					  copy_rtx (XEXP (decl_rtl, 0)));
9641 	}
9642 
9643       /* If we got something, return it.  But first, set the alignment
9644 	 if the address is a register.  */
9645       if (temp != 0)
9646 	{
9647 	  if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
9648 	    mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
9649 
9650 	  return temp;
9651 	}
9652 
9653       /* If the mode of DECL_RTL does not match that of the decl,
9654 	 there are two cases: we are dealing with a BLKmode value
9655 	 that is returned in a register, or we are dealing with
9656 	 a promoted value.  In the latter case, return a SUBREG
9657 	 of the wanted mode, but mark it so that we know that it
9658 	 was already extended.  */
9659       if (REG_P (decl_rtl)
9660 	  && DECL_MODE (exp) != BLKmode
9661 	  && GET_MODE (decl_rtl) != DECL_MODE (exp))
9662 	{
9663 	  machine_mode pmode;
9664 
9665 	  /* Get the signedness to be used for this variable.  Ensure we get
9666 	     the same mode we got when the variable was declared.  */
9667 	  if (code == SSA_NAME
9668 	      && (g = SSA_NAME_DEF_STMT (ssa_name))
9669 	      && gimple_code (g) == GIMPLE_CALL
9670 	      && !gimple_call_internal_p (g))
9671 	    pmode = promote_function_mode (type, mode, &unsignedp,
9672 					   gimple_call_fntype (g),
9673 					   2);
9674 	  else
9675 	    pmode = promote_decl_mode (exp, &unsignedp);
9676 	  gcc_assert (GET_MODE (decl_rtl) == pmode);
9677 
9678 	  temp = gen_lowpart_SUBREG (mode, decl_rtl);
9679 	  SUBREG_PROMOTED_VAR_P (temp) = 1;
9680 	  SUBREG_PROMOTED_SET (temp, unsignedp);
9681 	  return temp;
9682 	}
9683 
9684       return decl_rtl;
9685 
9686     case INTEGER_CST:
9687       /* Given that TYPE_PRECISION (type) is not always equal to
9688          GET_MODE_PRECISION (TYPE_MODE (type)), we need to extend from
9689          the former to the latter according to the signedness of the
9690          type. */
9691       temp = immed_wide_int_const (wide_int::from
9692 				   (exp,
9693 				    GET_MODE_PRECISION (TYPE_MODE (type)),
9694 				    TYPE_SIGN (type)),
9695 				   TYPE_MODE (type));
9696       return temp;
9697 
9698     case VECTOR_CST:
9699       {
9700 	tree tmp = NULL_TREE;
9701 	if (GET_MODE_CLASS (mode) == MODE_VECTOR_INT
9702 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT
9703 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_FRACT
9704 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UFRACT
9705 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_ACCUM
9706 	    || GET_MODE_CLASS (mode) == MODE_VECTOR_UACCUM)
9707 	  return const_vector_from_tree (exp);
9708 	if (GET_MODE_CLASS (mode) == MODE_INT)
9709 	  {
9710 	    tree type_for_mode = lang_hooks.types.type_for_mode (mode, 1);
9711 	    if (type_for_mode)
9712 	      tmp = fold_unary_loc (loc, VIEW_CONVERT_EXPR, type_for_mode, exp);
9713 	  }
9714 	if (!tmp)
9715 	  {
9716 	    vec<constructor_elt, va_gc> *v;
9717 	    unsigned i;
9718 	    vec_alloc (v, VECTOR_CST_NELTS (exp));
9719 	    for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
9720 	      CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, VECTOR_CST_ELT (exp, i));
9721 	    tmp = build_constructor (type, v);
9722 	  }
9723 	return expand_expr (tmp, ignore ? const0_rtx : target,
9724 			    tmode, modifier);
9725       }
9726 
9727     case CONST_DECL:
9728       return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
9729 
9730     case REAL_CST:
9731       /* If optimized, generate immediate CONST_DOUBLE
9732 	 which will be turned into memory by reload if necessary.
9733 
9734 	 We used to force a register so that loop.c could see it.  But
9735 	 this does not allow gen_* patterns to perform optimizations with
9736 	 the constants.  It also produces two insns in cases like "x = 1.0;".
9737 	 On most machines, floating-point constants are not permitted in
9738 	 many insns, so we'd end up copying it to a register in any case.
9739 
9740 	 Now, we do the copying in expand_binop, if appropriate.  */
9741       return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
9742 					   TYPE_MODE (TREE_TYPE (exp)));
9743 
9744     case FIXED_CST:
9745       return CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (exp),
9746 					   TYPE_MODE (TREE_TYPE (exp)));
9747 
9748     case COMPLEX_CST:
9749       /* Handle evaluating a complex constant in a CONCAT target.  */
9750       if (original_target && GET_CODE (original_target) == CONCAT)
9751 	{
9752 	  machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9753 	  rtx rtarg, itarg;
9754 
9755 	  rtarg = XEXP (original_target, 0);
9756 	  itarg = XEXP (original_target, 1);
9757 
9758 	  /* Move the real and imaginary parts separately.  */
9759 	  op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, EXPAND_NORMAL);
9760 	  op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, EXPAND_NORMAL);
9761 
9762 	  if (op0 != rtarg)
9763 	    emit_move_insn (rtarg, op0);
9764 	  if (op1 != itarg)
9765 	    emit_move_insn (itarg, op1);
9766 
9767 	  return original_target;
9768 	}
9769 
9770       /* ... fall through ...  */
9771 
9772     case STRING_CST:
9773       temp = expand_expr_constant (exp, 1, modifier);
9774 
9775       /* temp contains a constant address.
9776 	 On RISC machines where a constant address isn't valid,
9777 	 make some insns to get that address into a register.  */
9778       if (modifier != EXPAND_CONST_ADDRESS
9779 	  && modifier != EXPAND_INITIALIZER
9780 	  && modifier != EXPAND_SUM
9781 	  && ! memory_address_addr_space_p (mode, XEXP (temp, 0),
9782 					    MEM_ADDR_SPACE (temp)))
9783 	return replace_equiv_address (temp,
9784 				      copy_rtx (XEXP (temp, 0)));
9785       return temp;
9786 
9787     case SAVE_EXPR:
9788       {
9789 	tree val = treeop0;
9790 	rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl,
9791 				      inner_reference_p);
9792 
9793 	if (!SAVE_EXPR_RESOLVED_P (exp))
9794 	  {
9795 	    /* We can indeed still hit this case, typically via builtin
9796 	       expanders calling save_expr immediately before expanding
9797 	       something.  Assume this means that we only have to deal
9798 	       with non-BLKmode values.  */
9799 	    gcc_assert (GET_MODE (ret) != BLKmode);
9800 
9801 	    val = build_decl (curr_insn_location (),
9802 			      VAR_DECL, NULL, TREE_TYPE (exp));
9803 	    DECL_ARTIFICIAL (val) = 1;
9804 	    DECL_IGNORED_P (val) = 1;
9805 	    treeop0 = val;
9806 	    TREE_OPERAND (exp, 0) = treeop0;
9807 	    SAVE_EXPR_RESOLVED_P (exp) = 1;
9808 
9809 	    if (!CONSTANT_P (ret))
9810 	      ret = copy_to_reg (ret);
9811 	    SET_DECL_RTL (val, ret);
9812 	  }
9813 
9814         return ret;
9815       }
9816 
9817 
9818     case CONSTRUCTOR:
9819       /* If we don't need the result, just ensure we evaluate any
9820 	 subexpressions.  */
9821       if (ignore)
9822 	{
9823 	  unsigned HOST_WIDE_INT idx;
9824 	  tree value;
9825 
9826 	  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), idx, value)
9827 	    expand_expr (value, const0_rtx, VOIDmode, EXPAND_NORMAL);
9828 
9829 	  return const0_rtx;
9830 	}
9831 
9832       return expand_constructor (exp, target, modifier, false);
9833 
9834     case TARGET_MEM_REF:
9835       {
9836 	addr_space_t as
9837 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9838 	enum insn_code icode;
9839 	unsigned int align;
9840 
9841 	op0 = addr_for_mem_ref (exp, as, true);
9842 	op0 = memory_address_addr_space (mode, op0, as);
9843 	temp = gen_rtx_MEM (mode, op0);
9844 	set_mem_attributes (temp, exp, 0);
9845 	set_mem_addr_space (temp, as);
9846 	align = get_object_alignment (exp);
9847 	if (modifier != EXPAND_WRITE
9848 	    && modifier != EXPAND_MEMORY
9849 	    && mode != BLKmode
9850 	    && align < GET_MODE_ALIGNMENT (mode)
9851 	    /* If the target does not have special handling for unaligned
9852 	       loads of mode then it can use regular moves for them.  */
9853 	    && ((icode = optab_handler (movmisalign_optab, mode))
9854 		!= CODE_FOR_nothing))
9855 	  {
9856 	    struct expand_operand ops[2];
9857 
9858 	    /* We've already validated the memory, and we're creating a
9859 	       new pseudo destination.  The predicates really can't fail,
9860 	       nor can the generator.  */
9861 	    create_output_operand (&ops[0], NULL_RTX, mode);
9862 	    create_fixed_operand (&ops[1], temp);
9863 	    expand_insn (icode, 2, ops);
9864 	    temp = ops[0].value;
9865 	  }
9866 	return temp;
9867       }
9868 
9869     case MEM_REF:
9870       {
9871 	addr_space_t as
9872 	  = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
9873 	machine_mode address_mode;
9874 	tree base = TREE_OPERAND (exp, 0);
9875 	gimple def_stmt;
9876 	enum insn_code icode;
9877 	unsigned align;
9878 	/* Handle expansion of non-aliased memory with non-BLKmode.  That
9879 	   might end up in a register.  */
9880 	if (mem_ref_refers_to_non_mem_p (exp))
9881 	  {
9882 	    HOST_WIDE_INT offset = mem_ref_offset (exp).to_short_addr ();
9883 	    base = TREE_OPERAND (base, 0);
9884 	    if (offset == 0
9885 		&& tree_fits_uhwi_p (TYPE_SIZE (type))
9886 		&& (GET_MODE_BITSIZE (DECL_MODE (base))
9887 		    == tree_to_uhwi (TYPE_SIZE (type))))
9888 	      return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base),
9889 				  target, tmode, modifier);
9890 	    if (TYPE_MODE (type) == BLKmode)
9891 	      {
9892 		temp = assign_stack_temp (DECL_MODE (base),
9893 					  GET_MODE_SIZE (DECL_MODE (base)));
9894 		store_expr (base, temp, 0, false);
9895 		temp = adjust_address (temp, BLKmode, offset);
9896 		set_mem_size (temp, int_size_in_bytes (type));
9897 		return temp;
9898 	      }
9899 	    exp = build3 (BIT_FIELD_REF, type, base, TYPE_SIZE (type),
9900 			  bitsize_int (offset * BITS_PER_UNIT));
9901 	    return expand_expr (exp, target, tmode, modifier);
9902 	  }
9903 	address_mode = targetm.addr_space.address_mode (as);
9904 	base = TREE_OPERAND (exp, 0);
9905 	if ((def_stmt = get_def_for_expr (base, BIT_AND_EXPR)))
9906 	  {
9907 	    tree mask = gimple_assign_rhs2 (def_stmt);
9908 	    base = build2 (BIT_AND_EXPR, TREE_TYPE (base),
9909 			   gimple_assign_rhs1 (def_stmt), mask);
9910 	    TREE_OPERAND (exp, 0) = base;
9911 	  }
9912 	align = get_object_alignment (exp);
9913 	op0 = expand_expr (base, NULL_RTX, VOIDmode, EXPAND_SUM);
9914 	op0 = memory_address_addr_space (mode, op0, as);
9915 	if (!integer_zerop (TREE_OPERAND (exp, 1)))
9916 	  {
9917 	    rtx off = immed_wide_int_const (mem_ref_offset (exp), address_mode);
9918 	    op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
9919 	    op0 = memory_address_addr_space (mode, op0, as);
9920 	  }
9921 	temp = gen_rtx_MEM (mode, op0);
9922 	set_mem_attributes (temp, exp, 0);
9923 	set_mem_addr_space (temp, as);
9924 	if (TREE_THIS_VOLATILE (exp))
9925 	  MEM_VOLATILE_P (temp) = 1;
9926 	if (modifier != EXPAND_WRITE
9927 	    && modifier != EXPAND_MEMORY
9928 	    && !inner_reference_p
9929 	    && mode != BLKmode
9930 	    && align < GET_MODE_ALIGNMENT (mode))
9931 	  {
9932 	    if ((icode = optab_handler (movmisalign_optab, mode))
9933 		!= CODE_FOR_nothing)
9934 	      {
9935 		struct expand_operand ops[2];
9936 
9937 		/* We've already validated the memory, and we're creating a
9938 		   new pseudo destination.  The predicates really can't fail,
9939 		   nor can the generator.  */
9940 		create_output_operand (&ops[0], NULL_RTX, mode);
9941 		create_fixed_operand (&ops[1], temp);
9942 		expand_insn (icode, 2, ops);
9943 		temp = ops[0].value;
9944 	      }
9945 	    else if (SLOW_UNALIGNED_ACCESS (mode, align))
9946 	      temp = extract_bit_field (temp, GET_MODE_BITSIZE (mode),
9947 					0, TYPE_UNSIGNED (TREE_TYPE (exp)),
9948 					(modifier == EXPAND_STACK_PARM
9949 					 ? NULL_RTX : target),
9950 					mode, mode);
9951 	  }
9952 	return temp;
9953       }
9954 
9955     case ARRAY_REF:
9956 
9957       {
9958 	tree array = treeop0;
9959 	tree index = treeop1;
9960 	tree init;
9961 
9962 	/* Fold an expression like: "foo"[2].
9963 	   This is not done in fold so it won't happen inside &.
9964 	   Don't fold if this is for wide characters since it's too
9965 	   difficult to do correctly and this is a very rare case.  */
9966 
9967 	if (modifier != EXPAND_CONST_ADDRESS
9968 	    && modifier != EXPAND_INITIALIZER
9969 	    && modifier != EXPAND_MEMORY)
9970 	  {
9971 	    tree t = fold_read_from_constant_string (exp);
9972 
9973 	    if (t)
9974 	      return expand_expr (t, target, tmode, modifier);
9975 	  }
9976 
9977 	/* If this is a constant index into a constant array,
9978 	   just get the value from the array.  Handle both the cases when
9979 	   we have an explicit constructor and when our operand is a variable
9980 	   that was declared const.  */
9981 
9982 	if (modifier != EXPAND_CONST_ADDRESS
9983 	    && modifier != EXPAND_INITIALIZER
9984 	    && modifier != EXPAND_MEMORY
9985 	    && TREE_CODE (array) == CONSTRUCTOR
9986 	    && ! TREE_SIDE_EFFECTS (array)
9987 	    && TREE_CODE (index) == INTEGER_CST)
9988 	  {
9989 	    unsigned HOST_WIDE_INT ix;
9990 	    tree field, value;
9991 
9992 	    FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (array), ix,
9993 				      field, value)
9994 	      if (tree_int_cst_equal (field, index))
9995 		{
9996 		  if (!TREE_SIDE_EFFECTS (value))
9997 		    return expand_expr (fold (value), target, tmode, modifier);
9998 		  break;
9999 		}
10000 	  }
10001 
10002 	else if (optimize >= 1
10003 		 && modifier != EXPAND_CONST_ADDRESS
10004 		 && modifier != EXPAND_INITIALIZER
10005 		 && modifier != EXPAND_MEMORY
10006 		 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
10007 		 && TREE_CODE (index) == INTEGER_CST
10008 		 && (TREE_CODE (array) == VAR_DECL
10009 		     || TREE_CODE (array) == CONST_DECL)
10010 		 && (init = ctor_for_folding (array)) != error_mark_node)
10011 	  {
10012 	    if (init == NULL_TREE)
10013 	      {
10014 		tree value = build_zero_cst (type);
10015 		if (TREE_CODE (value) == CONSTRUCTOR)
10016 		  {
10017 		    /* If VALUE is a CONSTRUCTOR, this optimization is only
10018 		       useful if this doesn't store the CONSTRUCTOR into
10019 		       memory.  If it does, it is more efficient to just
10020 		       load the data from the array directly.  */
10021 		    rtx ret = expand_constructor (value, target,
10022 						  modifier, true);
10023 		    if (ret == NULL_RTX)
10024 		      value = NULL_TREE;
10025 		  }
10026 
10027 		if (value)
10028 		  return expand_expr (value, target, tmode, modifier);
10029 	      }
10030 	    else if (TREE_CODE (init) == CONSTRUCTOR)
10031 	      {
10032 		unsigned HOST_WIDE_INT ix;
10033 		tree field, value;
10034 
10035 		FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), ix,
10036 					  field, value)
10037 		  if (tree_int_cst_equal (field, index))
10038 		    {
10039 		      if (TREE_SIDE_EFFECTS (value))
10040 			break;
10041 
10042 		      if (TREE_CODE (value) == CONSTRUCTOR)
10043 			{
10044 			  /* If VALUE is a CONSTRUCTOR, this
10045 			     optimization is only useful if
10046 			     this doesn't store the CONSTRUCTOR
10047 			     into memory.  If it does, it is more
10048 			     efficient to just load the data from
10049 			     the array directly.  */
10050 			  rtx ret = expand_constructor (value, target,
10051 							modifier, true);
10052 			  if (ret == NULL_RTX)
10053 			    break;
10054 			}
10055 
10056 		      return
10057 		        expand_expr (fold (value), target, tmode, modifier);
10058 		    }
10059 	      }
10060 	    else if (TREE_CODE (init) == STRING_CST)
10061 	      {
10062 		tree low_bound = array_ref_low_bound (exp);
10063 		tree index1 = fold_convert_loc (loc, sizetype, treeop1);
10064 
10065 		/* Optimize the special case of a zero lower bound.
10066 
10067 		   We convert the lower bound to sizetype to avoid problems
10068 		   with constant folding.  E.g. suppose the lower bound is
10069 		   1 and its mode is QI.  Without the conversion
10070 		      (ARRAY + (INDEX - (unsigned char)1))
10071 		   becomes
10072 		      (ARRAY + (-(unsigned char)1) + INDEX)
10073 		   which becomes
10074 		      (ARRAY + 255 + INDEX).  Oops!  */
10075 		if (!integer_zerop (low_bound))
10076 		  index1 = size_diffop_loc (loc, index1,
10077 					    fold_convert_loc (loc, sizetype,
10078 							      low_bound));
10079 
10080 		if (compare_tree_int (index1, TREE_STRING_LENGTH (init)) < 0)
10081 		  {
10082 		    tree type = TREE_TYPE (TREE_TYPE (init));
10083 		    machine_mode mode = TYPE_MODE (type);
10084 
10085 		    if (GET_MODE_CLASS (mode) == MODE_INT
10086 			&& GET_MODE_SIZE (mode) == 1)
10087 		      return gen_int_mode (TREE_STRING_POINTER (init)
10088 					   [TREE_INT_CST_LOW (index1)],
10089 					   mode);
10090 		  }
10091 	      }
10092 	  }
10093       }
10094       goto normal_inner_ref;
10095 
10096     case COMPONENT_REF:
10097       /* If the operand is a CONSTRUCTOR, we can just extract the
10098 	 appropriate field if it is present.  */
10099       if (TREE_CODE (treeop0) == CONSTRUCTOR)
10100 	{
10101 	  unsigned HOST_WIDE_INT idx;
10102 	  tree field, value;
10103 
10104 	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (treeop0),
10105 				    idx, field, value)
10106 	    if (field == treeop1
10107 		/* We can normally use the value of the field in the
10108 		   CONSTRUCTOR.  However, if this is a bitfield in
10109 		   an integral mode that we can fit in a HOST_WIDE_INT,
10110 		   we must mask only the number of bits in the bitfield,
10111 		   since this is done implicitly by the constructor.  If
10112 		   the bitfield does not meet either of those conditions,
10113 		   we can't do this optimization.  */
10114 		&& (! DECL_BIT_FIELD (field)
10115 		    || ((GET_MODE_CLASS (DECL_MODE (field)) == MODE_INT)
10116 			&& (GET_MODE_PRECISION (DECL_MODE (field))
10117 			    <= HOST_BITS_PER_WIDE_INT))))
10118 	      {
10119 		if (DECL_BIT_FIELD (field)
10120 		    && modifier == EXPAND_STACK_PARM)
10121 		  target = 0;
10122 		op0 = expand_expr (value, target, tmode, modifier);
10123 		if (DECL_BIT_FIELD (field))
10124 		  {
10125 		    HOST_WIDE_INT bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
10126 		    machine_mode imode = TYPE_MODE (TREE_TYPE (field));
10127 
10128 		    if (TYPE_UNSIGNED (TREE_TYPE (field)))
10129 		      {
10130 			op1 = gen_int_mode (((HOST_WIDE_INT) 1 << bitsize) - 1,
10131 					    imode);
10132 			op0 = expand_and (imode, op0, op1, target);
10133 		      }
10134 		    else
10135 		      {
10136 			int count = GET_MODE_PRECISION (imode) - bitsize;
10137 
10138 			op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
10139 					    target, 0);
10140 			op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
10141 					    target, 0);
10142 		      }
10143 		  }
10144 
10145 		return op0;
10146 	      }
10147 	}
10148       goto normal_inner_ref;
10149 
10150     case BIT_FIELD_REF:
10151     case ARRAY_RANGE_REF:
10152     normal_inner_ref:
10153       {
10154 	machine_mode mode1, mode2;
10155 	HOST_WIDE_INT bitsize, bitpos;
10156 	tree offset;
10157 	int volatilep = 0, must_force_mem;
10158 	tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
10159 					&mode1, &unsignedp, &volatilep, true);
10160 	rtx orig_op0, memloc;
10161 	bool clear_mem_expr = false;
10162 
10163 	/* If we got back the original object, something is wrong.  Perhaps
10164 	   we are evaluating an expression too early.  In any event, don't
10165 	   infinitely recurse.  */
10166 	gcc_assert (tem != exp);
10167 
10168 	/* If TEM's type is a union of variable size, pass TARGET to the inner
10169 	   computation, since it will need a temporary and TARGET is known
10170 	   to have to do.  This occurs in unchecked conversion in Ada.  */
10171 	orig_op0 = op0
10172 	  = expand_expr_real (tem,
10173 			      (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10174 			       && COMPLETE_TYPE_P (TREE_TYPE (tem))
10175 			       && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10176 				   != INTEGER_CST)
10177 			       && modifier != EXPAND_STACK_PARM
10178 			       ? target : NULL_RTX),
10179 			      VOIDmode,
10180 			      modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10181 			      NULL, true);
10182 
10183 	/* If the field has a mode, we want to access it in the
10184 	   field's mode, not the computed mode.
10185 	   If a MEM has VOIDmode (external with incomplete type),
10186 	   use BLKmode for it instead.  */
10187 	if (MEM_P (op0))
10188 	  {
10189 	    if (mode1 != VOIDmode)
10190 	      op0 = adjust_address (op0, mode1, 0);
10191 	    else if (GET_MODE (op0) == VOIDmode)
10192 	      op0 = adjust_address (op0, BLKmode, 0);
10193 	  }
10194 
10195 	mode2
10196 	  = CONSTANT_P (op0) ? TYPE_MODE (TREE_TYPE (tem)) : GET_MODE (op0);
10197 
10198 	/* If we have either an offset, a BLKmode result, or a reference
10199 	   outside the underlying object, we must force it to memory.
10200 	   Such a case can occur in Ada if we have unchecked conversion
10201 	   of an expression from a scalar type to an aggregate type or
10202 	   for an ARRAY_RANGE_REF whose type is BLKmode, or if we were
10203 	   passed a partially uninitialized object or a view-conversion
10204 	   to a larger size.  */
10205 	must_force_mem = (offset
10206 			  || mode1 == BLKmode
10207 			  || bitpos + bitsize > GET_MODE_BITSIZE (mode2));
10208 
10209 	/* Handle CONCAT first.  */
10210 	if (GET_CODE (op0) == CONCAT && !must_force_mem)
10211 	  {
10212 	    if (bitpos == 0
10213 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (op0))
10214 		&& COMPLEX_MODE_P (mode1)
10215 		&& COMPLEX_MODE_P (GET_MODE (op0))
10216 		&& (GET_MODE_PRECISION (GET_MODE_INNER (mode1))
10217 		    == GET_MODE_PRECISION (GET_MODE_INNER (GET_MODE (op0)))))
10218 	      {
10219 		if (mode1 != GET_MODE (op0))
10220 		  {
10221 		    rtx parts[2];
10222 		    for (int i = 0; i < 2; i++)
10223 		      {
10224 			rtx op = read_complex_part (op0, i != 0);
10225 			if (GET_CODE (op) == SUBREG)
10226 			  op = force_reg (GET_MODE (op), op);
10227 			rtx temp = gen_lowpart_common (GET_MODE_INNER (mode1),
10228 						       op);
10229 			if (temp)
10230 			  op = temp;
10231 			else
10232 			  {
10233 			    if (!REG_P (op) && !MEM_P (op))
10234 			      op = force_reg (GET_MODE (op), op);
10235 			    op = gen_lowpart (GET_MODE_INNER (mode1), op);
10236 			  }
10237 			parts[i] = op;
10238 		      }
10239 		    op0 = gen_rtx_CONCAT (mode1, parts[0], parts[1]);
10240 		  }
10241 		return op0;
10242 	      }
10243 	    if (bitpos == 0
10244 		&& bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10245 		&& bitsize)
10246 	      {
10247 		op0 = XEXP (op0, 0);
10248 		mode2 = GET_MODE (op0);
10249 	      }
10250 	    else if (bitpos == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
10251 		     && bitsize == GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 1)))
10252 		     && bitpos
10253 		     && bitsize)
10254 	      {
10255 		op0 = XEXP (op0, 1);
10256 		bitpos = 0;
10257 		mode2 = GET_MODE (op0);
10258 	      }
10259 	    else
10260 	      /* Otherwise force into memory.  */
10261 	      must_force_mem = 1;
10262 	  }
10263 
10264 	/* If this is a constant, put it in a register if it is a legitimate
10265 	   constant and we don't need a memory reference.  */
10266 	if (CONSTANT_P (op0)
10267 	    && mode2 != BLKmode
10268 	    && targetm.legitimate_constant_p (mode2, op0)
10269 	    && !must_force_mem)
10270 	  op0 = force_reg (mode2, op0);
10271 
10272 	/* Otherwise, if this is a constant, try to force it to the constant
10273 	   pool.  Note that back-ends, e.g. MIPS, may refuse to do so if it
10274 	   is a legitimate constant.  */
10275 	else if (CONSTANT_P (op0) && (memloc = force_const_mem (mode2, op0)))
10276 	  op0 = validize_mem (memloc);
10277 
10278 	/* Otherwise, if this is a constant or the object is not in memory
10279 	   and need be, put it there.  */
10280 	else if (CONSTANT_P (op0) || (!MEM_P (op0) && must_force_mem))
10281 	  {
10282 	    memloc = assign_temp (TREE_TYPE (tem), 1, 1);
10283 	    emit_move_insn (memloc, op0);
10284 	    op0 = memloc;
10285 	    clear_mem_expr = true;
10286 	  }
10287 
10288 	if (offset)
10289 	  {
10290 	    machine_mode address_mode;
10291 	    rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
10292 					  EXPAND_SUM);
10293 
10294 	    gcc_assert (MEM_P (op0));
10295 
10296 	    address_mode = get_address_mode (op0);
10297 	    if (GET_MODE (offset_rtx) != address_mode)
10298 	      offset_rtx = convert_to_mode (address_mode, offset_rtx, 0);
10299 
10300 	    /* See the comment in expand_assignment for the rationale.  */
10301 	    if (mode1 != VOIDmode
10302 		&& bitpos != 0
10303 		&& bitsize > 0
10304 		&& (bitpos % bitsize) == 0
10305 		&& (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
10306 		&& MEM_ALIGN (op0) >= GET_MODE_ALIGNMENT (mode1))
10307 	      {
10308 		op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10309 		bitpos = 0;
10310 	      }
10311 
10312 	    op0 = offset_address (op0, offset_rtx,
10313 				  highest_pow2_factor (offset));
10314 	  }
10315 
10316 	/* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
10317 	   record its alignment as BIGGEST_ALIGNMENT.  */
10318 	if (MEM_P (op0) && bitpos == 0 && offset != 0
10319 	    && is_aligning_offset (offset, tem))
10320 	  set_mem_align (op0, BIGGEST_ALIGNMENT);
10321 
10322 	/* Don't forget about volatility even if this is a bitfield.  */
10323 	if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
10324 	  {
10325 	    if (op0 == orig_op0)
10326 	      op0 = copy_rtx (op0);
10327 
10328 	    MEM_VOLATILE_P (op0) = 1;
10329 	  }
10330 
10331 	/* In cases where an aligned union has an unaligned object
10332 	   as a field, we might be extracting a BLKmode value from
10333 	   an integer-mode (e.g., SImode) object.  Handle this case
10334 	   by doing the extract into an object as wide as the field
10335 	   (which we know to be the width of a basic mode), then
10336 	   storing into memory, and changing the mode to BLKmode.  */
10337 	if (mode1 == VOIDmode
10338 	    || REG_P (op0) || GET_CODE (op0) == SUBREG
10339 	    || (mode1 != BLKmode && ! direct_load[(int) mode1]
10340 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10341 		&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
10342 		&& modifier != EXPAND_CONST_ADDRESS
10343 		&& modifier != EXPAND_INITIALIZER
10344 		&& modifier != EXPAND_MEMORY)
10345 	    /* If the bitfield is volatile and the bitsize
10346 	       is narrower than the access size of the bitfield,
10347 	       we need to extract bitfields from the access.  */
10348 	    || (volatilep && TREE_CODE (exp) == COMPONENT_REF
10349 		&& DECL_BIT_FIELD_TYPE (TREE_OPERAND (exp, 1))
10350 		&& mode1 != BLKmode
10351 		&& bitsize < GET_MODE_SIZE (mode1) * BITS_PER_UNIT)
10352 	    /* If the field isn't aligned enough to fetch as a memref,
10353 	       fetch it as a bit field.  */
10354 	    || (mode1 != BLKmode
10355 		&& (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
10356 		      || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
10357 		      || (MEM_P (op0)
10358 			  && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
10359 			      || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
10360 		     && modifier != EXPAND_MEMORY
10361 		     && ((modifier == EXPAND_CONST_ADDRESS
10362 			  || modifier == EXPAND_INITIALIZER)
10363 			 ? STRICT_ALIGNMENT
10364 			 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
10365 		    || (bitpos % BITS_PER_UNIT != 0)))
10366 	    /* If the type and the field are a constant size and the
10367 	       size of the type isn't the same size as the bitfield,
10368 	       we must use bitfield operations.  */
10369 	    || (bitsize >= 0
10370 		&& TYPE_SIZE (TREE_TYPE (exp))
10371 		&& TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
10372 		&& 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
10373 					  bitsize)))
10374 	  {
10375 	    machine_mode ext_mode = mode;
10376 
10377 	    if (ext_mode == BLKmode
10378 		&& ! (target != 0 && MEM_P (op0)
10379 		      && MEM_P (target)
10380 		      && bitpos % BITS_PER_UNIT == 0))
10381 	      ext_mode = mode_for_size (bitsize, MODE_INT, 1);
10382 
10383 	    if (ext_mode == BLKmode)
10384 	      {
10385 		if (target == 0)
10386 		  target = assign_temp (type, 1, 1);
10387 
10388 		/* ??? Unlike the similar test a few lines below, this one is
10389 		   very likely obsolete.  */
10390 		if (bitsize == 0)
10391 		  return target;
10392 
10393 		/* In this case, BITPOS must start at a byte boundary and
10394 		   TARGET, if specified, must be a MEM.  */
10395 		gcc_assert (MEM_P (op0)
10396 			    && (!target || MEM_P (target))
10397 			    && !(bitpos % BITS_PER_UNIT));
10398 
10399 		emit_block_move (target,
10400 				 adjust_address (op0, VOIDmode,
10401 						 bitpos / BITS_PER_UNIT),
10402 				 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
10403 					  / BITS_PER_UNIT),
10404 				 (modifier == EXPAND_STACK_PARM
10405 				  ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10406 
10407 		return target;
10408 	      }
10409 
10410 	    /* If we have nothing to extract, the result will be 0 for targets
10411 	       with SHIFT_COUNT_TRUNCATED == 0 and garbage otherwise.  Always
10412 	       return 0 for the sake of consistency, as reading a zero-sized
10413 	       bitfield is valid in Ada and the value is fully specified.  */
10414 	    if (bitsize == 0)
10415 	      return const0_rtx;
10416 
10417 	    op0 = validize_mem (op0);
10418 
10419 	    if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
10420 	      mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10421 
10422 	    op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
10423 				     (modifier == EXPAND_STACK_PARM
10424 				      ? NULL_RTX : target),
10425 				     ext_mode, ext_mode);
10426 
10427 	    /* If the result is a record type and BITSIZE is narrower than
10428 	       the mode of OP0, an integral mode, and this is a big endian
10429 	       machine, we must put the field into the high-order bits.  */
10430 	    if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
10431 		&& GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
10432 		&& bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
10433 	      op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
10434 				  GET_MODE_BITSIZE (GET_MODE (op0))
10435 				  - bitsize, op0, 1);
10436 
10437 	    /* If the result type is BLKmode, store the data into a temporary
10438 	       of the appropriate type, but with the mode corresponding to the
10439 	       mode for the data we have (op0's mode).  */
10440 	    if (mode == BLKmode)
10441 	      {
10442 		rtx new_rtx
10443 		  = assign_stack_temp_for_type (ext_mode,
10444 						GET_MODE_BITSIZE (ext_mode),
10445 						type);
10446 		emit_move_insn (new_rtx, op0);
10447 		op0 = copy_rtx (new_rtx);
10448 		PUT_MODE (op0, BLKmode);
10449 	      }
10450 
10451 	    return op0;
10452 	  }
10453 
10454 	/* If the result is BLKmode, use that to access the object
10455 	   now as well.  */
10456 	if (mode == BLKmode)
10457 	  mode1 = BLKmode;
10458 
10459 	/* Get a reference to just this component.  */
10460 	if (modifier == EXPAND_CONST_ADDRESS
10461 	    || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
10462 	  op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
10463 	else
10464 	  op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
10465 
10466 	if (op0 == orig_op0)
10467 	  op0 = copy_rtx (op0);
10468 
10469 	set_mem_attributes (op0, exp, 0);
10470 
10471 	if (REG_P (XEXP (op0, 0)))
10472 	  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10473 
10474 	/* If op0 is a temporary because the original expressions was forced
10475 	   to memory, clear MEM_EXPR so that the original expression cannot
10476 	   be marked as addressable through MEM_EXPR of the temporary.  */
10477 	if (clear_mem_expr)
10478 	  set_mem_expr (op0, NULL_TREE);
10479 
10480 	MEM_VOLATILE_P (op0) |= volatilep;
10481 	if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
10482 	    || modifier == EXPAND_CONST_ADDRESS
10483 	    || modifier == EXPAND_INITIALIZER)
10484 	  return op0;
10485 
10486 	if (target == 0)
10487 	  target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
10488 
10489 	convert_move (target, op0, unsignedp);
10490 	return target;
10491       }
10492 
10493     case OBJ_TYPE_REF:
10494       return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
10495 
10496     case CALL_EXPR:
10497       /* All valid uses of __builtin_va_arg_pack () are removed during
10498 	 inlining.  */
10499       if (CALL_EXPR_VA_ARG_PACK (exp))
10500 	error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
10501       {
10502 	tree fndecl = get_callee_fndecl (exp), attr;
10503 
10504 	if (fndecl
10505 	    && (attr = lookup_attribute ("error",
10506 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10507 	  error ("%Kcall to %qs declared with attribute error: %s",
10508 		 exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10509 		 TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10510 	if (fndecl
10511 	    && (attr = lookup_attribute ("warning",
10512 					 DECL_ATTRIBUTES (fndecl))) != NULL)
10513 	  warning_at (tree_nonartificial_location (exp),
10514 		      0, "%Kcall to %qs declared with attribute warning: %s",
10515 		      exp, identifier_to_locale (lang_hooks.decl_printable_name (fndecl, 1)),
10516 		      TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))));
10517 
10518 	/* Check for a built-in function.  */
10519 	if (fndecl && DECL_BUILT_IN (fndecl))
10520 	  {
10521 	    gcc_assert (DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_FRONTEND);
10522 	    if (CALL_WITH_BOUNDS_P (exp))
10523 	      return expand_builtin_with_bounds (exp, target, subtarget,
10524 						 tmode, ignore);
10525 	    else
10526 	      return expand_builtin (exp, target, subtarget, tmode, ignore);
10527 	  }
10528       }
10529       return expand_call (exp, target, ignore);
10530 
10531     case VIEW_CONVERT_EXPR:
10532       op0 = NULL_RTX;
10533 
10534       /* If we are converting to BLKmode, try to avoid an intermediate
10535 	 temporary by fetching an inner memory reference.  */
10536       if (mode == BLKmode
10537 	  && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
10538 	  && TYPE_MODE (TREE_TYPE (treeop0)) != BLKmode
10539 	  && handled_component_p (treeop0))
10540       {
10541 	machine_mode mode1;
10542 	HOST_WIDE_INT bitsize, bitpos;
10543 	tree offset;
10544 	int unsignedp;
10545 	int volatilep = 0;
10546 	tree tem
10547 	  = get_inner_reference (treeop0, &bitsize, &bitpos,
10548 				 &offset, &mode1, &unsignedp, &volatilep,
10549 				 true);
10550 	rtx orig_op0;
10551 
10552 	/* ??? We should work harder and deal with non-zero offsets.  */
10553 	if (!offset
10554 	    && (bitpos % BITS_PER_UNIT) == 0
10555 	    && bitsize >= 0
10556 	    && compare_tree_int (TYPE_SIZE (type), bitsize) == 0)
10557 	  {
10558 	    /* See the normal_inner_ref case for the rationale.  */
10559 	    orig_op0
10560 	      = expand_expr_real (tem,
10561 				  (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
10562 				   && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
10563 				       != INTEGER_CST)
10564 				   && modifier != EXPAND_STACK_PARM
10565 				   ? target : NULL_RTX),
10566 				  VOIDmode,
10567 				  modifier == EXPAND_SUM ? EXPAND_NORMAL : modifier,
10568 				  NULL, true);
10569 
10570 	    if (MEM_P (orig_op0))
10571 	      {
10572 		op0 = orig_op0;
10573 
10574 		/* Get a reference to just this component.  */
10575 		if (modifier == EXPAND_CONST_ADDRESS
10576 		    || modifier == EXPAND_SUM
10577 		    || modifier == EXPAND_INITIALIZER)
10578 		  op0 = adjust_address_nv (op0, mode, bitpos / BITS_PER_UNIT);
10579 		else
10580 		  op0 = adjust_address (op0, mode, bitpos / BITS_PER_UNIT);
10581 
10582 		if (op0 == orig_op0)
10583 		  op0 = copy_rtx (op0);
10584 
10585 		set_mem_attributes (op0, treeop0, 0);
10586 		if (REG_P (XEXP (op0, 0)))
10587 		  mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
10588 
10589 		MEM_VOLATILE_P (op0) |= volatilep;
10590 	      }
10591 	  }
10592       }
10593 
10594       if (!op0)
10595 	op0 = expand_expr_real (treeop0, NULL_RTX, VOIDmode, modifier,
10596 				NULL, inner_reference_p);
10597 
10598       /* If the input and output modes are both the same, we are done.  */
10599       if (mode == GET_MODE (op0))
10600 	;
10601       /* If neither mode is BLKmode, and both modes are the same size
10602 	 then we can use gen_lowpart.  */
10603       else if (mode != BLKmode && GET_MODE (op0) != BLKmode
10604 	       && (GET_MODE_PRECISION (mode)
10605 		   == GET_MODE_PRECISION (GET_MODE (op0)))
10606 	       && !COMPLEX_MODE_P (GET_MODE (op0)))
10607 	{
10608 	  if (GET_CODE (op0) == SUBREG)
10609 	    op0 = force_reg (GET_MODE (op0), op0);
10610 	  temp = gen_lowpart_common (mode, op0);
10611 	  if (temp)
10612 	    op0 = temp;
10613 	  else
10614 	    {
10615 	      if (!REG_P (op0) && !MEM_P (op0))
10616 		op0 = force_reg (GET_MODE (op0), op0);
10617 	      op0 = gen_lowpart (mode, op0);
10618 	    }
10619 	}
10620       /* If both types are integral, convert from one mode to the other.  */
10621       else if (INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (treeop0)))
10622 	op0 = convert_modes (mode, GET_MODE (op0), op0,
10623 			     TYPE_UNSIGNED (TREE_TYPE (treeop0)));
10624       /* If the output type is a bit-field type, do an extraction.  */
10625       else if (reduce_bit_field)
10626 	return extract_bit_field (op0, TYPE_PRECISION (type), 0,
10627 				  TYPE_UNSIGNED (type), NULL_RTX,
10628 				  mode, mode);
10629       /* As a last resort, spill op0 to memory, and reload it in a
10630 	 different mode.  */
10631       else if (!MEM_P (op0))
10632 	{
10633 	  /* If the operand is not a MEM, force it into memory.  Since we
10634 	     are going to be changing the mode of the MEM, don't call
10635 	     force_const_mem for constants because we don't allow pool
10636 	     constants to change mode.  */
10637 	  tree inner_type = TREE_TYPE (treeop0);
10638 
10639 	  gcc_assert (!TREE_ADDRESSABLE (exp));
10640 
10641 	  if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
10642 	    target
10643 	      = assign_stack_temp_for_type
10644 		(TYPE_MODE (inner_type),
10645 		 GET_MODE_SIZE (TYPE_MODE (inner_type)), inner_type);
10646 
10647 	  emit_move_insn (target, op0);
10648 	  op0 = target;
10649 	}
10650 
10651       /* If OP0 is (now) a MEM, we need to deal with alignment issues.  If the
10652 	 output type is such that the operand is known to be aligned, indicate
10653 	 that it is.  Otherwise, we need only be concerned about alignment for
10654 	 non-BLKmode results.  */
10655       if (MEM_P (op0))
10656 	{
10657 	  enum insn_code icode;
10658 
10659 	  if (TYPE_ALIGN_OK (type))
10660 	    {
10661 	      /* ??? Copying the MEM without substantially changing it might
10662 		 run afoul of the code handling volatile memory references in
10663 		 store_expr, which assumes that TARGET is returned unmodified
10664 		 if it has been used.  */
10665 	      op0 = copy_rtx (op0);
10666 	      set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
10667 	    }
10668 	  else if (modifier != EXPAND_WRITE
10669 		   && modifier != EXPAND_MEMORY
10670 		   && !inner_reference_p
10671 		   && mode != BLKmode
10672 		   && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode))
10673 	    {
10674 	      /* If the target does have special handling for unaligned
10675 		 loads of mode then use them.  */
10676 	      if ((icode = optab_handler (movmisalign_optab, mode))
10677 		  != CODE_FOR_nothing)
10678 		{
10679 		  rtx reg, insn;
10680 
10681 		  op0 = adjust_address (op0, mode, 0);
10682 		  /* We've already validated the memory, and we're creating a
10683 		     new pseudo destination.  The predicates really can't
10684 		     fail.  */
10685 		  reg = gen_reg_rtx (mode);
10686 
10687 		  /* Nor can the insn generator.  */
10688 		  insn = GEN_FCN (icode) (reg, op0);
10689 		  emit_insn (insn);
10690 		  return reg;
10691 		}
10692 	      else if (STRICT_ALIGNMENT)
10693 		{
10694 		  tree inner_type = TREE_TYPE (treeop0);
10695 		  HOST_WIDE_INT temp_size
10696 		    = MAX (int_size_in_bytes (inner_type),
10697 			   (HOST_WIDE_INT) GET_MODE_SIZE (mode));
10698 		  rtx new_rtx
10699 		    = assign_stack_temp_for_type (mode, temp_size, type);
10700 		  rtx new_with_op0_mode
10701 		    = adjust_address (new_rtx, GET_MODE (op0), 0);
10702 
10703 		  gcc_assert (!TREE_ADDRESSABLE (exp));
10704 
10705 		  if (GET_MODE (op0) == BLKmode)
10706 		    emit_block_move (new_with_op0_mode, op0,
10707 				     GEN_INT (GET_MODE_SIZE (mode)),
10708 				     (modifier == EXPAND_STACK_PARM
10709 				      ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
10710 		  else
10711 		    emit_move_insn (new_with_op0_mode, op0);
10712 
10713 		  op0 = new_rtx;
10714 		}
10715 	    }
10716 
10717 	  op0 = adjust_address (op0, mode, 0);
10718 	}
10719 
10720       return op0;
10721 
10722     case MODIFY_EXPR:
10723       {
10724 	tree lhs = treeop0;
10725 	tree rhs = treeop1;
10726 	gcc_assert (ignore);
10727 
10728 	/* Check for |= or &= of a bitfield of size one into another bitfield
10729 	   of size 1.  In this case, (unless we need the result of the
10730 	   assignment) we can do this more efficiently with a
10731 	   test followed by an assignment, if necessary.
10732 
10733 	   ??? At this point, we can't get a BIT_FIELD_REF here.  But if
10734 	   things change so we do, this code should be enhanced to
10735 	   support it.  */
10736 	if (TREE_CODE (lhs) == COMPONENT_REF
10737 	    && (TREE_CODE (rhs) == BIT_IOR_EXPR
10738 		|| TREE_CODE (rhs) == BIT_AND_EXPR)
10739 	    && TREE_OPERAND (rhs, 0) == lhs
10740 	    && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
10741 	    && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
10742 	    && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
10743 	  {
10744 	    rtx_code_label *label = gen_label_rtx ();
10745 	    int value = TREE_CODE (rhs) == BIT_IOR_EXPR;
10746 	    do_jump (TREE_OPERAND (rhs, 1),
10747 		     value ? label : 0,
10748 		     value ? 0 : label, -1);
10749 	    expand_assignment (lhs, build_int_cst (TREE_TYPE (rhs), value),
10750 			       false);
10751 	    do_pending_stack_adjust ();
10752 	    emit_label (label);
10753 	    return const0_rtx;
10754 	  }
10755 
10756 	expand_assignment (lhs, rhs, false);
10757 	return const0_rtx;
10758       }
10759 
10760     case ADDR_EXPR:
10761       return expand_expr_addr_expr (exp, target, tmode, modifier);
10762 
10763     case REALPART_EXPR:
10764       op0 = expand_normal (treeop0);
10765       return read_complex_part (op0, false);
10766 
10767     case IMAGPART_EXPR:
10768       op0 = expand_normal (treeop0);
10769       return read_complex_part (op0, true);
10770 
10771     case RETURN_EXPR:
10772     case LABEL_EXPR:
10773     case GOTO_EXPR:
10774     case SWITCH_EXPR:
10775     case ASM_EXPR:
10776       /* Expanded in cfgexpand.c.  */
10777       gcc_unreachable ();
10778 
10779     case TRY_CATCH_EXPR:
10780     case CATCH_EXPR:
10781     case EH_FILTER_EXPR:
10782     case TRY_FINALLY_EXPR:
10783       /* Lowered by tree-eh.c.  */
10784       gcc_unreachable ();
10785 
10786     case WITH_CLEANUP_EXPR:
10787     case CLEANUP_POINT_EXPR:
10788     case TARGET_EXPR:
10789     case CASE_LABEL_EXPR:
10790     case VA_ARG_EXPR:
10791     case BIND_EXPR:
10792     case INIT_EXPR:
10793     case CONJ_EXPR:
10794     case COMPOUND_EXPR:
10795     case PREINCREMENT_EXPR:
10796     case PREDECREMENT_EXPR:
10797     case POSTINCREMENT_EXPR:
10798     case POSTDECREMENT_EXPR:
10799     case LOOP_EXPR:
10800     case EXIT_EXPR:
10801     case COMPOUND_LITERAL_EXPR:
10802       /* Lowered by gimplify.c.  */
10803       gcc_unreachable ();
10804 
10805     case FDESC_EXPR:
10806       /* Function descriptors are not valid except for as
10807 	 initialization constants, and should not be expanded.  */
10808       gcc_unreachable ();
10809 
10810     case WITH_SIZE_EXPR:
10811       /* WITH_SIZE_EXPR expands to its first argument.  The caller should
10812 	 have pulled out the size to use in whatever context it needed.  */
10813       return expand_expr_real (treeop0, original_target, tmode,
10814 			       modifier, alt_rtl, inner_reference_p);
10815 
10816     default:
10817       return expand_expr_real_2 (&ops, target, tmode, modifier);
10818     }
10819 }
10820 
10821 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
10822    signedness of TYPE), possibly returning the result in TARGET.  */
10823 static rtx
10824 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
10825 {
10826   HOST_WIDE_INT prec = TYPE_PRECISION (type);
10827   if (target && GET_MODE (target) != GET_MODE (exp))
10828     target = 0;
10829   /* For constant values, reduce using build_int_cst_type. */
10830   if (CONST_INT_P (exp))
10831     {
10832       HOST_WIDE_INT value = INTVAL (exp);
10833       tree t = build_int_cst_type (type, value);
10834       return expand_expr (t, target, VOIDmode, EXPAND_NORMAL);
10835     }
10836   else if (TYPE_UNSIGNED (type))
10837     {
10838       machine_mode mode = GET_MODE (exp);
10839       rtx mask = immed_wide_int_const
10840 	(wi::mask (prec, false, GET_MODE_PRECISION (mode)), mode);
10841       return expand_and (mode, exp, mask, target);
10842     }
10843   else
10844     {
10845       int count = GET_MODE_PRECISION (GET_MODE (exp)) - prec;
10846       exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp),
10847 			  exp, count, target, 0);
10848       return expand_shift (RSHIFT_EXPR, GET_MODE (exp),
10849 			   exp, count, target, 0);
10850     }
10851 }
10852 
10853 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
10854    when applied to the address of EXP produces an address known to be
10855    aligned more than BIGGEST_ALIGNMENT.  */
10856 
10857 static int
10858 is_aligning_offset (const_tree offset, const_tree exp)
10859 {
10860   /* Strip off any conversions.  */
10861   while (CONVERT_EXPR_P (offset))
10862     offset = TREE_OPERAND (offset, 0);
10863 
10864   /* We must now have a BIT_AND_EXPR with a constant that is one less than
10865      power of 2 and which is larger than BIGGEST_ALIGNMENT.  */
10866   if (TREE_CODE (offset) != BIT_AND_EXPR
10867       || !tree_fits_uhwi_p (TREE_OPERAND (offset, 1))
10868       || compare_tree_int (TREE_OPERAND (offset, 1),
10869 			   BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
10870       || exact_log2 (tree_to_uhwi (TREE_OPERAND (offset, 1)) + 1) < 0)
10871     return 0;
10872 
10873   /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
10874      It must be NEGATE_EXPR.  Then strip any more conversions.  */
10875   offset = TREE_OPERAND (offset, 0);
10876   while (CONVERT_EXPR_P (offset))
10877     offset = TREE_OPERAND (offset, 0);
10878 
10879   if (TREE_CODE (offset) != NEGATE_EXPR)
10880     return 0;
10881 
10882   offset = TREE_OPERAND (offset, 0);
10883   while (CONVERT_EXPR_P (offset))
10884     offset = TREE_OPERAND (offset, 0);
10885 
10886   /* This must now be the address of EXP.  */
10887   return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
10888 }
10889 
10890 /* Return the tree node if an ARG corresponds to a string constant or zero
10891    if it doesn't.  If we return nonzero, set *PTR_OFFSET to the offset
10892    in bytes within the string that ARG is accessing.  The type of the
10893    offset will be `sizetype'.  */
10894 
10895 tree
10896 string_constant (tree arg, tree *ptr_offset)
10897 {
10898   tree array, offset, lower_bound;
10899   STRIP_NOPS (arg);
10900 
10901   if (TREE_CODE (arg) == ADDR_EXPR)
10902     {
10903       if (TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
10904 	{
10905 	  *ptr_offset = size_zero_node;
10906 	  return TREE_OPERAND (arg, 0);
10907 	}
10908       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == VAR_DECL)
10909 	{
10910 	  array = TREE_OPERAND (arg, 0);
10911 	  offset = size_zero_node;
10912 	}
10913       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
10914 	{
10915 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10916 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10917 	  if (TREE_CODE (array) != STRING_CST
10918 	      && TREE_CODE (array) != VAR_DECL)
10919 	    return 0;
10920 
10921 	  /* Check if the array has a nonzero lower bound.  */
10922 	  lower_bound = array_ref_low_bound (TREE_OPERAND (arg, 0));
10923 	  if (!integer_zerop (lower_bound))
10924 	    {
10925 	      /* If the offset and base aren't both constants, return 0.  */
10926 	      if (TREE_CODE (lower_bound) != INTEGER_CST)
10927 	        return 0;
10928 	      if (TREE_CODE (offset) != INTEGER_CST)
10929 		return 0;
10930 	      /* Adjust offset by the lower bound.  */
10931 	      offset = size_diffop (fold_convert (sizetype, offset),
10932 				    fold_convert (sizetype, lower_bound));
10933 	    }
10934 	}
10935       else if (TREE_CODE (TREE_OPERAND (arg, 0)) == MEM_REF)
10936 	{
10937 	  array = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
10938 	  offset = TREE_OPERAND (TREE_OPERAND (arg, 0), 1);
10939 	  if (TREE_CODE (array) != ADDR_EXPR)
10940 	    return 0;
10941 	  array = TREE_OPERAND (array, 0);
10942 	  if (TREE_CODE (array) != STRING_CST
10943 	      && TREE_CODE (array) != VAR_DECL)
10944 	    return 0;
10945 	}
10946       else
10947 	return 0;
10948     }
10949   else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
10950     {
10951       tree arg0 = TREE_OPERAND (arg, 0);
10952       tree arg1 = TREE_OPERAND (arg, 1);
10953 
10954       STRIP_NOPS (arg0);
10955       STRIP_NOPS (arg1);
10956 
10957       if (TREE_CODE (arg0) == ADDR_EXPR
10958 	  && (TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST
10959 	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == VAR_DECL))
10960 	{
10961 	  array = TREE_OPERAND (arg0, 0);
10962 	  offset = arg1;
10963 	}
10964       else if (TREE_CODE (arg1) == ADDR_EXPR
10965 	       && (TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST
10966 		   || TREE_CODE (TREE_OPERAND (arg1, 0)) == VAR_DECL))
10967 	{
10968 	  array = TREE_OPERAND (arg1, 0);
10969 	  offset = arg0;
10970 	}
10971       else
10972 	return 0;
10973     }
10974   else
10975     return 0;
10976 
10977   if (TREE_CODE (array) == STRING_CST)
10978     {
10979       *ptr_offset = fold_convert (sizetype, offset);
10980       return array;
10981     }
10982   else if (TREE_CODE (array) == VAR_DECL
10983 	   || TREE_CODE (array) == CONST_DECL)
10984     {
10985       int length;
10986       tree init = ctor_for_folding (array);
10987 
10988       /* Variables initialized to string literals can be handled too.  */
10989       if (init == error_mark_node
10990 	  || !init
10991 	  || TREE_CODE (init) != STRING_CST)
10992 	return 0;
10993 
10994       /* Avoid const char foo[4] = "abcde";  */
10995       if (DECL_SIZE_UNIT (array) == NULL_TREE
10996 	  || TREE_CODE (DECL_SIZE_UNIT (array)) != INTEGER_CST
10997 	  || (length = TREE_STRING_LENGTH (init)) <= 0
10998 	  || compare_tree_int (DECL_SIZE_UNIT (array), length) < 0)
10999 	return 0;
11000 
11001       /* If variable is bigger than the string literal, OFFSET must be constant
11002 	 and inside of the bounds of the string literal.  */
11003       offset = fold_convert (sizetype, offset);
11004       if (compare_tree_int (DECL_SIZE_UNIT (array), length) > 0
11005 	  && (! tree_fits_uhwi_p (offset)
11006 	      || compare_tree_int (offset, length) >= 0))
11007 	return 0;
11008 
11009       *ptr_offset = offset;
11010       return init;
11011     }
11012 
11013   return 0;
11014 }
11015 
11016 /* Generate code to calculate OPS, and exploded expression
11017    using a store-flag instruction and return an rtx for the result.
11018    OPS reflects a comparison.
11019 
11020    If TARGET is nonzero, store the result there if convenient.
11021 
11022    Return zero if there is no suitable set-flag instruction
11023    available on this machine.
11024 
11025    Once expand_expr has been called on the arguments of the comparison,
11026    we are committed to doing the store flag, since it is not safe to
11027    re-evaluate the expression.  We emit the store-flag insn by calling
11028    emit_store_flag, but only expand the arguments if we have a reason
11029    to believe that emit_store_flag will be successful.  If we think that
11030    it will, but it isn't, we have to simulate the store-flag with a
11031    set/jump/set sequence.  */
11032 
11033 static rtx
11034 do_store_flag (sepops ops, rtx target, machine_mode mode)
11035 {
11036   enum rtx_code code;
11037   tree arg0, arg1, type;
11038   tree tem;
11039   machine_mode operand_mode;
11040   int unsignedp;
11041   rtx op0, op1;
11042   rtx subtarget = target;
11043   location_t loc = ops->location;
11044 
11045   arg0 = ops->op0;
11046   arg1 = ops->op1;
11047 
11048   /* Don't crash if the comparison was erroneous.  */
11049   if (arg0 == error_mark_node || arg1 == error_mark_node)
11050     return const0_rtx;
11051 
11052   type = TREE_TYPE (arg0);
11053   operand_mode = TYPE_MODE (type);
11054   unsignedp = TYPE_UNSIGNED (type);
11055 
11056   /* We won't bother with BLKmode store-flag operations because it would mean
11057      passing a lot of information to emit_store_flag.  */
11058   if (operand_mode == BLKmode)
11059     return 0;
11060 
11061   /* We won't bother with store-flag operations involving function pointers
11062      when function pointers must be canonicalized before comparisons.  */
11063 #ifdef HAVE_canonicalize_funcptr_for_compare
11064   if (HAVE_canonicalize_funcptr_for_compare
11065       && ((TREE_CODE (TREE_TYPE (arg0)) == POINTER_TYPE
11066 	   && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg0)))
11067 	       == FUNCTION_TYPE))
11068 	  || (TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE
11069 	      && (TREE_CODE (TREE_TYPE (TREE_TYPE (arg1)))
11070 		  == FUNCTION_TYPE))))
11071     return 0;
11072 #endif
11073 
11074   STRIP_NOPS (arg0);
11075   STRIP_NOPS (arg1);
11076 
11077   /* For vector typed comparisons emit code to generate the desired
11078      all-ones or all-zeros mask.  Conveniently use the VEC_COND_EXPR
11079      expander for this.  */
11080   if (TREE_CODE (ops->type) == VECTOR_TYPE)
11081     {
11082       tree ifexp = build2 (ops->code, ops->type, arg0, arg1);
11083       tree if_true = constant_boolean_node (true, ops->type);
11084       tree if_false = constant_boolean_node (false, ops->type);
11085       return expand_vec_cond_expr (ops->type, ifexp, if_true, if_false, target);
11086     }
11087 
11088   /* Get the rtx comparison code to use.  We know that EXP is a comparison
11089      operation of some type.  Some comparisons against 1 and -1 can be
11090      converted to comparisons with zero.  Do so here so that the tests
11091      below will be aware that we have a comparison with zero.   These
11092      tests will not catch constants in the first operand, but constants
11093      are rarely passed as the first operand.  */
11094 
11095   switch (ops->code)
11096     {
11097     case EQ_EXPR:
11098       code = EQ;
11099       break;
11100     case NE_EXPR:
11101       code = NE;
11102       break;
11103     case LT_EXPR:
11104       if (integer_onep (arg1))
11105 	arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11106       else
11107 	code = unsignedp ? LTU : LT;
11108       break;
11109     case LE_EXPR:
11110       if (! unsignedp && integer_all_onesp (arg1))
11111 	arg1 = integer_zero_node, code = LT;
11112       else
11113 	code = unsignedp ? LEU : LE;
11114       break;
11115     case GT_EXPR:
11116       if (! unsignedp && integer_all_onesp (arg1))
11117 	arg1 = integer_zero_node, code = GE;
11118       else
11119 	code = unsignedp ? GTU : GT;
11120       break;
11121     case GE_EXPR:
11122       if (integer_onep (arg1))
11123 	arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11124       else
11125 	code = unsignedp ? GEU : GE;
11126       break;
11127 
11128     case UNORDERED_EXPR:
11129       code = UNORDERED;
11130       break;
11131     case ORDERED_EXPR:
11132       code = ORDERED;
11133       break;
11134     case UNLT_EXPR:
11135       code = UNLT;
11136       break;
11137     case UNLE_EXPR:
11138       code = UNLE;
11139       break;
11140     case UNGT_EXPR:
11141       code = UNGT;
11142       break;
11143     case UNGE_EXPR:
11144       code = UNGE;
11145       break;
11146     case UNEQ_EXPR:
11147       code = UNEQ;
11148       break;
11149     case LTGT_EXPR:
11150       code = LTGT;
11151       break;
11152 
11153     default:
11154       gcc_unreachable ();
11155     }
11156 
11157   /* Put a constant second.  */
11158   if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST
11159       || TREE_CODE (arg0) == FIXED_CST)
11160     {
11161       tem = arg0; arg0 = arg1; arg1 = tem;
11162       code = swap_condition (code);
11163     }
11164 
11165   /* If this is an equality or inequality test of a single bit, we can
11166      do this by shifting the bit being tested to the low-order bit and
11167      masking the result with the constant 1.  If the condition was EQ,
11168      we xor it with 1.  This does not require an scc insn and is faster
11169      than an scc insn even if we have it.
11170 
11171      The code to make this transformation was moved into fold_single_bit_test,
11172      so we just call into the folder and expand its result.  */
11173 
11174   if ((code == NE || code == EQ)
11175       && integer_zerop (arg1)
11176       && (TYPE_PRECISION (ops->type) != 1 || TYPE_UNSIGNED (ops->type)))
11177     {
11178       gimple srcstmt = get_def_for_expr (arg0, BIT_AND_EXPR);
11179       if (srcstmt
11180 	  && integer_pow2p (gimple_assign_rhs2 (srcstmt)))
11181 	{
11182 	  enum tree_code tcode = code == NE ? NE_EXPR : EQ_EXPR;
11183 	  tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
11184 	  tree temp = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg1),
11185 				       gimple_assign_rhs1 (srcstmt),
11186 				       gimple_assign_rhs2 (srcstmt));
11187 	  temp = fold_single_bit_test (loc, tcode, temp, arg1, type);
11188 	  if (temp)
11189 	    return expand_expr (temp, target, VOIDmode, EXPAND_NORMAL);
11190 	}
11191     }
11192 
11193   if (! get_subtarget (target)
11194       || GET_MODE (subtarget) != operand_mode)
11195     subtarget = 0;
11196 
11197   expand_operands (arg0, arg1, subtarget, &op0, &op1, EXPAND_NORMAL);
11198 
11199   if (target == 0)
11200     target = gen_reg_rtx (mode);
11201 
11202   /* Try a cstore if possible.  */
11203   return emit_store_flag_force (target, code, op0, op1,
11204 				operand_mode, unsignedp,
11205 				(TYPE_PRECISION (ops->type) == 1
11206 				 && !TYPE_UNSIGNED (ops->type)) ? -1 : 1);
11207 }
11208 
11209 
11210 /* Stubs in case we haven't got a casesi insn.  */
11211 #ifndef HAVE_casesi
11212 # define HAVE_casesi 0
11213 # define gen_casesi(a, b, c, d, e) (0)
11214 # define CODE_FOR_casesi CODE_FOR_nothing
11215 #endif
11216 
11217 /* Attempt to generate a casesi instruction.  Returns 1 if successful,
11218    0 otherwise (i.e. if there is no casesi instruction).
11219 
11220    DEFAULT_PROBABILITY is the probability of jumping to the default
11221    label.  */
11222 int
11223 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
11224 	    rtx table_label, rtx default_label, rtx fallback_label,
11225             int default_probability)
11226 {
11227   struct expand_operand ops[5];
11228   machine_mode index_mode = SImode;
11229   rtx op1, op2, index;
11230 
11231   if (! HAVE_casesi)
11232     return 0;
11233 
11234   /* Convert the index to SImode.  */
11235   if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
11236     {
11237       machine_mode omode = TYPE_MODE (index_type);
11238       rtx rangertx = expand_normal (range);
11239 
11240       /* We must handle the endpoints in the original mode.  */
11241       index_expr = build2 (MINUS_EXPR, index_type,
11242 			   index_expr, minval);
11243       minval = integer_zero_node;
11244       index = expand_normal (index_expr);
11245       if (default_label)
11246         emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
11247 				 omode, 1, default_label,
11248                                  default_probability);
11249       /* Now we can safely truncate.  */
11250       index = convert_to_mode (index_mode, index, 0);
11251     }
11252   else
11253     {
11254       if (TYPE_MODE (index_type) != index_mode)
11255 	{
11256 	  index_type = lang_hooks.types.type_for_mode (index_mode, 0);
11257 	  index_expr = fold_convert (index_type, index_expr);
11258 	}
11259 
11260       index = expand_normal (index_expr);
11261     }
11262 
11263   do_pending_stack_adjust ();
11264 
11265   op1 = expand_normal (minval);
11266   op2 = expand_normal (range);
11267 
11268   create_input_operand (&ops[0], index, index_mode);
11269   create_convert_operand_from_type (&ops[1], op1, TREE_TYPE (minval));
11270   create_convert_operand_from_type (&ops[2], op2, TREE_TYPE (range));
11271   create_fixed_operand (&ops[3], table_label);
11272   create_fixed_operand (&ops[4], (default_label
11273 				  ? default_label
11274 				  : fallback_label));
11275   expand_jump_insn (CODE_FOR_casesi, 5, ops);
11276   return 1;
11277 }
11278 
11279 /* Attempt to generate a tablejump instruction; same concept.  */
11280 #ifndef HAVE_tablejump
11281 #define HAVE_tablejump 0
11282 #define gen_tablejump(x, y) (0)
11283 #endif
11284 
11285 /* Subroutine of the next function.
11286 
11287    INDEX is the value being switched on, with the lowest value
11288    in the table already subtracted.
11289    MODE is its expected mode (needed if INDEX is constant).
11290    RANGE is the length of the jump table.
11291    TABLE_LABEL is a CODE_LABEL rtx for the table itself.
11292 
11293    DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11294    index value is out of range.
11295    DEFAULT_PROBABILITY is the probability of jumping to
11296    the default label.  */
11297 
11298 static void
11299 do_tablejump (rtx index, machine_mode mode, rtx range, rtx table_label,
11300 	      rtx default_label, int default_probability)
11301 {
11302   rtx temp, vector;
11303 
11304   if (INTVAL (range) > cfun->cfg->max_jumptable_ents)
11305     cfun->cfg->max_jumptable_ents = INTVAL (range);
11306 
11307   /* Do an unsigned comparison (in the proper mode) between the index
11308      expression and the value which represents the length of the range.
11309      Since we just finished subtracting the lower bound of the range
11310      from the index expression, this comparison allows us to simultaneously
11311      check that the original index expression value is both greater than
11312      or equal to the minimum value of the range and less than or equal to
11313      the maximum value of the range.  */
11314 
11315   if (default_label)
11316     emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11317 			     default_label, default_probability);
11318 
11319 
11320   /* If index is in range, it must fit in Pmode.
11321      Convert to Pmode so we can index with it.  */
11322   if (mode != Pmode)
11323     index = convert_to_mode (Pmode, index, 1);
11324 
11325   /* Don't let a MEM slip through, because then INDEX that comes
11326      out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11327      and break_out_memory_refs will go to work on it and mess it up.  */
11328 #ifdef PIC_CASE_VECTOR_ADDRESS
11329   if (flag_pic && !REG_P (index))
11330     index = copy_to_mode_reg (Pmode, index);
11331 #endif
11332 
11333   /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11334      GET_MODE_SIZE, because this indicates how large insns are.  The other
11335      uses should all be Pmode, because they are addresses.  This code
11336      could fail if addresses and insns are not the same size.  */
11337   index = simplify_gen_binary (MULT, Pmode, index,
11338 			       gen_int_mode (GET_MODE_SIZE (CASE_VECTOR_MODE),
11339 					     Pmode));
11340   index = simplify_gen_binary (PLUS, Pmode, index,
11341 			       gen_rtx_LABEL_REF (Pmode, table_label));
11342 
11343 #ifdef PIC_CASE_VECTOR_ADDRESS
11344   if (flag_pic)
11345     index = PIC_CASE_VECTOR_ADDRESS (index);
11346   else
11347 #endif
11348     index = memory_address (CASE_VECTOR_MODE, index);
11349   temp = gen_reg_rtx (CASE_VECTOR_MODE);
11350   vector = gen_const_mem (CASE_VECTOR_MODE, index);
11351   convert_move (temp, vector, 0);
11352 
11353   emit_jump_insn (gen_tablejump (temp, table_label));
11354 
11355   /* If we are generating PIC code or if the table is PC-relative, the
11356      table and JUMP_INSN must be adjacent, so don't output a BARRIER.  */
11357   if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11358     emit_barrier ();
11359 }
11360 
11361 int
11362 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
11363 	       rtx table_label, rtx default_label, int default_probability)
11364 {
11365   rtx index;
11366 
11367   if (! HAVE_tablejump)
11368     return 0;
11369 
11370   index_expr = fold_build2 (MINUS_EXPR, index_type,
11371 			    fold_convert (index_type, index_expr),
11372 			    fold_convert (index_type, minval));
11373   index = expand_normal (index_expr);
11374   do_pending_stack_adjust ();
11375 
11376   do_tablejump (index, TYPE_MODE (index_type),
11377 		convert_modes (TYPE_MODE (index_type),
11378 			       TYPE_MODE (TREE_TYPE (range)),
11379 			       expand_normal (range),
11380 			       TYPE_UNSIGNED (TREE_TYPE (range))),
11381 		table_label, default_label, default_probability);
11382   return 1;
11383 }
11384 
11385 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree.  */
11386 static rtx
11387 const_vector_from_tree (tree exp)
11388 {
11389   rtvec v;
11390   unsigned i;
11391   int units;
11392   tree elt;
11393   machine_mode inner, mode;
11394 
11395   mode = TYPE_MODE (TREE_TYPE (exp));
11396 
11397   if (initializer_zerop (exp))
11398     return CONST0_RTX (mode);
11399 
11400   units = GET_MODE_NUNITS (mode);
11401   inner = GET_MODE_INNER (mode);
11402 
11403   v = rtvec_alloc (units);
11404 
11405   for (i = 0; i < VECTOR_CST_NELTS (exp); ++i)
11406     {
11407       elt = VECTOR_CST_ELT (exp, i);
11408 
11409       if (TREE_CODE (elt) == REAL_CST)
11410 	RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
11411 							 inner);
11412       else if (TREE_CODE (elt) == FIXED_CST)
11413 	RTVEC_ELT (v, i) = CONST_FIXED_FROM_FIXED_VALUE (TREE_FIXED_CST (elt),
11414 							 inner);
11415       else
11416 	RTVEC_ELT (v, i) = immed_wide_int_const (elt, inner);
11417     }
11418 
11419   return gen_rtx_CONST_VECTOR (mode, v);
11420 }
11421 
11422 /* Build a decl for a personality function given a language prefix.  */
11423 
11424 tree
11425 build_personality_function (const char *lang)
11426 {
11427   const char *unwind_and_version;
11428   tree decl, type;
11429   char *name;
11430 
11431   switch (targetm_common.except_unwind_info (&global_options))
11432     {
11433     case UI_NONE:
11434       return NULL;
11435     case UI_SJLJ:
11436       unwind_and_version = "_sj0";
11437       break;
11438     case UI_DWARF2:
11439     case UI_TARGET:
11440       unwind_and_version = "_v0";
11441       break;
11442     case UI_SEH:
11443       unwind_and_version = "_seh0";
11444       break;
11445     default:
11446       gcc_unreachable ();
11447     }
11448 
11449   name = ACONCAT (("__", lang, "_personality", unwind_and_version, NULL));
11450 
11451   type = build_function_type_list (integer_type_node, integer_type_node,
11452 				   long_long_unsigned_type_node,
11453 				   ptr_type_node, ptr_type_node, NULL_TREE);
11454   decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
11455 		     get_identifier (name), type);
11456   DECL_ARTIFICIAL (decl) = 1;
11457   DECL_EXTERNAL (decl) = 1;
11458   TREE_PUBLIC (decl) = 1;
11459 
11460   /* Zap the nonsensical SYMBOL_REF_DECL for this.  What we're left with
11461      are the flags assigned by targetm.encode_section_info.  */
11462   SET_SYMBOL_REF_DECL (XEXP (DECL_RTL (decl), 0), NULL);
11463 
11464   return decl;
11465 }
11466 
11467 /* Extracts the personality function of DECL and returns the corresponding
11468    libfunc.  */
11469 
11470 rtx
11471 get_personality_function (tree decl)
11472 {
11473   tree personality = DECL_FUNCTION_PERSONALITY (decl);
11474   enum eh_personality_kind pk;
11475 
11476   pk = function_needs_eh_personality (DECL_STRUCT_FUNCTION (decl));
11477   if (pk == eh_personality_none)
11478     return NULL;
11479 
11480   if (!personality
11481       && pk == eh_personality_any)
11482     personality = lang_hooks.eh_personality ();
11483 
11484   if (pk == eh_personality_lang)
11485     gcc_assert (personality != NULL_TREE);
11486 
11487   return XEXP (DECL_RTL (personality), 0);
11488 }
11489 
11490 /* Returns a tree for the size of EXP in bytes.  */
11491 
11492 static tree
11493 tree_expr_size (const_tree exp)
11494 {
11495   if (DECL_P (exp)
11496       && DECL_SIZE_UNIT (exp) != 0)
11497     return DECL_SIZE_UNIT (exp);
11498   else
11499     return size_in_bytes (TREE_TYPE (exp));
11500 }
11501 
11502 /* Return an rtx for the size in bytes of the value of EXP.  */
11503 
11504 rtx
11505 expr_size (tree exp)
11506 {
11507   tree size;
11508 
11509   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11510     size = TREE_OPERAND (exp, 1);
11511   else
11512     {
11513       size = tree_expr_size (exp);
11514       gcc_assert (size);
11515       gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
11516     }
11517 
11518   return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
11519 }
11520 
11521 /* Return a wide integer for the size in bytes of the value of EXP, or -1
11522    if the size can vary or is larger than an integer.  */
11523 
11524 static HOST_WIDE_INT
11525 int_expr_size (tree exp)
11526 {
11527   tree size;
11528 
11529   if (TREE_CODE (exp) == WITH_SIZE_EXPR)
11530     size = TREE_OPERAND (exp, 1);
11531   else
11532     {
11533       size = tree_expr_size (exp);
11534       gcc_assert (size);
11535     }
11536 
11537   if (size == 0 || !tree_fits_shwi_p (size))
11538     return -1;
11539 
11540   return tree_to_shwi (size);
11541 }
11542 
11543 #include "gt-expr.h"
11544